text
stringlengths 6
947k
| repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
|
---|---|---|---|---|---|---|
import src.game_utils.function_proxy as check
from src.basic_functions import *
"""
This file is the one you'll be working on
read the documentation of the functions to know
what it must be able to do.
"""
def move_snake():
"""
This function controls how the snake moves
Uses an edited version of before implemented function in Snake class
"""
move_snake_head_to_next()
def grow_snake(body):
"""
This function is responsible for growing the snake when it eats food
:param body : takes the snake body to grow
"""
body.append(body[-1]) # adds a cube at the last place in the body
# where the added cube will follow the previous cube an so on
def frame_logic():
"""
Controls Frame Logic
"""
snake = get_snake()
snake.move()
body = snake.body
if body[0] == get_food_position(): # if the snake ate a food
food_location(body) # calls a function to change food location taking care of not spawning on snake body
increase_score()
snake.grow()
elif body[0] in body[1:] or is_out_of_screen(body[0]): # checks if eaten itself or out of screen
game_over()
def food_location(body):
"""
:param body: Snake body to avoid
:return: None
"""
rnd_pnt = random_point()
while rnd_pnt in body:
rnd_pnt = random_point()
change_food_location(rnd_pnt)
def submit_your_functions():
check.proton_frame_logic = frame_logic
check.proton_grow_snake=grow_snake
check.proton_move_snake=move_snake
check.proton_change_food_location=food_location
|
RagaiAhmed/PSP
|
src/tasks.py
|
Python
|
apache-2.0
| 1,617 | 0.005566 |
from collections import UserDict
class PathDict(UserDict):
def __normalize_key(self, key):
tkey = key
if isinstance(key, str) and '.' in key:
tkey = tuple(key.split('.'))
return tkey
def __setitem__(self, key, value):
tkey = self.__normalize_key(key)
return super().__setitem__(tkey, value)
def __contains__(self, item):
tkey = self.__normalize_key(item)
return super().__contains__(tkey)
def __getitem__(self, item):
tkey = self.__normalize_key(item)
return super().__getitem__(tkey)
def __delitem__(self, key):
tkey = self.__normalize_key(key)
return super().__delitem__(tkey)
|
UKTradeInvestment/export-wins-data
|
fdi/tests/util.py
|
Python
|
gpl-3.0
| 706 | 0 |
## Copyright (c) 2003 Henk Punt
## Permission is hereby granted, free of charge, to any person obtaining
## a copy of this software and associated documentation files (the
## "Software"), to deal in the Software without restriction, including
## without limitation the rights to use, copy, modify, merge, publish,
## distribute, sublicense, and/or sell copies of the Software, and to
## permit persons to whom the Software is furnished to do so, subject to
## the following conditions:
## The above copyright notice and this permission notice shall be
## included in all copies or substantial portions of the Software.
## THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
## EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
## MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
## NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
## LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
## OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
## WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE
from ctypes import *
#TODO auto ie/comctl detection
WIN32_IE = 0x0550
#TODO: auto unicode selection,
#if unicode:
# CreateWindowEx = windll.user32.CreateWindowExW
#else:
# CreateWindowEx = windll.user32.CreateWindowExA
#etc, etc
DWORD = c_ulong
HANDLE = c_ulong
UINT = c_uint
BOOL = c_int
HWND = HANDLE
HINSTANCE = HANDLE
HICON = HANDLE
HDC = HANDLE
HCURSOR = HANDLE
HBRUSH = HANDLE
HMENU = HANDLE
HBITMAP = HANDLE
HIMAGELIST = HANDLE
HGDIOBJ = HANDLE
HMETAFILE = HANDLE
ULONG = DWORD
ULONG_PTR = DWORD
UINT_PTR = DWORD
LONG_PTR = DWORD
INT = c_int
LPCTSTR = c_char_p
LPTSTR = c_char_p
PSTR = c_char_p
LPCSTR = c_char_p
LPCWSTR = c_wchar_p
LPSTR = c_char_p
LPWSTR = c_wchar_p
PVOID = c_void_p
USHORT = c_ushort
WORD = c_ushort
ATOM = WORD
SHORT = c_short
LPARAM = c_ulong
WPARAM = c_uint
LPVOID = c_voidp
LONG = c_long
BYTE = c_byte
TCHAR = c_char #TODO depends on unicode/wide conventions
DWORD_PTR = c_ulong #TODO what is this exactly?
INT_PTR = c_ulong #TODO what is this exactly?
COLORREF = c_ulong
CLIPFORMAT = WORD
FLOAT = c_float
CHAR = c_char
WCHAR = c_wchar
FXPT16DOT16 = c_long
FXPT2DOT30 = c_long
LCSCSTYPE = c_long
LCSGAMUTMATCH = c_long
COLOR16 = USHORT
LRESULT = LONG_PTR
#### Windows version detection ##############################
class OSVERSIONINFO(Structure):
_fields_ = [("dwOSVersionInfoSize", DWORD),
("dwMajorVersion", DWORD),
("dwMinorVersion", DWORD),
("dwBuildNumber", DWORD),
("dwPlatformId", DWORD),
("szCSDVersion", TCHAR * 128)]
def isMajorMinor(self, major, minor):
return (self.dwMajorVersion, self.dwMinorVersion) == (major, minor)
GetVersion = windll.kernel32.GetVersionExA
versionInfo = OSVERSIONINFO()
versionInfo.dwOSVersionInfoSize = sizeof(versionInfo)
GetVersion(byref(versionInfo))
def MAKELONG(w1, w2):
return w1 | (w2 << 16)
MAKELPARAM = MAKELONG
def RGB(r,g,b):
return r | (g<<8) | (b<<16)
##### Windows Callback functions ################################
WNDPROC = WINFUNCTYPE(c_int, HWND, UINT, WPARAM, LPARAM)
DialogProc = WINFUNCTYPE(c_int, HWND, UINT, WPARAM, LPARAM)
CBTProc = WINFUNCTYPE(c_int, c_int, c_int, c_int)
MessageProc = CBTProc
EnumChildProc = WINFUNCTYPE(c_int, HWND, LPARAM)
MSGBOXCALLBACK = WINFUNCTYPE(c_int, HWND, LPARAM) #TODO look up real def
class WNDCLASSEX(Structure):
_fields_ = [("cbSize", UINT),
("style", UINT),
("lpfnWndProc", WNDPROC),
("cbClsExtra", INT),
("cbWndExtra", INT),
("hInstance", HINSTANCE),
("hIcon", HICON),
("hCursor", HCURSOR),
("hbrBackground", HBRUSH),
("lpszMenuName", LPCTSTR),
("lpszClassName", LPCTSTR),
("hIconSm", HICON)]
class POINT(Structure):
_fields_ = [("x", LONG),
("y", LONG)]
def __str__(self):
return "POINT {x: %d, y: %d}" % (self.x, self.y)
POINTL = POINT
class POINTS(Structure):
_fields_ = [("x", SHORT),
("y", SHORT)]
PtInRect = windll.user32.PtInRect
class RECT(Structure):
_fields_ = [("left", LONG),
("top", LONG),
("right", LONG),
("bottom", LONG)]
def __str__(self):
return "RECT {left: %d, top: %d, right: %d, bottom: %d}" % (self.left, self.top,
self.right, self.bottom)
def getHeight(self):
return self.bottom - self.top
height = property(getHeight, None, None, "")
def getWidth(self):
return self.right - self.left
width = property(getWidth, None, None, "")
def getSize(self):
return self.width, self.height
size = property(getSize, None, None, "")
def ContainsPoint(self, pt):
"""determines if this RECT contains the given POINT pt
returns True if pt is in this rect
"""
return bool(PtInRect(byref(self), pt))
RECTL = RECT
class SIZE(Structure):
_fields_ = [('cx', LONG),
('cy', LONG)]
SIZEL = SIZE
##class MSG(Structure):
## _fields_ = [("hWnd", HWND),
## ("message", UINT),
## ("wParam", WPARAM),
## ("lParam", LPARAM),
## ("time", DWORD),
## ("pt", POINT)]
## def __str__(self):
## return "MSG {%d %d %d %d %d %s}" % (self.hWnd, self.message, self.wParam, self.lParam,
## self.time, str(self.pt))
#Hack: we need to use the same MSG type as ctypes uses!
from ctypes.wintypes import MSG
class ACCEL(Structure):
_fields_ = [("fVirt", BYTE),
("key", WORD),
("cmd", WORD)]
class CREATESTRUCT(Structure):
_fields_ = [("lpCreateParams", LPVOID),
("hInstance", HINSTANCE),
("hMenu", HMENU),
("hwndParent", HWND),
("cx", INT),
("cy", INT),
("x", INT),
("y", INT),
("style", LONG),
("lpszName", LPCTSTR),
("lpszClass", LPCTSTR),
("dwExStyle", DWORD)]
class NMHDR(Structure):
_fields_ = [("hwndFrom", HWND),
("idFrom", UINT),
("code", UINT)]
class PAINTSTRUCT(Structure):
_fields_ = [("hdc", HDC),
("fErase", BOOL),
("rcPaint", RECT),
("fRestore", BOOL),
("fIncUpdate", BOOL),
("rgbReserved", c_char * 32)]
class MENUITEMINFO(Structure):
_fields_ = [("cbSize", UINT),
("fMask", UINT),
("fType", UINT),
("fState", UINT),
("wID", UINT),
("hSubMenu", HMENU),
("hbmpChecked", HBITMAP),
("hbmpUnchecked", HBITMAP),
("dwItemData", ULONG_PTR),
("dwTypeData", LPTSTR),
("cch", UINT),
("hbmpItem", HBITMAP)]
class DLGTEMPLATE(Structure):
_pack_ = 2
_fields_ = [
("style", DWORD),
("exStyle", DWORD),
("cDlgItems", WORD),
("x", c_short),
("y", c_short),
("cx", c_short),
("cy", c_short)
]
class DLGITEMTEMPLATE(Structure):
_pack_ = 2
_fields_ = [
("style", DWORD),
("exStyle", DWORD),
("x", c_short),
("y", c_short),
("cx", c_short),
("cy", c_short),
("id", WORD)
]
class COPYDATASTRUCT(Structure):
_fields_ = [
("dwData", ULONG_PTR),
("cbData", DWORD),
("lpData", PVOID)]
def LOWORD(dword):
return dword & 0x0000ffff
def HIWORD(dword):
return dword >> 16
TRUE = 1
FALSE = 0
NULL = 0
IDI_APPLICATION = 32512
SW_SHOW = 5
SW_SHOWNORMAL = 1
SW_HIDE = 0
EN_CHANGE = 768
MSGS = [('WM_NULL', 0),
('WM_CREATE', 1),
('WM_CANCELMODE', 31),
('WM_CAPTURECHANGED', 533),
('WM_CLOSE', 16),
('WM_COMMAND', 273),
('WM_DESTROY', 2),
('WM_ERASEBKGND', 20),
('WM_GETFONT', 49),
('WM_INITDIALOG', 272),
('WM_INITMENUPOPUP', 279),
('WM_KEYDOWN', 256),
('WM_KEYFIRST', 256),
('WM_KEYLAST', 264),
('WM_KEYUP', 257),
('WM_LBUTTONDBLCLK', 515),
('WM_LBUTTONDOWN', 513),
('WM_LBUTTONUP', 514),
('WM_MBUTTONDBLCLK', 521),
('WM_MBUTTONDOWN', 519),
('WM_MBUTTONUP', 520),
('WM_MENUSELECT', 287),
('WM_MOUSEFIRST', 512),
('WM_MOUSEHOVER', 673),
('WM_MOUSELEAVE', 675),
('WM_MOUSEMOVE', 512),
('WM_MOVE', 3),
('WM_NCCREATE', 129),
('WM_NCDESTROY', 130),
('WM_NOTIFY', 78),
('WM_PAINT', 15),
('WM_RBUTTONDBLCLK', 518),
('WM_RBUTTONDOWN', 516),
('WM_RBUTTONUP', 517),
('WM_SETCURSOR', 32),
('WM_SETFONT', 48),
('WM_SETREDRAW', 11),
('WM_SIZE', 5),
('WM_SYSKEYDOWN', 260),
('WM_SYSKEYUP', 261),
('WM_USER', 1024),
('WM_WINDOWPOSCHANGED', 71),
('WM_WINDOWPOSCHANGING', 70),
('WM_SETTEXT', 12),
('WM_GETTEXT', 13),
('WM_GETTEXTLENGTH', 14),
('WM_ACTIVATE', 6),
('WM_HSCROLL', 276),
('WM_VSCROLL', 277),
('WM_CTLCOLORBTN', 309),
('WM_CTLCOLORDLG', 310),
('WM_CTLCOLOREDIT', 307),
('WM_CTLCOLORLISTBOX', 308),
('WM_CTLCOLORMSGBOX', 306),
('WM_CTLCOLORSCROLLBAR', 311),
('WM_CTLCOLORSTATIC', 312),
('WM_TIMER', 0x0113),
('WM_CONTEXTMENU', 0x007B),
('WM_COPYDATA', 0x004A)
]
#insert wm_* msgs as constants in this module:
for key, val in MSGS:
exec('%s = %d' % (key, val)) #TODO without using 'exec'?
BN_CLICKED = 0
VK_DOWN = 40
VK_LEFT = 37
VK_RIGHT = 39
VK_DELETE = 0x2E
CS_HREDRAW = 2
CS_VREDRAW = 1
WHITE_BRUSH = 0
BLACK_BRUSH = 4
MIIM_STATE= 1
MIIM_ID= 2
MIIM_SUBMENU =4
MIIM_CHECKMARKS= 8
MIIM_TYPE= 16
MIIM_DATA= 32
MIIM_STRING= 64
MIIM_BITMAP= 128
MIIM_FTYPE =256
MFT_BITMAP= 4
MFT_MENUBARBREAK =32
MFT_MENUBREAK= 64
MFT_OWNERDRAW= 256
MFT_RADIOCHECK= 512
MFT_RIGHTJUSTIFY= 0x4000
MFT_SEPARATOR =0x800
MFT_RIGHTORDER= 0x2000L
MFT_STRING = 0
MF_ENABLED =0
MF_GRAYED =1
MF_DISABLED =2
MF_BITMAP =4
MF_CHECKED =8
MF_MENUBARBREAK= 32
MF_MENUBREAK =64
MF_OWNERDRAW =256
MF_POPUP =16
MF_SEPARATOR =0x800
MF_STRING =0
MF_UNCHECKED =0
MF_DEFAULT =4096
MF_SYSMENU =0x2000
MF_HELP =0x4000
MF_END =128
MF_RIGHTJUSTIFY= 0x4000
MF_MOUSESELECT = 0x8000
MF_INSERT= 0
MF_CHANGE= 128
MF_APPEND= 256
MF_DELETE= 512
MF_REMOVE= 4096
MF_USECHECKBITMAPS= 512
MF_UNHILITE= 0
MF_HILITE= 128
MF_BYCOMMAND= 0
MF_BYPOSITION= 1024
MF_UNCHECKED= 0
MF_HILITE = 128
MF_UNHILITE = 0
LOCALE_SYSTEM_DEFAULT = 0x800
MFS_GRAYED = 0x00000003L
MFS_DISABLED = MFS_GRAYED
MFS_CHECKED = MF_CHECKED
MFS_HILITE = MF_HILITE
MFS_ENABLED = MF_ENABLED
MFS_UNCHECKED = MF_UNCHECKED
MFS_UNHILITE = MF_UNHILITE
MFS_DEFAULT = MF_DEFAULT
WS_BORDER = 0x800000
WS_CAPTION = 0xc00000
WS_CHILD = 0x40000000
WS_CHILDWINDOW = 0x40000000
WS_CLIPCHILDREN = 0x2000000
WS_CLIPSIBLINGS = 0x4000000
WS_DISABLED = 0x8000000
WS_DLGFRAME = 0x400000
WS_GROUP = 0x20000
WS_HSCROLL = 0x100000
WS_ICONIC = 0x20000000
WS_MAXIMIZE = 0x1000000
WS_MAXIMIZEBOX = 0x10000
WS_MINIMIZE = 0x20000000
WS_MINIMIZEBOX = 0x20000
WS_OVERLAPPED = 0
WS_OVERLAPPEDWINDOW = 0xcf0000
WS_POPUP = 0x80000000l
WS_POPUPWINDOW = 0x80880000
WS_SIZEBOX = 0x40000
WS_SYSMENU = 0x80000
WS_TABSTOP = 0x10000
WS_THICKFRAME = 0x40000
WS_TILED = 0
WS_TILEDWINDOW = 0xcf0000
WS_VISIBLE = 0x10000000
WS_VSCROLL = 0x200000
WS_EX_TOOLWINDOW = 128
WS_EX_LEFT = 0
WS_EX_LTRREADING = 0
WS_EX_RIGHTSCROLLBAR = 0
WS_EX_WINDOWEDGE = 256
WS_EX_STATICEDGE = 0x20000
WS_EX_CLIENTEDGE = 512
WS_EX_OVERLAPPEDWINDOW = 0x300
WS_EX_APPWINDOW = 0x40000
WA_INACTIVE = 0
WA_ACTIVE = 1
WA_CLICKACTIVE = 2
RB_SETBARINFO = WM_USER + 4
RB_GETBANDCOUNT = WM_USER + 12
RB_INSERTBANDA = WM_USER + 1
RB_INSERTBANDW = WM_USER + 10
RB_INSERTBAND = RB_INSERTBANDA
RBBIM_STYLE = 1
RBBIM_COLORS = 2
RBBIM_TEXT = 4
RBBIM_IMAGE = 8
RBBIM_CHILD = 16
RBBIM_CHILDSIZE = 32
RBBIM_SIZE = 64
RBBIM_BACKGROUND = 128
RBBIM_ID = 256
RBBIM_IDEALSIZE = 0x00000200
TPM_CENTERALIGN =4
TPM_LEFTALIGN =0
TPM_RIGHTALIGN= 8
TPM_LEFTBUTTON= 0
TPM_RIGHTBUTTON= 2
TPM_HORIZONTAL= 0
TPM_VERTICAL= 64
TPM_TOPALIGN= 0
TPM_VCENTERALIGN= 16
TPM_BOTTOMALIGN= 32
TPM_NONOTIFY= 128
TPM_RETURNCMD= 256
TBIF_TEXT = 0x00000002
DT_NOPREFIX = 0x00000800
DT_HIDEPREFIX = 1048576
WH_CBT = 5
WH_MSGFILTER = (-1)
I_IMAGENONE = -2
TBSTATE_ENABLED = 4
BTNS_SHOWTEXT = 0x00000040
CW_USEDEFAULT = 0x80000000
COLOR_3DFACE = 15
BF_LEFT = 1
BF_TOP = 2
BF_RIGHT = 4
BF_BOTTOM = 8
BDR_RAISEDOUTER = 1
BDR_SUNKENOUTER = 2
BDR_RAISEDINNER = 4
BDR_SUNKENINNER = 8
BDR_OUTER = 3
BDR_INNER = 0xc
BDR_RAISED = 5
BDR_SUNKEN = 10
EDGE_RAISED = (BDR_RAISEDOUTER|BDR_RAISEDINNER)
EDGE_SUNKEN = (BDR_SUNKENOUTER|BDR_SUNKENINNER)
EDGE_ETCHED = (BDR_SUNKENOUTER|BDR_RAISEDINNER)
EDGE_BUMP = (BDR_RAISEDOUTER|BDR_SUNKENINNER)
IDC_SIZENWSE = 32642
IDC_SIZENESW = 32643
IDC_SIZEWE = 32644
IDC_SIZENS = 32645
IDC_SIZEALL = 32646
IDC_SIZE = 32640
IDC_ARROW = 32512
TCIF_TEXT =1
TCIF_IMAGE =2
TCIF_RTLREADING= 4
TCIF_PARAM = 8
TCS_MULTILINE = 512
MK_LBUTTON = 1
MK_RBUTTON = 2
MK_SHIFT = 4
MK_CONTROL = 8
MK_MBUTTON = 16
ILC_COLOR = 0
ILC_COLOR4 = 4
ILC_COLOR8 = 8
ILC_COLOR16 = 16
ILC_COLOR24 = 24
ILC_COLOR32 = 32
ILC_COLORDDB = 254
ILC_MASK = 1
ILC_PALETTE = 2048
IMAGE_BITMAP = 0
IMAGE_ICON = 1
LR_LOADFROMFILE = 16
LR_VGACOLOR = 0x0080
LR_LOADMAP3DCOLORS = 4096
LR_LOADTRANSPARENT = 32
LVSIL_NORMAL = 0
LVSIL_SMALL = 1
LVSIL_STATE = 2
TVSIL_NORMAL = 0
TVSIL_STATE = 2
SRCCOPY = 0xCC0020
GWL_WNDPROC = -4
HWND_BOTTOM = 1
HWND_TOP=0
HWND_TOPMOST=-1
SWP_DRAWFRAME= 32
SWP_FRAMECHANGED= 32
SWP_HIDEWINDOW= 128
SWP_NOACTIVATE= 16
SWP_NOCOPYBITS= 256
SWP_NOMOVE= 2
SWP_NOSIZE= 1
SWP_NOREDRAW= 8
SWP_NOZORDER= 4
SWP_SHOWWINDOW= 64
SWP_NOOWNERZORDER =512
SWP_NOREPOSITION= 512
SWP_NOSENDCHANGING= 1024
SWP_DEFERERASE= 8192
SWP_ASYNCWINDOWPOS= 16384
DCX_WINDOW = 1
DCX_CACHE = 2
DCX_PARENTCLIP = 32
DCX_CLIPSIBLINGS= 16
DCX_CLIPCHILDREN= 8
DCX_NORESETATTRS= 4
DCX_LOCKWINDOWUPDATE= 0x400
DCX_EXCLUDERGN= 64
DCX_INTERSECTRGN =128
DCX_VALIDATE= 0x200000
GCL_STYLE = -26
SB_HORZ = 0
SB_VERT = 1
SB_CTL = 2
SB_BOTH = 3
SB_LINEUP =0
SB_LINELEFT =0
SB_LINEDOWN =1
SB_LINERIGHT =1
SB_PAGEUP =2
SB_PAGELEFT =2
SB_PAGEDOWN =3
SB_PAGERIGHT =3
SB_THUMBPOSITION =4
SB_THUMBTRACK =5
SB_TOP =6
SB_LEFT =6
SB_BOTTOM =7
SB_RIGHT =7
SB_ENDSCROLL =8
MB_OK = 0x00000000
MB_OKCANCEL = 0x00000001
MB_ABORTRETRYIGNORE = 0x00000002
MB_YESNOCANCEL = 0x00000003
MB_YESNO = 0x00000004
MB_RETRYCANCEL = 0x00000005
MB_ICONASTERISK = 64
MB_ICONEXCLAMATION= 0x30
MB_ICONWARNING= 0x30
MB_ICONERROR= 16
MB_ICONHAND= 16
MB_ICONQUESTION= 32
MB_ICONINFORMATION= 64
MB_ICONSTOP= 16
MB_ICONMASK= 240
IDOK = 1
IDCANCEL = 2
IDABORT = 3
IDRETRY = 4
IDIGNORE = 5
IDYES = 6
IDNO = 7
IDCLOSE = 8
IDHELP = 9
COLOR_3DDKSHADOW = 21
COLOR_3DFACE = 15
COLOR_3DHILIGHT = 20
COLOR_3DHIGHLIGHT= 20
COLOR_3DLIGHT= 22
COLOR_BTNHILIGHT= 20
COLOR_3DSHADOW= 16
COLOR_ACTIVEBORDER =10
COLOR_ACTIVECAPTION= 2
COLOR_APPWORKSPACE= 12
COLOR_BACKGROUND= 1
COLOR_DESKTOP= 1
COLOR_BTNFACE= 15
COLOR_BTNHIGHLIGHT= 20
COLOR_BTNSHADOW= 16
COLOR_BTNTEXT= 18
COLOR_CAPTIONTEXT= 9
COLOR_GRAYTEXT= 17
COLOR_HIGHLIGHT= 13
COLOR_HIGHLIGHTTEXT= 14
COLOR_INACTIVEBORDER= 11
COLOR_INACTIVECAPTION= 3
COLOR_INACTIVECAPTIONTEXT= 19
COLOR_INFOBK= 24
COLOR_INFOTEXT= 23
COLOR_MENU= 4
COLOR_MENUTEXT= 7
COLOR_SCROLLBAR= 0
COLOR_WINDOW= 5
COLOR_WINDOWFRAME= 6
COLOR_WINDOWTEXT= 8
CTLCOLOR_MSGBOX= 0
CTLCOLOR_EDIT= 1
CTLCOLOR_LISTBOX= 2
CTLCOLOR_BTN= 3
CTLCOLOR_DLG= 4
CTLCOLOR_SCROLLBAR= 5
CTLCOLOR_STATIC= 6
CTLCOLOR_MAX= 7
GMEM_FIXED = 0x0000
GMEM_MOVEABLE = 0x0002
GMEM_NOCOMPACT = 0x0010
GMEM_NODISCARD = 0x0020
GMEM_ZEROINIT = 0x0040
GMEM_MODIFY = 0x0080
GMEM_DISCARDABLE = 0x0100
GMEM_NOT_BANKED = 0x1000
GMEM_SHARE = 0x2000
GMEM_DDESHARE = 0x2000
GMEM_NOTIFY = 0x4000
GMEM_LOWER = GMEM_NOT_BANKED
GMEM_VALID_FLAGS = 0x7F72
GMEM_INVALID_HANDLE= 0x8000
RT_DIALOG = "5"
CF_TEXT = 1
BS_PUSHBUTTON = 0x00L
BS_DEFPUSHBUTTON = 0x01L
BS_GROUPBOX = 0x7
PUSHBUTTON = 0x80
EDITTEXT = 0x81
LTEXT = 0x82
LISTBOX = 0x83
SCROLLBAR = 0x84
COMBOXBOX = 0x85
ES_MULTILINE = 4
ES_AUTOVSCROLL = 0x40L
ES_AUTOHSCROLL = 0x80L
ES_READONLY = 0x800
CP_ACP = 0
DS_SETFONT = 0x40
DS_MODALFRAME = 0x80
SYNCHRONIZE = (0x00100000L)
STANDARD_RIGHTS_REQUIRED = (0x000F0000L)
EVENT_ALL_ACCESS = (STANDARD_RIGHTS_REQUIRED|SYNCHRONIZE|0x3)
MAX_PATH = 260
def GET_XY_LPARAM(lParam):
x = LOWORD(lParam)
if x > 32768:
x = x - 65536
y = HIWORD(lParam)
if y > 32768:
y = y - 65536
return x, y
def GET_POINT_LPARAM(lParam):
x, y = GET_XY_LPARAM(lParam)
return POINT(x, y)
FVIRTKEY = 0x01
FNOINVERT = 0x02
FSHIFT = 0x04
FCONTROL = 0x08
FALT = 0x10
def ValidHandle(value):
if value == 0:
raise WinError()
else:
return value
def Fail(value):
if value == -1:
raise WinError()
else:
return value
GetModuleHandle = windll.kernel32.GetModuleHandleA
GetCurrentProcess = windll.kernel32.GetCurrentProcess
GetCurrentProcessId = windll.kernel32.GetCurrentProcessId
PostQuitMessage= windll.user32.PostQuitMessage
DefWindowProc = windll.user32.DefWindowProcA
CallWindowProc = windll.user32.CallWindowProcA
GetDCEx = windll.user32.GetDCEx
GetDC = windll.user32.GetDC
ReleaseDC = windll.user32.ReleaseDC
LoadIcon = windll.user32.LoadIconA
DestroyIcon = windll.user32.DestroyIcon
LoadCursor = windll.user32.LoadCursorA
LoadCursor.restype = ValidHandle
LoadImage = windll.user32.LoadImageA
LoadImage.restype = ValidHandle
RegisterClassEx = windll.user32.RegisterClassExA
SetCursor = windll.user32.SetCursor
CreateWindowEx = windll.user32.CreateWindowExA
CreateWindowEx.restype = ValidHandle
ShowWindow = windll.user32.ShowWindow
UpdateWindow = windll.user32.UpdateWindow
GetMessage = windll.user32.GetMessageA
TranslateMessage = windll.user32.TranslateMessage
DispatchMessage = windll.user32.DispatchMessageA
GetWindowRect = windll.user32.GetWindowRect
MoveWindow = windll.user32.MoveWindow
DestroyWindow = windll.user32.DestroyWindow
CloseWindow = windll.user32.CloseWindow
CreateMenu = windll.user32.CreateMenu
CreatePopupMenu = windll.user32.CreatePopupMenu
DestroyMenu = windll.user32.DestroyMenu
AppendMenu = windll.user32.AppendMenuA
EnableMenuItem = windll.user32.EnableMenuItem
SendMessage = windll.user32.SendMessageA
PostMessage = windll.user32.PostMessageA
GetClientRect = windll.user32.GetClientRect
GetWindowRect = windll.user32.GetWindowRect
IsDialogMessage = windll.user32.IsDialogMessage
RegisterWindowMessage = windll.user32.RegisterWindowMessageA
GetParent = windll.user32.GetParent
SetWindowLong = windll.user32.SetWindowLongA
SetClassLong = windll.user32.SetClassLongA
GetClassLong = windll.user32.GetClassLongA
SetWindowPos = windll.user32.SetWindowPos
InvalidateRect = windll.user32.InvalidateRect
BeginPaint = windll.user32.BeginPaint
EndPaint = windll.user32.EndPaint
SetCapture = windll.user32.SetCapture
GetCapture = windll.user32.GetCapture
ReleaseCapture = windll.user32.ReleaseCapture
ScreenToClient = windll.user32.ScreenToClient
ClientToScreen = windll.user32.ClientToScreen
GetMessagePos = windll.user32.GetMessagePos
BeginDeferWindowPos = windll.user32.BeginDeferWindowPos
DeferWindowPos = windll.user32.DeferWindowPos
EndDeferWindowPos = windll.user32.EndDeferWindowPos
CreateAcceleratorTable = windll.user32.CreateAcceleratorTableA
DestroyAcceleratorTable = windll.user32.DestroyAcceleratorTable
TranslateAccelerator = windll.user32.TranslateAccelerator
ExpandEnvironmentStrings = windll.kernel32.ExpandEnvironmentStringsA
GetModuleHandle = windll.kernel32.GetModuleHandleA
GetModuleHandle.restype = ValidHandle
LoadLibrary = windll.kernel32.LoadLibraryA
LoadLibrary.restype = ValidHandle
FindResource = windll.kernel32.FindResourceA
FindResource.restype = ValidHandle
FindWindow = windll.user32.FindWindowA
GetForegroundWindow = windll.user32.GetForegroundWindow
ChildWindowFromPoint = windll.user32.ChildWindowFromPoint
TrackPopupMenuEx = windll.user32.TrackPopupMenuEx
GetMenuItemCount = windll.user32.GetMenuItemCount
GetMenuItemCount.restype = Fail
GetMenuItemInfo = windll.user32.GetMenuItemInfoA
GetMenuItemInfo.restype = ValidHandle
GetSubMenu = windll.user32.GetSubMenu
SetMenuItemInfo = windll.user32.SetMenuItemInfoA
SetWindowsHookEx = windll.user32.SetWindowsHookExA
CallNextHookEx = windll.user32.CallNextHookEx
UnhookWindowsHookEx = windll.user32.UnhookWindowsHookEx
GetCurrentThreadId = windll.kernel32.GetCurrentThreadId
GetModuleFileName = windll.kernel32.GetModuleFileNameA
GetTempPath = windll.kernel32.GetTempPathA
MessageBox = windll.user32.MessageBoxA
SetWindowText = windll.user32.SetWindowTextA
GetFocus = windll.user32.GetFocus
GlobalAlloc = windll.kernel32.GlobalAlloc
GlobalLock = windll.kernel32.GlobalLock
GlobalUnlock = windll.kernel32.GlobalUnlock
GlobalFree = windll.kernel32.GlobalFree
OpenClipboard = windll.user32.OpenClipboard
EmptyClipboard = windll.user32.EmptyClipboard
SetClipboardData = windll.user32.SetClipboardData
GetClipboardData = windll.user32.GetClipboardData
RegisterClipboardFormat = windll.user32.RegisterClipboardFormatA
CloseClipboard = windll.user32.CloseClipboard
EnumClipboardFormats = windll.user32.EnumClipboardFormats
IsClipboardFormatAvailable = windll.user32.IsClipboardFormatAvailable
DialogBoxParam = windll.user32.DialogBoxParamA
GetDlgItem = windll.user32.GetDlgItem
GetClassName = windll.user32.GetClassNameA
EndDialog = windll.user32.EndDialog
ShowScrollBar = windll.user32.ShowScrollBar
GetDesktopWindow = windll.user32.GetDesktopWindow
SetFocus = windll.user32.SetFocus
MultiByteToWideChar = windll.kernel32.MultiByteToWideChar
CreateDialogIndirectParam = windll.user32.CreateDialogIndirectParamA
DialogBoxIndirectParam = windll.user32.DialogBoxIndirectParamA
EnumChildWindows = windll.user32.EnumChildWindows
GetMenu = windll.user32.GetMenu
SetTimer = windll.user32.SetTimer
KillTimer = windll.user32.KillTimer
IsWindowVisible = windll.user32.IsWindowVisible
IsIconic = windll.user32.IsIconic
GetCursorPos = windll.user32.GetCursorPos
SetForegroundWindow = windll.user32.SetForegroundWindow
SetMenuDefaultItem = windll.user32.SetMenuDefaultItem
GetClassInfo = windll.user32.GetClassInfoA
OpenEvent = windll.kernel32.OpenEventA
CreateEvent = windll.kernel32.CreateEventA
LockWindowUpdate = windll.user32.LockWindowUpdate
|
toymachine/venster
|
venster/windows.py
|
Python
|
mit
| 23,583 | 0.018318 |
# This is the interface for adb
import subprocess
import logging
import re
from adapter import Adapter
import time
import sys
import os
class ADBException(Exception):
"""
Exception in ADB connection
"""
pass
class ADB(Adapter):
"""
interface of ADB
send adb commands via this, see:
http://developer.android.com/tools/help/adb.html
"""
UP = 0
DOWN = 1
DOWN_AND_UP = 2
MODEL_PROPERTY = "ro.product.model"
VERSION_SDK_PROPERTY = 'ro.build.version.sdk'
VERSION_RELEASE_PROPERTY = 'ro.build.version.release'
RO_SECURE_PROPERTY = 'ro.secure'
RO_DEBUGGABLE_PROPERTY = 'ro.debuggable'
def __init__(self, device=None):
"""
initiate a ADB connection from serial no
the serial no should be in output of `adb devices`
:param device: instance of Device
:return:
"""
self.logger = logging.getLogger(self.__class__.__name__)
if device is None:
from droidbot.device import Device
device = Device()
self.device = device
self.cmd_prefix = ['adb', "-s", device.serial]
def run_cmd(self, extra_args):
"""
run an adb command and return the output
:return: output of adb command
@param extra_args: arguments to run in adb
"""
if isinstance(extra_args, str) or isinstance(extra_args, unicode):
extra_args = extra_args.split()
if not isinstance(extra_args, list):
msg = "invalid arguments: %s\nshould be list or str, %s given" % (extra_args, type(extra_args))
self.logger.warning(msg)
raise ADBException(msg)
args = [] + self.cmd_prefix
args += extra_args
self.logger.debug('command:')
self.logger.debug(args)
try:
r = subprocess.check_output(args).strip()
except subprocess.CalledProcessError: #this might mean device/emulator crashed
os._exit()
self.logger.debug('return:')
self.logger.debug(r)
return r
def shell(self, extra_args):
"""
run an `adb shell` command
@param extra_args:
@return: output of adb shell command
"""
if isinstance(extra_args, str) or isinstance(extra_args, unicode):
extra_args = extra_args.split()
if not isinstance(extra_args, list):
msg = "invalid arguments: %s\nshould be list or str, %s given" % (extra_args, type(extra_args))
self.logger.warning(msg)
raise ADBException(msg)
shell_extra_args = ['shell'] + extra_args
return self.run_cmd(shell_extra_args)
def check_connectivity(self):
"""
check if adb is connected
:return: True for connected
"""
r = self.run_cmd("get-state")
return r.startswith("device")
def connect(self):
"""
connect adb
"""
self.logger.debug("connected")
def disconnect(self):
"""
disconnect adb
"""
print "[CONNECTION] %s is disconnected" % self.__class__.__name__
def get_property(self, property):
"""
get the value of property
@param property:
@return:
"""
return self.shell(["getprop", property])
def get_model_number(self):
"""
Get device model number. e.g. SM-G935F
"""
return self.get_property(ADB.MODEL_PROPERTY)
def get_sdk_version(self):
"""
Get version of SDK, e.g. 18, 20
"""
try:
return int(self.get_property(ADB.VERSION_SDK_PROPERTY))
except ValueError:
return self.get_property(ADB.VERSION_SDK_PROPERTY)
def get_release_version(self):
"""
Get release version, e.g. 4.3, 6.0
"""
return self.get_property(ADB.VERSION_RELEASE_PROPERTY)
def get_ro_secure(self):
"""
get ro.secure value
@return: 0/1
"""
return int(self.get_property(ADB.RO_SECURE_PROPERTY))
def get_ro_debuggable(self):
"""
get ro.debuggable value
@return: 0/1
"""
return int(self.get_property(ADB.RO_DEBUGGABLE_PROPERTY))
# The following methods are originally from androidviewclient project.
# https://github.com/dtmilano/AndroidViewClient.
def get_display_info(self):
"""
Gets C{mDefaultViewport} and then C{deviceWidth} and C{deviceHeight} values from dumpsys.
This is a method to obtain display dimensions and density
"""
display_info = {}
logical_display_re = re.compile(".*DisplayViewport\{valid=true, .*orientation=(?P<orientation>\d+),"
" .*deviceWidth=(?P<width>\d+), deviceHeight=(?P<height>\d+).*")
dumpsys_display_result = self.shell("dumpsys display")
if dumpsys_display_result is not None:
for line in dumpsys_display_result.splitlines():
m = logical_display_re.search(line, 0)
if m:
for prop in ['width', 'height', 'orientation']:
display_info[prop] = int(m.group(prop))
if 'width' not in display_info or 'height' not in display_info:
physical_display_re = re.compile('Physical size: (?P<width>\d+)x(?P<height>\d+)')
m = physical_display_re.search(self.shell('wm size'))
if m:
for prop in ['width', 'height']:
display_info[prop] = int(m.group(prop))
if 'width' not in display_info or 'height' not in display_info:
# This could also be mSystem or mOverscanScreen
display_re = re.compile('\s*mUnrestrictedScreen=\((?P<x>\d+),(?P<y>\d+)\) (?P<width>\d+)x(?P<height>\d+)')
# This is known to work on older versions (i.e. API 10) where mrestrictedScreen is not available
display_width_height_re = re.compile('\s*DisplayWidth=(?P<width>\d+) *DisplayHeight=(?P<height>\d+)')
for line in self.shell('dumpsys window').splitlines():
m = display_re.search(line, 0)
if not m:
m = display_width_height_re.search(line, 0)
if m:
for prop in ['width', 'height']:
display_info[prop] = int(m.group(prop))
if 'orientation' not in display_info:
surface_orientation_re = re.compile("SurfaceOrientation:\s+(\d+)")
output = self.shell("dumpsys input")
m = surface_orientation_re.search(output)
if m:
display_info['orientation'] = int(m.group(1))
density = None
float_re = re.compile(r"[-+]?\d*\.\d+|\d+")
d = self.get_property('ro.sf.lcd_density')
if float_re.match(d):
density = float(d)
else:
d = self.get_property('qemu.sf.lcd_density')
if float_re.match(d):
density = float(d)
else:
physicalDensityRE = re.compile('Physical density: (?P<density>[\d.]+)', re.MULTILINE)
m = physicalDensityRE.search(self.shell('wm density'))
if m:
density = float(m.group('density'))
if density is not None:
display_info['density'] = density
display_info_keys = {'width', 'height', 'orientation', 'density'}
if not display_info_keys.issuperset(display_info):
self.logger.warning("getDisplayInfo failed to get: %s" % display_info_keys)
return display_info
def get_enabled_accessibility_services(self):
"""
Get enabled accessibility services
:return: the enabled service names, each service name is in <package_name>/<service_name> format
"""
r = self.shell("settings get secure enabled_accessibility_services")
r = re.sub(r'(?m)^WARNING:.*\n?', '', r)
return r.strip().split(":") if r.strip() != '' else []
def disable_accessibility_service(self, service_name):
"""
Disable an accessibility service
:param service_name: the service to disable, in <package_name>/<service_name> format
"""
service_names = self.get_enabled_accessibility_services()
if service_name in service_names:
service_names.remove(service_name)
self.shell("settings put secure enabled_accessibility_services %s" % ":".join(service_names))
def enable_accessibility_service(self, service_name):
"""
Enable an accessibility service
:param service_name: the service to enable, in <package_name>/<service_name> format
"""
service_names = self.get_enabled_accessibility_services()
if service_name not in service_names:
service_names.append(service_name)
self.shell("settings put secure enabled_accessibility_services %s" % ":".join(service_names))
self.shell("settings put secure accessibility_enabled 1")
def enable_accessibility_service_db(self, service_name):
"""
Enable an accessibility service
:param service_name: the service to enable, in <package_name>/<service_name> format
"""
subprocess.check_output("adb shell \"sqlite3 -batch /data/data/com.android.providers.settings/databases/settings.db \\\"DELETE FROM secure WHERE name='enabled_accessibility_services' OR name='accessibility_enabled' OR name='touch_exploration_granted_accessibility_services' OR name='touch_exploration_enabled'; INSERT INTO secure (name, value) VALUES ('enabled_accessibility_services','" + service_name + "'), ('accessibility_enabled','1'), ('touch_exploration_granted_accessibility_services','" + service_name + "'), ('touch_exploration_enabled','1')\\\";\"", shell=True)
self.shell("stop")
time.sleep(1)
self.shell("start")
def get_installed_apps(self):
"""
Get the package names and apk paths of installed apps on the device
:return: a dict, each key is a package name of an app and each value is the file path to the apk
"""
app_lines = self.shell("pm list packages -f").splitlines()
app_line_re = re.compile('package:(?P<apk_path>.+)=(?P<package>[^=]+)')
package_to_path = {}
for app_line in app_lines:
m = app_line_re.match(app_line)
if m:
package_to_path[m.group('package')] = m.group('apk_path')
return package_to_path
def get_display_density(self):
display_info = self.get_display_info()
if 'density' in display_info:
return display_info['density']
else:
return -1.0
def __transform_point_by_orientation(self, (x, y), orientation_orig, orientation_dest):
if orientation_orig != orientation_dest:
if orientation_dest == 1:
_x = x
x = self.get_display_info()['width'] - y
y = _x
elif orientation_dest == 3:
_x = x
x = y
y = self.get_display_info()['height'] - _x
return x, y
def get_orientation(self):
display_info = self.get_display_info()
if 'orientation' in display_info:
return display_info['orientation']
else:
return -1
def unlock(self):
"""
Unlock the screen of the device
"""
self.shell("input keyevent MENU")
self.shell("input keyevent BACK")
def press(self, key_code):
"""
Press a key
"""
self.shell("input keyevent %s" % key_code)
def touch(self, x, y, orientation=-1, eventType=DOWN_AND_UP):
if orientation == -1:
orientation = self.get_orientation()
self.shell("input tap %d %d" %
self.__transform_point_by_orientation((x, y), orientation, self.get_orientation()))
def long_touch(self, x, y, duration=2000, orientation=-1):
"""
Long touches at (x, y)
@param duration: duration in ms
@param orientation: the orientation (-1: undefined)
This workaround was suggested by U{HaMi<http://stackoverflow.com/users/2571957/hami>}
"""
self.drag((x, y), (x, y), duration, orientation)
def drag(self, (x0, y0), (x1, y1), duration, orientation=-1):
"""
Sends drag event n PX (actually it's using C{input swipe} command.
@param (x0, y0): starting point in pixel
@param (x1, y1): ending point in pixel
@param duration: duration of the event in ms
@param orientation: the orientation (-1: undefined)
"""
if orientation == -1:
orientation = self.get_orientation()
(x0, y0) = self.__transform_point_by_orientation((x0, y0), orientation, self.get_orientation())
(x1, y1) = self.__transform_point_by_orientation((x1, y1), orientation, self.get_orientation())
version = self.get_sdk_version()
if version <= 15:
self.logger.error("drag: API <= 15 not supported (version=%d)" % version)
elif version <= 17:
self.shell("input swipe %d %d %d %d" % (x0, y0, x1, y1))
else:
self.shell("input touchscreen swipe %d %d %d %d %d" % (x0, y0, x1, y1, duration))
def type(self, text):
if isinstance(text, str):
escaped = text.replace("%s", "\\%s")
encoded = escaped.replace(" ", "%s")
else:
encoded = str(text)
# TODO find out which characters can be dangerous, and handle non-English characters
self.shell("input text %s" % encoded)
|
nastya/droidbot
|
droidbot/adapter/adb.py
|
Python
|
mit
| 13,744 | 0.003783 |
# -*- coding: Latin-1 -*-
"""Graphviz's dot language Python interface.
This module provides with a full interface to create handle modify
and process graphs in Graphviz's dot language.
References:
pydot Homepage: http://code.google.com/p/pydot/
Graphviz: http://www.graphviz.org/
DOT Language: http://www.graphviz.org/doc/info/lang.html
Programmed and tested with Graphviz 2.26.3 and Python 2.6 on OSX 10.6.4
Copyright (c) 2005-2011 Ero Carrera <ero.carrera@gmail.com>
Distributed under MIT license [http://opensource.org/licenses/mit-license.html].
"""
__revision__ = "$LastChangedRevision: 25 $"
__author__ = 'Ero Carrera'
__version__ = '1.0.%d' % int( __revision__[21:-2] )
__license__ = 'MIT'
import os
import re
import subprocess
import tempfile
import copy
try:
import dot_parser
except Exception, e:
print "Couldn't import dot_parser, loading of dot files will not be possible."
GRAPH_ATTRIBUTES = set( ['Damping', 'K', 'URL', 'aspect', 'bb', 'bgcolor',
'center', 'charset', 'clusterrank', 'colorscheme', 'comment', 'compound',
'concentrate', 'defaultdist', 'dim', 'dimen', 'diredgeconstraints',
'dpi', 'epsilon', 'esep', 'fontcolor', 'fontname', 'fontnames',
'fontpath', 'fontsize', 'id', 'label', 'labeljust', 'labelloc',
'landscape', 'layers', 'layersep', 'layout', 'levels', 'levelsgap',
'lheight', 'lp', 'lwidth', 'margin', 'maxiter', 'mclimit', 'mindist',
'mode', 'model', 'mosek', 'nodesep', 'nojustify', 'normalize', 'nslimit',
'nslimit1', 'ordering', 'orientation', 'outputorder', 'overlap',
'overlap_scaling', 'pack', 'packmode', 'pad', 'page', 'pagedir',
'quadtree', 'quantum', 'rankdir', 'ranksep', 'ratio', 'remincross',
'repulsiveforce', 'resolution', 'root', 'rotate', 'searchsize', 'sep',
'showboxes', 'size', 'smoothing', 'sortv', 'splines', 'start',
'stylesheet', 'target', 'truecolor', 'viewport', 'voro_margin',
# for subgraphs
'rank' ] )
EDGE_ATTRIBUTES = set( ['URL', 'arrowhead', 'arrowsize', 'arrowtail',
'color', 'colorscheme', 'comment', 'constraint', 'decorate', 'dir',
'edgeURL', 'edgehref', 'edgetarget', 'edgetooltip', 'fontcolor',
'fontname', 'fontsize', 'headURL', 'headclip', 'headhref', 'headlabel',
'headport', 'headtarget', 'headtooltip', 'href', 'id', 'label',
'labelURL', 'labelangle', 'labeldistance', 'labelfloat', 'labelfontcolor',
'labelfontname', 'labelfontsize', 'labelhref', 'labeltarget',
'labeltooltip', 'layer', 'len', 'lhead', 'lp', 'ltail', 'minlen',
'nojustify', 'penwidth', 'pos', 'samehead', 'sametail', 'showboxes',
'style', 'tailURL', 'tailclip', 'tailhref', 'taillabel', 'tailport',
'tailtarget', 'tailtooltip', 'target', 'tooltip', 'weight',
'rank' ] )
NODE_ATTRIBUTES = set( ['URL', 'color', 'colorscheme', 'comment',
'distortion', 'fillcolor', 'fixedsize', 'fontcolor', 'fontname',
'fontsize', 'group', 'height', 'id', 'image', 'imagescale', 'label',
'labelloc', 'layer', 'margin', 'nojustify', 'orientation', 'penwidth',
'peripheries', 'pin', 'pos', 'rects', 'regular', 'root', 'samplepoints',
'shape', 'shapefile', 'showboxes', 'sides', 'skew', 'sortv', 'style',
'target', 'tooltip', 'vertices', 'width', 'z',
# The following are attributes dot2tex
'texlbl', 'texmode' ] )
CLUSTER_ATTRIBUTES = set( ['K', 'URL', 'bgcolor', 'color', 'colorscheme',
'fillcolor', 'fontcolor', 'fontname', 'fontsize', 'label', 'labeljust',
'labelloc', 'lheight', 'lp', 'lwidth', 'nojustify', 'pencolor',
'penwidth', 'peripheries', 'sortv', 'style', 'target', 'tooltip'] )
#
# Extented version of ASPN's Python Cookbook Recipe:
# Frozen dictionaries.
# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/414283
#
# This version freezes dictionaries used as values within dictionaries.
#
class frozendict(dict):
def _blocked_attribute(obj):
raise AttributeError, "A frozendict cannot be modified."
_blocked_attribute = property(_blocked_attribute)
__delitem__ = __setitem__ = clear = _blocked_attribute
pop = popitem = setdefault = update = _blocked_attribute
def __new__(cls, *args, **kw):
new = dict.__new__(cls)
args_ = []
for arg in args:
if isinstance(arg, dict):
arg = copy.copy(arg)
for k, v in arg.iteritems():
if isinstance(v, frozendict):
arg[k] = v
elif isinstance(v, dict):
arg[k] = frozendict(v)
elif isinstance(v, list):
v_ = list()
for elm in v:
if isinstance(elm, dict):
v_.append( frozendict(elm) )
else:
v_.append( elm )
arg[k] = tuple(v_)
args_.append( arg )
else:
args_.append( arg )
dict.__init__(new, *args_, **kw)
return new
def __init__(self, *args, **kw):
pass
def __hash__(self):
try:
return self._cached_hash
except AttributeError:
h = self._cached_hash = hash(tuple(sorted(self.iteritems())))
return h
def __repr__(self):
return "frozendict(%s)" % dict.__repr__(self)
dot_keywords = ['graph', 'subgraph', 'digraph', 'node', 'edge', 'strict']
id_re_alpha_nums = re.compile('^[_a-zA-Z][a-zA-Z0-9_:,]*$')
id_re_num = re.compile('^[0-9,]+$')
id_re_with_port = re.compile('^([^:]*):([^:]*)$')
id_re_dbl_quoted = re.compile('^\".*\"$', re.S)
id_re_html = re.compile('^<.*>$', re.S)
def needs_quotes( s ):
"""Checks whether a string is a dot language ID.
It will check whether the string is solely composed
by the characters allowed in an ID or not.
If the string is one of the reserved keywords it will
need quotes too.
"""
if s in dot_keywords:
return False
chars = [ord(c) for c in s if ord(c)>0x7f or ord(c)==0]
if chars and not id_re_dbl_quoted.match(s):
return True
for test in [id_re_alpha_nums, id_re_num, id_re_dbl_quoted, id_re_html]:
if test.match(s):
return False
m = id_re_with_port.match(s)
if m:
return needs_quotes(m.group(1)) or needs_quotes(m.group(2))
return True
def quote_if_necessary(s):
if isinstance(s, bool):
if s is True:
return 'True'
return 'False'
if not isinstance( s, basestring ):
return s
if needs_quotes(s):
replace = {'"' : r'\"',
"\n" : r'\n',
"\r" : r'\r'}
for (a,b) in replace.items():
s = s.replace(a, b)
return '"' + s + '"'
return s
def graph_from_dot_data(data):
"""Load graph as defined by data in DOT format.
The data is assumed to be in DOT format. It will
be parsed and a Dot class will be returned,
representing the graph.
"""
return dot_parser.parse_dot_data(data)
def graph_from_dot_file(path):
"""Load graph as defined by a DOT file.
The file is assumed to be in DOT format. It will
be loaded, parsed and a Dot class will be returned,
representing the graph.
"""
fd = file(path, 'rb')
data = fd.read()
fd.close()
return graph_from_dot_data(data)
def graph_from_edges(edge_list, node_prefix='', directed=False):
"""Creates a basic graph out of an edge list.
The edge list has to be a list of tuples representing
the nodes connected by the edge.
The values can be anything: bool, int, float, str.
If the graph is undirected by default, it is only
calculated from one of the symmetric halves of the matrix.
"""
if directed:
graph = Dot(graph_type='digraph')
else:
graph = Dot(graph_type='graph')
for edge in edge_list:
if isinstance(edge[0], str):
src = node_prefix + edge[0]
else:
src = node_prefix + str(edge[0])
if isinstance(edge[1], str):
dst = node_prefix + edge[1]
else:
dst = node_prefix + str(edge[1])
e = Edge( src, dst )
graph.add_edge(e)
return graph
def graph_from_adjacency_matrix(matrix, node_prefix= u'', directed=False):
"""Creates a basic graph out of an adjacency matrix.
The matrix has to be a list of rows of values
representing an adjacency matrix.
The values can be anything: bool, int, float, as long
as they can evaluate to True or False.
"""
node_orig = 1
if directed:
graph = Dot(graph_type='digraph')
else:
graph = Dot(graph_type='graph')
for row in matrix:
if not directed:
skip = matrix.index(row)
r = row[skip:]
else:
skip = 0
r = row
node_dest = skip+1
for e in r:
if e:
graph.add_edge(
Edge( node_prefix + node_orig,
node_prefix + node_dest) )
node_dest += 1
node_orig += 1
return graph
def graph_from_incidence_matrix(matrix, node_prefix='', directed=False):
"""Creates a basic graph out of an incidence matrix.
The matrix has to be a list of rows of values
representing an incidence matrix.
The values can be anything: bool, int, float, as long
as they can evaluate to True or False.
"""
node_orig = 1
if directed:
graph = Dot(graph_type='digraph')
else:
graph = Dot(graph_type='graph')
for row in matrix:
nodes = []
c = 1
for node in row:
if node:
nodes.append(c*node)
c += 1
nodes.sort()
if len(nodes) == 2:
graph.add_edge(
Edge( node_prefix + abs(nodes[0]),
node_prefix + nodes[1] ))
if not directed:
graph.set_simplify(True)
return graph
def __find_executables(path):
"""Used by find_graphviz
path - single directory as a string
If any of the executables are found, it will return a dictionary
containing the program names as keys and their paths as values.
Otherwise returns None
"""
success = False
progs = {'dot': '', 'twopi': '', 'neato': '', 'circo': '', 'fdp': '', 'sfdp': ''}
was_quoted = False
path = path.strip()
if path.startswith('"') and path.endswith('"'):
path = path[1:-1]
was_quoted = True
if os.path.isdir(path) :
for prg in progs.iterkeys():
if progs[prg]:
continue
if os.path.exists( os.path.join(path, prg) ):
if was_quoted:
progs[prg] = '"' + os.path.join(path, prg) + '"'
else:
progs[prg] = os.path.join(path, prg)
success = True
elif os.path.exists( os.path.join(path, prg + '.exe') ):
if was_quoted:
progs[prg] = '"' + os.path.join(path, prg + '.exe') + '"'
else:
progs[prg] = os.path.join(path, prg + '.exe')
success = True
if success:
return progs
else:
return None
# The multi-platform version of this 'find_graphviz' function was
# contributed by Peter Cock
#
def find_graphviz():
"""Locate Graphviz's executables in the system.
Tries three methods:
First: Windows Registry (Windows only)
This requires Mark Hammond's pywin32 is installed.
Secondly: Search the path
It will look for 'dot', 'twopi' and 'neato' in all the directories
specified in the PATH environment variable.
Thirdly: Default install location (Windows only)
It will look for 'dot', 'twopi' and 'neato' in the default install
location under the "Program Files" directory.
It will return a dictionary containing the program names as keys
and their paths as values.
If this fails, it returns None.
"""
# Method 1 (Windows only)
#
if os.sys.platform == 'win32':
HKEY_LOCAL_MACHINE = 0x80000002
KEY_QUERY_VALUE = 0x0001
RegOpenKeyEx = None
RegQueryValueEx = None
RegCloseKey = None
try:
import win32api, win32con
RegOpenKeyEx = win32api.RegOpenKeyEx
RegQueryValueEx = win32api.RegQueryValueEx
RegCloseKey = win32api.RegCloseKey
except ImportError:
# Print a messaged suggesting they install these?
#
pass
try:
import ctypes
def RegOpenKeyEx(key, subkey, opt, sam):
result = ctypes.c_uint(0)
ctypes.windll.advapi32.RegOpenKeyExA(key, subkey, opt, sam, ctypes.byref(result))
return result.value
def RegQueryValueEx( hkey, valuename ):
data_type = ctypes.c_uint(0)
data_len = ctypes.c_uint(1024)
data = ctypes.create_string_buffer( 1024 )
res = ctypes.windll.advapi32.RegQueryValueExA(hkey, valuename, 0,
ctypes.byref(data_type), data, ctypes.byref(data_len))
return data.value
RegCloseKey = ctypes.windll.advapi32.RegCloseKey
except ImportError:
# Print a messaged suggesting they install these?
#
pass
if RegOpenKeyEx is not None:
# Get the GraphViz install path from the registry
#
hkey = None
potentialKeys = [
"SOFTWARE\\ATT\\Graphviz",
"SOFTWARE\\AT&T Research Labs\\Graphviz",
]
for potentialKey in potentialKeys:
try:
hkey = RegOpenKeyEx( HKEY_LOCAL_MACHINE,
potentialKey, 0, KEY_QUERY_VALUE )
if hkey is not None:
path = RegQueryValueEx( hkey, "InstallPath" )
RegCloseKey( hkey )
# The regitry variable might exist, left by old installations
# but with no value, in those cases we keep searching...
if not path:
continue
# Now append the "bin" subdirectory:
#
path = os.path.join(path, "bin")
progs = __find_executables(path)
if progs is not None :
#print "Used Windows registry"
return progs
except Exception, excp:
#raise excp
pass
else:
break
# Method 2 (Linux, Windows etc)
#
if os.environ.has_key('PATH'):
for path in os.environ['PATH'].split(os.pathsep):
progs = __find_executables(path)
if progs is not None :
#print "Used path"
return progs
# Method 3 (Windows only)
#
if os.sys.platform == 'win32':
# Try and work out the equivalent of "C:\Program Files" on this
# machine (might be on drive D:, or in a different language)
#
if os.environ.has_key('PROGRAMFILES'):
# Note, we could also use the win32api to get this
# information, but win32api may not be installed.
path = os.path.join(os.environ['PROGRAMFILES'], 'ATT', 'GraphViz', 'bin')
else:
#Just in case, try the default...
path = r"C:\Program Files\att\Graphviz\bin"
progs = __find_executables(path)
if progs is not None :
#print "Used default install location"
return progs
for path in (
'/usr/bin', '/usr/local/bin',
'/opt/local/bin',
'/opt/bin', '/sw/bin', '/usr/share',
'/Applications/Graphviz.app/Contents/MacOS/' ):
progs = __find_executables(path)
if progs is not None :
#print "Used path"
return progs
# Failed to find GraphViz
#
return None
class Common:
"""Common information to several classes.
Should not be directly used, several classes are derived from
this one.
"""
def __getstate__(self):
dict = copy.copy(self.obj_dict)
return dict
def __setstate__(self, state):
self.obj_dict = state
def __get_attribute__(self, attr):
"""Look for default attributes for this node"""
attr_val = self.obj_dict['attributes'].get(attr, None)
if attr_val is None:
# get the defaults for nodes/edges
default_node_name = self.obj_dict['type']
# The defaults for graphs are set on a node named 'graph'
if default_node_name in ('subgraph', 'digraph', 'cluster'):
default_node_name = 'graph'
defaults = self.get_parent_graph().get_node( default_node_name )
# Multiple defaults could be set by having repeated 'graph [...]'
# 'node [...]', 'edge [...]' statements. In such case, if the
# same attribute is set in different statements, only the first
# will be returned. In order to get all, one would call the
# get_*_defaults() methods and handle those. Or go node by node
# (of the ones specifying defaults) and modify the attributes
# individually.
#
if not isinstance(defaults, (list, tuple)):
defaults = [defaults]
for default in defaults:
attr_val = default.obj_dict['attributes'].get(attr, None)
if attr_val:
return attr_val
else:
return attr_val
return None
def set_parent_graph(self, parent_graph):
self.obj_dict['parent_graph'] = parent_graph
def get_parent_graph(self):
return self.obj_dict.get('parent_graph', None)
def set(self, name, value):
"""Set an attribute value by name.
Given an attribute 'name' it will set its value to 'value'.
There's always the possibility of using the methods:
set_'name'(value)
which are defined for all the existing attributes.
"""
self.obj_dict['attributes'][name] = value
def get(self, name):
"""Get an attribute value by name.
Given an attribute 'name' it will get its value.
There's always the possibility of using the methods:
get_'name'()
which are defined for all the existing attributes.
"""
return self.obj_dict['attributes'].get(name, None)
def get_attributes(self):
""""""
return self.obj_dict['attributes']
def set_sequence(self, seq):
self.obj_dict['sequence'] = seq
def get_sequence(self):
return self.obj_dict['sequence']
def create_attribute_methods(self, obj_attributes):
#for attr in self.obj_dict['attributes']:
for attr in obj_attributes:
# Generate all the Setter methods.
#
self.__setattr__( 'set_'+attr, lambda x, a=attr : self.obj_dict['attributes'].__setitem__(a, x) )
# Generate all the Getter methods.
#
self.__setattr__('get_'+attr, lambda a=attr : self.__get_attribute__(a))
class Error(Exception):
"""General error handling class.
"""
def __init__(self, value):
self.value = value
def __str__(self):
return self.value
class InvocationException(Exception):
"""To indicate that a ploblem occurred while running any of the GraphViz executables.
"""
def __init__(self, value):
self.value = value
def __str__(self):
return self.value
class Node(object, Common):
"""A graph node.
This class represents a graph's node with all its attributes.
node(name, attribute=value, ...)
name: node's name
All the attributes defined in the Graphviz dot language should
be supported.
"""
def __init__(self, name = '', obj_dict = None, **attrs):
#
# Nodes will take attributes of all other types because the defaults
# for any GraphViz object are dealt with as if they were Node definitions
#
if obj_dict is not None:
self.obj_dict = obj_dict
else:
self.obj_dict = dict()
# Copy the attributes
#
self.obj_dict[ 'attributes' ] = dict( attrs )
self.obj_dict[ 'type' ] = 'node'
self.obj_dict[ 'parent_graph' ] = None
self.obj_dict[ 'parent_node_list' ] = None
self.obj_dict[ 'sequence' ] = None
# Remove the compass point
#
port = None
if isinstance(name, basestring) and not name.startswith('"'):
idx = name.find(':')
if idx > 0:
name, port = name[:idx], name[idx:]
if isinstance(name, (long, int)):
name = str(name)
self.obj_dict['name'] = quote_if_necessary( name )
self.obj_dict['port'] = port
self.create_attribute_methods(NODE_ATTRIBUTES)
def set_name(self, node_name):
"""Set the node's name."""
self.obj_dict['name'] = node_name
def get_name(self):
"""Get the node's name."""
return self.obj_dict['name']
def get_port(self):
"""Get the node's port."""
return self.obj_dict['port']
def add_style(self, style):
styles = self.obj_dict['attributes'].get('style', None)
if not styles and style:
styles = [ style ]
else:
styles = styles.split(',')
styles.append( style )
self.obj_dict['attributes']['style'] = ','.join( styles )
def to_string(self):
"""Returns a string representation of the node in dot language.
"""
# RMF: special case defaults for node, edge and graph properties.
#
node = quote_if_necessary(self.obj_dict['name'])
node_attr = list()
for attr, value in self.obj_dict['attributes'].iteritems():
if value is not None:
node_attr.append( '%s=%s' % (attr, quote_if_necessary(value) ) )
else:
node_attr.append( attr )
# No point in having nodes setting any defaults if the don't set
# any attributes...
#
if node in ('graph', 'node', 'edge') and len(node_attr) == 0:
return ''
node_attr = ', '.join(node_attr)
if node_attr:
node += ' [' + node_attr + ']'
return node + ';'
class Edge(object, Common ):
"""A graph edge.
This class represents a graph's edge with all its attributes.
edge(src, dst, attribute=value, ...)
src: source node's name
dst: destination node's name
All the attributes defined in the Graphviz dot language should
be supported.
Attributes can be set through the dynamically generated methods:
set_[attribute name], i.e. set_label, set_fontname
or directly by using the instance's special dictionary:
Edge.obj_dict['attributes'][attribute name], i.e.
edge_instance.obj_dict['attributes']['label']
edge_instance.obj_dict['attributes']['fontname']
"""
def __init__(self, src='', dst='', obj_dict=None, **attrs):
if isinstance(src, (list, tuple)) and dst == '':
src, dst = src
if obj_dict is not None:
self.obj_dict = obj_dict
else:
self.obj_dict = dict()
# Copy the attributes
#
self.obj_dict[ 'attributes' ] = dict( attrs )
self.obj_dict[ 'type' ] = 'edge'
self.obj_dict[ 'parent_graph' ] = None
self.obj_dict[ 'parent_edge_list' ] = None
self.obj_dict[ 'sequence' ] = None
if isinstance(src, Node):
src = src.get_name()
if isinstance(dst, Node):
dst = dst.get_name()
points = ( quote_if_necessary( src) , quote_if_necessary( dst) )
self.obj_dict['points'] = points
self.create_attribute_methods(EDGE_ATTRIBUTES)
def get_source(self):
"""Get the edges source node name."""
return self.obj_dict['points'][0]
def get_destination(self):
"""Get the edge's destination node name."""
return self.obj_dict['points'][1]
def __hash__(self):
return hash( hash(self.get_source()) + hash(self.get_destination()) )
def __eq__(self, edge):
"""Compare two edges.
If the parent graph is directed, arcs linking
node A to B are considered equal and A->B != B->A
If the parent graph is undirected, any edge
connecting two nodes is equal to any other
edge connecting the same nodes, A->B == B->A
"""
if not isinstance(edge, Edge):
raise Error, "Can't compare and edge to a non-edge object."
if self.get_parent_graph().get_top_graph_type() == 'graph':
# If the graph is undirected, the edge has neither
# source nor destination.
#
if ( ( self.get_source() == edge.get_source() and self.get_destination() == edge.get_destination() ) or
( edge.get_source() == self.get_destination() and edge.get_destination() == self.get_source() ) ):
return True
else:
if self.get_source()==edge.get_source() and self.get_destination()==edge.get_destination() :
return True
return False
def parse_node_ref(self, node_str):
if not isinstance(node_str, str):
return node_str
if node_str.startswith('"') and node_str.endswith('"'):
return node_str
node_port_idx = node_str.rfind(':')
if node_port_idx>0 and node_str[0]=='"' and node_str[node_port_idx-1]=='"':
return node_str
if node_port_idx>0:
a = node_str[:node_port_idx]
b = node_str[node_port_idx+1:]
node = quote_if_necessary(a)
node += ':'+quote_if_necessary(b)
return node
return node_str
def to_string(self):
"""Returns a string representation of the edge in dot language.
"""
src = self.parse_node_ref( self.get_source() )
dst = self.parse_node_ref( self.get_destination() )
if isinstance(src, frozendict):
edge = [ Subgraph(obj_dict=src).to_string() ]
elif isinstance(src, (int, long)):
edge = [ str(src) ]
else:
edge = [ src ]
if (self.get_parent_graph() and
self.get_parent_graph().get_top_graph_type() and
self.get_parent_graph().get_top_graph_type() == 'digraph' ):
edge.append( '->' )
else:
edge.append( '--' )
if isinstance(dst, frozendict):
edge.append( Subgraph(obj_dict=dst).to_string() )
elif isinstance(dst, (int, long)):
edge.append( str(dst) )
else:
edge.append( dst )
edge_attr = list()
for attr, value in self.obj_dict['attributes'].iteritems():
if value is not None:
edge_attr.append( '%s=%s' % (attr, quote_if_necessary(value) ) )
else:
edge_attr.append( attr )
edge_attr = ', '.join(edge_attr)
if edge_attr:
edge.append( ' [' + edge_attr + ']' )
return ' '.join(edge) + ';'
class Graph(object, Common):
"""Class representing a graph in Graphviz's dot language.
This class implements the methods to work on a representation
of a graph in Graphviz's dot language.
graph( graph_name='G', graph_type='digraph',
strict=False, suppress_disconnected=False, attribute=value, ...)
graph_name:
the graph's name
graph_type:
can be 'graph' or 'digraph'
suppress_disconnected:
defaults to False, which will remove from the
graph any disconnected nodes.
simplify:
if True it will avoid displaying equal edges, i.e.
only one edge between two nodes. removing the
duplicated ones.
All the attributes defined in the Graphviz dot language should
be supported.
Attributes can be set through the dynamically generated methods:
set_[attribute name], i.e. set_size, set_fontname
or using the instance's attributes:
Graph.obj_dict['attributes'][attribute name], i.e.
graph_instance.obj_dict['attributes']['label']
graph_instance.obj_dict['attributes']['fontname']
"""
def __init__(self, graph_name='G', obj_dict=None, graph_type='digraph', strict=False,
suppress_disconnected=False, simplify=False, **attrs):
if obj_dict is not None:
self.obj_dict = obj_dict
else:
self.obj_dict = dict()
self.obj_dict['attributes'] = dict(attrs)
if graph_type not in ['graph', 'digraph']:
raise Error, 'Invalid type "%s". Accepted graph types are: graph, digraph, subgraph' % graph_type
self.obj_dict['name'] = graph_name
self.obj_dict['type'] = graph_type
self.obj_dict['strict'] = strict
self.obj_dict['suppress_disconnected'] = suppress_disconnected
self.obj_dict['simplify'] = simplify
self.obj_dict['current_child_sequence'] = 1
self.obj_dict['nodes'] = dict()
self.obj_dict['edges'] = dict()
self.obj_dict['subgraphs'] = dict()
self.set_parent_graph(self)
self.create_attribute_methods(GRAPH_ATTRIBUTES)
def get_graph_type(self):
return self.obj_dict['type']
def get_top_graph_type(self):
parent = self
while True:
parent_ = parent.get_parent_graph()
if parent_ == parent:
break
parent = parent_
return parent.obj_dict['type']
def set_graph_defaults(self, **attrs):
self.add_node( Node('graph', **attrs) )
def get_graph_defaults(self, **attrs):
graph_nodes = self.get_node('graph')
if isinstance( graph_nodes, (list, tuple)):
return [ node.get_attributes() for node in graph_nodes ]
return graph_nodes.get_attributes()
def set_node_defaults(self, **attrs):
self.add_node( Node('node', **attrs) )
def get_node_defaults(self, **attrs):
graph_nodes = self.get_node('node')
if isinstance( graph_nodes, (list, tuple)):
return [ node.get_attributes() for node in graph_nodes ]
return graph_nodes.get_attributes()
def set_edge_defaults(self, **attrs):
self.add_node( Node('edge', **attrs) )
def get_edge_defaults(self, **attrs):
graph_nodes = self.get_node('edge')
if isinstance( graph_nodes, (list, tuple)):
return [ node.get_attributes() for node in graph_nodes ]
return graph_nodes.get_attributes()
def set_simplify(self, simplify):
"""Set whether to simplify or not.
If True it will avoid displaying equal edges, i.e.
only one edge between two nodes. removing the
duplicated ones.
"""
self.obj_dict['simplify'] = simplify
def get_simplify(self):
"""Get whether to simplify or not.
Refer to set_simplify for more information.
"""
return self.obj_dict['simplify']
def set_type(self, graph_type):
"""Set the graph's type, 'graph' or 'digraph'."""
self.obj_dict['type'] = graph_type
def get_type(self):
"""Get the graph's type, 'graph' or 'digraph'."""
return self.obj_dict['type']
def set_name(self, graph_name):
"""Set the graph's name."""
self.obj_dict['name'] = graph_name
def get_name(self):
"""Get the graph's name."""
return self.obj_dict['name']
def set_strict(self, val):
"""Set graph to 'strict' mode.
This option is only valid for top level graphs.
"""
self.obj_dict['strict'] = val
def get_strict(self, val):
"""Get graph's 'strict' mode (True, False).
This option is only valid for top level graphs.
"""
return self.obj_dict['strict']
def set_suppress_disconnected(self, val):
"""Suppress disconnected nodes in the output graph.
This option will skip nodes in the graph with no incoming or outgoing
edges. This option works also for subgraphs and has effect only in the
current graph/subgraph.
"""
self.obj_dict['suppress_disconnected'] = val
def get_suppress_disconnected(self, val):
"""Get if suppress disconnected is set.
Refer to set_suppress_disconnected for more information.
"""
return self.obj_dict['suppress_disconnected']
def get_next_sequence_number(self):
seq = self.obj_dict['current_child_sequence']
self.obj_dict['current_child_sequence'] += 1
return seq
def add_node(self, graph_node):
"""Adds a node object to the graph.
It takes a node object as its only argument and returns
None.
"""
if not isinstance(graph_node, Node):
raise TypeError('add_node() received a non node class object: ' + str(graph_node))
node = self.get_node(graph_node.get_name())
if not node:
self.obj_dict['nodes'][graph_node.get_name()] = [ graph_node.obj_dict ]
#self.node_dict[graph_node.get_name()] = graph_node.attributes
graph_node.set_parent_graph(self.get_parent_graph())
else:
self.obj_dict['nodes'][graph_node.get_name()].append( graph_node.obj_dict )
graph_node.set_sequence(self.get_next_sequence_number())
def del_node(self, name, index=None):
"""Delete a node from the graph.
Given a node's name all node(s) with that same name
will be deleted if 'index' is not specified or set
to None.
If there are several nodes with that same name and
'index' is given, only the node in that position
will be deleted.
'index' should be an integer specifying the position
of the node to delete. If index is larger than the
number of nodes with that name, no action is taken.
If nodes are deleted it returns True. If no action
is taken it returns False.
"""
if isinstance(name, Node):
name = name.get_name()
if self.obj_dict['nodes'].has_key(name):
if index is not None and index < len(self.obj_dict['nodes'][name]):
del self.obj_dict['nodes'][name][index]
return True
else:
del self.obj_dict['nodes'][name]
return True
return False
def get_node(self, name):
"""Retrieve a node from the graph.
Given a node's name the corresponding Node
instance will be returned.
If one or more nodes exist with that name a list of
Node instances is returned.
An empty list is returned otherwise.
"""
match = list()
if self.obj_dict['nodes'].has_key(name):
match.extend( [ Node( obj_dict = obj_dict ) for obj_dict in self.obj_dict['nodes'][name] ])
return match
def get_nodes(self):
"""Get the list of Node instances."""
return self.get_node_list()
def get_node_list(self):
"""Get the list of Node instances.
This method returns the list of Node instances
composing the graph.
"""
node_objs = list()
for node, obj_dict_list in self.obj_dict['nodes'].iteritems():
node_objs.extend( [ Node( obj_dict = obj_d ) for obj_d in obj_dict_list ] )
return node_objs
def add_edge(self, graph_edge):
"""Adds an edge object to the graph.
It takes a edge object as its only argument and returns
None.
"""
if not isinstance(graph_edge, Edge):
raise TypeError('add_edge() received a non edge class object: ' + str(graph_edge))
edge_points = ( graph_edge.get_source(), graph_edge.get_destination() )
if self.obj_dict['edges'].has_key(edge_points):
edge_list = self.obj_dict['edges'][edge_points]
edge_list.append(graph_edge.obj_dict)
else:
self.obj_dict['edges'][edge_points] = [ graph_edge.obj_dict ]
graph_edge.set_sequence( self.get_next_sequence_number() )
graph_edge.set_parent_graph( self.get_parent_graph() )
def del_edge(self, src_or_list, dst=None, index=None):
"""Delete an edge from the graph.
Given an edge's (source, destination) node names all
matching edges(s) will be deleted if 'index' is not
specified or set to None.
If there are several matching edges and 'index' is
given, only the edge in that position will be deleted.
'index' should be an integer specifying the position
of the edge to delete. If index is larger than the
number of matching edges, no action is taken.
If edges are deleted it returns True. If no action
is taken it returns False.
"""
if isinstance( src_or_list, (list, tuple)):
if dst is not None and isinstance(dst, (int, long)):
index = dst
src, dst = src_or_list
else:
src, dst = src_or_list, dst
if isinstance(src, Node):
src = src.get_name()
if isinstance(dst, Node):
dst = dst.get_name()
if self.obj_dict['edges'].has_key( (src, dst) ):
if index is not None and index < len(self.obj_dict['edges'][(src, dst)]):
del self.obj_dict['edges'][(src, dst)][index]
return True
else:
del self.obj_dict['edges'][(src, dst)]
return True
return False
def get_edge(self, src_or_list, dst=None):
"""Retrieved an edge from the graph.
Given an edge's source and destination the corresponding
Edge instance(s) will be returned.
If one or more edges exist with that source and destination
a list of Edge instances is returned.
An empty list is returned otherwise.
"""
if isinstance( src_or_list, (list, tuple)) and dst is None:
edge_points = tuple(src_or_list)
edge_points_reverse = (edge_points[1], edge_points[0])
else:
edge_points = (src_or_list, dst)
edge_points_reverse = (dst, src_or_list)
match = list()
if self.obj_dict['edges'].has_key( edge_points ) or (
self.get_top_graph_type() == 'graph' and self.obj_dict['edges'].has_key( edge_points_reverse )):
edges_obj_dict = self.obj_dict['edges'].get(
edge_points,
self.obj_dict['edges'].get( edge_points_reverse, None ))
for edge_obj_dict in edges_obj_dict:
match.append( Edge( edge_points[0], edge_points[1], obj_dict = edge_obj_dict ) )
return match
def get_edges(self):
return self.get_edge_list()
def get_edge_list(self):
"""Get the list of Edge instances.
This method returns the list of Edge instances
composing the graph.
"""
edge_objs = list()
for edge, obj_dict_list in self.obj_dict['edges'].iteritems():
edge_objs.extend( [ Edge( obj_dict = obj_d ) for obj_d in obj_dict_list ] )
return edge_objs
def add_subgraph(self, sgraph):
"""Adds an subgraph object to the graph.
It takes a subgraph object as its only argument and returns
None.
"""
if not isinstance(sgraph, Subgraph) and not isinstance(sgraph, Cluster):
raise TypeError('add_subgraph() received a non subgraph class object:' + str(sgraph))
if self.obj_dict['subgraphs'].has_key(sgraph.get_name()):
sgraph_list = self.obj_dict['subgraphs'][ sgraph.get_name() ]
sgraph_list.append( sgraph.obj_dict )
else:
self.obj_dict['subgraphs'][ sgraph.get_name() ] = [ sgraph.obj_dict ]
sgraph.set_sequence( self.get_next_sequence_number() )
sgraph.set_parent_graph( self.get_parent_graph() )
def get_subgraph(self, name):
"""Retrieved a subgraph from the graph.
Given a subgraph's name the corresponding
Subgraph instance will be returned.
If one or more subgraphs exist with the same name, a list of
Subgraph instances is returned.
An empty list is returned otherwise.
"""
match = list()
if self.obj_dict['subgraphs'].has_key( name ):
sgraphs_obj_dict = self.obj_dict['subgraphs'].get( name )
for obj_dict_list in sgraphs_obj_dict:
#match.extend( Subgraph( obj_dict = obj_d ) for obj_d in obj_dict_list )
match.append( Subgraph( obj_dict = obj_dict_list ) )
return match
def get_subgraphs(self):
return self.get_subgraph_list()
def get_subgraph_list(self):
"""Get the list of Subgraph instances.
This method returns the list of Subgraph instances
in the graph.
"""
sgraph_objs = list()
for sgraph, obj_dict_list in self.obj_dict['subgraphs'].iteritems():
sgraph_objs.extend( [ Subgraph( obj_dict = obj_d ) for obj_d in obj_dict_list ] )
return sgraph_objs
def set_parent_graph(self, parent_graph):
self.obj_dict['parent_graph'] = parent_graph
for obj_list in self.obj_dict['nodes'].itervalues():
for obj in obj_list:
obj['parent_graph'] = parent_graph
for obj_list in self.obj_dict['edges'].itervalues():
for obj in obj_list:
obj['parent_graph'] = parent_graph
for obj_list in self.obj_dict['subgraphs'].itervalues():
for obj in obj_list:
Graph(obj_dict=obj).set_parent_graph(parent_graph)
def to_string(self):
"""Returns a string representation of the graph in dot language.
It will return the graph and all its subelements in string from.
"""
graph = list()
if self.obj_dict.get('strict', None) is not None:
if self==self.get_parent_graph() and self.obj_dict['strict']:
graph.append('strict ')
if self.obj_dict['name'] == '':
if 'show_keyword' in self.obj_dict and self.obj_dict['show_keyword']:
graph.append( 'subgraph {\n' )
else:
graph.append( '{\n' )
else:
graph.append( '%s %s {\n' % (self.obj_dict['type'], self.obj_dict['name']) )
for attr in self.obj_dict['attributes'].iterkeys():
if self.obj_dict['attributes'].get(attr, None) is not None:
val = self.obj_dict['attributes'].get(attr)
if val is not None:
graph.append( '%s=%s' % (attr, quote_if_necessary(val)) )
else:
graph.append( attr )
graph.append( ';\n' )
edges_done = set()
edge_obj_dicts = list()
for e in self.obj_dict['edges'].itervalues():
edge_obj_dicts.extend(e)
if edge_obj_dicts:
edge_src_set, edge_dst_set = zip( *[obj['points'] for obj in edge_obj_dicts] )
edge_src_set, edge_dst_set = set(edge_src_set), set(edge_dst_set)
else:
edge_src_set, edge_dst_set = set(), set()
node_obj_dicts = list()
for e in self.obj_dict['nodes'].itervalues():
node_obj_dicts.extend(e)
sgraph_obj_dicts = list()
for sg in self.obj_dict['subgraphs'].itervalues():
sgraph_obj_dicts.extend(sg)
obj_list = [ (obj['sequence'], obj) for obj in (edge_obj_dicts + node_obj_dicts + sgraph_obj_dicts) ]
obj_list.sort()
for idx, obj in obj_list:
if obj['type'] == 'node':
node = Node(obj_dict=obj)
if self.obj_dict.get('suppress_disconnected', False):
if (node.get_name() not in edge_src_set and
node.get_name() not in edge_dst_set):
continue
graph.append( node.to_string()+'\n' )
elif obj['type'] == 'edge':
edge = Edge(obj_dict=obj)
if self.obj_dict.get('simplify', False) and edge in edges_done:
continue
graph.append( edge.to_string() + '\n' )
edges_done.add(edge)
else:
sgraph = Subgraph(obj_dict=obj)
graph.append( sgraph.to_string()+'\n' )
graph.append( '}\n' )
return ''.join(graph)
class Subgraph(Graph):
"""Class representing a subgraph in Graphviz's dot language.
This class implements the methods to work on a representation
of a subgraph in Graphviz's dot language.
subgraph(graph_name='subG', suppress_disconnected=False, attribute=value, ...)
graph_name:
the subgraph's name
suppress_disconnected:
defaults to false, which will remove from the
subgraph any disconnected nodes.
All the attributes defined in the Graphviz dot language should
be supported.
Attributes can be set through the dynamically generated methods:
set_[attribute name], i.e. set_size, set_fontname
or using the instance's attributes:
Subgraph.obj_dict['attributes'][attribute name], i.e.
subgraph_instance.obj_dict['attributes']['label']
subgraph_instance.obj_dict['attributes']['fontname']
"""
# RMF: subgraph should have all the attributes of graph so it can be passed
# as a graph to all methods
#
def __init__(self, graph_name='', obj_dict=None, suppress_disconnected=False,
simplify=False, **attrs):
Graph.__init__(self, graph_name=graph_name, obj_dict=obj_dict,
suppress_disconnected=suppress_disconnected, simplify=simplify, **attrs)
if obj_dict is None:
self.obj_dict['type'] = 'subgraph'
class Cluster(Graph):
"""Class representing a cluster in Graphviz's dot language.
This class implements the methods to work on a representation
of a cluster in Graphviz's dot language.
cluster(graph_name='subG', suppress_disconnected=False, attribute=value, ...)
graph_name:
the cluster's name (the string 'cluster' will be always prepended)
suppress_disconnected:
defaults to false, which will remove from the
cluster any disconnected nodes.
All the attributes defined in the Graphviz dot language should
be supported.
Attributes can be set through the dynamically generated methods:
set_[attribute name], i.e. set_color, set_fontname
or using the instance's attributes:
Cluster.obj_dict['attributes'][attribute name], i.e.
cluster_instance.obj_dict['attributes']['label']
cluster_instance.obj_dict['attributes']['fontname']
"""
def __init__(self, graph_name='subG', obj_dict=None, suppress_disconnected=False,
simplify=False, **attrs):
Graph.__init__(self, graph_name=graph_name, obj_dict=obj_dict,
suppress_disconnected=suppress_disconnected, simplify=simplify, **attrs)
if obj_dict is None:
self.obj_dict['type'] = 'subgraph'
self.obj_dict['name'] = 'cluster_'+graph_name
self.create_attribute_methods(CLUSTER_ATTRIBUTES)
class Dot(Graph):
"""A container for handling a dot language file.
This class implements methods to write and process
a dot language file. It is a derived class of
the base class 'Graph'.
"""
def __init__(self, *argsl, **argsd):
Graph.__init__(self, *argsl, **argsd)
self.shape_files = list()
self.progs = None
self.formats = ['canon', 'cmap', 'cmapx', 'cmapx_np', 'dia', 'dot',
'fig', 'gd', 'gd2', 'gif', 'hpgl', 'imap', 'imap_np', 'ismap',
'jpe', 'jpeg', 'jpg', 'mif', 'mp', 'pcl', 'pdf', 'pic', 'plain',
'plain-ext', 'png', 'ps', 'ps2', 'svg', 'svgz', 'vml', 'vmlz',
'vrml', 'vtx', 'wbmp', 'xdot', 'xlib' ]
self.prog = 'dot'
# Automatically creates all the methods enabling the creation
# of output in any of the supported formats.
for frmt in self.formats:
self.__setattr__(
'create_'+frmt,
lambda f=frmt, prog=self.prog : self.create(format=f, prog=prog))
f = self.__dict__['create_'+frmt]
f.__doc__ = '''Refer to the docstring accompanying the 'create' method for more information.'''
for frmt in self.formats+['raw']:
self.__setattr__(
'write_'+frmt,
lambda path, f=frmt, prog=self.prog : self.write(path, format=f, prog=prog))
f = self.__dict__['write_'+frmt]
f.__doc__ = '''Refer to the docstring accompanying the 'write' method for more information.'''
def __getstate__(self):
dict = copy.copy(self.obj_dict)
return dict
def __setstate__(self, state):
self.obj_dict = state
def set_shape_files(self, file_paths):
"""Add the paths of the required image files.
If the graph needs graphic objects to be used as shapes or otherwise
those need to be in the same folder as the graph is going to be rendered
from. Alternatively the absolute path to the files can be specified when
including the graphics in the graph.
The files in the location pointed to by the path(s) specified as arguments
to this method will be copied to the same temporary location where the
graph is going to be rendered.
"""
if isinstance( file_paths, basestring ):
self.shape_files.append( file_paths )
if isinstance( file_paths, (list, tuple) ):
self.shape_files.extend( file_paths )
def set_prog(self, prog):
"""Sets the default program.
Sets the default program in charge of processing
the dot file into a graph.
"""
self.prog = prog
def set_graphviz_executables(self, paths):
"""This method allows to manually specify the location of the GraphViz executables.
The argument to this method should be a dictionary where the keys are as follows:
{'dot': '', 'twopi': '', 'neato': '', 'circo': '', 'fdp': ''}
and the values are the paths to the corresponding executable, including the name
of the executable itself.
"""
self.progs = paths
def write(self, path, prog=None, format='raw'):
"""Writes a graph to a file.
Given a filename 'path' it will open/create and truncate
such file and write on it a representation of the graph
defined by the dot object and in the format specified by
'format'.
The format 'raw' is used to dump the string representation
of the Dot object, without further processing.
The output can be processed by any of graphviz tools, defined
in 'prog', which defaults to 'dot'
Returns True or False according to the success of the write
operation.
There's also the preferred possibility of using:
write_'format'(path, prog='program')
which are automatically defined for all the supported formats.
[write_ps(), write_gif(), write_dia(), ...]
"""
if prog is None:
prog = self.prog
dot_fd = file(path, "w+b")
if format == 'raw':
dot_fd.write(self.to_string())
else:
dot_fd.write(self.create(prog, format))
dot_fd.close()
return True
def create(self, prog=None, format='ps'):
"""Creates and returns a Postscript representation of the graph.
create will write the graph to a temporary dot file and process
it with the program given by 'prog' (which defaults to 'twopi'),
reading the Postscript output and returning it as a string is the
operation is successful.
On failure None is returned.
There's also the preferred possibility of using:
create_'format'(prog='program')
which are automatically defined for all the supported formats.
[create_ps(), create_gif(), create_dia(), ...]
If 'prog' is a list instead of a string the fist item is expected
to be the program name, followed by any optional command-line
arguments for it:
[ 'twopi', '-Tdot', '-s10' ]
"""
if prog is None:
prog = self.prog
if isinstance(prog, (list, tuple)):
prog = prog[0]
args = prog[1:]
else:
args = []
if self.progs is None:
self.progs = find_graphviz()
if self.progs is None:
raise InvocationException(
'GraphViz\'s executables not found' )
if not self.progs.has_key(prog):
raise InvocationException(
'GraphViz\'s executable "%s" not found' % prog )
if not os.path.exists( self.progs[prog] ) or not os.path.isfile( self.progs[prog] ):
raise InvocationException(
'GraphViz\'s executable "%s" is not a file or doesn\'t exist' % self.progs[prog] )
tmp_fd, tmp_name = tempfile.mkstemp()
os.close(tmp_fd)
self.write(tmp_name)
tmp_dir = os.path.dirname(tmp_name )
# For each of the image files...
#
for img in self.shape_files:
# Get its data
#
f = file(img, 'rb')
f_data = f.read()
f.close()
# And copy it under a file with the same name in the temporary directory
#
f = file( os.path.join( tmp_dir, os.path.basename(img) ), 'wb' )
f.write(f_data)
f.close()
cmdline = [self.progs[prog], '-T'+format, tmp_name] + args
p = subprocess.Popen(
cmdline,
cwd=tmp_dir,
stderr=subprocess.PIPE, stdout=subprocess.PIPE)
stderr = p.stderr
stdout = p.stdout
stdout_output = list()
while True:
data = stdout.read()
if not data:
break
stdout_output.append(data)
stdout.close()
stdout_output = ''.join(stdout_output)
if not stderr.closed:
stderr_output = list()
while True:
data = stderr.read()
if not data:
break
stderr_output.append(data)
stderr.close()
if stderr_output:
stderr_output = ''.join(stderr_output)
#pid, status = os.waitpid(p.pid, 0)
status = p.wait()
if status != 0 :
raise InvocationException(
'Program terminated with status: %d. stderr follows: %s' % (
status, stderr_output) )
elif stderr_output:
print stderr_output
# For each of the image files...
#
for img in self.shape_files:
# remove it
#
os.unlink( os.path.join( tmp_dir, os.path.basename(img) ) )
os.unlink(tmp_name)
return stdout_output
|
andrewjylee/omniplay
|
logdb/pydot.py
|
Python
|
bsd-2-clause
| 60,152 | 0.01694 |
#!/usr/bin/python3 -S
# -*- coding: utf-8 -*-
"""
`Unit tests for cargo.clients.AioPostgresPool`
--·--·--·--·--·--·--·--·--·--·--·--·--·--·--·--·--·--·--·--·--·--·--·--·--·--·--
2016 Jared Lunde © The MIT License (MIT)
http://github.com/jaredlunde
"""
import unittest
import psycopg2
from cargo.cursors import *
from cargo.clients import AioPostgresPool, local_client
from unit_tests.aio import configure
class TestAioPostgresPool(unittest.TestCase):
@staticmethod
def setUpClass():
db = configure.db
configure.drop_schema(db, 'cargo_tests', cascade=True, if_exists=True)
configure.create_schema(db, 'cargo_tests')
configure.Plan(configure.Foo()).execute()
@staticmethod
def tearDownClass():
db = configure.db
configure.drop_schema(db, 'cargo_tests', cascade=True, if_exists=True)
local_client.clear()
'''def test_connect(self):
client = AioPostgresPool()
self.assertFalse(client.autocommit)
self.assertIsNone(client._pool)
self.assertDictEqual(client._connection_options, {})
self.assertIsNone(client._schema)
self.assertEqual(client.encoding, None)
self.assertEqual(client.cursor_factory, CNamedTupleCursor)
def test_connection(self):
client = AioPostgresPool(1, 2)
conn = client.get()
self.assertFalse(conn._connection.closed)
conn.close()
self.assertTrue(conn._connection.closed)
client = AioPostgresPool(1, 2)
conn = client.get()
self.assertFalse(conn._connection.closed)
def test_close(self):
client = AioPostgresPool(1, 2)
self.assertTrue(client.closed)
client.connect()
self.assertFalse(client.closed)
client.close()
self.assertTrue(client.closed)
def test_context_manager(self):
with AioPostgresPool(1, 2) as pool:
self.assertFalse(pool.closed)
with pool.get() as connection:
with pool.get() as connection2:
self.assertIsNot(connection, connection2)
with self.assertRaises(psycopg2.pool.PoolError):
with pool.get() as connection3:
pass
with pool.get() as connection4:
self.assertIsNot(connection, connection4)
self.assertIs(connection2.connection,
connection4.connection)
with self.assertRaises(psycopg2.pool.PoolError):
with pool.get() as connection5:
pass
self.assertTrue(pool.closed)
def test_connection_obj(self):
with AioPostgresPool(1, 2) as pool:
with pool.get() as connection:
self.assertIs(connection.autocommit, pool.autocommit)
self.assertIs(connection._dsn, pool._dsn)
self.assertIs(connection._schema, pool._schema)
self.assertIs(connection.encoding, pool.encoding)
self.assertIs(connection.minconn, pool.minconn)
self.assertIs(connection.maxconn, pool.maxconn)
self.assertIs(connection.cursor_factory, pool.cursor_factory)
def test_put(self):
with AioPostgresPool(1, 2) as pool:
conn = pool.get()
self.assertIsNotNone(conn._connection)
conn2 = pool.get()
self.assertIsNot(conn2, conn)
with self.assertRaises(psycopg2.pool.PoolError):
pool.get()
# Put conn obj
pool.put(conn)
conn2 = pool.get()
self.assertIsNotNone(conn2)
# Put raw conn
pool.put(conn2.connection)
conn2 = pool.get()
self.assertIsNotNone(conn2)
self.assertTrue(pool.closed)
self.assertTrue(conn.closed)
self.assertTrue(conn2.closed)
def test_commit(self):
client = AioPostgresPool(1, 2)
conn = client.get()
cur = conn.cursor()
client.apply_schema(cur, 'cargo_tests')
cur.execute("INSERT INTO foo (uid, textfield) VALUES (1, 'bar')")
self.assertIsNone(conn.commit())
cur = conn.cursor()
with self.assertRaises(psycopg2.ProgrammingError):
cur.execute(
"INSERT INTO foo (uid, textfield) VALUES (1, 'bar', 4)")
conn.commit()
with self.assertRaises(psycopg2.InternalError):
cur.execute("INSERT INTO foo (uid, textfield) VALUES (1, 'bar')")
client.put(conn)
def test_rollback(self):
client = AioPostgresPool(1, 2)
conn = client.get()
cur = conn.cursor()
client.apply_schema(cur, 'cargo_tests')
cur.execute("INSERT INTO foo (uid, textfield) VALUES (2, 'bar')")
self.assertIsNone(conn.commit())
cur = conn.cursor()
with self.assertRaises(psycopg2.ProgrammingError):
cur.execute(
"INSERT INTO foo (uid, textfield) VALUES (1, 'bar', 4)")
conn.commit()
with self.assertRaises(psycopg2.InternalError):
cur.execute("INSERT INTO foo (uid, textfield) VALUES (1, 'bar')")
conn.rollback()
cur.execute("INSERT INTO foo (uid, textfield) VALUES (3, 'bar')")
self.assertIsNone(conn.commit())
client.put(conn)
def test_minconn_maxconn(self):
client = AioPostgresPool(10, 12)
self.assertEqual(client.pool.minconn, 10)
self.assertEqual(client.pool.maxconn, 12)'''
if __name__ == '__main__':
# Unit test
unittest.main()
|
jaredlunde/cargo-orm
|
unit_tests/aio/AioPostgresPool.py
|
Python
|
mit
| 5,680 | 0 |
import pydev_log
import traceback
import pydevd_resolver
from pydevd_constants import * #@UnusedWildImport
from types import * #@UnusedWildImport
try:
from urllib import quote
except:
from urllib.parse import quote #@UnresolvedImport
try:
from xml.sax.saxutils import escape
def makeValidXmlValue(s):
return escape(s, {'"': '"'})
except:
#Simple replacement if it's not there.
def makeValidXmlValue(s):
return s.replace('<', '<').replace('>', '>').replace('"', '"')
class ExceptionOnEvaluate:
def __init__(self, result):
self.result = result
#------------------------------------------------------------------------------------------------------ resolvers in map
if not sys.platform.startswith("java"):
typeMap = [
#None means that it should not be treated as a compound variable
#isintance does not accept a tuple on some versions of python, so, we must declare it expanded
(type(None), None,),
(int, None),
(float, None),
(complex, None),
(str, None),
(tuple, pydevd_resolver.tupleResolver),
(list, pydevd_resolver.tupleResolver),
(dict, pydevd_resolver.dictResolver),
]
try:
typeMap.append((long, None))
except:
pass #not available on all python versions
try:
typeMap.append((unicode, None))
except:
pass #not available on all python versions
try:
typeMap.append((set, pydevd_resolver.setResolver))
except:
pass #not available on all python versions
try:
typeMap.append((frozenset, pydevd_resolver.setResolver))
except:
pass #not available on all python versions
else: #platform is java
from org.python import core #@UnresolvedImport
typeMap = [
(core.PyNone, None),
(core.PyInteger, None),
(core.PyLong, None),
(core.PyFloat, None),
(core.PyComplex, None),
(core.PyString, None),
(core.PyTuple, pydevd_resolver.tupleResolver),
(core.PyList, pydevd_resolver.tupleResolver),
(core.PyDictionary, pydevd_resolver.dictResolver),
(core.PyStringMap, pydevd_resolver.dictResolver),
]
if hasattr(core, 'PyJavaInstance'):
#Jython 2.5b3 removed it.
typeMap.append((core.PyJavaInstance, pydevd_resolver.instanceResolver))
def getType(o):
""" returns a triple (typeObject, typeString, resolver
resolver != None means that variable is a container,
and should be displayed as a hierarchy.
Use the resolver to get its attributes.
All container objects should have a resolver.
"""
try:
type_object = type(o)
type_name = type_object.__name__
except:
#This happens for org.python.core.InitModule
return 'Unable to get Type', 'Unable to get Type', None
try:
if type_name == 'org.python.core.PyJavaInstance':
return type_object, type_name, pydevd_resolver.instanceResolver
if type_name == 'org.python.core.PyArray':
return type_object, type_name, pydevd_resolver.jyArrayResolver
for t in typeMap:
if isinstance(o, t[0]):
return type_object, type_name, t[1]
except:
traceback.print_exc()
#no match return default
return type_object, type_name, pydevd_resolver.defaultResolver
def frameVarsToXML(frame_f_locals):
""" dumps frame variables to XML
<var name="var_name" scope="local" type="type" value="value"/>
"""
xml = ""
keys = frame_f_locals.keys()
if hasattr(keys, 'sort'):
keys.sort() #Python 3.0 does not have it
else:
keys = sorted(keys) #Jython 2.1 does not have it
for k in keys:
try:
v = frame_f_locals[k]
xml += varToXML(v, str(k))
except Exception:
traceback.print_exc()
pydev_log.error("Unexpected error, recovered safely.\n")
return xml
def varToXML(val, name, doTrim=True):
""" single variable or dictionary to xml representation """
is_exception_on_eval = isinstance(val, ExceptionOnEvaluate)
if is_exception_on_eval:
v = val.result
else:
v = val
type, typeName, resolver = getType(v)
try:
if hasattr(v, '__class__'):
try:
cName = str(v.__class__)
if cName.find('.') != -1:
cName = cName.split('.')[-1]
elif cName.find("'") != -1: #does not have '.' (could be something like <type 'int'>)
cName = cName[cName.index("'") + 1:]
if cName.endswith("'>"):
cName = cName[:-2]
except:
cName = str(v.__class__)
value = '%s: %s' % (cName, v)
else:
value = str(v)
except:
try:
value = repr(v)
except:
value = 'Unable to get repr for %s' % v.__class__
try:
name = quote(name, '/>_= ') #TODO: Fix PY-5834 without using quote
except:
pass
xml = '<var name="%s" type="%s"' % (makeValidXmlValue(name), makeValidXmlValue(typeName))
if value:
#cannot be too big... communication may not handle it.
if len(value) > MAXIMUM_VARIABLE_REPRESENTATION_SIZE and doTrim:
value = value[0:MAXIMUM_VARIABLE_REPRESENTATION_SIZE]
value += '...'
#fix to work with unicode values
try:
if not IS_PY3K:
if isinstance(value, unicode):
value = value.encode('utf-8')
else:
if isinstance(value, bytes):
value = value.encode('utf-8')
except TypeError: #in java, unicode is a function
pass
xmlValue = ' value="%s"' % (makeValidXmlValue(quote(value, '/>_= ')))
else:
xmlValue = ''
if is_exception_on_eval:
xmlCont = ' isErrorOnEval="True"'
else:
if resolver is not None:
xmlCont = ' isContainer="True"'
else:
xmlCont = ''
return ''.join((xml, xmlValue, xmlCont, ' />\n'))
|
akiokio/centralfitestoque
|
src/.pycharm_helpers/pydev/pydevd_xml.py
|
Python
|
bsd-2-clause
| 6,287 | 0.009384 |
# -*- coding: utf-8 -*-
"""
***************************************************************************
lasduplicate.py
---------------------
Date : September 2013
Copyright : (C) 2013 by Martin Isenburg
Email : martin near rapidlasso point com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Martin Isenburg'
__date__ = 'September 2013'
__copyright__ = '(C) 2013, Martin Isenburg'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import os
from LAStoolsUtils import LAStoolsUtils
from LAStoolsAlgorithm import LAStoolsAlgorithm
from processing.core.parameters import ParameterBoolean
from processing.core.parameters import ParameterFile
class lasduplicate(LAStoolsAlgorithm):
LOWEST_Z = "LOWEST_Z"
UNIQUE_XYZ = "UNIQUE_XYZ"
SINGLE_RETURNS = "SINGLE_RETURNS"
RECORD_REMOVED = "RECORD_REMOVED"
def defineCharacteristics(self):
self.name, self.i18n_name = self.trAlgorithm('lasduplicate')
self.group, self.i18n_group = self.trAlgorithm('LAStools')
self.addParametersVerboseGUI()
self.addParametersPointInputGUI()
self.addParameter(ParameterBoolean(lasduplicate.LOWEST_Z,
self.tr("keep duplicate with lowest z coordinate"), False))
self.addParameter(ParameterBoolean(lasduplicate.UNIQUE_XYZ,
self.tr("only remove duplicates in x y and z"), False))
self.addParameter(ParameterBoolean(lasduplicate.SINGLE_RETURNS,
self.tr("mark surviving duplicate as single return"), False))
self.addParameter(ParameterFile(lasduplicate.RECORD_REMOVED,
self.tr("record removed duplicates to LAS/LAZ file")))
self.addParametersPointOutputGUI()
self.addParametersAdditionalGUI()
def processAlgorithm(self, progress):
commands = [os.path.join(LAStoolsUtils.LAStoolsPath(), "bin", "lasduplicate")]
self.addParametersVerboseCommands(commands)
self.addParametersPointInputCommands(commands)
if self.getParameterValue(lasduplicate.LOWEST_Z):
commands.append("-lowest_z")
if self.getParameterValue(lasduplicate.UNIQUE_XYZ):
commands.append("-unique_xyz")
if self.getParameterValue(lasduplicate.SINGLE_RETURNS):
commands.append("-single_returns")
record_removed = self.getParameterValue(lasduplicate.RECORD_REMOVED)
if record_removed is not None and record_removed != "":
commands.append("-record_removed")
commands.append(record_removed)
self.addParametersPointOutputCommands(commands)
self.addParametersAdditionalCommands(commands)
LAStoolsUtils.runLAStools(commands, progress)
|
jarped/QGIS
|
python/plugins/processing/algs/lidar/lastools/lasduplicate.py
|
Python
|
gpl-2.0
| 3,468 | 0.001442 |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.16 on 2019-03-13 11:54
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('views', '0014_data_migration'),
]
operations = [
migrations.AlterField(
model_name='view',
name='comment',
field=models.TextField(blank=True, help_text='Additional internal information about this view.', verbose_name='Comment'),
),
migrations.AlterField(
model_name='view',
name='help_lang1',
field=models.TextField(blank=True, help_text='The help text for this view in the primary language.', verbose_name='Help (primary)'),
),
migrations.AlterField(
model_name='view',
name='help_lang2',
field=models.TextField(blank=True, help_text='The help text for this view in the secondary language.', verbose_name='Help (secondary)'),
),
migrations.AlterField(
model_name='view',
name='help_lang3',
field=models.TextField(blank=True, help_text='The help text for this view in the tertiary language.', verbose_name='Help (tertiary)'),
),
migrations.AlterField(
model_name='view',
name='help_lang4',
field=models.TextField(blank=True, help_text='The help text for this view in the quaternary language.', verbose_name='Help (quaternary)'),
),
migrations.AlterField(
model_name='view',
name='help_lang5',
field=models.TextField(blank=True, help_text='The help text for this view in the quinary language.', verbose_name='Help (quinary)'),
),
migrations.AlterField(
model_name='view',
name='key',
field=models.SlugField(blank=True, help_text='The internal identifier of this view.', max_length=128, verbose_name='Key'),
),
migrations.AlterField(
model_name='view',
name='template',
field=models.TextField(blank=True, help_text='The template for this view, written in Django template language.', verbose_name='Template'),
),
migrations.AlterField(
model_name='view',
name='title_lang1',
field=models.CharField(blank=True, help_text='The title for this view in the primary language.', max_length=256, verbose_name='Title (primary)'),
),
migrations.AlterField(
model_name='view',
name='title_lang2',
field=models.CharField(blank=True, help_text='The title for this view in the secondary language.', max_length=256, verbose_name='Title (secondary)'),
),
migrations.AlterField(
model_name='view',
name='title_lang3',
field=models.CharField(blank=True, help_text='The title for this view in the tertiary language.', max_length=256, verbose_name='Title (tertiary)'),
),
migrations.AlterField(
model_name='view',
name='title_lang4',
field=models.CharField(blank=True, help_text='The title for this view in the quaternary language.', max_length=256, verbose_name='Title (quaternary)'),
),
migrations.AlterField(
model_name='view',
name='title_lang5',
field=models.CharField(blank=True, help_text='The title for this view in the quinary language.', max_length=256, verbose_name='Title (quinary)'),
),
migrations.AlterField(
model_name='view',
name='uri',
field=models.URLField(blank=True, help_text='The Uniform Resource Identifier of this view (auto-generated).', max_length=640, verbose_name='URI'),
),
migrations.AlterField(
model_name='view',
name='uri_prefix',
field=models.URLField(blank=True, help_text='The prefix for the URI of this view.', max_length=256, verbose_name='URI Prefix'),
),
]
|
rdmorganiser/rdmo
|
rdmo/views/migrations/0015_remove_null_true.py
|
Python
|
apache-2.0
| 4,097 | 0.003661 |
#!/usr/bin/env python
import sys
from imp import load_source
from os import path
src_path = path.abspath(path.dirname(__file__))
oj_path = path.join(src_path, 'dependencies', 'oj')
sys.path.append(oj_path)
oj = load_source('oj', path.join(oj_path, 'utils', 'run.py'))
oj.run(src_path)
|
NuAge-Solutions/NW
|
oj.py
|
Python
|
gpl-3.0
| 292 | 0 |
# vim: set encoding=utf-8
# Copyright (c) 2016 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
""" Test functionality of group_by, including aggregation_arguments """
import unittest
import pandas as pd
import numpy as np
import math
from sparktkregtests.lib import sparktk_test
class GroupByTest(sparktk_test.SparkTKTestCase):
# Aggregates and names for non-numeric aggregates
# (some aggregates are not defined on integers)
# atk aggregates, then numpy aggregates
pd_cols_str = ['size', '<lambda>', 'max', 'min']
numpy_aggs_str = ['size',
lambda x: pd.Series.nunique(x, False),
'max',
'min']
atk_cols_str = ['_COUNT', '_COUNT_DISTINCT', '_MAX', '_MIN']
pd_cols = ['mean', 'size', '<lambda>', 'max',
'min', 'std', 'nansum', 'var']
numpy_aggs = ['mean',
'size',
lambda x: pd.Series.nunique(x, False),
'max',
'min',
'std',
np.nansum,
'var']
atk_cols = ['_AVG', '_COUNT', '_COUNT_DISTINCT', '_MAX',
'_MIN', '_STDEV', '_SUM', '_VAR']
def setUp(self):
"""Build test frame"""
super(GroupByTest, self).setUp()
# Aggregates to test on strings
self.aggs_str = [self.context.agg.count,
self.context.agg.count_distinct,
self.context.agg.max,
self.context.agg.min]
# Aggregates for numeric columns
self.aggs = [self.context.agg.avg,
self.context.agg.count,
self.context.agg.count_distinct,
self.context.agg.max,
self.context.agg.min,
self.context.agg.stdev,
self.context.agg.sum,
self.context.agg.var]
schema_colors = [("Int32_0_15", int),
("Int32_0_31", int),
("colors", str),
("Int64_0_15", int),
("Int64_0_31", int),
("Float32_0_15", float),
("Float32_0_31", float),
("Float64_0_15", float),
("Float64_0_31", float)]
dataset = self.get_file("colors_32_9cols_128rows.csv")
self.frame = self.context.frame.import_csv(
dataset, schema=schema_colors)
def test_stats_on_string_avg(self):
"""Non-numeric aggregates error on non-numeric column"""
with self.assertRaises(Exception):
self.frame.group_by('colors', {'colors': self.context.agg.avg})
def test_stats_on_string_stdev(self):
"""Non-numeric aggregates error on non-numeric column"""
with self.assertRaises(Exception):
self.frame.group_by('colors', {'colors': self.context.agg.stdev})
def test_stats_on_string_sum(self):
"""Non-numeric aggregates error on non-numeric column"""
with self.assertRaises(Exception):
self.frame.group_by('colors', {'colors': self.context.agg.sum})
def test_stats_on_string_var(self):
"""Non-numeric aggregates error on non-numeric column"""
with self.assertRaises(Exception):
self.frame.group_by('colors', {'colors': self.context.agg.var})
def test_invalid_column_name(self):
"""Aggregate on non-existant column errors"""
with self.assertRaises(Exception):
self.frame.group_by(
'InvalidColumnName', {'colors': self.context.agg.var})
def test_group_int32_standard(self):
"""Test groupby on 1 column, int32"""
stats = self.frame.group_by(['Int32_0_15'], {'Int32_0_31': self.aggs})
self._validate(stats, 'Int32_0_31', ['Int32_0_15'])
def test_group_float32_standard(self):
"""Test groupby on 1 column, float32"""
stats = self.frame.group_by(
['Float32_0_15'], {'Float32_0_31': self.aggs})
self._validate(stats, 'Float32_0_31', ['Float32_0_15'])
def test_group_float64_standard(self):
"""Test groupby on 1 column, float64"""
stats = self.frame.group_by(
['Float64_0_15'], {'Float64_0_31': self.aggs})
self._validate(stats, 'Float64_0_31', ['Float64_0_15'])
def test_group_int64_standard(self):
"""Test groupby on 1 column, int64"""
stats = self.frame.group_by(['Int64_0_15'], {'Int64_0_31': self.aggs})
self._validate(stats, 'Int64_0_31', ['Int64_0_15'])
def Test_group_by_str_standard(self):
"""Test groupby on 1 column, string"""
stats = self.frame.group_by(['colors'], {'Int32_0_31': self.aggs})
self._validate_str(stats, 'Int32_0_31', ['colors'])
def test_group_by_str_agg_str(self):
"""Test groupby on 1 column, string, aggregate is string"""
stats = self.frame.group_by(['colors'], {'colors': self.aggs_str})
self._validate_str(stats, 'colors', ['colors'])
def test_group_int32_multiple_cols(self):
"""Test groupby on multiple columns, int32"""
stats = self.frame.group_by(
['Int32_0_15', 'Int32_0_31'], {'Int32_0_31': self.aggs})
self._validate(stats, 'Int32_0_31', ['Int32_0_15', 'Int32_0_31'])
def test_group_float32_multiple_cols(self):
"""Test groupby on multiple columns, float32"""
stats = self.frame.group_by(
['Float32_0_15', 'Float32_0_31'], {'Float32_0_31': self.aggs})
self._validate(stats, 'Float32_0_31', ['Float32_0_15', 'Float32_0_31'])
def test_group_float64_multiple_cols(self):
"""Test groupby on multiple columns, float64"""
stats = self.frame.group_by(
['Float64_0_15', 'Float64_0_31'], {'Float32_0_31': self.aggs})
self._validate(stats, 'Float32_0_31', ['Float64_0_15', 'Float64_0_31'])
def test_group_int64_multiple_cols(self):
"""Test groupby on multiple columns, int64"""
stats = self.frame.group_by(
['Int64_0_15', 'Int64_0_31'], {'Int64_0_31': self.aggs})
self._validate(stats, 'Int64_0_31', ['Int64_0_15', 'Int64_0_31'])
def test_groupby_str_multiple_cols(self):
"""Test groupby on multiple columns, string"""
stats = self.frame.group_by(
['colors', 'Int32_0_15'], {'colors': self.aggs_str})
self._validate_str(stats, 'colors', ['colors', 'Int32_0_15'])
def test_group_int32_none(self):
"""Test groupby none, int32 aggregate"""
stats = self.frame.group_by(None, {'Int32_0_31': self.aggs})
self._validate_single_group(stats, None, 'Int32_0_31')
def test_group_float32_none(self):
"""Test groupby none, float32 aggregate"""
stats = self.frame.group_by(None, {'Float32_0_31': self.aggs})
self._validate_single_group(stats, None, 'Float32_0_31')
def test_group_float64_none(self):
"""Test groupby none, float64 aggregate"""
stats = self.frame.group_by(None, {'Float64_0_31': self.aggs})
self._validate_single_group(stats, None, 'Float64_0_31')
def test_group_int64_none(self):
"""Test groupby none, int64 aggregate"""
stats = self.frame.group_by(None, {'Int64_0_31': self.aggs})
self._validate_single_group(stats, None, 'Int64_0_31')
def _validate_single_group(self, stats, groupby_cols, aggregator):
# Validate the result of atk groupby and pandas groupby are the same
# when there is single group (none)
pd_stats = stats.to_pandas(stats.count())
new_frame = self.frame.to_pandas(self.frame.count())
gb = new_frame.groupby(lambda x: 0)[aggregator].agg(self.numpy_aggs)
int_cols = map(lambda x: aggregator+x, self.atk_cols)
for k, l in zip(int_cols, self.pd_cols):
self.assertAlmostEqual(gb.loc[0][l], pd_stats.loc[0][k], places=4)
def _validate(self, stats, aggregator, groupby_cols):
# Validate atk and pandas groupby are the same,
# Cast the index to integer, and use all aggregates, as column
# for aggregatees is numeric
self._validate_helper(
stats, aggregator, groupby_cols, self.numpy_aggs,
self.pd_cols, self.atk_cols, int)
def _validate_str(self, stats, aggregator, groupby_cols):
# Validate atk and pandas groupby are the same,
# Cast the index to the same value, and use strin aggregates, as column
# for aggregatees is a string
self._validate_helper(
stats, aggregator, groupby_cols, self.numpy_aggs_str,
self.pd_cols_str, self.atk_cols_str, lambda x: x)
def _validate_helper(self, stats, aggregator, groupby_cols,
aggs, pd_cols, atk_cols, mapper):
# Get and compare results of atk and pandas, cast as appropriate
pd_stats = stats.to_pandas(stats.count())
new_frame = self.frame.to_pandas(self.frame.count())
gb = new_frame.groupby(groupby_cols)[aggregator].agg(aggs)
int_cols = map(lambda x: aggregator+x, atk_cols)
for _, i in pd_stats.iterrows():
for k, l in zip(int_cols, pd_cols):
if ((type(i[k]) is np.float64 or type(i[k]) is float) and
math.isnan(i[k])):
self.assertTrue(
math.isnan(
gb.loc[tuple(
map(lambda x: mapper(i[x]),
groupby_cols))][l]))
else:
self.assertAlmostEqual(
gb.loc[tuple(
map(lambda x: mapper(i[x]), groupby_cols))][l],
i[k], places=4)
if __name__ == "__main__":
unittest.main()
|
dmsuehir/spark-tk
|
regression-tests/sparktkregtests/testcases/frames/frame_group_by_test.py
|
Python
|
apache-2.0
| 10,492 | 0.000096 |
import datetime
import re
import requests
from django.conf import settings
from django.utils.timezone import make_aware, utc
from raven.contrib.django.raven_compat.models import client as sentry_client
def get_articles_data(count=8):
payload = {
'consumer_key': settings.POCKET_CONSUMER_KEY,
'access_token': settings.POCKET_ACCESS_TOKEN,
'count': count,
'detailType': 'complete',
}
try:
resp = requests.post(settings.POCKET_API_URL, json=payload, timeout=5)
resp.raise_for_status()
return resp.json()
except Exception:
sentry_client.captureException()
return None
def complete_articles_data(articles):
for _, article in articles:
# id from API should be moved to pocket_id to not conflict w/DB's id
article['pocket_id'] = article['id']
# convert time_shared from unix timestamp to datetime
article['time_shared'] = make_aware(datetime.datetime.fromtimestamp(int(article['time_shared'])), utc)
# remove data points we don't need
del article['comment']
del article['excerpt']
del article['id']
del article['quote']
check_article_image(article)
def check_article_image(article):
"""Determine if external image is available"""
# sanity check to make sure image provided by API actually exists and is https
if article['image_src'] and re.match(r'^https://', article['image_src'], flags=re.I):
try:
resp = requests.get(article['image_src'])
resp.raise_for_status()
except Exception:
sentry_client.captureException()
article['image_src'] = None
else:
article['image_src'] = None
|
sgarrity/bedrock
|
bedrock/pocketfeed/api.py
|
Python
|
mpl-2.0
| 1,744 | 0.00172 |
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: pogoprotos/networking/requests/messages/evolve_pokemon_message.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from pogoprotos.inventory.item import item_id_pb2 as pogoprotos_dot_inventory_dot_item_dot_item__id__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='pogoprotos/networking/requests/messages/evolve_pokemon_message.proto',
package='pogoprotos.networking.requests.messages',
syntax='proto3',
serialized_pb=_b('\nDpogoprotos/networking/requests/messages/evolve_pokemon_message.proto\x12\'pogoprotos.networking.requests.messages\x1a\'pogoprotos/inventory/item/item_id.proto\"q\n\x14\x45volvePokemonMessage\x12\x12\n\npokemon_id\x18\x01 \x01(\x06\x12\x45\n\x1a\x65volution_item_requirement\x18\x02 \x01(\x0e\x32!.pogoprotos.inventory.item.ItemIdb\x06proto3')
,
dependencies=[pogoprotos_dot_inventory_dot_item_dot_item__id__pb2.DESCRIPTOR,])
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_EVOLVEPOKEMONMESSAGE = _descriptor.Descriptor(
name='EvolvePokemonMessage',
full_name='pogoprotos.networking.requests.messages.EvolvePokemonMessage',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='pokemon_id', full_name='pogoprotos.networking.requests.messages.EvolvePokemonMessage.pokemon_id', index=0,
number=1, type=6, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='evolution_item_requirement', full_name='pogoprotos.networking.requests.messages.EvolvePokemonMessage.evolution_item_requirement', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=154,
serialized_end=267,
)
_EVOLVEPOKEMONMESSAGE.fields_by_name['evolution_item_requirement'].enum_type = pogoprotos_dot_inventory_dot_item_dot_item__id__pb2._ITEMID
DESCRIPTOR.message_types_by_name['EvolvePokemonMessage'] = _EVOLVEPOKEMONMESSAGE
EvolvePokemonMessage = _reflection.GeneratedProtocolMessageType('EvolvePokemonMessage', (_message.Message,), dict(
DESCRIPTOR = _EVOLVEPOKEMONMESSAGE,
__module__ = 'pogoprotos.networking.requests.messages.evolve_pokemon_message_pb2'
# @@protoc_insertion_point(class_scope:pogoprotos.networking.requests.messages.EvolvePokemonMessage)
))
_sym_db.RegisterMessage(EvolvePokemonMessage)
# @@protoc_insertion_point(module_scope)
|
bellowsj/aiopogo
|
aiopogo/pogoprotos/networking/requests/messages/evolve_pokemon_message_pb2.py
|
Python
|
mit
| 3,240 | 0.008025 |
from test.UdsTest import UdsTest
from test.stub import StubbedIsoTPSocket
from udsoncan.exceptions import *
import socket
class TestStubbedIsoTPSocket(UdsTest):
def test_open(self):
tpsock = StubbedIsoTPSocket()
self.assertFalse(tpsock.bound)
tpsock.bind(interface='vcan0', rxid=0x100, txid=0x101)
self.assertTrue(tpsock.bound)
tpsock.close()
self.assertFalse(tpsock.bound)
def test_transmit(self):
tpsock1 = StubbedIsoTPSocket()
tpsock2 = StubbedIsoTPSocket(timeout=0.5)
tpsock1.bind(interface='vcan0', rxid=0x200, txid=0x201)
tpsock2.bind(interface='vcan0', rxid=0x201, txid=0x200)
payload1 = b"\x01\x02\x03\x04"
tpsock1.send(payload1)
payload2 = tpsock2.recv()
self.assertEqual(payload1, payload2)
def test_multicast(self):
tpsock1 = StubbedIsoTPSocket()
tpsock2 = StubbedIsoTPSocket(timeout=0.5)
tpsock3 = StubbedIsoTPSocket(timeout=0.5)
tpsock1.bind(interface='vcan0', rxid=0x300, txid=0x301)
tpsock2.bind(interface='vcan0', rxid=0x301, txid=0x300)
tpsock3.bind(interface='vcan0', rxid=0x301, txid=0x300)
payload1 = b"\x01\x02\x03\x04"
tpsock1.send(payload1)
payload2 = tpsock2.recv()
payload3 = tpsock3.recv()
self.assertEqual(payload1, payload2)
self.assertEqual(payload1, payload3)
def test_empty_on_close(self):
tpsock1 = StubbedIsoTPSocket()
tpsock2 = StubbedIsoTPSocket(timeout=0.2)
tpsock1.bind(interface='vcan0', rxid=0x400, txid=0x401)
tpsock2.bind(interface='vcan0', rxid=0x401, txid=0x400)
payload = b"\x01\x02\x03\x04"
tpsock1.send(payload)
tpsock2.close()
with self.assertRaises(socket.timeout):
tpsock2.recv()
def test_no_listener(self):
tpsock1 = StubbedIsoTPSocket()
tpsock2 = StubbedIsoTPSocket(timeout=0.2)
tpsock1.bind(interface='vcan0', rxid=0x400, txid=0x401)
payload = b"\x01\x02\x03\x04"
tpsock1.send(payload)
tpsock2.bind(interface='vcan0', rxid=0x401, txid=0x400)
with self.assertRaises(socket.timeout):
tpsock2.recv()
|
pylessard/python-udsoncan
|
test/test_stubbed_isotpsock.py
|
Python
|
mit
| 2,233 | 0.000896 |
import io,pycurl,sys,os,time
class idctest:
def __init__(self):
self.contents = ''
def body_callback(self,buf):
self.contents = self.contents + buf
def test_gzip(input_url):
t = idctest()
#gzip_test = file("gzip_test.txt", 'w')
c = pycurl.Curl()
c.setopt(pycurl.WRITEFUNCTION,t.body_callback)
c.setopt(pycurl.ENCODING, 'gzip')
c.setopt(pycurl.URL,input_url)
c.setopt(pycurl.MAXREDIRS, 5)
c.perform()
http_code = c.getinfo(pycurl.HTTP_CODE)
dns_resolve = c.getinfo(pycurl.NAMELOOKUP_TIME)
http_conn_time = c.getinfo(pycurl.CONNECT_TIME)
http_pre_trans = c.getinfo(pycurl.PRETRANSFER_TIME)
http_start_trans = c.getinfo(pycurl.STARTTRANSFER_TIME)
http_total_time = c.getinfo(pycurl.TOTAL_TIME)
http_size_download = c.getinfo(pycurl.SIZE_DOWNLOAD)
http_header_size = c.getinfo(pycurl.HEADER_SIZE)
http_speed_downlaod = c.getinfo(pycurl.SPEED_DOWNLOAD)
print('HTTP响应状态: %d' %http_code)
print('DNS解析时间:%.2f ms' %(dns_resolve*1000))
print('建立连接时间: %.2f ms' %(http_conn_time*1000))
print('准备传输时间: %.2f ms' %(http_pre_trans*1000))
print("传输开始时间: %.2f ms" %(http_start_trans*1000))
print("传输结束时间: %.2f ms" %(http_total_time*1000))
print("下载数据包大小: %d bytes/s" %http_size_download)
print("HTTP头大小: %d bytes/s" %http_header_size)
print("平均下载速度: %d k/s" %(http_speed_downlaod/1024))
if __name__ == '__main__':
input_url = sys.argv[1]
test_gzip(input_url)
|
jinzekid/codehub
|
python/test_web_speed.py
|
Python
|
gpl-3.0
| 1,603 | 0.022163 |
#
# Copyright (c) 2010 Red Hat, Inc.
#
# This software is licensed to you under the GNU General Public License,
# version 2 (GPLv2). There is NO WARRANTY for this software, express or
# implied, including the implied warranties of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2
# along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
#
# Red Hat trademarks are not licensed under GPLv2. No permission is
# granted to use or replicate Red Hat trademarks that are incorporated
# in this software or its documentation.
#
#
|
hustodemon/spacewalk
|
backend/wsgi/__init__.py
|
Python
|
gpl-2.0
| 609 | 0 |
#!/usr/bin/env python3
from mathbind.types import BasicType
class BasicValueType(BasicType):
"""
Represents a basic pure type that can be passed by value, thus excluding arrays and pointers.
Attributes:
- typename (str): basic C typename (int, long long, unsigned, bool, etc)
- c_math_name (str): corresponding Mathematica C type
- math_name (str): corresponding Mathematica type (Integer, Real)
- c_name (str): corresponding C type (int, long long, float).
"""
def __init__(self, typename):
self.typename = typename
type_parts = set(typename.split())
self.c_name = typename
if not type_parts:
raise ValueError
elif {'float', 'double'} & type_parts:
self.c_math_name = 'mreal'
self.math_name = 'Real'
elif 'bool' in type_parts:
self.c_name = 'int'
self.c_math_name = 'mbool'
self.math_name = 'Boolean'
elif not type_parts - {'signed', 'unsigned', 'char', 'int', 'short', 'long'}:
self.c_math_name = 'mint'
self.math_name = 'Integer'
else:
raise ValueError('Unrecognized C type')
@classmethod
def from_str(cls, s):
"""
Tries to build a new BasicValueType from the string specification, failing if
the type is a pointer or array-like.
"""
if '*' in s or '[' in s or ']' in s:
raise ValueError('Not a valid basic C type')
while ' ' in s:
s = s.replace(' ', ' ')
return BasicValueType(s.strip())
@classmethod
def from_prototype_cstr(cls, s):
"""
Tries to extract (type, argname) from the string.
"""
while s.count(' ') > 2:
s = s.replace(' ', '')
s = s.strip()
if not s.replace(' ', '').replace('_', '').isalnum():
raise ValueError('Unrecognized characters')
*words, argname = s.split()
return BasicValueType.from_str(' '.join(words)), argname.strip()
def __repr__(self):
return 'BasicValueType(typename=%r)' % self.typename
def __eq__(self, other):
return self.typename == other.typename
def retrieve_cstr(self, argname, index, tab='', suffix=None):
if suffix is None:
suffix = self.default_suffix
form = '{tab}{self.c_name} {argname} = MArgument_get{self.math_name}(Args{suffix}[{index}]);\n'
return form.format(argname=argname, self=self, tab=tab, index=index, suffix=suffix)
def return_cstr(self, func_call, tab='', suffix=None):
if suffix is None:
suffix = self.default_suffix
form = (
'{tab}{self.c_name} return_value{suffix} = {func_call};\n'
'{tab}MArgument_set{self.math_name}(Res{suffix}, return_value{suffix});\n'
)
return form.format(func_call=func_call, tab=tab, self=self, suffix=suffix)
def prototype_cstr(self, argname):
return self.c_name + ' ' + argname
def prototype_return_cstr(self):
"""
Returns a C string representing the declaration in a prototype return.
"""
return self.c_name
@property
def math_convert_f(self):
"""
Returns the Mathematica function responsible for converting values
to this one.
"""
if 'float' in self.typename or 'double' in self.typename:
return 'N'
else:
return 'IntegerPart'
|
diogenes1oliveira/mathbind
|
mathbind/types/basicvaluetype.py
|
Python
|
mit
| 3,498 | 0.002287 |
# Copyright 2008-2009 WebDriver committers
# Copyright 2008-2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Firefox Profile management."""
import ConfigParser
import logging
import os
import platform
import re
import shutil
import subprocess
import tempfile
import zipfile
import utils
DEFAULT_PORT = 7055
ANONYMOUS_PROFILE_NAME = "WEBDRIVER_ANONYMOUS_PROFILE"
def get_profile_ini():
app_data_dir = utils.get_firefox_app_data_dir()
profile_ini = ConfigParser.SafeConfigParser()
profile_ini.read(os.path.join(app_data_dir, "profiles.ini"))
return profile_ini
class FirefoxProfile(object):
"""Represents a firefox profile."""
profile_ini = get_profile_ini()
def __init__(self, name=ANONYMOUS_PROFILE_NAME, port=DEFAULT_PORT,
template_profile=None, extension_path=None):
"""Creates a FirefoxProfile.
Args:
name: the profile name. A new firefox profile is created if the one
specified doesn't exist.
port: the port webdriver extension listens on for command
template_profile: if not none, the content of the specified profile
will be copied from this directory.
extension_path: the source of the webdriver extension
Usage:
-- Get a profile with a given name:
profile = FirefoxProfile("profile_name")
-- Get a new created profile:
profile = FirefoxProfile()
-- Get a new created profile with content copied from "/some/path":
profile = FirefoxProfile(template_profile="/some/path")
"""
self.name = name
self.port = port
if (extension_path is None):
self.extension_path = os.path.join(os.path.dirname(__file__), '..',
'build_artifacts', 'webdriver-extension.zip')
else:
self.extension_path = extension_path
if name == ANONYMOUS_PROFILE_NAME:
self._create_anonymous_profile(template_profile)
self._refresh_ini()
else:
self.initialize()
def _create_anonymous_profile(self, template_profile):
self.anonymous_profile_dir = tempfile.mkdtemp()
if template_profile is not None and os.path.exists(template_profile):
self._copy_profile_source(template_profile)
self._update_user_preference()
self.add_extension(extension_zip_path=self.extension_path)
self._launch_in_silent()
def initialize(self):
self.remove_lock_file()
self.add_extension(True, extension_zip_path=self.extension_path)
def _copy_profile_source(self, source_path):
"""Copy the profile content from source_path source_path.
"""
logging.info("Copying profile from '%s' to '%s'"
% (source_path, self.path))
try:
shutil.rmtree(self.path)
shutil.copytree(source_path, self.path)
self._launch_in_silent()
except OSError, err:
raise Exception("Errors in copying profile: %s" % err)
def add_extension(self, force_create=True, extension_zip_path=None):
"""Adds the webdriver extension to this profile.
If force_create is True, the fxdriver extension is updated if a
new version is accessable. The old extension is untouched if the
new version is unavailable, but it might be deleted if the new
version is accessable but the upgrade fails.
If force_create is False, nothing will happen if the extension
directory exists and otherwise a new extension will be installed.
The sources of a new extension are (in the order of preference)
(1) zipped file webdriver-extension.zip in the current directory,
which can be created using 'rake firefox_xpi' in
%webdriver_directory%, and
(2) zipped files pointed by extension_zip_path, and
(3) unzipped files specified by environment variable WEBDRIVER;
these unzipped files must include the generated xpt files,
see %webdriver_directory%/firefox/prebuilt, or run
'rake firefox_xpi' and use the built files generated in
%webdriver_directory%/build
Default value of force_create is True. This enables users to
install new extension by attaching new extension as specified; if
no files is specified, no installation will be performed even when
force_creat is True.
"""
extension_dir = os.path.join(self.path,
"extensions", "fxdriver@googlecode.com")
logging.debug("extension_dir : %s" % extension_dir)
if force_create or not os.path.exists(extension_dir):
extension_source_path = utils.unzip_to_temp_dir(
"webdriver-extension.zip")
if (extension_source_path is None or
not os.path.exists(extension_source_path)):
extension_source_path = utils.unzip_to_temp_dir(
extension_zip_path)
if (extension_source_path is None or
not os.path.exists(extension_source_path)):
webdriver_dir = os.getenv("WEBDRIVER")
if webdriver_dir is not None:
extension_source_path = os.path.join(
webdriver_dir, "firefox", "src", "extension")
if (extension_source_path is None or
not os.path.exists(extension_source_path)):
raise Exception(
"No extension found at %s" % extension_source_path)
logging.debug("extension_source_path : %s" % extension_source_path)
logging.info("Copying extenstion from '%s' to '%s'"
% (extension_source_path, extension_dir))
try:
if os.path.exists(extension_dir):
shutil.rmtree(extension_dir)
else:
#copytree()'s behavior on linux makes me to write these
#two lines to ensure that the parent directory exists,
#although it is not required according to the documentation.
os.makedirs(extension_dir)
shutil.rmtree(extension_dir)
shutil.copytree(extension_source_path, extension_dir)
logging.info("Extenstion has been copied from '%s' to '%s'"
% (extension_source_path, extension_dir))
except OSError, err:
logging.info("Fail to install firefox extension. %s" % err)
else:
logging.info("No extension installation required.")
def remove_lock_file(self):
for lock_file in [".parentlock", "lock", "parent.lock"]:
try:
os.remove(os.path.join(self.path, lock_file))
except OSError:
pass
@property
def path(self):
if "anonymous_profile_dir" in self.__dict__:
return self.anonymous_profile_dir
section = self._get_ini_section()
assert section is not None, "Profile doesn't exist in profiles.ini"
return os.path.join(utils.get_firefox_app_data_dir(),
self.profile_ini.get(section, "Path"))
@staticmethod
def _refresh_ini():
FirefoxProfile.profile_ini = get_profile_ini()
def _launch_in_silent(self):
os.environ["XRE_PROFILE_PATH"] = self.anonymous_profile_dir
subprocess.Popen([utils.get_firefox_start_cmd(), "-silent"]).wait()
def _update_user_preference(self):
"""Updates the user.js with the configurations needed by webdriver."""
preference = {}
user_pref_file_name = os.path.join(
self.path, "user.js")
try:
user_pref_file = open(user_pref_file_name)
for line in user_pref_file:
match = re.match(r'user_pref\("(\.*?)","(\.*?)"', line)
if match:
preference[match.group(1)] = match.group(2)
except IOError:
logging.debug("user.js doesn't exist, creating one...")
preference.update(self._get_webdriver_prefs())
preference["webdriver.firefox_port"] = self.port
user_pref_file = open(user_pref_file_name, "w")
for key, value in preference.items():
user_pref_file.write('user_pref("%s", %s);\n' % (key, value))
user_pref_file.close()
logging.info('user_pref after update:')
logging.info(preference)
def _delete_profile_if_exist(self):
section = self._get_ini_section()
if not section:
return
logging.info("deleting %s" % self.path)
shutil.rmtree(self.path)
def _get_ini_section(self):
for section in self.profile_ini.sections():
try:
if self.profile_ini.get(section, "Name") == self.name:
return section
except ConfigParser.NoOptionError:
pass
return None
@staticmethod
def _get_webdriver_prefs():
"""Gets the preferences required by webdriver."""
return {"app.update.auto": "false",
"app.update.enabled": "false",
"browser.download.manager.showWhenStarting": "false",
"browser.EULA.override": "true",
"browser.EULA.3.accepted": "true",
"browser.link.open_external": "2",
"browser.link.open_newwindow": "2",
"browser.safebrowsing.enabled": "false",
"browser.search.update": "false",
"browser.sessionstore.resume_from_crash": "false",
"browser.shell.checkDefaultBrowser": "false",
"browser.startup.page": "0",
"browser.tabs.warnOnClose": "false",
"browser.tabs.warnOnOpen": "false",
"dom.disable_open_during_load": "false",
"extensions.update.enabled": "false",
"extensions.update.notifyUser": "false",
"security.warn_entering_secure": "false",
"security.warn_submit_insecure": "false",
"security.warn_entering_secure.show_once": "false",
"security.warn_entering_weak": "false",
"security.warn_entering_weak.show_once": "false",
"security.warn_leaving_secure": "false",
"security.warn_leaving_secure.show_once": "false",
"security.warn_submit_insecure": "false",
"security.warn_viewing_mixed": "false",
"security.warn_viewing_mixed.show_once": "false",
"signon.rememberSignons": "false",
"startup.homepage_welcome_url": "\"about:blank\"",
"javascript.options.showInConsole": "true",
"browser.dom.window.dump.enabled": "true" ,
}
|
jijeshmohan/webdriver-rb
|
firefox/src/py/firefox_profile.py
|
Python
|
apache-2.0
| 11,601 | 0.003017 |
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from unittest import mock
from cinder.image import accelerator
from cinder.tests.unit import test
class fakeEngine(object):
def __init__(self):
pass
def compress_img(self, src, dest, run_as_root):
pass
def decompress_img(self, src, dest, run_as_root):
pass
class TestAccelerator(test.TestCase):
@mock.patch('cinder.image.accelerator.ImageAccel._get_engine')
@mock.patch('cinder.image.accelerator.ImageAccel.is_engine_ready',
return_value = True)
def test_compress_img_engine_ready(self, mock_accel_engine_ready,
mock_get_engine):
source = mock.sentinel.source
dest = mock.sentinel.dest
run_as_root = mock.sentinel.run_as_root
mock_engine = mock.Mock(spec=fakeEngine)
mock_get_engine.return_value = mock_engine
accel = accelerator.ImageAccel(source, dest)
accel.compress_img(run_as_root=run_as_root)
mock_engine.compress_img.assert_called()
@mock.patch('cinder.image.accelerator.ImageAccel._get_engine')
@mock.patch('cinder.image.accelerator.ImageAccel.is_engine_ready',
return_value = False)
def test_compress_img_engine_not_ready(self, mock_accel_engine_ready,
mock_get_engine):
source = mock.sentinel.source
dest = mock.sentinel.dest
run_as_root = mock.sentinel.run_as_root
mock_engine = mock.Mock(spec=fakeEngine)
mock_get_engine.return_value = mock_engine
accel = accelerator.ImageAccel(source, dest)
accel.compress_img(run_as_root=run_as_root)
mock_engine.compress_img.assert_not_called()
@mock.patch('cinder.image.accelerator.ImageAccel._get_engine')
@mock.patch('cinder.image.accelerator.ImageAccel.is_engine_ready',
return_value = True)
def test_decompress_img_engine_ready(self, mock_accel_engine_ready,
mock_get_engine):
source = mock.sentinel.source
dest = mock.sentinel.dest
run_as_root = mock.sentinel.run_as_root
mock_engine = mock.Mock(spec=fakeEngine)
mock_get_engine.return_value = mock_engine
accel = accelerator.ImageAccel(source, dest)
accel.decompress_img(run_as_root=run_as_root)
mock_engine.decompress_img.assert_called()
@mock.patch('cinder.image.accelerator.ImageAccel._get_engine')
@mock.patch('cinder.image.accelerator.ImageAccel.is_engine_ready',
return_value = False)
def test_decompress_img_engine_not_ready(self, mock_accel_engine_ready,
mock_get_engine):
source = mock.sentinel.source
dest = mock.sentinel.dest
run_as_root = mock.sentinel.run_as_root
mock_engine = mock.Mock(spec=fakeEngine)
mock_get_engine.return_value = mock_engine
accel = accelerator.ImageAccel(source, dest)
accel.decompress_img(run_as_root=run_as_root)
mock_engine.decompress_img.assert_not_called()
|
mahak/cinder
|
cinder/tests/unit/image/test_accelerator.py
|
Python
|
apache-2.0
| 3,676 | 0.002176 |
"""
Copyright 2008, 2009, 2011 Free Software Foundation, Inc.
This file is part of GNU Radio
GNU Radio Companion is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
GNU Radio Companion is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
"""
import os
import gtk
from . import Bars, Actions, Preferences, Utils
from .BlockTreeWindow import BlockTreeWindow
from .Constants import \
NEW_FLOGRAPH_TITLE, DEFAULT_REPORTS_WINDOW_WIDTH
from .Dialogs import TextDisplay, MessageDialogHelper
from .NotebookPage import NotebookPage
from ..core import Messages
MAIN_WINDOW_TITLE_TMPL = """\
#if not $saved
*#slurp
#end if
#if $basename
$basename#slurp
#else
$new_flowgraph_title#slurp
#end if
#if $read_only
(read only)#slurp
#end if
#if $dirname
- $dirname#slurp
#end if
- $platform_name#slurp
"""
PAGE_TITLE_MARKUP_TMPL = """\
#set $foreground = $saved and 'black' or 'red'
<span foreground="$foreground">$encode($title or $new_flowgraph_title)</span>#slurp
#if $read_only
(ro)#slurp
#end if
"""
############################################################
# Main window
############################################################
class MainWindow(gtk.Window):
"""The topmost window with menus, the tool bar, and other major windows."""
def __init__(self, platform, action_handler_callback):
"""
MainWindow contructor
Setup the menu, toolbar, flowgraph editor notebook, block selection window...
"""
self._platform = platform
gen_opts = platform.blocks['options'].get_param('generate_options')
generate_mode_default = gen_opts.get_value()
generate_modes = [
(o.get_key(), o.get_name(), o.get_key() == generate_mode_default)
for o in gen_opts.get_options()]
# load preferences
Preferences.load(platform)
#setup window
gtk.Window.__init__(self, gtk.WINDOW_TOPLEVEL)
vbox = gtk.VBox()
self.hpaned = gtk.HPaned()
self.add(vbox)
#create the menu bar and toolbar
self.add_accel_group(Actions.get_accel_group())
self.menu_bar = Bars.MenuBar(generate_modes, action_handler_callback)
vbox.pack_start(self.menu_bar, False)
self.tool_bar = Bars.Toolbar(generate_modes, action_handler_callback )
vbox.pack_start(self.tool_bar, False)
vbox.pack_start(self.hpaned)
#create the notebook
self.notebook = gtk.Notebook()
self.page_to_be_closed = None
self.current_page = None
self.notebook.set_show_border(False)
self.notebook.set_scrollable(True) #scroll arrows for page tabs
self.notebook.connect('switch-page', self._handle_page_change)
#setup containers
self.flow_graph_vpaned = gtk.VPaned()
#flow_graph_box.pack_start(self.scrolled_window)
self.flow_graph_vpaned.pack1(self.notebook)
self.hpaned.pack1(self.flow_graph_vpaned)
self.btwin = BlockTreeWindow(platform, self.get_flow_graph);
self.hpaned.pack2(self.btwin, False) #dont allow resize
#create the reports window
self.text_display = TextDisplay()
#house the reports in a scrolled window
self.reports_scrolled_window = gtk.ScrolledWindow()
self.reports_scrolled_window.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
self.reports_scrolled_window.add(self.text_display)
self.reports_scrolled_window.set_size_request(-1, DEFAULT_REPORTS_WINDOW_WIDTH)
self.flow_graph_vpaned.pack2(self.reports_scrolled_window, False) #dont allow resize
#load preferences and show the main window
self.resize(*Preferences.main_window_size())
self.flow_graph_vpaned.set_position(Preferences.reports_window_position())
self.hpaned.set_position(Preferences.blocks_window_position())
self.show_all()
self.reports_scrolled_window.hide()
self.btwin.hide()
############################################################
# Event Handlers
############################################################
def _quit(self, window, event):
"""
Handle the delete event from the main window.
Generated by pressing X to close, alt+f4, or right click+close.
This method in turns calls the state handler to quit.
Returns:
true
"""
Actions.APPLICATION_QUIT()
return True
def _handle_page_change(self, notebook, page, page_num):
"""
Handle a page change. When the user clicks on a new tab,
reload the flow graph to update the vars window and
call handle states (select nothing) to update the buttons.
Args:
notebook: the notebook
page: new page
page_num: new page number
"""
self.current_page = self.notebook.get_nth_page(page_num)
Messages.send_page_switch(self.current_page.get_file_path())
Actions.PAGE_CHANGE()
############################################################
# Report Window
############################################################
def add_report_line(self, line):
"""
Place line at the end of the text buffer, then scroll its window all the way down.
Args:
line: the new text
"""
self.text_display.insert(line)
############################################################
# Pages: create and close
############################################################
def new_page(self, file_path='', show=False):
"""
Create a new notebook page.
Set the tab to be selected.
Args:
file_path: optional file to load into the flow graph
show: true if the page should be shown after loading
"""
#if the file is already open, show the open page and return
if file_path and file_path in self._get_files(): #already open
page = self.notebook.get_nth_page(self._get_files().index(file_path))
self._set_page(page)
return
try: #try to load from file
if file_path: Messages.send_start_load(file_path)
flow_graph = self._platform.get_new_flow_graph()
flow_graph.grc_file_path = file_path
#print flow_graph
page = NotebookPage(
self,
flow_graph=flow_graph,
file_path=file_path,
)
if file_path: Messages.send_end_load()
except Exception, e: #return on failure
Messages.send_fail_load(e)
if isinstance(e, KeyError) and str(e) == "'options'":
# This error is unrecoverable, so crash gracefully
exit(-1)
return
#add this page to the notebook
self.notebook.append_page(page, page.get_tab())
try: self.notebook.set_tab_reorderable(page, True)
except: pass #gtk too old
self.notebook.set_tab_label_packing(page, False, False, gtk.PACK_START)
#only show if blank or manual
if not file_path or show: self._set_page(page)
def close_pages(self):
"""
Close all the pages in this notebook.
Returns:
true if all closed
"""
open_files = filter(lambda file: file, self._get_files()) #filter blank files
open_file = self.get_page().get_file_path()
#close each page
for page in sorted(self.get_pages(), key=lambda p: p.get_saved()):
self.page_to_be_closed = page
closed = self.close_page(False)
if not closed:
break
if self.notebook.get_n_pages(): return False
#save state before closing
Preferences.set_open_files(open_files)
Preferences.file_open(open_file)
Preferences.main_window_size(self.get_size())
Preferences.reports_window_position(self.flow_graph_vpaned.get_position())
Preferences.blocks_window_position(self.hpaned.get_position())
Preferences.save()
return True
def close_page(self, ensure=True):
"""
Close the current page.
If the notebook becomes empty, and ensure is true,
call new page upon exit to ensure that at least one page exists.
Args:
ensure: boolean
"""
if not self.page_to_be_closed: self.page_to_be_closed = self.get_page()
#show the page if it has an executing flow graph or is unsaved
if self.page_to_be_closed.get_proc() or not self.page_to_be_closed.get_saved():
self._set_page(self.page_to_be_closed)
#unsaved? ask the user
if not self.page_to_be_closed.get_saved():
response = self._save_changes() # return value is either OK, CLOSE, or CANCEL
if response == gtk.RESPONSE_OK:
Actions.FLOW_GRAPH_SAVE() #try to save
if not self.page_to_be_closed.get_saved(): #still unsaved?
self.page_to_be_closed = None #set the page to be closed back to None
return False
elif response == gtk.RESPONSE_CANCEL:
self.page_to_be_closed = None
return False
#stop the flow graph if executing
if self.page_to_be_closed.get_proc(): Actions.FLOW_GRAPH_KILL()
#remove the page
self.notebook.remove_page(self.notebook.page_num(self.page_to_be_closed))
if ensure and self.notebook.get_n_pages() == 0: self.new_page() #no pages, make a new one
self.page_to_be_closed = None #set the page to be closed back to None
return True
############################################################
# Misc
############################################################
def update(self):
"""
Set the title of the main window.
Set the titles on the page tabs.
Show/hide the reports window.
Args:
title: the window title
"""
gtk.Window.set_title(self, Utils.parse_template(MAIN_WINDOW_TITLE_TMPL,
basename=os.path.basename(self.get_page().get_file_path()),
dirname=os.path.dirname(self.get_page().get_file_path()),
new_flowgraph_title=NEW_FLOGRAPH_TITLE,
read_only=self.get_page().get_read_only(),
saved=self.get_page().get_saved(),
platform_name=self._platform.config.name,
)
)
#set tab titles
for page in self.get_pages(): page.set_markup(
Utils.parse_template(PAGE_TITLE_MARKUP_TMPL,
#get filename and strip out file extension
title=os.path.splitext(os.path.basename(page.get_file_path()))[0],
read_only=page.get_read_only(), saved=page.get_saved(),
new_flowgraph_title=NEW_FLOGRAPH_TITLE,
)
)
#show/hide notebook tabs
self.notebook.set_show_tabs(len(self.get_pages()) > 1)
def update_pages(self):
"""
Forces a reload of all the pages in this notebook.
"""
for page in self.get_pages():
success = page.get_flow_graph().reload()
if success: # Only set saved if errors occurred during import
page.set_saved(False)
def get_page(self):
"""
Get the selected page.
Returns:
the selected page
"""
return self.current_page
def get_flow_graph(self):
"""
Get the selected flow graph.
Returns:
the selected flow graph
"""
return self.get_page().get_flow_graph()
def get_focus_flag(self):
"""
Get the focus flag from the current page.
Returns:
the focus flag
"""
return self.get_page().get_drawing_area().get_focus_flag()
############################################################
# Helpers
############################################################
def _set_page(self, page):
"""
Set the current page.
Args:
page: the page widget
"""
self.current_page = page
self.notebook.set_current_page(self.notebook.page_num(self.current_page))
def _save_changes(self):
"""
Save changes to flow graph?
Returns:
the response_id (see buttons variable below)
"""
buttons = (
'Close without saving', gtk.RESPONSE_CLOSE,
gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL,
gtk.STOCK_SAVE, gtk.RESPONSE_OK
)
return MessageDialogHelper(
gtk.MESSAGE_QUESTION, gtk.BUTTONS_NONE, 'Unsaved Changes!',
'Would you like to save changes before closing?', gtk.RESPONSE_OK, buttons
)
def _get_files(self):
"""
Get the file names for all the pages, in order.
Returns:
list of file paths
"""
return map(lambda page: page.get_file_path(), self.get_pages())
def get_pages(self):
"""
Get a list of all pages in the notebook.
Returns:
list of pages
"""
return [self.notebook.get_nth_page(page_num) for page_num in range(self.notebook.get_n_pages())]
|
glennlive/gnuradio-wg-grc
|
grc/gui/MainWindow.py
|
Python
|
gpl-3.0
| 13,903 | 0.006258 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import functools
from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
from ..._vendor import _convert_request
from ...operations._monitoring_settings_operations import build_get_request, build_update_patch_request_initial, build_update_put_request_initial
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class MonitoringSettingsOperations:
"""MonitoringSettingsOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.appplatform.v2020_11_01_preview.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
@distributed_trace_async
async def get(
self,
resource_group_name: str,
service_name: str,
**kwargs: Any
) -> "_models.MonitoringSettingResource":
"""Get the Monitoring Setting and its properties.
:param resource_group_name: The name of the resource group that contains the resource. You can
obtain this value from the Azure Resource Manager API or the portal.
:type resource_group_name: str
:param service_name: The name of the Service resource.
:type service_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: MonitoringSettingResource, or the result of cls(response)
:rtype: ~azure.mgmt.appplatform.v2020_11_01_preview.models.MonitoringSettingResource
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.MonitoringSettingResource"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_request(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
service_name=service_name,
template_url=self.get.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('MonitoringSettingResource', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/monitoringSettings/default'} # type: ignore
async def _update_put_initial(
self,
resource_group_name: str,
service_name: str,
monitoring_setting_resource: "_models.MonitoringSettingResource",
**kwargs: Any
) -> "_models.MonitoringSettingResource":
cls = kwargs.pop('cls', None) # type: ClsType["_models.MonitoringSettingResource"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(monitoring_setting_resource, 'MonitoringSettingResource')
request = build_update_put_request_initial(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
service_name=service_name,
content_type=content_type,
json=_json,
template_url=self._update_put_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('MonitoringSettingResource', pipeline_response)
if response.status_code == 202:
deserialized = self._deserialize('MonitoringSettingResource', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_update_put_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/monitoringSettings/default'} # type: ignore
@distributed_trace_async
async def begin_update_put(
self,
resource_group_name: str,
service_name: str,
monitoring_setting_resource: "_models.MonitoringSettingResource",
**kwargs: Any
) -> AsyncLROPoller["_models.MonitoringSettingResource"]:
"""Update the Monitoring Setting.
:param resource_group_name: The name of the resource group that contains the resource. You can
obtain this value from the Azure Resource Manager API or the portal.
:type resource_group_name: str
:param service_name: The name of the Service resource.
:type service_name: str
:param monitoring_setting_resource: Parameters for the update operation.
:type monitoring_setting_resource:
~azure.mgmt.appplatform.v2020_11_01_preview.models.MonitoringSettingResource
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either MonitoringSettingResource or the
result of cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2020_11_01_preview.models.MonitoringSettingResource]
:raises: ~azure.core.exceptions.HttpResponseError
"""
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.MonitoringSettingResource"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._update_put_initial(
resource_group_name=resource_group_name,
service_name=service_name,
monitoring_setting_resource=monitoring_setting_resource,
content_type=content_type,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('MonitoringSettingResource', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_update_put.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/monitoringSettings/default'} # type: ignore
async def _update_patch_initial(
self,
resource_group_name: str,
service_name: str,
monitoring_setting_resource: "_models.MonitoringSettingResource",
**kwargs: Any
) -> "_models.MonitoringSettingResource":
cls = kwargs.pop('cls', None) # type: ClsType["_models.MonitoringSettingResource"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(monitoring_setting_resource, 'MonitoringSettingResource')
request = build_update_patch_request_initial(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
service_name=service_name,
content_type=content_type,
json=_json,
template_url=self._update_patch_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('MonitoringSettingResource', pipeline_response)
if response.status_code == 202:
deserialized = self._deserialize('MonitoringSettingResource', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_update_patch_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/monitoringSettings/default'} # type: ignore
@distributed_trace_async
async def begin_update_patch(
self,
resource_group_name: str,
service_name: str,
monitoring_setting_resource: "_models.MonitoringSettingResource",
**kwargs: Any
) -> AsyncLROPoller["_models.MonitoringSettingResource"]:
"""Update the Monitoring Setting.
:param resource_group_name: The name of the resource group that contains the resource. You can
obtain this value from the Azure Resource Manager API or the portal.
:type resource_group_name: str
:param service_name: The name of the Service resource.
:type service_name: str
:param monitoring_setting_resource: Parameters for the update operation.
:type monitoring_setting_resource:
~azure.mgmt.appplatform.v2020_11_01_preview.models.MonitoringSettingResource
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either MonitoringSettingResource or the
result of cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2020_11_01_preview.models.MonitoringSettingResource]
:raises: ~azure.core.exceptions.HttpResponseError
"""
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.MonitoringSettingResource"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._update_patch_initial(
resource_group_name=resource_group_name,
service_name=service_name,
monitoring_setting_resource=monitoring_setting_resource,
content_type=content_type,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('MonitoringSettingResource', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_update_patch.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/monitoringSettings/default'} # type: ignore
|
Azure/azure-sdk-for-python
|
sdk/appplatform/azure-mgmt-appplatform/azure/mgmt/appplatform/v2020_11_01_preview/aio/operations/_monitoring_settings_operations.py
|
Python
|
mit
| 16,766 | 0.005189 |
"""followers
Revision ID: 2356a38169ea
Revises: 288cd3dc5a8
Create Date: 2013-12-31 16:10:34.500006
"""
# revision identifiers, used by Alembic.
revision = '2356a38169ea'
down_revision = '288cd3dc5a8'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('follows',
sa.Column('follower_id', sa.Integer(), nullable=False),
sa.Column('followed_id', sa.Integer(), nullable=False),
sa.Column('timestamp', sa.DateTime(), nullable=True),
sa.ForeignKeyConstraint(['followed_id'], ['users.id'], ),
sa.ForeignKeyConstraint(['follower_id'], ['users.id'], ),
sa.PrimaryKeyConstraint('follower_id', 'followed_id')
)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('follows')
### end Alembic commands ###
|
mrcatacroquer/Bridge
|
migrations/versions/2356a38169ea_followers.py
|
Python
|
mit
| 941 | 0.012752 |
'''
Created on Apr 19, 2012
@package: superdesk media archive
@copyright: 2012 Sourcefabric o.p.s.
@license: http://www.gnu.org/licenses/gpl-3.0.txt
@author: Gabriel Nistor
SQL Alchemy based implementation for the image data API.
'''
from ally.cdm.spec import ICDM
from ally.container import wire
from ally.container.ioc import injected
from ally.container.support import setup
from superdesk.media_archive.api.image_data import IImageDataService, QImageData
from superdesk.media_archive.core.impl.meta_service_base import \
MetaDataServiceBaseAlchemy
from superdesk.media_archive.core.spec import IMetaDataReferencer, \
IThumbnailManager
from superdesk.media_archive.meta.image_data import ImageDataMapped
# --------------------------------------------------------------------
@injected
@setup(IImageDataService, name='imageDataService')
class ImageDataServiceAlchemy(MetaDataServiceBaseAlchemy, IMetaDataReferencer, IImageDataService):
'''
@see: IImageDataService
'''
cdmArchiveImage = ICDM; wire.entity('cdmArchiveImage')
thumbnailManager = IThumbnailManager; wire.entity('thumbnailManager')
def __init__(self):
assert isinstance(self.cdmArchiveImage, ICDM), 'Invalid archive CDM %s' % self.cdmArchiveImage
assert isinstance(self.thumbnailManager, IThumbnailManager), 'Invalid thumbnail manager %s' % self.thumbnailManager
MetaDataServiceBaseAlchemy.__init__(self, ImageDataMapped, QImageData, self, self.cdmArchiveImage, self.thumbnailManager)
|
superdesk/Live-Blog
|
plugins/media-archive-image/superdesk/media_archive/impl/image_data.py
|
Python
|
agpl-3.0
| 1,525 | 0.007213 |
#!/usr/bin/env python
from time import sleep
class ButtonListener():
"""
Service that polls the button status device and calls a
callback funtion for each button pressed.
Callback function should return a boolean to show whether
or not the listening should continue.
"""
def __init__(self, button_callback, device_filename="/dev/buttons", num_buttons=8, *args, **kw):
self.button_callback = button_callback
self.button_device = open(device_filename, "r")
self.num_buttons = num_buttons
self.last_state = {"0": 0}
def listen(self):
while True:
raw_state = [ord(ch) for ch in self.button_device.read(self.num_buttons)]
state = dict(zip(range(0, len(raw_state)), raw_state))
for (button, isup) in state.iteritems():
if isup:
state[button] = 1
else:
state[button] = 0
if not isup and button in self.last_state and self.last_state[button]:
if not self.button_callback(button):
return
self.last_state = state
sleep(0.2)
if __name__ == "__main__":
def print_button(button):
print("Button %s pressed" % button)
return True
service = ButtonListener(print_button)
service.listen()
|
kd0kfo/pi_lcd_controller
|
python/picontroller/button_listener.py
|
Python
|
gpl-3.0
| 1,373 | 0.00437 |
# Multi-threaded Mandelbrot Fractal (Do not run using IDLE!)
# FB - 201104306
import threading
from PIL import Image
w = 512 # image width
h = 512 # image height
image = Image.new("RGB", (w, h))
wh = w * h
maxIt = 256 # max number of iterations allowed
# drawing region (xa < xb & ya < yb)
xa = -2.0
xb = 1.0
ya = -1.5
yb = 1.5
xd = xb - xa
yd = yb - ya
numThr = 5 # number of threads to run
# lock = threading.Lock()
class ManFrThread(threading.Thread):
def __init__ (self, k):
self.k = k
threading.Thread.__init__(self)
def run(self):
# each thread only calculates its own share of pixels
for i in range(k, wh, numThr):
kx = i % w
ky = int(i / w)
a = xa + xd * kx / (w - 1.0)
b = ya + yd * ky / (h - 1.0)
x = a
y = b
for kc in range(maxIt):
x0 = x * x - y * y + a
y = 2.0 * x * y + b
x = x0
if x * x + y * y > 4:
# various color palettes can be created here
red = (kc % 8) * 32
green = (16 - kc % 16) * 16
blue = (kc % 16) * 16
# lock.acquire()
global image
image.putpixel((kx, ky), (red, green, blue))
# lock.release()
break
if __name__ == "__main__":
tArr = []
for k in range(numThr): # create all threads
tArr.append(ManFrThread(k))
for k in range(numThr): # start all threads
tArr[k].start()
for k in range(numThr): # wait until all threads finished
tArr[k].join()
image.save("MandelbrotFractal.png", "PNG")
|
ActiveState/code
|
recipes/Python/577680_Multithreaded_Mandelbrot_Fractal/recipe-577680.py
|
Python
|
mit
| 1,749 | 0.009148 |
#!/usr/bin/env python
#-*- coding: utf-8 -*-
###########################################################################
## ##
## Copyrights Frédéric Rodrigo 2016 ##
## ##
## This program is free software: you can redistribute it and/or modify ##
## it under the terms of the GNU General Public License as published by ##
## the Free Software Foundation, either version 3 of the License, or ##
## (at your option) any later version. ##
## ##
## This program is distributed in the hope that it will be useful, ##
## but WITHOUT ANY WARRANTY; without even the implied warranty of ##
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the ##
## GNU General Public License for more details. ##
## ##
## You should have received a copy of the GNU General Public License ##
## along with this program. If not, see <http://www.gnu.org/licenses/>. ##
## ##
###########################################################################
from Analyser_Osmosis import Analyser_Osmosis
sql10 = """
CREATE TEMP TABLE turn_lanes_ends AS
SELECT
DISTINCT ON (id)
ends(nodes) AS id
FROM
ways
WHERE
tags != ''::hstore AND
tags?'highway' AND
(
tags->'highway' = 'motorway' OR
(tags->'highway' = 'trunk' AND tags->'oneway' = 'yes')
) AND
tags?'turn:lanes'
"""
sql11 = """
CREATE INDEX idx_turn_lanes_ends_id ON turn_lanes_ends(id);
"""
sql12 = """
CREATE TEMP TABLE turn_lanes_steps AS
SELECT
turn_lanes_ends.id AS nid,
CASE ways.tags->'oneway'
WHEN '-1' THEN turn_lanes_ends.id != ways.nodes[1]
ELSE turn_lanes_ends.id = ways.nodes[1]
END AS start_end,
ways.id,
ways.tags
FROM
ways
JOIN turn_lanes_ends ON
turn_lanes_ends.id = ways.nodes[1] OR
turn_lanes_ends.id = ways.nodes[array_length(ways.nodes, 1)]
WHERE
ways.tags != ''::hstore AND
ways.tags?'highway' AND
(NOT ways.tags?'access' OR ways.tags->'access' != 'no')
"""
sql13 = """
CREATE TEMP TABLE sum_turn_lanes_steps AS
SELECT
nid,
start_end,
SUM(CASE
WHEN tags->'lanes' ~ E'^[0-9]+$' THEN (tags->'lanes')::integer
WHEN tags?'turn:lanes' THEN array_length(string_to_array(tags->'turn:lanes', '|'), 1)
WHEN tags->'highway' IN ('motorway', 'trunk') THEN 2
ELSE 1
END) AS lanes,
SUM(array_length(string_to_array(tags->'turn:lanes', 'slight_'), 1) - 1) AS lanes_slight,
SUM(array_length(string_to_array(tags->'turn:lanes', 'merge_to_'), 1) - 1) AS lanes_merge_to
FROM
turn_lanes_steps
GROUP BY
nid,
start_end
HAVING
BOOL_AND(tags->'highway' IN ('motorway', 'motorway_link') OR (tags->'highway' IN ('trunk', 'trunk_link') AND tags->'oneway' = 'yes'))
"""
sql14 = """
SELECT
nid,
ST_AsText(nodes.geom),
lin_lanes,
lin_lanes_merge_to,
lin_lanes_slight,
lout_lanes,
lout_lanes_merge_to,
lout_lanes_slight
FROM
(
SELECT
lin.nid,
lin.lanes AS lin_lanes,
lin.lanes_merge_to AS lin_lanes_merge_to,
lin.lanes_slight AS lin_lanes_slight,
lout.lanes AS lout_lanes,
lout.lanes_merge_to AS lout_lanes_merge_to,
lout.lanes_slight AS lout_lanes_slight
FROM
sum_turn_lanes_steps AS lin
JOIN sum_turn_lanes_steps AS lout ON
lin.nid = lout.nid AND
(
(
(lin.lanes_merge_to = 0 OR lin.lanes_merge_to IS NULL) AND
lout.lanes < lin.lanes
) OR (
lin.lanes_merge_to > 0 AND
NOT (
lout.lanes - lin.lanes_slight <= lin.lanes AND
lout.lanes - lin.lanes_slight - lout.lanes_merge_to <= lin.lanes - lin.lanes_merge_to + lout.lanes_slight
)
)
)
WHERE
NOT lin.start_end AND
lout.start_end
ORDER BY
1 -- Just to force the query planner to does not merge sub and main request
) AS t
JOIN nodes ON
nodes.id = nid AND
(NOT nodes.tags?'highway' OR nodes.tags->'highway' != 'traffic_signals')
"""
class Analyser_Osmosis_Highway_Turn_Lanes(Analyser_Osmosis):
def __init__(self, config, logger = None):
Analyser_Osmosis.__init__(self, config, logger)
self.classs[1] = {"item":"3160", "level": 2, "tag": ["highway", "fix:chair"], "desc": T_(u"Bad lanes number or lanes:turn before and after this node") }
def analyser_osmosis(self):
self.run(sql10)
self.run(sql11)
self.run(sql12)
self.run(sql13)
self.run(sql14, lambda res: {"class":1, "data":[self.node, self.positionAsText], "text": {"en": "lanes in %s(-%s+%s), lanes out %s(-%s+%s)" % (res[2], res[3] or 0, res[4] or 0, res[5], res[6] or 0, res[7] or 0) }})
|
tyndare/osmose-backend
|
analysers/analyser_osmosis_highway_turn_lanes.py
|
Python
|
gpl-3.0
| 4,965 | 0.006448 |
# -*- coding: utf-8 -*-
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
"""
A tool to generate a tasks_list.sh file for running fmriprep
on subjects downloaded with datalad with sample_openfmri.py
"""
import os
import glob
CMDLINE = """\
{fmriprep_cmd} {bids_dir}/{dataset_dir} {output_dir}/{dataset_dir} participant \
-w {dataset_dir}/work --participant_label {participant_label} \
"""
def get_parser():
"""Build parser object"""
from argparse import ArgumentParser
from argparse import RawTextHelpFormatter
parser = ArgumentParser(
description='OpenfMRI participants sampler, for FMRIPREP\'s testing purposes',
formatter_class=RawTextHelpFormatter)
parser.add_argument('openfmri_dir', action='store',
help='the root folder of a the openfmri dataset')
parser.add_argument('output_dir', action='store',
help='the directory where outputs should be stored')
parser.add_argument('sample_file', action='store',
help='a YAML file containing the subsample schedule')
# optional arguments
parser.add_argument('--anat-only', action='store_true', default=False,
help='run only anatomical workflow')
parser.add_argument('--nthreads', action='store', type=int,
help='number of total threads')
parser.add_argument('--omp_nthreads', action='store', type=int,
help='number of threads for OMP-based interfaces')
parser.add_argument('--mem-gb', action='store', type=int,
help='available memory in GB')
parser.add_argument('--tasks-list-file', default='tasks_list.sh',
action='store', help='write output file')
parser.add_argument('-t', '--tasks-filter', action='store', nargs='*',
help='run only specific tasks')
parser.add_argument('--cmd-call', action='store', help='command to be run')
return parser
def main():
"""Entry point"""
import yaml
opts = get_parser().parse_args()
with open(opts.sample_file) as sfh:
sampledict = yaml.load(sfh)
cmdline = CMDLINE
if opts.anat_only:
cmdline += ' --anat-only'
if opts.nthreads:
cmdline += '--nthreads %d' % opts.nthreads
if opts.omp_nthreads:
cmdline += '--omp-nthreads %d' % opts.omp_nthreads
if opts.mem_gb:
cmdline += '--mem_mb %d' % (opts.mem_gb * 1000)
if opts.tasks_filter:
cmdline += '-t %s' % ' '.join(opts.tasks_filter)
fmriprep_cmd = 'fmriprep'
if opts.cmd_call is None:
singularity_dir = os.getenv('SINGULARITY_BIN')
singularity_img = sorted(
glob.glob(os.path.join(singularity_dir, 'poldracklab_fmriprep_*')))
if singularity_img:
fmriprep_cmd = 'singularity run %s' % singularity_img[-1]
task_cmds = []
# Try to make this Python 2 compatible
try:
os.makedirs(opts.output_dir)
except OSError as e:
if e.errno != os.errno.EEXIST:
raise
for dset, sublist in sampledict.items():
for sub in sublist:
cmd = cmdline.format(
fmriprep_cmd=fmriprep_cmd,
bids_dir=opts.openfmri_dir,
dataset_dir=dset,
output_dir=opts.output_dir,
participant_label=sub,
)
task_cmds.append(cmd)
with open(opts.tasks_list_file, 'w') as tlfile:
tlfile.write('\n'.join(task_cmds))
if __name__ == '__main__':
main()
|
oesteban/preprocessing-workflow
|
fmriprep/cli/sample_openfmri_tasks_list.py
|
Python
|
bsd-3-clause
| 3,643 | 0.000549 |
from distutils.core import setup
from program_version import RELEASE
setup(name='program',
version=RELEASE,
description='A self updating program example',
author='Mr Snow',
author_email='ninja@snow.com',
url='https://github.com/mr-ninja-snow/Self-Updating-Python-Program.git',
packages=[],
)
|
mr-ninja-snow/Self-Updating-Python-Program
|
setup.py
|
Python
|
gpl-3.0
| 334 | 0.005988 |
"""
Due is a learning, modular, action-oriented dialogue agent. `Agents` are the
entities that can take part in Episodes (:mod:`due.episode`), receiving and
issuing Events (:mod:`due.event`).
"""
import uuid
from abc import ABCMeta, abstractmethod
from datetime import datetime
from due.event import Event
from due import episode
from due.util.python import dynamic_import
class Agent(metaclass=ABCMeta):
"""
Participants in an Episodes are called Agents. An Agent models an unique
identity through its ID, and can be served on a number of channels using
packages in :mod:`due.serve`.
Most importantly, Agent classes implement Natural Language Understanding
(NLU) and Generation (NLG) models, which are the core of the whole
conversational experience; they are meant to learn from Episodes coming from
a corpus, as well as from live conversations with humans or other agents.
:param agent_id: an unique ID for the Agent
:type agent_id: `str`
:param name: a human-friendly name for the Agent
:type name: `str`
"""
def __init__(self, agent_id=None):
self.id = agent_id if agent_id is not None else str(uuid.uuid1())
@abstractmethod
def save(self):
"""
Returns the Agent as an object. This object can be loaded with
:func:`Agent.load` and can be (de)serialized using the
:mod:`due.persistence` module.
A saved Agent must be a dictionary containing exactly the following items:
* `version`: version of the class who saved the agent (often `due.__version__`)
* `class`: absolute import name of the Agent class (eg. `due.models.dummy.DummyAgent`)
* `data`: saved agent data. Will be passed to the Agent constructor's `_data` parameter
:return: an object representing the Agent
:rtype: object
"""
pass
@staticmethod
def load(saved_agent):
"""
Loads an Agent from an object that was produced with the :meth:`Agent.save`
method.
:param saved_agent: an Agent, as it was saved by :meth:`Agent.save`
:type saved_agent: object
:return: an Agent
:rtype: `due.agent.Agent`
"""
class_ = dynamic_import(saved_agent['class'])
return class_(_data=saved_agent['data'])
@abstractmethod
def learn_episodes(self, episodes):
"""
Submit a list of Episodes for the :class:`Agent` to learn.
:param episodes: a list of episodes
:type episodes: `list` of :class:`due.episode.Episode`
"""
pass
def learn_episode(self, episode):
"""
Submit an Episode for the Agent to learn. By default, this just wraps a
call to :meth:`Agent.learn_episode`
:param episode: an Episode
:type episode: :class:`due.episode.Episode`
"""
self.learn_episodes([episode])
@abstractmethod
def new_episode_callback(self, new_episode):
"""
This is a callback method that is invoked whenever the Agent is invited
to join a new conversation (Episode) with another one.
Note that this is an **abstract method**: subclasses of :class:`Agent`
must implement their own.
:param new_episode: the new Episode that the other Agent has created
:type new_episode: :class:`due.episode.Episode`
"""
pass
def start_episode(self, other):
"""
Create a new :class:`due.episode.Episode` to engage another Agent in a
new conversation.
:param other_agent: The Agent you are inviting to the conversation.
:type other_agent: :class:`due.agent.Agent`
:return: a new Episode object
:rtype: :class:`due.episode.LiveEpisode`
"""
result = episode.LiveEpisode(self, other)
other.new_episode_callback(result)
return result
def event_callback(self, event, episode):
"""
This is a callback method that is invoked whenever a new Event is acted
in an Episode. This method acts as a proxy to specific Event type
handlers:
* :meth:`Agent.utterance_callback` (:class:`due.event.Event.Type.Utterance`)
* :meth:`Agent.action_callback` (:class:`due.event.Event.Type.Action`)
* :meth:`Agent.leave_callback` (:class:`due.event.Event.Type.Leave`)
:param event: The new Event
:type event: :class:`due.event.Event`
:param episode: The Episode where the Event was acted
:type episode: :class:`due.episode.Episode`
:return: A list of response Events
:rtype: `list` of :class:`due.event.Event`
"""
if event.type == Event.Type.Utterance:
result = self.utterance_callback(episode)
elif event.type == Event.Type.Action:
result = self.action_callback(episode)
elif event.type == Event.Type.Leave:
result = self.leave_callback(episode)
if not result:
result = []
return result
@abstractmethod
def utterance_callback(self, episode):
"""
This is a callback method that is invoked whenever a new Utterance
Event is acted in an Episode.
:param episode: the Episode where the Utterance was acted
:type episode: `due.episode.Episode`
:return: A list of response Events
:rtype: `list` of :class:`due.event.Event`
"""
pass
@abstractmethod
def action_callback(self, episode):
"""
This is a callback method that is invoked whenever a new Action Event
is acted in an Episode.
:param episode: the Episode where the Action was acted
:type episode: `due.episode.Episode`
:return: A list of response Events
:rtype: `list` of :class:`due.event.Event`
"""
pass
@abstractmethod
def leave_callback(self, episode):
"""
This is a callback method that is invoked whenever a new Leave Event is
acted in an Episode.
:param episode: the Episode where the Leave Event was acted
:type episode: `due.episode.Episode`
:return: A list of response Events
:rtype: `list` of :class:`due.event.Event`
"""
pass
def act_events(self, events, episode):
"""
Act a sequence of Events in the given Episode.
:param events: a list of Events
:type events: `list` of :class:`due.event.Event`
:param episode: an Episode
:type episode: :class:`due.episode.Episode`
"""
for e in events:
if e.type == Event.Type.Action:
e.payload.run()
episode.add_event(e)
def say(self, sentence, episode):
"""
Create an Event out of the given sentence and act the new Event in
the given Episode. :class:`Agent` subclassed may need to extend this
implementation with some output operation (eg. print on screen,
broadcast to a jabber chat...).
:param sentence: A sentence
:type sentence: :class:`str`
:param episode: An Episode
:type episode: :class:`due.episode.Episode`
"""
utterance_event = Event(Event.Type.Utterance, datetime.now(), self.id, sentence)
episode.add_event(utterance_event)
def do(self, action, episode):
"""
Create an Event out of the given Action and acts the new Event in the
given Episode.
:param action: An Action
:type action: :class:`due.action.Action`
"""
action.run()
action_event = Event(Event.Type.Action, datetime.now(), self.id, action)
episode.add_event(action_event)
def leave(self, episode):
"""
Acts a new Leave Event in the given Episode.
:param episode: One of the Agent's active episodes
:type episode: :class:`due.episode.Episode`
"""
leave_event = Event(Event.Type.Leave, datetime.now(), self.id, None)
episode.add_event(leave_event)
def __str__(self):
return f"<Agent: {self.id}>"
|
dario-chiappetta/Due
|
due/agent.py
|
Python
|
gpl-3.0
| 7,123 | 0.026534 |
from django import forms
# future use
|
DemocracyFoundation/Epitome
|
Agora/forms.py
|
Python
|
agpl-3.0
| 40 | 0.025 |
#Copyright (c) 2014 Sony Computer Entertainment America LLC. See License.txt.
import sys
sys.path.append("./CommonTestScripts")
import Test
doc = atfDocService.OpenNewDocument(editor)
#===================== 0: root ==================================
Test.Equal(0, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(treeLister.TreeView.DomNode)), "Verify root child count")
package = editingContext.Insert[UIPackage](DomNode(UISchema.UIPackageType.Type), treeLister.TreeView.DomNode)
Test.Equal(1, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(treeLister.TreeView.DomNode)), "Verify root child count")
print "Trying to add objects that cannot be a child of the root"
editingContext.Insert[UIForm](DomNode(UISchema.UIFormType.Type), treeLister.TreeView.DomNode)
Test.Equal(1, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(treeLister.TreeView.DomNode)), "Verify root child count does not increase when adding a form")
editingContext.Insert[UIShader](DomNode(UISchema.UIShaderType.Type), treeLister.TreeView.DomNode)
Test.Equal(1, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(treeLister.TreeView.DomNode)), "Verify root child count does not increase when adding a shader")
editingContext.Insert[UITexture](DomNode(UISchema.UITextureType.Type), treeLister.TreeView.DomNode)
Test.Equal(1, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(treeLister.TreeView.DomNode)), "Verify root child count does not increase when adding a texture")
editingContext.Insert[UIFont](DomNode(UISchema.UIFontType.Type), treeLister.TreeView.DomNode)
Test.Equal(1, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(treeLister.TreeView.DomNode)), "Verify root child count does not increase when adding a font")
editingContext.Insert[UISprite](DomNode(UISchema.UISpriteType.Type), treeLister.TreeView.DomNode)
Test.Equal(1, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(treeLister.TreeView.DomNode)), "Verify root child count does not increase when adding a sprite")
editingContext.Insert[UITextItem](DomNode(UISchema.UITextItemType.Type), treeLister.TreeView.DomNode)
Test.Equal(1, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(treeLister.TreeView.DomNode)), "Verify root child count does not increase when adding a text")
editingContext.Insert[UIAnimation](DomNode(UISchema.UIAnimationType.Type), treeLister.TreeView.DomNode)
Test.Equal(1, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(treeLister.TreeView.DomNode)), "Verify root child count does not increase when adding an animation")
#===================== 1: Package ==================================
print "Adding children to a package"
Test.Equal(0, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(package.DomNode)), "Verify package child count")
form = editingContext.Insert[UIForm](DomNode(UISchema.UIFormType.Type), package.DomNode)
Test.Equal(1, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(package.DomNode)), "Verify package child count after adding form")
shader = editingContext.Insert[UIShader](DomNode(UISchema.UIShaderType.Type), package.DomNode)
Test.Equal(2, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(package.DomNode)), "Verify package child count after adding shader")
texture = editingContext.Insert[UITexture](DomNode(UISchema.UITextureType.Type), package.DomNode)
Test.Equal(3, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(package.DomNode)), "Verify package child count after adding texture")
font = editingContext.Insert[UIFont](DomNode(UISchema.UIFontType.Type), package.DomNode)
Test.Equal(4, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(package.DomNode)), "Verify package child count after adding font")
packageChildCount = 4
print "Trying to add objects that cannot be a child of a package"
editingContext.Insert[UIPackage](DomNode(UISchema.UIPackageType.Type), package.DomNode)
Test.Equal(packageChildCount, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(package.DomNode)), "Verify package child count does not increase after adding package")
editingContext.Insert[UISprite](DomNode(UISchema.UISpriteType.Type), package.DomNode)
Test.Equal(packageChildCount, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(package.DomNode)), "Verify package child count does not increase after adding sprite")
editingContext.Insert[UITextItem](DomNode(UISchema.UITextItemType.Type), package.DomNode)
Test.Equal(packageChildCount, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(package.DomNode)), "Verify package child count does not increase after adding text")
editingContext.Insert[UIAnimation](DomNode(UISchema.UIAnimationType.Type), package.DomNode)
Test.Equal(packageChildCount, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(package.DomNode)), "Verify package child count does not increase after adding animation")
#===================== 2: Form ==================================
print "Adding children to a form"
Test.Equal(0, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(form.DomNode)), "Verify form child count")
sprite = editingContext.Insert[UISprite](DomNode(UISchema.UISpriteType.Type), form.DomNode)
Test.Equal(1, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(form.DomNode)), "Verify form child count after adding sprite")
text = editingContext.Insert[UITextItem](DomNode(UISchema.UITextItemType.Type), form.DomNode)
Test.Equal(2, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(form.DomNode)), "Verify form child count after adding text")
animation = editingContext.Insert[UIAnimation](DomNode(UISchema.UIAnimationType.Type), form.DomNode)
Test.Equal(3, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(form.DomNode)), "Verify form child count after adding animation")
print "Trying to add objects that cannot be a child of a form"
editingContext.Insert[UIPackage](DomNode(UISchema.UIPackageType.Type), form.DomNode)
Test.Equal(3, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(form.DomNode)), "Verify form child count does not increase after adding a package")
editingContext.Insert[UIForm](DomNode(UISchema.UIFormType.Type), form.DomNode)
Test.Equal(3, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(form.DomNode)), "Verify form child count does not increase after adding a form")
editingContext.Insert[UIShader](DomNode(UISchema.UIShaderType.Type), form.DomNode)
Test.Equal(3, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(form.DomNode)), "Verify form child count does not increase after adding a shader")
editingContext.Insert[UITexture](DomNode(UISchema.UITextureType.Type), form.DomNode)
Test.Equal(3, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(form.DomNode)), "Verify form child count does not increase after adding a texture")
editingContext.Insert[UIFont](DomNode(UISchema.UIFontType.Type), form.DomNode)
Test.Equal(3, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(form.DomNode)), "Verify form child count does not increase after adding a font")
#===================== 3: Shader ==================================
print "Verify cannot add children to a shader"
editingContext.Insert[UIPackage](DomNode(UISchema.UIPackageType.Type), shader.DomNode)
Test.Equal(0, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(shader.DomNode)), "Verify shader child count does not increase when adding a package")
editingContext.Insert[UIForm](DomNode(UISchema.UIFormType.Type), shader.DomNode)
Test.Equal(0, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(shader.DomNode)), "Verify shader child count does not increase when adding a form")
editingContext.Insert[UIShader](DomNode(UISchema.UIShaderType.Type), shader.DomNode)
Test.Equal(0, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(shader.DomNode)), "Verify shader child count does not increase when adding a shader")
editingContext.Insert[UITexture](DomNode(UISchema.UITextureType.Type), shader.DomNode)
Test.Equal(0, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(shader.DomNode)), "Verify shader child count does not increase when adding a texture")
editingContext.Insert[UIFont](DomNode(UISchema.UIFontType.Type), shader.DomNode)
Test.Equal(0, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(shader.DomNode)), "Verify shader child count does not increase when adding a font")
editingContext.Insert[UISprite](DomNode(UISchema.UISpriteType.Type), shader.DomNode)
Test.Equal(0, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(shader.DomNode)), "Verify shader child count does not increase when adding a sprite")
editingContext.Insert[UITextItem](DomNode(UISchema.UITextItemType.Type), shader.DomNode)
Test.Equal(0, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(shader.DomNode)), "Verify shader child count does not increase when adding a text")
editingContext.Insert[UIAnimation](DomNode(UISchema.UIAnimationType.Type), shader.DomNode)
Test.Equal(0, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(shader.DomNode)), "Verify shader child count does not increase when adding an animation")
#===================== 4: Texture ==================================
print "Verify cannot add children to a texture"
editingContext.Insert[UIPackage](DomNode(UISchema.UIPackageType.Type), texture.DomNode)
Test.Equal(0, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(texture.DomNode)), "Verify texture child count does not increase when adding a package")
editingContext.Insert[UIForm](DomNode(UISchema.UIFormType.Type), texture.DomNode)
Test.Equal(0, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(texture.DomNode)), "Verify texture child count does not increase when adding a form")
editingContext.Insert[UIShader](DomNode(UISchema.UIShaderType.Type), texture.DomNode)
Test.Equal(0, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(texture.DomNode)), "Verify texture child count does not increase when adding a shader")
editingContext.Insert[UITexture](DomNode(UISchema.UITextureType.Type), texture.DomNode)
Test.Equal(0, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(texture.DomNode)), "Verify texture child count does not increase when adding a texture")
editingContext.Insert[UIFont](DomNode(UISchema.UIFontType.Type), texture.DomNode)
Test.Equal(0, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(texture.DomNode)), "Verify texture child count does not increase when adding a font")
editingContext.Insert[UISprite](DomNode(UISchema.UISpriteType.Type), texture.DomNode)
Test.Equal(0, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(texture.DomNode)), "Verify texture child count does not increase when adding a sprite")
editingContext.Insert[UITextItem](DomNode(UISchema.UITextItemType.Type), texture.DomNode)
Test.Equal(0, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(texture.DomNode)), "Verify texture child count does not increase when adding a text")
editingContext.Insert[UIAnimation](DomNode(UISchema.UIAnimationType.Type), texture.DomNode)
Test.Equal(0, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(texture.DomNode)), "Verify texture child count does not increase when adding an animation")
#===================== 5: Font ==================================
print "Verify cannot add children to a font"
editingContext.Insert[UIPackage](DomNode(UISchema.UIPackageType.Type), font.DomNode)
Test.Equal(0, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(font.DomNode)), "Verify font child count does not increase when adding a package")
editingContext.Insert[UIForm](DomNode(UISchema.UIFormType.Type), font.DomNode)
Test.Equal(0, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(font.DomNode)), "Verify font child count does not increase when adding a form")
editingContext.Insert[UIShader](DomNode(UISchema.UIShaderType.Type), font.DomNode)
Test.Equal(0, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(font.DomNode)), "Verify font child count does not increase when adding a shader")
editingContext.Insert[UITexture](DomNode(UISchema.UITextureType.Type), font.DomNode)
Test.Equal(0, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(font.DomNode)), "Verify font child count does not increase when adding a texture")
editingContext.Insert[UIFont](DomNode(UISchema.UIFontType.Type), font.DomNode)
Test.Equal(0, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(font.DomNode)), "Verify font child count does not increase when adding a font")
editingContext.Insert[UISprite](DomNode(UISchema.UISpriteType.Type), font.DomNode)
Test.Equal(0, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(font.DomNode)), "Verify font child count does not increase when adding a sprite")
editingContext.Insert[UITextItem](DomNode(UISchema.UITextItemType.Type), font.DomNode)
Test.Equal(0, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(font.DomNode)), "Verify font child count does not increase when adding a text")
editingContext.Insert[UIAnimation](DomNode(UISchema.UIAnimationType.Type), font.DomNode)
Test.Equal(0, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(font.DomNode)), "Verify font child count does not increase when adding an animation")
#===================== 6: Sprite ==================================
print "Adding children to a sprite"
Test.Equal(2, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(sprite.DomNode)), "Verify sprite child count (starts with a transform and an empty ref)")
spriteUnderSprite = editingContext.Insert[UISprite](DomNode(UISchema.UISpriteType.Type), sprite.DomNode)
Test.Equal(3, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(sprite.DomNode)), "Verify sprite child count after adding sprite")
textUnderSprite = editingContext.Insert[UITextItem](DomNode(UISchema.UITextItemType.Type), sprite.DomNode)
Test.Equal(4, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(sprite.DomNode)), "Verify sprite child count after adding text")
animationUnderSprite = editingContext.Insert[UIAnimation](DomNode(UISchema.UIAnimationType.Type), sprite.DomNode)
Test.Equal(5, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(sprite.DomNode)), "Verify sprite child count after adding animation")
#must be added as ref:
shaderUnderSprite = editingContext.InsertAsRef[UIShader](DomNode(UISchema.UIShaderType.Type), sprite.DomNode)
Test.Equal(5, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(sprite.DomNode)), "Verify sprite child count after adding shader")
#refs will be added as real objects to the package
packageChildCount = packageChildCount + 1
Test.Equal(packageChildCount, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(package.DomNode)), "Verify package child count increases after adding a ref")
print "Trying to add objects that cannot be a child of a sprite"
editingContext.Insert[UIPackage](DomNode(UISchema.UIPackageType.Type), sprite.DomNode)
Test.Equal(5, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(sprite.DomNode)), "Verify sprite child count does not increase when adding a package")
editingContext.Insert[UIForm](DomNode(UISchema.UIFormType.Type), sprite.DomNode)
Test.Equal(5, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(sprite.DomNode)), "Verify sprite child count does not increase when adding a form")
editingContext.Insert[UITexture](DomNode(UISchema.UITextureType.Type), sprite.DomNode)
Test.Equal(5, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(sprite.DomNode)), "Verify sprite child count does not increase when adding a texture")
editingContext.Insert[UIFont](DomNode(UISchema.UIFontType.Type), sprite.DomNode)
Test.Equal(5, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(sprite.DomNode)), "Verify sprite child count does not increase when adding a font")
editingContext.Insert[UIShader](DomNode(UISchema.UIShaderType.Type), sprite.DomNode)
Test.Equal(5, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(sprite.DomNode)), "Verify sprite child count does not increase when adding a shader")
#===================== 7: Text ==================================
print "Adding children to a text"
Test.Equal(2, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(text.DomNode)), "Verify text child count (starts with a transform and an empty ref)")
spriteUnderText = editingContext.Insert[UISprite](DomNode(UISchema.UISpriteType.Type), text.DomNode)
Test.Equal(3, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(text.DomNode)), "Verify text child count after adding sprite")
textUnderText = editingContext.Insert[UITextItem](DomNode(UISchema.UITextItemType.Type), text.DomNode)
Test.Equal(4, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(text.DomNode)), "Verify text child count after adding text")
animationUnderText = editingContext.Insert[UIAnimation](DomNode(UISchema.UIAnimationType.Type), text.DomNode)
Test.Equal(5, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(text.DomNode)), "Verify text child count after adding animation")
#must be added as ref:
fontUnderText = editingContext.InsertAsRef[UIFont](DomNode(UISchema.UIFontType.Type), text.DomNode)
Test.Equal(5, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(text.DomNode)), "Verify text child count after adding font as ref")
packageChildCount = packageChildCount + 1
Test.Equal(packageChildCount, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(package.DomNode)), "Verify package child count increases after adding a ref")
print "Trying to add objects that cannot be a child of a text"
editingContext.Insert[UIPackage](DomNode(UISchema.UIPackageType.Type), text.DomNode)
Test.Equal(5, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(text.DomNode)), "Verify text child count does not increase when adding a package")
editingContext.Insert[UIForm](DomNode(UISchema.UIFormType.Type), text.DomNode)
Test.Equal(5, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(text.DomNode)), "Verify text child count does not increase when adding a form")
editingContext.Insert[UIShader](DomNode(UISchema.UIShaderType.Type), text.DomNode)
Test.Equal(5, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(text.DomNode)), "Verify text child count does not increase when adding a shader")
editingContext.Insert[UITexture](DomNode(UISchema.UITextureType.Type), text.DomNode)
Test.Equal(5, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(text.DomNode)), "Verify text child count does not increase when adding a texture")
editingContext.Insert[UIFont](DomNode(UISchema.UIFontType.Type), text.DomNode)
Test.Equal(5, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(text.DomNode)), "Verify text child count does not increase when adding a font")
#===================== 8: Animation ==================================
print "Verify cannot add children to an animation"
animCount = Test.GetEnumerableCount(treeLister.TreeView.GetChildren(animation.DomNode))
editingContext.Insert[UIPackage](DomNode(UISchema.UIPackageType.Type), animation.DomNode)
Test.Equal(Test.GetEnumerableCount(treeLister.TreeView.GetChildren(animation.DomNode)), animCount, "Verify animation child count does not increase when adding a package")
editingContext.Insert[UIForm](DomNode(UISchema.UIFormType.Type), animation.DomNode)
Test.Equal(Test.GetEnumerableCount(treeLister.TreeView.GetChildren(animation.DomNode)), animCount, "Verify animation child count does not increase when adding a form")
editingContext.Insert[UIShader](DomNode(UISchema.UIShaderType.Type), animation.DomNode)
Test.Equal(Test.GetEnumerableCount(treeLister.TreeView.GetChildren(animation.DomNode)), animCount, "Verify animation child count does not increase when adding a shader")
editingContext.Insert[UITexture](DomNode(UISchema.UITextureType.Type), animation.DomNode)
Test.Equal(Test.GetEnumerableCount(treeLister.TreeView.GetChildren(animation.DomNode)), animCount, "Verify animation child count does not increase when adding a texture")
editingContext.Insert[UIFont](DomNode(UISchema.UIFontType.Type), animation.DomNode)
Test.Equal(Test.GetEnumerableCount(treeLister.TreeView.GetChildren(animation.DomNode)), animCount, "Verify animation child count does not increase when adding a font")
editingContext.Insert[UISprite](DomNode(UISchema.UISpriteType.Type), animation.DomNode)
Test.Equal(Test.GetEnumerableCount(treeLister.TreeView.GetChildren(animation.DomNode)), animCount, "Verify animation child count does not increase when adding a sprite")
editingContext.Insert[UITextItem](DomNode(UISchema.UITextItemType.Type), animation.DomNode)
Test.Equal(Test.GetEnumerableCount(treeLister.TreeView.GetChildren(animation.DomNode)), animCount, "Verify animation child count does not increase when adding a text")
editingContext.Insert[UIAnimation](DomNode(UISchema.UIAnimationType.Type), animation.DomNode)
Test.Equal(Test.GetEnumerableCount(treeLister.TreeView.GetChildren(animation.DomNode)), animCount, "Verify animation child count does not increase when adding an animation")
print Test.SUCCESS
|
mindbaffle/ATF
|
Test/FunctionalTests/DomTreeEditorTestScripts/AddAllItems.py
|
Python
|
apache-2.0
| 20,874 | 0.007857 |
"""Makes figure with GridRad and MYRORSS predictors."""
import argparse
import numpy
import matplotlib
matplotlib.use('agg')
import matplotlib.pyplot as pyplot
from gewittergefahr.gg_utils import soundings
from gewittergefahr.gg_utils import radar_utils
from gewittergefahr.gg_utils import time_conversion
from gewittergefahr.gg_utils import file_system_utils
from gewittergefahr.deep_learning import cnn
from gewittergefahr.deep_learning import testing_io
from gewittergefahr.deep_learning import training_validation_io as trainval_io
from gewittergefahr.plotting import plotting_utils
from gewittergefahr.plotting import radar_plotting
from gewittergefahr.plotting import imagemagick_utils
from gewittergefahr.scripts import plot_input_examples as plot_examples
SEPARATOR_STRING = '\n\n' + '*' * 50 + '\n\n'
MINOR_SEPARATOR_STRING = '\n\n' + '-' * 50 + '\n\n'
TIME_FORMAT = '%Y-%m-%d-%H%M%S'
DUMMY_TARGET_NAME = 'tornado_lead-time=0000-3600sec_distance=00000-10000m'
SOUNDING_FIELD_NAMES = [
soundings.U_WIND_NAME, soundings.V_WIND_NAME,
soundings.TEMPERATURE_NAME, soundings.SPECIFIC_HUMIDITY_NAME,
soundings.PRESSURE_NAME
]
SOUNDING_HEIGHTS_M_AGL = soundings.DEFAULT_HEIGHT_LEVELS_M_AGL
NUM_GRIDRAD_ROWS = 32
NUM_GRIDRAD_COLUMNS = 32
RADAR_HEIGHTS_M_AGL = numpy.array([3000], dtype=int)
GRIDRAD_FIELD_NAMES = [
radar_utils.REFL_NAME, radar_utils.SPECTRUM_WIDTH_NAME,
radar_utils.VORTICITY_NAME, radar_utils.DIVERGENCE_NAME
]
NUM_MYRORSS_ROWS = 64
NUM_MYRORSS_COLUMNS = 64
MYRORSS_SHEAR_FIELD_NAMES = [
radar_utils.LOW_LEVEL_SHEAR_NAME, radar_utils.MID_LEVEL_SHEAR_NAME
]
COLOUR_BAR_LENGTH = 0.8
DEFAULT_FONT_SIZE = 45
TITLE_FONT_SIZE = 45
COLOUR_BAR_FONT_SIZE = 45
SOUNDING_FONT_SIZE = 45
PANEL_LETTER_FONT_SIZE = 75
pyplot.rc('font', size=DEFAULT_FONT_SIZE)
pyplot.rc('axes', titlesize=DEFAULT_FONT_SIZE)
pyplot.rc('axes', labelsize=DEFAULT_FONT_SIZE)
pyplot.rc('xtick', labelsize=DEFAULT_FONT_SIZE)
pyplot.rc('ytick', labelsize=DEFAULT_FONT_SIZE)
pyplot.rc('legend', fontsize=DEFAULT_FONT_SIZE)
pyplot.rc('figure', titlesize=DEFAULT_FONT_SIZE)
X_LABEL_COORD_NORMALIZED = -0.02
Y_LABEL_COORD_NORMALIZED = 0.85
FIGURE_RESOLUTION_DPI = 300
CONCAT_FIGURE_SIZE_PX = int(1e7)
GRIDRAD_DIR_ARG_NAME = 'gridrad_example_dir_name'
GRIDRAD_ID_ARG_NAME = 'gridrad_full_id_string'
GRIDRAD_TIME_ARG_NAME = 'gridrad_time_string'
MYRORSS_DIR_ARG_NAME = 'myrorss_example_dir_name'
MYRORSS_ID_ARG_NAME = 'myrorss_full_id_string'
MYRORSS_TIME_ARG_NAME = 'myrorss_time_string'
OUTPUT_DIR_ARG_NAME = 'output_dir_name'
GRIDRAD_DIR_HELP_STRING = (
'Name of top-level directory with GridRad examples. Files therein will be '
'found by `input_examples.find_example_file` and read by '
'`input_examples.read_example_file`.')
GRIDRAD_ID_HELP_STRING = 'Full ID of GridRad storm object.'
GRIDRAD_TIME_HELP_STRING = (
'Valid time (format "yyyy-mm-dd-HHMMSS") of GridRad storm object.')
MYRORSS_DIR_HELP_STRING = 'Same as `{0:s}` but for MYRORSS.'.format(
GRIDRAD_DIR_ARG_NAME)
MYRORSS_ID_HELP_STRING = 'Same as `{0:s}` but for MYRORSS.'.format(
GRIDRAD_ID_ARG_NAME)
MYRORSS_TIME_HELP_STRING = 'Same as `{0:s}` but for MYRORSS.'.format(
GRIDRAD_TIME_ARG_NAME)
OUTPUT_DIR_HELP_STRING = (
'Name of output directory. Figures will be saved here.')
INPUT_ARG_PARSER = argparse.ArgumentParser()
INPUT_ARG_PARSER.add_argument(
'--' + GRIDRAD_DIR_ARG_NAME, type=str, required=True,
help=GRIDRAD_DIR_HELP_STRING)
INPUT_ARG_PARSER.add_argument(
'--' + GRIDRAD_ID_ARG_NAME, type=str, required=True,
help=GRIDRAD_ID_HELP_STRING)
INPUT_ARG_PARSER.add_argument(
'--' + GRIDRAD_TIME_ARG_NAME, type=str, required=True,
help=GRIDRAD_TIME_HELP_STRING)
INPUT_ARG_PARSER.add_argument(
'--' + MYRORSS_DIR_ARG_NAME, type=str, required=True,
help=MYRORSS_DIR_HELP_STRING)
INPUT_ARG_PARSER.add_argument(
'--' + MYRORSS_ID_ARG_NAME, type=str, required=True,
help=MYRORSS_ID_HELP_STRING)
INPUT_ARG_PARSER.add_argument(
'--' + MYRORSS_TIME_ARG_NAME, type=str, required=True,
help=MYRORSS_TIME_HELP_STRING)
INPUT_ARG_PARSER.add_argument(
'--' + OUTPUT_DIR_ARG_NAME, type=str, required=True,
help=OUTPUT_DIR_HELP_STRING)
def _read_one_example(
top_example_dir_name, full_storm_id_string, storm_time_unix_sec,
source_name, radar_field_name, include_sounding):
"""Reads one example (storm object).
T = number of input tensors to model
H_s = number of heights in sounding
:param top_example_dir_name: See documentation at top of file.
:param full_storm_id_string: Full storm ID.
:param storm_time_unix_sec: Valid time of storm.
:param source_name: Radar source (must be accepted by
`radar_utils.check_data_source`).
:param radar_field_name: Name of radar field (must be accepted by
`radar_utils.check_field_name`).
:param include_sounding: Boolean flag.
:return: predictor_matrices: length-T list of numpy arrays, where
the [i]th array is the [i]th input tensor to the model. The first axis
of each array has length = 1.
:return: model_metadata_dict: See doc for `cnn.write_model_metadata`.
:return: sounding_pressures_pa: length-H numpy array of pressures. If
soundings were not read, this is None.
"""
if source_name == radar_utils.GRIDRAD_SOURCE_ID:
num_radar_rows = NUM_GRIDRAD_ROWS
num_radar_columns = NUM_GRIDRAD_COLUMNS
else:
num_radar_rows = NUM_MYRORSS_ROWS
num_radar_columns = NUM_MYRORSS_COLUMNS
training_option_dict = dict()
training_option_dict[trainval_io.RADAR_FIELDS_KEY] = [radar_field_name]
training_option_dict[trainval_io.RADAR_HEIGHTS_KEY] = RADAR_HEIGHTS_M_AGL
training_option_dict[trainval_io.SOUNDING_FIELDS_KEY] = (
SOUNDING_FIELD_NAMES if include_sounding else None
)
training_option_dict[trainval_io.SOUNDING_HEIGHTS_KEY] = (
SOUNDING_HEIGHTS_M_AGL
)
training_option_dict[trainval_io.NUM_ROWS_KEY] = num_radar_rows
training_option_dict[trainval_io.NUM_COLUMNS_KEY] = num_radar_columns
training_option_dict[trainval_io.NORMALIZATION_TYPE_KEY] = None
training_option_dict[trainval_io.TARGET_NAME_KEY] = DUMMY_TARGET_NAME
training_option_dict[trainval_io.BINARIZE_TARGET_KEY] = False
training_option_dict[trainval_io.SAMPLING_FRACTIONS_KEY] = None
training_option_dict[trainval_io.REFLECTIVITY_MASK_KEY] = None
training_option_dict[trainval_io.UPSAMPLE_REFLECTIVITY_KEY] = False
model_metadata_dict = {
cnn.TRAINING_OPTION_DICT_KEY: training_option_dict,
cnn.LAYER_OPERATIONS_KEY: None,
}
print(MINOR_SEPARATOR_STRING)
example_dict = testing_io.read_predictors_specific_examples(
top_example_dir_name=top_example_dir_name,
desired_full_id_strings=[full_storm_id_string],
desired_times_unix_sec=numpy.array([storm_time_unix_sec], dtype=int),
option_dict=model_metadata_dict[cnn.TRAINING_OPTION_DICT_KEY],
layer_operation_dicts=None
)
predictor_matrices = example_dict[testing_io.INPUT_MATRICES_KEY]
sounding_pressure_matrix_pa = example_dict[
testing_io.SOUNDING_PRESSURES_KEY]
if sounding_pressure_matrix_pa is None:
sounding_pressures_pa = None
else:
sounding_pressures_pa = sounding_pressure_matrix_pa[0, ...]
return predictor_matrices, model_metadata_dict, sounding_pressures_pa
def _run(gridrad_example_dir_name, gridrad_full_id_string, gridrad_time_string,
myrorss_example_dir_name, myrorss_full_id_string, myrorss_time_string,
output_dir_name):
"""Makes figure with GridRad and MYRORSS predictors.
This is effectively the main method.
:param gridrad_example_dir_name: See documentation at top of file.
:param gridrad_full_id_string: Same.
:param gridrad_time_string: Same.
:param myrorss_example_dir_name: Same.
:param myrorss_full_id_string: Same.
:param myrorss_time_string: Same.
:param output_dir_name: Same.
"""
file_system_utils.mkdir_recursive_if_necessary(
directory_name=output_dir_name)
gridrad_time_unix_sec = time_conversion.string_to_unix_sec(
gridrad_time_string, TIME_FORMAT)
myrorss_time_unix_sec = time_conversion.string_to_unix_sec(
myrorss_time_string, TIME_FORMAT)
letter_label = None
num_gridrad_fields = len(GRIDRAD_FIELD_NAMES)
panel_file_names = [None] * num_gridrad_fields * 2
for j in range(num_gridrad_fields):
these_predictor_matrices, this_metadata_dict = _read_one_example(
top_example_dir_name=gridrad_example_dir_name,
full_storm_id_string=gridrad_full_id_string,
storm_time_unix_sec=gridrad_time_unix_sec,
source_name=radar_utils.GRIDRAD_SOURCE_ID,
radar_field_name=GRIDRAD_FIELD_NAMES[j], include_sounding=False
)[:2]
print(MINOR_SEPARATOR_STRING)
this_handle_dict = plot_examples.plot_one_example(
list_of_predictor_matrices=these_predictor_matrices,
model_metadata_dict=this_metadata_dict, pmm_flag=False,
example_index=0, plot_sounding=False, allow_whitespace=True,
plot_panel_names=False, add_titles=False, label_colour_bars=False,
colour_bar_font_size=COLOUR_BAR_FONT_SIZE,
colour_bar_length=COLOUR_BAR_LENGTH)
this_title_string = radar_plotting.fields_and_heights_to_names(
field_names=[GRIDRAD_FIELD_NAMES[j]],
heights_m_agl=RADAR_HEIGHTS_M_AGL[[0]], include_units=True
)[0]
this_title_string = this_title_string.replace('\n', ' ').replace(
' km AGL', ' km')
this_title_string = 'GridRad {0:s}{1:s}'.format(
this_title_string[0].lower(), this_title_string[1:]
)
this_figure_object = this_handle_dict[
plot_examples.RADAR_FIGURES_KEY][0]
this_axes_object = this_handle_dict[
plot_examples.RADAR_AXES_KEY][0][0, 0]
this_figure_object.suptitle('')
this_axes_object.set_title(
this_title_string, fontsize=TITLE_FONT_SIZE)
# this_axes_object.set_yticklabels(
# this_axes_object.get_yticks(), color=ALMOST_WHITE_COLOUR
# )
if letter_label is None:
letter_label = 'a'
else:
letter_label = chr(ord(letter_label) + 1)
plotting_utils.label_axes(
axes_object=this_axes_object,
label_string='({0:s})'.format(letter_label),
font_size=PANEL_LETTER_FONT_SIZE,
x_coord_normalized=X_LABEL_COORD_NORMALIZED,
y_coord_normalized=Y_LABEL_COORD_NORMALIZED
)
panel_file_names[j * 2] = '{0:s}/gridrad_{1:s}.jpg'.format(
output_dir_name, GRIDRAD_FIELD_NAMES[j].replace('_', '-')
)
print('Saving figure to: "{0:s}"...'.format(panel_file_names[j * 2]))
this_figure_object.savefig(
panel_file_names[j * 2], dpi=FIGURE_RESOLUTION_DPI,
pad_inches=0, bbox_inches='tight'
)
pyplot.close(this_figure_object)
print(SEPARATOR_STRING)
num_myrorss_shear_fields = len(MYRORSS_SHEAR_FIELD_NAMES)
for j in range(num_myrorss_shear_fields):
(these_predictor_matrices, this_metadata_dict, these_pressures_pascals
) = _read_one_example(
top_example_dir_name=myrorss_example_dir_name,
full_storm_id_string=myrorss_full_id_string,
storm_time_unix_sec=myrorss_time_unix_sec,
source_name=radar_utils.MYRORSS_SOURCE_ID,
radar_field_name=MYRORSS_SHEAR_FIELD_NAMES[j],
include_sounding=j == 0)
print(MINOR_SEPARATOR_STRING)
this_handle_dict = plot_examples.plot_one_example(
list_of_predictor_matrices=these_predictor_matrices,
model_metadata_dict=this_metadata_dict, pmm_flag=False,
example_index=0, plot_sounding=j == 0,
sounding_pressures_pascals=these_pressures_pascals,
allow_whitespace=True, plot_panel_names=False, add_titles=False,
label_colour_bars=False, colour_bar_font_size=COLOUR_BAR_FONT_SIZE,
colour_bar_length=COLOUR_BAR_LENGTH,
sounding_font_size=SOUNDING_FONT_SIZE)
if j == 0:
this_axes_object = this_handle_dict[plot_examples.SOUNDING_AXES_KEY]
this_axes_object.set_title('Proximity sounding')
letter_label = chr(ord(letter_label) + 1)
plotting_utils.label_axes(
axes_object=this_axes_object,
label_string='({0:s})'.format(letter_label),
font_size=PANEL_LETTER_FONT_SIZE,
x_coord_normalized=X_LABEL_COORD_NORMALIZED,
y_coord_normalized=Y_LABEL_COORD_NORMALIZED
)
this_figure_object = this_handle_dict[
plot_examples.SOUNDING_FIGURE_KEY]
panel_file_names[1] = '{0:s}/sounding.jpg'.format(output_dir_name)
print('Saving figure to: "{0:s}"...'.format(panel_file_names[1]))
this_figure_object.savefig(
panel_file_names[1], dpi=FIGURE_RESOLUTION_DPI,
pad_inches=0, bbox_inches='tight'
)
pyplot.close(this_figure_object)
this_title_string = radar_plotting.fields_and_heights_to_names(
field_names=[radar_utils.REFL_NAME],
heights_m_agl=RADAR_HEIGHTS_M_AGL[[0]], include_units=True
)[0]
this_title_string = this_title_string.replace('\n', ' ').replace(
' km AGL', ' km')
this_title_string = 'MYRORSS {0:s}{1:s}'.format(
this_title_string[0].lower(), this_title_string[1:]
)
this_figure_object = this_handle_dict[
plot_examples.RADAR_FIGURES_KEY][0]
this_axes_object = this_handle_dict[
plot_examples.RADAR_AXES_KEY][0][0, 0]
this_figure_object.suptitle('')
this_axes_object.set_title(
this_title_string, fontsize=TITLE_FONT_SIZE)
letter_label = chr(ord(letter_label) + 1)
plotting_utils.label_axes(
axes_object=this_axes_object,
label_string='({0:s})'.format(letter_label),
font_size=PANEL_LETTER_FONT_SIZE,
x_coord_normalized=X_LABEL_COORD_NORMALIZED,
y_coord_normalized=Y_LABEL_COORD_NORMALIZED
)
panel_file_names[3] = '{0:s}/myrorss_{1:s}.jpg'.format(
output_dir_name, radar_utils.REFL_NAME.replace('_', '-')
)
print('Saving figure to: "{0:s}"...'.format(panel_file_names[3]))
this_figure_object.savefig(
panel_file_names[3], dpi=FIGURE_RESOLUTION_DPI,
pad_inches=0, bbox_inches='tight'
)
pyplot.close(this_figure_object)
this_title_string = radar_plotting.fields_and_heights_to_names(
field_names=[MYRORSS_SHEAR_FIELD_NAMES[j]],
heights_m_agl=RADAR_HEIGHTS_M_AGL[[0]], include_units=True
)[0]
this_title_string = this_title_string.split('\n')[0]
this_title_string = 'MYRORSS {0:s}{1:s}'.format(
this_title_string[0].lower(), this_title_string[1:]
)
this_figure_object = this_handle_dict[
plot_examples.RADAR_FIGURES_KEY][1]
this_axes_object = this_handle_dict[
plot_examples.RADAR_AXES_KEY][1][0, 0]
this_figure_object.suptitle('')
this_axes_object.set_title(
this_title_string, fontsize=TITLE_FONT_SIZE)
letter_label = chr(ord(letter_label) + 1)
plotting_utils.label_axes(
axes_object=this_axes_object,
label_string='({0:s})'.format(letter_label),
font_size=PANEL_LETTER_FONT_SIZE,
x_coord_normalized=X_LABEL_COORD_NORMALIZED,
y_coord_normalized=Y_LABEL_COORD_NORMALIZED
)
panel_file_names[5 + j * 2] = '{0:s}/myrorss_{1:s}.jpg'.format(
output_dir_name, MYRORSS_SHEAR_FIELD_NAMES[j].replace('_', '-')
)
print('Saving figure to: "{0:s}"...'.format(
panel_file_names[5 + j * 2]
))
this_figure_object.savefig(
panel_file_names[5 + j * 2], dpi=FIGURE_RESOLUTION_DPI,
pad_inches=0, bbox_inches='tight'
)
pyplot.close(this_figure_object)
if j != num_myrorss_shear_fields:
print(SEPARATOR_STRING)
concat_file_name = '{0:s}/predictors.jpg'.format(output_dir_name)
print('Concatenating panels to: "{0:s}"...'.format(concat_file_name))
imagemagick_utils.concatenate_images(
input_file_names=panel_file_names, output_file_name=concat_file_name,
num_panel_rows=4, num_panel_columns=2)
imagemagick_utils.resize_image(
input_file_name=concat_file_name, output_file_name=concat_file_name,
output_size_pixels=CONCAT_FIGURE_SIZE_PX)
if __name__ == '__main__':
INPUT_ARG_OBJECT = INPUT_ARG_PARSER.parse_args()
_run(
gridrad_example_dir_name=getattr(
INPUT_ARG_OBJECT, GRIDRAD_DIR_ARG_NAME),
gridrad_full_id_string=getattr(INPUT_ARG_OBJECT, GRIDRAD_ID_ARG_NAME),
gridrad_time_string=getattr(INPUT_ARG_OBJECT, GRIDRAD_TIME_ARG_NAME),
myrorss_example_dir_name=getattr(
INPUT_ARG_OBJECT, MYRORSS_DIR_ARG_NAME),
myrorss_full_id_string=getattr(INPUT_ARG_OBJECT, MYRORSS_ID_ARG_NAME),
myrorss_time_string=getattr(INPUT_ARG_OBJECT, MYRORSS_TIME_ARG_NAME),
output_dir_name=getattr(INPUT_ARG_OBJECT, OUTPUT_DIR_ARG_NAME)
)
|
thunderhoser/GewitterGefahr
|
gewittergefahr/prediction_paper_2019/make_predictor_figure.py
|
Python
|
mit
| 17,810 | 0.000842 |
# -*- coding: utf-8 -*-
# Copyright (C) 2009-2014:
# Gabes Jean, naparuba@gmail.com
# Gerhard Lausser, Gerhard.Lausser@consol.de
# Gregory Starck, g.starck@gmail.com
# Hartmut Goebel, h.goebel@goebel-consult.de
#
# This file is part of Shinken.
#
# Shinken is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Shinken is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Shinken. If not, see <http://www.gnu.org/licenses/>.
import time
import re
import copy
import sys
import os
import json
try:
from ClusterShell.NodeSet import NodeSet, NodeSetParseRangeError
except ImportError:
NodeSet = None
from shinken.macroresolver import MacroResolver
from shinken.log import logger
try:
stdout_encoding = sys.stdout.encoding
safe_stdout = (stdout_encoding == 'UTF-8')
except Exception, exp:
logger.error('Encoding detection error= %s', exp)
safe_stdout = False
# ########## Strings #############
# Try to print strings, but if there is an utf8 error, go in simple ascii mode
# (Like if the terminal do not have en_US.UTF8 as LANG for example)
def safe_print(*args):
l = []
for e in args:
# If we got an str, go in unicode, and if we cannot print
# utf8, go in ascii mode
if isinstance(e, str):
if safe_stdout:
s = unicode(e, 'utf8', errors='ignore')
else:
s = e.decode('ascii', 'replace').encode('ascii', 'replace').\
decode('ascii', 'replace')
l.append(s)
# Same for unicode, but skip the unicode pass
elif isinstance(e, unicode):
if safe_stdout:
s = e
else:
s = e.encode('ascii', 'replace')
l.append(s)
# Other types can be directly convert in unicode
else:
l.append(unicode(e))
# Ok, now print it :)
print u' '.join(l)
def split_semicolon(line, maxsplit=None):
"""Split a line on semicolons characters but not on the escaped semicolons
"""
# Split on ';' character
splitted_line = line.split(';')
splitted_line_size = len(splitted_line)
# if maxsplit is not specified, we set it to the number of part
if maxsplit is None or 0 > maxsplit:
maxsplit = splitted_line_size
# Join parts to the next one, if ends with a '\'
# because we mustn't split if the semicolon is escaped
i = 0
while i < splitted_line_size - 1:
# for each part, check if its ends with a '\'
ends = splitted_line[i].endswith('\\')
if ends:
# remove the last character '\'
splitted_line[i] = splitted_line[i][:-1]
# append the next part to the current if it is not the last and the current
# ends with '\' or if there is more than maxsplit parts
if (ends or i >= maxsplit) and i < splitted_line_size - 1:
splitted_line[i] = ";".join([splitted_line[i], splitted_line[i + 1]])
# delete the next part
del splitted_line[i + 1]
splitted_line_size -= 1
# increase i only if we don't have append because after append the new
# string can end with '\'
else:
i += 1
return splitted_line
# Json-ify the objects
def jsonify_r(obj):
res = {}
cls = obj.__class__
if not hasattr(cls, 'properties'):
try:
json.dumps(obj)
return obj
except Exception, exp:
return None
properties = cls.properties.keys()
if hasattr(cls, 'running_properties'):
properties += cls.running_properties.keys()
for prop in properties:
if not hasattr(obj, prop):
continue
v = getattr(obj, prop)
# Maybe the property is not jsonable
try:
if isinstance(v, set):
v = list(v)
if isinstance(v, list):
v = sorted(v)
json.dumps(v)
res[prop] = v
except Exception, exp:
if isinstance(v, list):
lst = []
for _t in v:
t = getattr(_t.__class__, 'my_type', '')
if t == 'CommandCall':
try:
lst.append(_t.call)
except Exception:
pass
continue
if t and hasattr(_t, t + '_name'):
lst.append(getattr(_t, t + '_name'))
else:
pass
# print "CANNOT MANAGE OBJECT", _t, type(_t), t
res[prop] = lst
else:
t = getattr(v.__class__, 'my_type', '')
if t == 'CommandCall':
try:
res[prop] = v.call
except Exception:
pass
continue
if t and hasattr(v, t + '_name'):
res[prop] = getattr(v, t + '_name')
# else:
# print "CANNOT MANAGE OBJECT", v, type(v), t
return res
# ################################## TIME ##################################
# @memoized
def get_end_of_day(year, month_id, day):
end_time = (year, month_id, day, 23, 59, 59, 0, 0, -1)
end_time_epoch = time.mktime(end_time)
return end_time_epoch
# @memoized
def print_date(t):
return time.asctime(time.localtime(t))
# @memoized
def get_day(t):
return int(t - get_sec_from_morning(t))
# Same but for week day
def get_wday(t):
t_lt = time.localtime(t)
return t_lt.tm_wday
# @memoized
def get_sec_from_morning(t):
t_lt = time.localtime(t)
h = t_lt.tm_hour
m = t_lt.tm_min
s = t_lt.tm_sec
return h * 3600 + m * 60 + s
# @memoized
def get_start_of_day(year, month_id, day):
start_time = (year, month_id, day, 00, 00, 00, 0, 0, -1)
try:
start_time_epoch = time.mktime(start_time)
except OverflowError:
# Windows mktime sometimes crashes on (1970, 1, 1, ...)
start_time_epoch = 0.0
return start_time_epoch
# change a time in seconds like 3600 into a format: 0d 1h 0m 0s
def format_t_into_dhms_format(t):
s = t
m, s = divmod(s, 60)
h, m = divmod(m, 60)
d, h = divmod(h, 24)
return '%sd %sh %sm %ss' % (d, h, m, s)
# ################################ Pythonization ###########################
# first change to float so manage for example 25.0 to 25
def to_int(val):
return int(float(val))
def to_float(val):
return float(val)
def to_char(val):
return val[0]
def to_split(val, split_on_coma=True):
if isinstance(val, list):
return val
if not split_on_coma:
return [val]
val = val.split(',')
if val == ['']:
val = []
return val
def list_split(val, split_on_coma=True):
if not split_on_coma:
return val
new_val = []
for x in val:
new_val.extend(x.split(','))
return new_val
def to_best_int_float(val):
i = int(float(val))
f = float(val)
# If the f is a .0 value,
# best match is int
if i == f:
return i
return f
# bool('0') = true, so...
def to_bool(val):
if val == '1' or val == 'on' or val == 'true' or val == 'True':
return True
else:
return False
def from_bool_to_string(b):
if b:
return '1'
else:
return '0'
def from_bool_to_int(b):
if b:
return 1
else:
return 0
def from_list_to_split(val):
val = ','.join(['%s' % v for v in val])
return val
def from_float_to_int(val):
val = int(val)
return val
# Functions for brok_transformations
# They take 2 parameters: ref, and a value
# ref is the item like a service, and value
# if the value to preprocess
# Just a string list of all names, with ,
def to_list_string_of_names(ref, tab):
return ",".join([e.get_name() for e in tab])
# Just a list of names
def to_list_of_names(ref, tab):
return [e.get_name() for e in tab]
# This will give a string if the value exists
# or '' if not
def to_name_if_possible(ref, value):
if value:
return value.get_name()
return ''
# take a list of hosts and return a list
# of all host_names
def to_hostnames_list(ref, tab):
r = []
for h in tab:
if hasattr(h, 'host_name'):
r.append(h.host_name)
return r
# Will create a dict with 2 lists:
# *services: all services of the tab
# *hosts: all hosts of the tab
def to_svc_hst_distinct_lists(ref, tab):
r = {'hosts': [], 'services': []}
for e in tab:
cls = e.__class__
if cls.my_type == 'service':
name = e.get_dbg_name()
r['services'].append(name)
else:
name = e.get_dbg_name()
r['hosts'].append(name)
return r
# Will expand the value with macros from the
# host/service ref before brok it
def expand_with_macros(ref, value):
return MacroResolver().resolve_simple_macros_in_string(value, ref.get_data_for_checks())
# Just get the string name of the object
# (like for realm)
def get_obj_name(obj):
# Maybe we do not have a real object but already a string. If so
# return the string
if isinstance(obj, basestring):
return obj
return obj.get_name()
# Same as before, but call with object,prop instead of just value
# But if we got an attribute error, return ''
def get_obj_name_two_args_and_void(obj, value):
try:
return value.get_name()
except AttributeError:
return ''
# Get the full name if there is one
def get_obj_full_name(obj):
try:
return obj.get_full_name()
except Exception:
return obj.get_name()
# return the list of keys of the custom dict
# but without the _ before
def get_customs_keys(d):
return [k[1:] for k in d.keys()]
# return the values of the dict
def get_customs_values(d):
return d.values()
# Checks that a parameter has an unique value. If it's a list, the last
# value set wins.
def unique_value(val):
if isinstance(val, list):
if val:
return val[-1]
else:
return ''
else:
return val
# ##################### Sorting ################
def scheduler_no_spare_first(x, y):
if x.spare and not y.spare:
return 1
elif x.spare and y.spare:
return 0
else:
return -1
# -1 is x first, 0 equal, 1 is y first
def alive_then_spare_then_deads(x, y):
# First are alive
if x.alive and not y.alive:
return -1
if y.alive and not x.alive:
return 0
# if not alive both, I really don't care...
if not x.alive and not y.alive:
return -1
# Ok, both are alive... now spare after no spare
if not x.spare:
return -1
# x is a spare, so y must be before, even if
# y is a spare
if not y.spare:
return 1
return 0
# -1 is x first, 0 equal, 1 is y first
def sort_by_ids(x, y):
if x.id < y.id:
return -1
if x.id > y.id:
return 1
# So is equal
return 0
# From a tab, get the avg, min, max
# for the tab values, but not the lower ones
# and higher ones that are too distinct
# than major ones
def nighty_five_percent(t):
t2 = copy.copy(t)
t2.sort()
l = len(t)
# If void tab, wtf??
if l == 0:
return (None, None, None)
t_reduce = t2
# only take a part if we got more
# than 100 elements, or it's a non sense
if l > 100:
offset = int(l * 0.05)
t_reduce = t_reduce[offset:-offset]
reduce_len = len(t_reduce)
reduce_sum = sum(t_reduce)
reduce_avg = float(reduce_sum) / reduce_len
reduce_max = max(t_reduce)
reduce_min = min(t_reduce)
return (reduce_avg, reduce_min, reduce_max)
# #################### Cleaning ##############
def strip_and_uniq(tab):
new_tab = set()
for elt in tab:
val = elt.strip()
if (val != ''):
new_tab.add(val)
return list(new_tab)
# ################### Pattern change application (mainly for host) #######
def expand_xy_pattern(pattern):
ns = NodeSet(str(pattern))
if len(ns) > 1:
for elem in ns:
for a in expand_xy_pattern(elem):
yield a
else:
yield pattern
# This function is used to generate all pattern change as
# recursive list.
# for example, for a [(1,3),(1,4),(1,5)] xy_couples,
# it will generate a 60 item list with:
# Rule: [1, '[1-5]', [1, '[1-4]', [1, '[1-3]', []]]]
# Rule: [1, '[1-5]', [1, '[1-4]', [2, '[1-3]', []]]]
# ...
def got_generation_rule_pattern_change(xy_couples):
res = []
xy_cpl = xy_couples
if xy_couples == []:
return []
(x, y) = xy_cpl[0]
for i in xrange(x, y + 1):
n = got_generation_rule_pattern_change(xy_cpl[1:])
if n != []:
for e in n:
res.append([i, '[%d-%d]' % (x, y), e])
else:
res.append([i, '[%d-%d]' % (x, y), []])
return res
# this function apply a recursive pattern change
# generate by the got_generation_rule_pattern_change
# function.
# It take one entry of this list, and apply
# recursively the change to s like:
# s = "Unit [1-3] Port [1-4] Admin [1-5]"
# rule = [1, '[1-5]', [2, '[1-4]', [3, '[1-3]', []]]]
# output = Unit 3 Port 2 Admin 1
def apply_change_recursive_pattern_change(s, rule):
# print "Try to change %s" % s, 'with', rule
# new_s = s
(i, m, t) = rule
# print "replace %s by %s" % (r'%s' % m, str(i)), 'in', s
s = s.replace(r'%s' % m, str(i))
# print "And got", s
if t == []:
return s
return apply_change_recursive_pattern_change(s, t)
# For service generator, get dict from a _custom properties
# as _disks C$(80%!90%),D$(80%!90%)$,E$(80%!90%)$
# return {'C': '80%!90%', 'D': '80%!90%', 'E': '80%!90%'}
# And if we have a key that look like [X-Y] we will expand it
# into Y-X+1 keys
GET_KEY_VALUE_SEQUENCE_ERROR_NOERROR = 0
GET_KEY_VALUE_SEQUENCE_ERROR_SYNTAX = 1
GET_KEY_VALUE_SEQUENCE_ERROR_NODEFAULT = 2
GET_KEY_VALUE_SEQUENCE_ERROR_NODE = 3
def get_key_value_sequence(entry, default_value=None):
array1 = []
array2 = []
conf_entry = entry
# match a key$(value1..n)$
keyval_pattern_txt = r"""
\s*(?P<key>[^,]+?)(?P<values>(\$\(.*?\)\$)*)(?:[,]|$)
"""
keyval_pattern = re.compile('(?x)' + keyval_pattern_txt)
# match a whole sequence of key$(value1..n)$
all_keyval_pattern = re.compile('(?x)^(' + keyval_pattern_txt + ')+$')
# match a single value
value_pattern = re.compile('(?:\$\((?P<val>.*?)\)\$)')
# match a sequence of values
all_value_pattern = re.compile('^(?:\$\(.*?\)\$)+$')
if all_keyval_pattern.match(conf_entry):
for mat in re.finditer(keyval_pattern, conf_entry):
r = {'KEY': mat.group('key')}
# The key is in mat.group('key')
# If there are also value(s)...
if mat.group('values'):
if all_value_pattern.match(mat.group('values')):
# If there are multiple values, loop over them
valnum = 1
for val in re.finditer(value_pattern, mat.group('values')):
r['VALUE' + str(valnum)] = val.group('val')
valnum += 1
else:
# Value syntax error
return (None, GET_KEY_VALUE_SEQUENCE_ERROR_SYNTAX)
else:
r['VALUE1'] = None
array1.append(r)
else:
# Something is wrong with the values. (Maybe unbalanced '$(')
# TODO: count opening and closing brackets in the pattern
return (None, GET_KEY_VALUE_SEQUENCE_ERROR_SYNTAX)
# now fill the empty values with the default value
for r in array1:
if r['VALUE1'] is None:
if default_value is None:
return (None, GET_KEY_VALUE_SEQUENCE_ERROR_NODEFAULT)
else:
r['VALUE1'] = default_value
r['VALUE'] = r['VALUE1']
# Now create new one but for [X-Y] matchs
# array1 holds the original entries. Some of the keys may contain wildcards
# array2 is filled with originals and inflated wildcards
if NodeSet is None:
# The pattern that will say if we have a [X-Y] key.
pat = re.compile('\[(\d*)-(\d*)\]')
for r in array1:
key = r['KEY']
orig_key = r['KEY']
# We have no choice, we cannot use NodeSet, so we use the
# simple regexp
if NodeSet is None:
m = pat.search(key)
got_xy = (m is not None)
else: # Try to look with a nodeset check directly
try:
ns = NodeSet(str(key))
# If we have more than 1 element, we have a xy thing
got_xy = (len(ns) != 1)
except NodeSetParseRangeError:
return (None, GET_KEY_VALUE_SEQUENCE_ERROR_NODE)
pass # go in the next key
# Now we've got our couples of X-Y. If no void,
# we were with a "key generator"
if got_xy:
# Ok 2 cases: we have the NodeSet lib or not.
# if not, we use the dumb algo (quick, but manage less
# cases like /N or , in patterns)
if NodeSet is None: # us the old algo
still_loop = True
xy_couples = [] # will get all X-Y couples
while still_loop:
m = pat.search(key)
if m is not None: # we've find one X-Y
(x, y) = m.groups()
(x, y) = (int(x), int(y))
xy_couples.append((x, y))
# We must search if we've gotother X-Y, so
# we delete this one, and loop
key = key.replace('[%d-%d]' % (x, y), 'Z' * 10)
else: # no more X-Y in it
still_loop = False
# Now we have our xy_couples, we can manage them
# We search all pattern change rules
rules = got_generation_rule_pattern_change(xy_couples)
# Then we apply them all to get ours final keys
for rule in rules:
res = apply_change_recursive_pattern_change(orig_key, rule)
new_r = {}
for key in r:
new_r[key] = r[key]
new_r['KEY'] = res
array2.append(new_r)
else:
# The key was just a generator, we can remove it
# keys_to_del.append(orig_key)
# We search all pattern change rules
# rules = got_generation_rule_pattern_change(xy_couples)
nodes_set = expand_xy_pattern(orig_key)
new_keys = list(nodes_set)
# Then we apply them all to get ours final keys
for new_key in new_keys:
# res = apply_change_recursive_pattern_change(orig_key, rule)
new_r = {}
for key in r:
new_r[key] = r[key]
new_r['KEY'] = new_key
array2.append(new_r)
else:
# There were no wildcards
array2.append(r)
# t1 = time.time()
# print "***********Diff", t1 -t0
return (array2, GET_KEY_VALUE_SEQUENCE_ERROR_NOERROR)
# ############################## Files management #######################
# We got a file like /tmp/toto/toto2/bob.png And we want to be sure the dir
# /tmp/toto/toto2/ will really exists so we can copy it. Try to make if if need
# and return True/False if succeed
def expect_file_dirs(root, path):
dirs = os.path.normpath(path).split('/')
dirs = [d for d in dirs if d != '']
# We will create all directory until the last one
# so we are doing a mkdir -p .....
# TODO: and windows????
tmp_dir = root
for d in dirs:
_d = os.path.join(tmp_dir, d)
logger.info('Verify the existence of file %s', _d)
if not os.path.exists(_d):
try:
os.mkdir(_d)
except Exception:
return False
tmp_dir = _d
return True
# ####################### Services/hosts search filters #######################
# Filters used in services or hosts find_by_filter method
# Return callback functions which are passed host or service instances, and
# should return a boolean value that indicates if the inscance mached the
# filter
def filter_any(name):
def inner_filter(host):
return True
return inner_filter
def filter_none(name):
def inner_filter(host):
return False
return inner_filter
def filter_host_by_name(name):
def inner_filter(host):
if host is None:
return False
return host.host_name == name
return inner_filter
def filter_host_by_regex(regex):
host_re = re.compile(regex)
def inner_filter(host):
if host is None:
return False
return host_re.match(host.host_name) is not None
return inner_filter
def filter_host_by_group(group):
def inner_filter(host):
if host is None:
return False
return group in [g.hostgroup_name for g in host.hostgroups]
return inner_filter
def filter_host_by_tag(tpl):
def inner_filter(host):
if host is None:
return False
return tpl in [t.strip() for t in host.tags]
return inner_filter
def filter_service_by_name(name):
def inner_filter(service):
if service is None:
return False
return service.service_description == name
return inner_filter
def filter_service_by_regex_name(regex):
host_re = re.compile(regex)
def inner_filter(service):
if service is None:
return False
return host_re.match(service.service_description) is not None
return inner_filter
def filter_service_by_host_name(host_name):
def inner_filter(service):
if service is None or service.host is None:
return False
return service.host.host_name == host_name
return inner_filter
def filter_service_by_regex_host_name(regex):
host_re = re.compile(regex)
def inner_filter(service):
if service is None or service.host is None:
return False
return host_re.match(service.host.host_name) is not None
return inner_filter
def filter_service_by_hostgroup_name(group):
def inner_filter(service):
if service is None or service.host is None:
return False
return group in [g.hostgroup_name for g in service.host.hostgroups]
return inner_filter
def filter_service_by_host_tag_name(tpl):
def inner_filter(service):
if service is None or service.host is None:
return False
return tpl in [t.strip() for t in service.host.tags]
return inner_filter
def filter_service_by_servicegroup_name(group):
def inner_filter(service):
if service is None:
return False
return group in [g.servicegroup_name for g in service.servicegroups]
return inner_filter
def filter_host_by_bp_rule_label(label):
def inner_filter(host):
if host is None:
return False
return label in host.labels
return inner_filter
def filter_service_by_host_bp_rule_label(label):
def inner_filter(service):
if service is None or service.host is None:
return False
return label in service.host.labels
return inner_filter
def filter_service_by_bp_rule_label(label):
def inner_filter(service):
if service is None:
return False
return label in service.labels
return inner_filter
def is_complex_expr(expr):
for m in '()&|!*':
if m in expr:
return True
return False
|
staute/shinken_deb
|
shinken/util.py
|
Python
|
agpl-3.0
| 24,533 | 0.001141 |
# -*- coding: utf-8 -*-
from PyQt4 import QtCore, QtGui
from acq4.util.DataManager import *
#import acq4.Manager as Manager
import acq4.pyqtgraph as pg
#from acq4.pyqtgraph.MultiPlotWidget import MultiPlotWidget
#from acq4.pyqtgraph.ImageView import ImageView
from acq4.util.DictView import *
import acq4.util.metaarray as metaarray
import weakref
class FileDataView(QtGui.QSplitter):
def __init__(self, parent):
QtGui.QSplitter.__init__(self, parent)
#self.manager = Manager.getManager()
self.setOrientation(QtCore.Qt.Vertical)
self.current = None
self.currentType = None
self.widgets = []
self.dictWidget = None
#self.plots = []
def setCurrentFile(self, file):
#print "=============== set current file ============"
if file is self.current:
return
## What if we just want to update the data display?
#self.clear()
if file is None:
self.current = None
return
if file.isDir():
## Sequence or not?
return
else:
typ = file.fileType()
if typ is None:
return
else:
image = False
with pg.BusyCursor():
data = file.read()
if typ == 'ImageFile':
image = True
elif typ == 'MetaArray':
if data.ndim == 2 and not data.axisHasColumns(0) and not data.axisHasColumns(1):
image = True
elif data.ndim > 2:
image = True
else:
return
with pg.BusyCursor():
if image:
if self.currentType == 'image' and len(self.widgets) > 0:
try:
self.widgets[0].setImage(data, autoRange=False)
except:
print "widget types:", map(type, self.widgets)
raise
else:
self.clear()
w = pg.ImageView(self)
#print "add image:", w.ui.roiPlot.plotItem
#self.plots = [weakref.ref(w.ui.roiPlot.plotItem)]
self.addWidget(w)
w.setImage(data)
self.widgets.append(w)
self.currentType = 'image'
else:
self.clear()
w = pg.MultiPlotWidget(self)
self.addWidget(w)
w.plot(data)
self.currentType = 'plot'
self.widgets.append(w)
#print "add mplot:", w.mPlotItem.plots
#self.plots = [weakref.ref(p[0]) for p in w.mPlotItem.plots]
if (hasattr(data, 'implements') and data.implements('MetaArray')):
if self.dictWidget is None:
w = DictView(data._info)
self.dictWidget = w
#w.setText(str(data._info[-1]))
self.addWidget(w)
self.widgets.append(w)
h = self.size().height()
self.setSizes([h*0.8, h*0.2])
else:
self.dictWidget.setData(data._info)
def clear(self):
for w in self.widgets:
w.close()
w.setParent(None)
self.widgets = []
self.dictWidget = None
|
mgraupe/acq4
|
acq4/modules/DataManager/FileDataView.py
|
Python
|
mit
| 3,567 | 0.00841 |
#!/usr/bin/env python3
# Copyright (c) 2014-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test mining RPCs
- getmininginfo
- getblocktemplate proposal mode
- submitblock"""
import copy
from binascii import b2a_hex
from decimal import Decimal
from test_framework.blocktools import create_coinbase
from test_framework.mininode import CBlock
from test_framework.test_framework import IoPTestFramework
from test_framework.util import assert_equal, assert_raises_rpc_error
def b2x(b):
return b2a_hex(b).decode('ascii')
def assert_template(node, block, expect, rehash=True):
if rehash:
block.hashMerkleRoot = block.calc_merkle_root()
rsp = node.getblocktemplate({'data': b2x(block.serialize()), 'mode': 'proposal'})
assert_equal(rsp, expect)
class MiningTest(IoPTestFramework):
def set_test_params(self):
self.num_nodes = 2
self.setup_clean_chain = False
def run_test(self):
node = self.nodes[0]
self.log.info('getmininginfo')
mining_info = node.getmininginfo()
assert_equal(mining_info['blocks'], 200)
assert_equal(mining_info['chain'], 'regtest')
assert_equal(mining_info['currentblocktx'], 0)
assert_equal(mining_info['currentblockweight'], 0)
assert_equal(mining_info['difficulty'], Decimal('4.656542373906925E-10'))
assert_equal(mining_info['networkhashps'], Decimal('0.003333333333333334'))
assert_equal(mining_info['pooledtx'], 0)
# Mine a block to leave initial block download
node.generate(1)
tmpl = node.getblocktemplate()
self.log.info("getblocktemplate: Test capability advertised")
assert 'proposal' in tmpl['capabilities']
assert 'coinbasetxn' not in tmpl
coinbase_tx = create_coinbase(height=int(tmpl["height"]) + 1)
# sequence numbers must not be max for nLockTime to have effect
coinbase_tx.vin[0].nSequence = 2 ** 32 - 2
coinbase_tx.rehash()
block = CBlock()
block.nVersion = tmpl["version"]
block.hashPrevBlock = int(tmpl["previousblockhash"], 16)
block.nTime = tmpl["curtime"]
block.nBits = int(tmpl["bits"], 16)
block.nNonce = 0
block.vtx = [coinbase_tx]
self.log.info("getblocktemplate: Test valid block")
assert_template(node, block, None)
self.log.info("submitblock: Test block decode failure")
assert_raises_rpc_error(-22, "Block decode failed", node.submitblock, b2x(block.serialize()[:-15]))
self.log.info("getblocktemplate: Test bad input hash for coinbase transaction")
bad_block = copy.deepcopy(block)
bad_block.vtx[0].vin[0].prevout.hash += 1
bad_block.vtx[0].rehash()
assert_template(node, bad_block, 'bad-cb-missing')
self.log.info("submitblock: Test invalid coinbase transaction")
assert_raises_rpc_error(-22, "Block does not start with a coinbase", node.submitblock, b2x(bad_block.serialize()))
self.log.info("getblocktemplate: Test truncated final transaction")
assert_raises_rpc_error(-22, "Block decode failed", node.getblocktemplate, {'data': b2x(block.serialize()[:-1]), 'mode': 'proposal'})
self.log.info("getblocktemplate: Test duplicate transaction")
bad_block = copy.deepcopy(block)
bad_block.vtx.append(bad_block.vtx[0])
assert_template(node, bad_block, 'bad-txns-duplicate')
self.log.info("getblocktemplate: Test invalid transaction")
bad_block = copy.deepcopy(block)
bad_tx = copy.deepcopy(bad_block.vtx[0])
bad_tx.vin[0].prevout.hash = 255
bad_tx.rehash()
bad_block.vtx.append(bad_tx)
assert_template(node, bad_block, 'bad-txns-inputs-missingorspent')
self.log.info("getblocktemplate: Test nonfinal transaction")
bad_block = copy.deepcopy(block)
bad_block.vtx[0].nLockTime = 2 ** 32 - 1
bad_block.vtx[0].rehash()
assert_template(node, bad_block, 'bad-txns-nonfinal')
self.log.info("getblocktemplate: Test bad tx count")
# The tx count is immediately after the block header
TX_COUNT_OFFSET = 80
bad_block_sn = bytearray(block.serialize())
assert_equal(bad_block_sn[TX_COUNT_OFFSET], 1)
bad_block_sn[TX_COUNT_OFFSET] += 1
assert_raises_rpc_error(-22, "Block decode failed", node.getblocktemplate, {'data': b2x(bad_block_sn), 'mode': 'proposal'})
self.log.info("getblocktemplate: Test bad bits")
bad_block = copy.deepcopy(block)
bad_block.nBits = 469762303 # impossible in the real world
assert_template(node, bad_block, 'bad-diffbits')
self.log.info("getblocktemplate: Test bad merkle root")
bad_block = copy.deepcopy(block)
bad_block.hashMerkleRoot += 1
assert_template(node, bad_block, 'bad-txnmrklroot', False)
self.log.info("getblocktemplate: Test bad timestamps")
bad_block = copy.deepcopy(block)
bad_block.nTime = 2 ** 31 - 1
assert_template(node, bad_block, 'time-too-new')
bad_block.nTime = 0
assert_template(node, bad_block, 'time-too-old')
self.log.info("getblocktemplate: Test not best block")
bad_block = copy.deepcopy(block)
bad_block.hashPrevBlock = 123
assert_template(node, bad_block, 'inconclusive-not-best-prevblk')
if __name__ == '__main__':
MiningTest().main()
|
Jcing95/iop-hd
|
test/functional/mining.py
|
Python
|
mit
| 5,574 | 0.002153 |
"""
ship_crew.py
Generates a minimal ship crew based on tonnage.
python crew -s 400
"""
from __future__ import print_function
import random
import sys
sys.path.append(".")
from character import Character
import character_tools
def get_career():
return random.choice(['Scouts', 'Navy', 'Merchants'])
def create_crew(size):
for c in range(int(size/400)):
create_crewman("Pilot")
create_crewman("Navg")
for c in range(int(size/300)):
create_crewman("Eng")
def create_crewman(role):
if role == "Eng":
skill = "Engineering"
elif role == "Navg":
skill = "Navgigation"
elif role == "Helm":
skill = "Pilot"
else:
skill = "Computer"
crew = Character()
crew.generate_basic()
crew.run_career(get_career())
character_tools.add_skill(crew, skill)
print(role, end=" ")
crew.display()
print("")
|
makhidkarun/py_tools
|
lib/ship_crew.py
|
Python
|
gpl-3.0
| 852 | 0.026995 |
# -*- coding: utf-8 -*-
"""
<DefineSource>
@Date : Fri Nov 14 13:20:38 2014 \n
@Author : Erwan Ledoux \n\n
</DefineSource>
A Grouper establishes a group of parenting nodes for which
each level is setted in equivalent hdf5 structure.
"""
#<DefineAugmentation>
import ShareYourSystem as SYS
BaseModuleStr="ShareYourSystem.Hdformaters.Hdformater"
DecorationModuleStr="ShareYourSystem.Standards.Classors.Classer"
SYS.setSubModule(globals())
#</DefineAugmentation>
#<ImportSpecificModules>
import functools
from ShareYourSystem.Standards.Classors import Doer
from ShareYourSystem.Functers import Switcher
#</ImportSpecificModules>
#<DefineFunctions>
def getGroupedPathStrWithPathStrsList(_PathStrsList):
#Reduce
PathStr=functools.reduce(
lambda _TotalPathStr,_PathStr:
_TotalPathStr+_PathStr
if (len(_TotalPathStr)>0 and _TotalPathStr[-1]=='/') and (len(_PathStr)>0 and _PathStr[0]!='/'
) or (len(_TotalPathStr)>0 and _TotalPathStr[-1]!='/') and (len(_PathStr)>0 and _PathStr[0]=='/')
else
_TotalPathStr[:-1]+_PathStr
if (len(_TotalPathStr)>0 and _TotalPathStr[-1]=='/') and (len(_PathStr)>0 and _PathStr[0]=='/'
)
else _TotalPathStr+'/'+_PathStr
if '/' not in [_PathStr,_TotalPathStr]
else "",
_PathStrsList
)
#Maybe add / at the beginning
if (len(PathStr)>0 and PathStr[0]!='/') or PathStr=="":
PathStr='/'+PathStr
#Return
return PathStr
#</DefineFunctions>
#<DefineClass>
@DecorationClass()
class GrouperClass(BaseClass):
#Definition
RepresentingKeyStrsList=[
'GroupedParentVariable',
'GroupedInt',
'GroupedKeyStr',
'GroupedDeriveParentersList',
'GroupedPathStrsList',
'GroupedPathStr'
]
#@Hooker.HookerClass(**{'HookingAfterVariablesList':[{'CallingVariable':BaseClass.__init__}]})
def default_init(
self,
_GroupedParentVariable=None,
_GroupedInt=-1,
_GroupedKeyStr="",
_GroupedDeriveParentersList=None,
_GroupedPathStrsList=None,
_GroupedPathStr="/",
**_KwargVariablesDict
):
#Call the parent __init__ method
BaseClass.__init__(self,**_KwargVariablesDict)
#set
self.HdformatingFileKeyStr=SYS.InflectEngine.plural(
Doer.getDoStrWithDoerStr(
self.__class__.NameStr
)
)+'.hdf5'
def do_group(self):
#debug
'''
self.debug(('self.',self,['ParentingNodeStr']))
'''
#Parent
self.parent()
#Check
if len(self.ParentedDeriveParentersList)>0:
UppestParentPointer=self.ParentedDeriveParentersList[-1]
else:
UppestParentPointer=self
#Then get also from the UppestParentPointer its UppestGroupedParentVariable
if hasattr(UppestParentPointer,'GroupedDeriveParentersList'):
if len(UppestParentPointer.GroupedDeriveParentersList)>0:
UppestGroupedParentVariable=UppestParentPointer.GroupedDeriveParentersList.GroupedDeriveParentersList[-1]
else:
UppestGroupedParentVariable=UppestParentPointer
#Definition of the Link
self.HdformatedFileVariable=UppestGroupedParentVariable.
HdformatedFileVariableKeyStr="HdformatedFileVariable"
#debug
#self.debug('UppestParentPointer.GroupingPathStr is '+UppestParentPointer.GroupingPathStr)
#Point on the FilePointer of the uppest grouped Parent
self.__setattr__(
HdformatedFileVariableKeyStr,
getattr(
UppestGroupedParentVariable,
"HdformatedFileVariable"
)
)
#Get it definitely !
FilePointer=getattr(self,HdformatedFileVariableKeyStr)
#debug
#print('FilePointer is ',FilePointer)
#Create a group in the hdf5 file
if FilePointer!=None:
#debug
'''
self.debug(('self.',self,['NodedPathStr']))
'''
#set the GroupedPathStr
self.GroupedPathStr=getGroupedPathStrWithPathStrsList(
[
UppestGroupedParentVariable.GroupedPathStr,
self.ParentedNodePathStr
]
)
#debug
'''
self.debug(('self.',self,['GroupedPathStr']))
'''
#Check if the Path exists
if self.GroupedPathStr not in FilePointer:
#set all the intermediate Paths before
PathStrsList=self.GroupedPathStr.split('/')[1:]
ParsingChildPathStr="/"
#set the PathStr from the top to the down (integrativ loop)
for ChildPathStr in PathStrsList:
#Go deeper
NewParsingChildPathStr=ParsingChildPathStr+ChildPathStr
#Create the group if not already
if NewParsingChildPathStr not in FilePointer:
if self.HdformatingModuleStr=="tables":
FilePointer.create_group(ParsingChildPathStr,ChildPathStr)
elif self.HdformatingModuleStr=="h5py":
Group=FilePointer[ParsingChildPathStr]
Group.create_group(ChildPathStr)
#Prepare the next group
ParsingChildPathStr=NewParsingChildPathStr+'/'
#Return self
return self
#</DefineClass>
|
Ledoux/ShareYourSystem
|
Pythonlogy/draft/Noders/Grouper/Drafts/__init__ copy.py
|
Python
|
mit
| 4,755 | 0.04837 |
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Addons modules by CLEARCORP S.A.
# Copyright (C) 2009-TODAY CLEARCORP S.A. (<http://clearcorp.co.cr>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import account_exchange_rates_adjustment
import wizard
|
sysadminmatmoz/odoo-clearcorp
|
account_exchange_rates_adjustment/__init__.py
|
Python
|
agpl-3.0
| 1,086 | 0 |
# -*- coding: utf-8 -*-
msg = {
'en': {
'isbn-formatting': u'Robot: Formatting ISBN',
},
# Author: Csisc
# Author: Lloffiwr
# Author: Xqt
'qqq': {
'isbn-formatting': u'Edit summary when the bot fixes [http://en.wikipedia.org/wiki/International_Standard_Book_Number ISBN] number formatting.',
},
# Author: Csisc
'aeb': {
'isbn-formatting': u'روبوت: تنسيق ISBN',
},
# Author: Naudefj
'af': {
'isbn-formatting': u'Robot: Formatteer ISBN',
},
# Author: Als-Holder
'als': {
'isbn-formatting': u'Bot: ISBN formatiert',
},
# Author: Meno25
# Author: OsamaK
'ar': {
'isbn-formatting': u'روبوت: تنسيق ISBN',
},
# Author: Esbardu
# Author: Xuacu
'ast': {
'isbn-formatting': u'Robó: Formatiando l\'ISBN',
},
# Author: Khutuck
'az': {
'isbn-formatting': u'Bot: ISBN formatlandırılır',
},
# Author: E THP
'azb': {
'isbn-formatting': u'بوت:شابکلری ایستاندارد ائتمک',
},
# Author: Sagan
'ba': {
'isbn-formatting': u'Робот: ISBN үҙгәртеү',
},
# Author: Mucalexx
'bar': {
'isbn-formatting': u'Bot: Formaatir ISBN',
},
# Author: Yury Tarasievich
'be': {
'isbn-formatting': u'робат аформіў ISBN',
},
# Author: Jim-by
'be-x-old': {
'isbn-formatting': u'Робат: фарматаваньне ISBN',
},
# Author: DCLXVI
'bg': {
'isbn-formatting': u'Робот: Форматиране на ISBN',
},
# Author: Riemogerz
'bjn': {
'isbn-formatting': u'Bot: Mampurmat ISBN',
},
# Author: Bellayet
# Author: Wikitanvir
'bn': {
'isbn-formatting': u'বট: আইএসবিএন নম্বরের ফরম্যাট ঠিক করছে',
},
# Author: Fulup
'br': {
'isbn-formatting': u'Robot : O furmadiñ an ISBN',
},
# Author: CERminator
# Author: Edinwiki
'bs': {
'isbn-formatting': u'Bot: Oblikovanje ISBN',
},
# Author: SMP
'ca': {
'isbn-formatting': u'Robot: Format de l\'ISBN',
},
# Author: Asoxor
'ckb': {
'isbn-formatting': u'ڕۆبۆت: ڕاستکردنەوەی شێوازی ISBN',
},
# Author: Dontlietome7
'cs': {
'isbn-formatting': u'Robot: Formátování ISBN',
},
# Author: Salam
'cv': {
'isbn-formatting': u'робот: ISBN улӑштарни',
},
# Author: Lloffiwr
# Author: Xxglennxx
'cy': {
'isbn-formatting': u'Robot: Yn fformatio\'r rhif ISBN',
},
# Author: Peter Alberti
'da': {
'isbn-formatting': u'Robot: Formaterer ISBN',
},
'de': {
'isbn-formatting': u'Bot: Formatiere ISBN',
},
# Author: Eruedin
'de-ch': {
'isbn-formatting': u'Bot: Formatiere ISBN',
},
# Author: Erdemaslancan
'diq': {
'isbn-formatting': u'Boti ISBN\'i timar kerd',
},
# Author: Evropi
'el': {
'isbn-formatting': u'Ρομπότ: Μορφοποίηση ISBN',
},
# Author: Mihxil
# Author: Objectivesea
'eo': {
'isbn-formatting': u'Roboto: Aranĝas la ISBN',
},
# Author: Dferg
# Author: Invadinado
# Author: Xqt
'es': {
'isbn-formatting': u'Bot: Estandarizando ISBN',
},
# Author: Pikne
'et': {
'isbn-formatting': u'Robot: ISBN vormindatud',
},
# Author: An13sa
# Author: Xabier Armendaritz
'eu': {
'isbn-formatting': u'Robota: ISBNari formatua ematen',
},
# Author: ZxxZxxZ
'fa': {
'isbn-formatting': u'ربات: استانداردسازی شابک',
},
# Author: Crt
'fi': {
'isbn-formatting': u'Botti muotoili ISBN-tunnuksen',
},
# Author: EileenSanda
'fo': {
'isbn-formatting': u'Bottur: Formaterar ISBN',
},
# Author: Sherbrooke
'fr': {
'isbn-formatting': u'Robot : Mise en forme du ISBN',
},
# Author: ChrisPtDe
'frp': {
'isbn-formatting': u'Robot : misa en fôrma du ISBN',
},
# Author: Murma174
'frr': {
'isbn-formatting': u'Bot: Formatiare ISBN',
},
# Author: Klenje
'fur': {
'isbn-formatting': u'Robot: o formati il codiç ISBN',
},
# Author: Toliño
'gl': {
'isbn-formatting': u'Bot: Dou formato ISBN',
},
# Author: Jetlag
'hak': {
'isbn-formatting': u'機械人:格式化ISBN',
},
# Author: YaronSh
'he': {
'isbn-formatting': u'בוט: מעצב מסת״ב',
},
# Author: Ex13
'hr': {
'isbn-formatting': u'Bot: Oblikovanje ISBN',
},
# Author: Michawiki
'hsb': {
'isbn-formatting': u'Boćik: ISBN so formatuje',
},
# Author: Dani
'hu': {
'isbn-formatting': u'Bot: ISBN formázása',
},
# Author: Xelgen
'hy': {
'isbn-formatting': u'Ռոբոտը ուղղում է ԳՄՍՀի (ISBN) ձևաչափը',
},
# Author: McDutchie
'ia': {
'isbn-formatting': u'Robot: Formatation ISBN',
},
# Author: IvanLanin
'id': {
'isbn-formatting': u'Bot: Memformat ISBN',
},
# Author: Renan
'ie': {
'isbn-formatting': u'Machine: Formatant ISBN',
},
# Author: Lam-ang
'ilo': {
'isbn-formatting': u'Robot: Agiporpormat ti ISBN',
},
# Author: Snævar
'is': {
'isbn-formatting': u'Vélmenni: Forsnið ISBN',
},
# Author: Beta16
'it': {
'isbn-formatting': u'Bot: Formatto ISBN',
},
# Author: Fryed-peach
# Author: Shirayuki
'ja': {
'isbn-formatting': u'ロボットによる: ISBN の整形',
},
# Author: NoiX180
'jv': {
'isbn-formatting': u'Bot: Mormat ISBN',
},
# Author: 아라
'ko': {
'isbn-formatting': u'로봇: ISBN 형식 지정',
},
# Author: Purodha
'ksh': {
'isbn-formatting': u'Bot: ISBN zerääsch jemaat.',
},
# Author: George Animal
'ku': {
'isbn-formatting': u'Robot:ISBN\'ê format bike',
},
# Author: Robby
'lb': {
'isbn-formatting': u'Bot: ISBN formatéiert',
},
# Author: Pahles
'li': {
'isbn-formatting': u'Robot: ISBN opgemaak',
},
# Author: Hugo.arg
'lt': {
'isbn-formatting': u'Robotas: Formatuojamas ISBN',
},
# Author: Karlis
'lv': {
'isbn-formatting': u'Robots: ISBN formatējums',
},
# Author: StefanusRA
'map-bms': {
'isbn-formatting': u'Bot: Mbeneri format ISBN',
},
# Author: Jagwar
'mg': {
'isbn-formatting': u'Rôbô : manao formatage ny ISBN',
},
# Author: Luthfi94
'min': {
'isbn-formatting': u'Bot: Mamformat ISBN',
},
# Author: Bjankuloski06
'mk': {
'isbn-formatting': u'Робот: Форматирам ISBN',
},
# Author: Praveenp
'ml': {
'isbn-formatting': u'യന്ത്രം: ഐ.എസ്.ബി.എൻ. ശൈലി ശരിയാക്കുന്നു',
},
# Author: Htt
'mr': {
'isbn-formatting': u'सांगकाम्या: आयएसबीएन स्वरूपण',
},
# Author: Kurniasan
'ms': {
'isbn-formatting': u'Bot: Memformatkan ISBN',
},
# Author: Chrisportelli
'mt': {
'isbn-formatting': u'Bot: Format ISBN',
},
# Author: Lionslayer
'my': {
'isbn-formatting': u'ရိုဘော့ - ISBN နံပါတ်ကို ပုံစံချနေသည်',
},
# Author: Slomox
'nds': {
'isbn-formatting': u'Bot: ISBN-Format',
},
# Author: Servien
'nds-nl': {
'isbn-formatting': u'Bot: ISBN op-emaakt',
},
# Author: RajeshPandey
'ne': {
'isbn-formatting': u'रोबोट: ISBN मिलाउँदै',
},
'nl': {
'isbn-formatting': u'Robot: ISBN opgemaakt',
},
# Author: Harald Khan
# Author: Njardarlogar
'nn': {
'isbn-formatting': u'robot: formaterer ISBN',
},
# Author: Jon Harald Søby
'no': {
'isbn-formatting': u'robot: Formaterer ISBN',
},
# Author: Bouron
'os': {
'isbn-formatting': u'Робот: фæивта ISBN',
},
# Author: Sp5uhe
'pl': {
'isbn-formatting': u'Robot sformatował numer ISBN',
},
# Author: Borichèt
'pms': {
'isbn-formatting': u'Trigomiro: Formassion ëd l\'ISBN',
},
# Author: Hamilton Abreu
'pt': {
'isbn-formatting': u'Robô: A formatar o ISBN',
},
# Author: Hamilton Abreu
# Author: Helder.wiki
# Author: 555
'pt-br': {
'isbn-formatting': u'Bot: Formatando ISBN',
},
# Author: Minisarm
'ro': {
'isbn-formatting': u'Robot: Formatat codul ISBN',
},
# Author: Volkov
# Author: Александр Сигачёв
'ru': {
'isbn-formatting': u'бот: преобразование ISBN',
},
# Author: Gazeb
'rue': {
'isbn-formatting': u'Робот: Форматованя ISBN',
},
# Author: Avicennasis
'sco': {
'isbn-formatting': u'Robot: Formatting ISBN',
},
# Author: බිඟුවා
'si': {
'isbn-formatting': u'රොබෝ: ISBN ආකෘතිකරණය',
},
# Author: Wizzard
'sk': {
'isbn-formatting': u'Robot: Formátovanie ISBN',
},
# Author: Dbc334
'sl': {
'isbn-formatting': u'Robot: Oblikovanje ISBN',
},
# Author: Abshirdheere
'so': {
'isbn-formatting': u'Bot: Habayn ISBN',
},
# Author: Vinie007
'sq': {
'isbn-formatting': u'Robot: ISBN Formatimi',
},
# Author: Rancher
'sr': {
'isbn-formatting': u'Робот: обликовање ISBN-а',
},
# Author: Rancher
'sr-el': {
'isbn-formatting': u'Robot: oblikovanje ISBN-a',
},
# Author: Ainali
'sv': {
'isbn-formatting': u'Robot: Formaterar ISBN',
},
# Author: Przemub
'szl': {
'isbn-formatting': u'Robot: ISBN',
},
# Author: செல்வா
'ta': {
'isbn-formatting': u'தானியங்கி: ISBN ஐ வடிவமைத்தல்',
},
# Author: Horus
# Author: Nullzero
'th': {
'isbn-formatting': u'โรบอต: การจัดรูปแบบเลขมาตรฐานสากลประจำหนังสือ',
},
# Author: AnakngAraw
'tl': {
'isbn-formatting': u'Robot: Inaayos ang anyo ng ISBN',
},
# Author: Гусейн
'tly': {
'isbn-formatting': u'Робот: ISBN симо дәгиш карде',
},
# Author: Emperyan
# Author: Khutuck
# Author: Stultiwikia
# Author: Vito Genovese
'tr': {
'isbn-formatting': u'Bot: ISBN biçimlendiriliyor',
},
# Author: Zahidulla
'tt': {
'isbn-formatting': u'Робот: ISBN үзгәртеп кору',
},
# Author: Тест
'uk': {
'isbn-formatting': u'Робот: Форматування ISBN',
},
# Author: CoderSI
'uz': {
'isbn-formatting': u'Bot: ISBNni formatlash',
},
# Author: Alunardon90
# Author: GatoSelvadego
'vec': {
'isbn-formatting': u'Robot: Formatasion del ISBN',
},
# Author: Minh Nguyen
'vi': {
'isbn-formatting': u'Bot: Định dạng ISBN',
},
# Author: Harvzsf
'war': {
'isbn-formatting': u'Robot: Ginfoformat an ISBN',
},
# Author: פוילישער
'yi': {
'isbn-formatting': u'באט: פֿארמאַטירן ISBN',
},
# Author: Hydra
'zh': {
'isbn-formatting': u'机器人:ISBN格式化',
},
# Author: Andrew971218
# Author: Simon Shek
'zh-hant': {
'isbn-formatting': u'機械人:格式化ISBN',
},
# Author: Justincheng12345
'zh-hk': {
'isbn-formatting': u'機械人將ISBN格式化',
},
};
|
legoktm/pywikipedia-rewrite
|
scripts/i18n/isbn.py
|
Python
|
mit
| 10,492 | 0.047954 |
#
# Copyright 2010 (C) Norwegian University of Science and Technology
#
# This file is part of Network Administration Visualized (NAV).
#
# NAV is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License version 2 as published by
# the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details. You should have received a copy of the GNU General Public License
# along with NAV. If not, see <http://www.gnu.org/licenses/>.
#
"""View controller for PortAdmin"""
import simplejson
import logging
from operator import or_ as OR
from django.http import HttpResponse
from django.template import RequestContext, Context
from django.shortcuts import render_to_response
from django.contrib import messages
from django.core.urlresolvers import reverse
from django.db.models import Q
from nav.django.utils import get_account
from nav.web.utils import create_title
from nav.models.manage import Netbox, Interface
from nav.web.portadmin.utils import (get_and_populate_livedata,
find_and_populate_allowed_vlans,
get_aliastemplate, get_ifaliasformat,
save_to_database,
check_format_on_ifalias,
find_allowed_vlans_for_user_on_netbox,
find_allowed_vlans_for_user,
filter_vlans, fetch_voice_vlans,
should_check_access_rights,
mark_detained_interfaces)
from nav.Snmp.errors import SnmpError, TimeOutException
from nav.portadmin.snmputils import SNMPFactory
from .forms import SearchForm
_logger = logging.getLogger("nav.web.portadmin")
def get_base_context(additional_paths=None, form=None):
"""Returns a base context for portadmin
:type additional_paths: list of tuple
"""
navpath = [('Home', '/'), ('PortAdmin', reverse('portadmin-index'))]
if additional_paths:
navpath += additional_paths
form = form if form else SearchForm()
return {
'header': {'name': 'PortAdmin',
'description': 'Configure interfaces on ip devices'},
'navpath': navpath,
'title': create_title(navpath),
'form': form
}
def index(request):
"""View for showing main page"""
netboxes = []
interfaces = []
if 'query' in request.GET:
form = SearchForm(request.GET)
if form.is_valid():
netboxes, interfaces = search(form.cleaned_data['query'])
if len(netboxes) == 1 and not interfaces:
return search_by_sysname(request, netboxes[0].sysname)
elif len(interfaces) == 1 and not netboxes:
return search_by_interfaceid(request, interfaces[0].id)
else:
form = SearchForm()
context = get_base_context(form=form)
context['netboxes'] = netboxes
context['interfaces'] = interfaces
return render_to_response('portadmin/base.html',
context,
RequestContext(request))
def search(query):
"""Search for something in portadmin"""
netbox_filters = [
Q(sysname__icontains=query),
Q(ip=query)
]
netboxes = Netbox.objects.filter(
reduce(OR, netbox_filters)).order_by('sysname')
interfaces = Interface.objects.filter(
ifalias__icontains=query).order_by('netbox__sysname', 'ifname')
return netboxes, interfaces
def search_by_ip(request, ip):
"""View for showing a search done by ip-address"""
info_dict = get_base_context()
account = get_account(request)
try:
netbox = Netbox.objects.get(ip=ip)
except Netbox.DoesNotExist, do_not_exist_ex:
_logger.error("Netbox with ip %s not found; DoesNotExist = %s",
ip, do_not_exist_ex)
messages.error(request,
'Could not find netbox with ip-address %s' % str(ip))
return render_to_response('portadmin/base.html',
info_dict,
RequestContext(request))
else:
interfaces = netbox.get_swports_sorted()
info_dict = populate_infodict(request, account, netbox, interfaces)
return render_to_response(
'portadmin/netbox.html',
info_dict,
RequestContext(request))
def search_by_sysname(request, sysname):
"""View for showing a search done by sysname"""
info_dict = get_base_context()
account = get_account(request)
try:
netbox = Netbox.objects.get(sysname=sysname)
except Netbox.DoesNotExist, do_not_exist_ex:
_logger.error("Netbox %s not found; DoesNotExist = %s",
sysname, do_not_exist_ex)
messages.error(request,
'Could not find netbox with sysname %s' % sysname)
return render_to_response('portadmin/base.html',
info_dict,
RequestContext(request))
else:
interfaces = netbox.get_swports_sorted()
info_dict = populate_infodict(request, account, netbox, interfaces)
return render_to_response('portadmin/netbox.html',
info_dict,
RequestContext(request))
def search_by_interfaceid(request, interfaceid):
"""View for showing a search done by interface id"""
info_dict = get_base_context()
account = get_account(request)
try:
interface = Interface.objects.get(id=interfaceid)
except Interface.DoesNotExist, do_not_exist_ex:
_logger.error("Interface %s not found; DoesNotExist = %s",
interfaceid, do_not_exist_ex)
messages.error(request,
'Could not find interface with id %s' %
str(interfaceid))
return render_to_response('portadmin/base.html',
info_dict,
RequestContext(request))
else:
netbox = interface.netbox
interfaces = [interface]
info_dict = populate_infodict(request, account, netbox, interfaces)
return render_to_response('portadmin/netbox.html',
info_dict,
RequestContext(request))
def populate_infodict(request, account, netbox, interfaces):
"""Populate a dictionary used in every http response"""
allowed_vlans = []
voice_vlan = None
readonly = False
try:
fac = get_and_populate_livedata(netbox, interfaces)
allowed_vlans = find_and_populate_allowed_vlans(account, netbox,
interfaces, fac)
voice_vlan = fetch_voice_vlan_for_netbox(request, fac)
mark_detained_interfaces(interfaces)
except TimeOutException:
readonly = True
messages.error(request, "Timeout when contacting %s. Values displayed "
"are from database" % netbox.sysname)
if not netbox.read_only:
messages.error(request, "Read only community not set")
except SnmpError:
readonly = True
messages.error(request, "SNMP error when contacting %s. Values "
"displayed are from database" % netbox.sysname)
if check_read_write(netbox, request):
readonly = True
ifaliasformat = get_ifaliasformat()
aliastemplate = ''
if ifaliasformat:
tmpl = get_aliastemplate()
aliastemplate = tmpl.render(Context({'ifaliasformat': ifaliasformat}))
save_to_database(interfaces)
if voice_vlan:
set_voice_vlan_attribute(voice_vlan, interfaces)
info_dict = get_base_context([(netbox.sysname, )])
info_dict.update({'interfaces': interfaces,
'netbox': netbox,
'voice_vlan': voice_vlan,
'allowed_vlans': allowed_vlans,
'account': account,
'readonly': readonly,
'aliastemplate': aliastemplate})
return info_dict
def fetch_voice_vlan_for_netbox(request, factory):
"""Fetch the voice vlan for this netbox
There may be multiple voice vlans configured. Pick the one that exists
on this netbox. If multiple vlans exist, we cannot know which one to use.
"""
voice_vlans = fetch_voice_vlans()
if not voice_vlans:
return
voice_vlans_on_netbox = list(set(voice_vlans) &
set(factory.get_available_vlans()))
if not voice_vlans_on_netbox:
# Should this be reported? At the moment I do not think so.
return
if len(voice_vlans_on_netbox) > 1:
messages.error(request, 'Multiple voice vlans configured on this '
'netbox')
return
return voice_vlans_on_netbox[0]
def set_voice_vlan_attribute(voice_vlan, interfaces):
"""Set an attribute on the interfaces to indicate voice vlan behavior"""
if voice_vlan:
for interface in interfaces:
if not interface.trunk:
continue
allowed_vlans = interface.swportallowedvlan.get_allowed_vlans()
interface.voice_activated = (len(allowed_vlans) == 1 and
voice_vlan in allowed_vlans)
def check_read_write(netbox, request):
"""Add a message to user explaining why he can't edit anything
:returns: flag indicating readonly or not
"""
if not netbox.read_write:
messages.error(request,
"Write community not set for this device, "
"changes cannot be saved")
return True
return False
def save_interfaceinfo(request):
"""Set ifalias and/or vlan on netbox
messages: created from the results from the messages framework
interfaceid must be a part of the request
ifalias, vlan and voicevlan are all optional
"""
if request.method == 'POST':
interface = Interface.objects.get(pk=request.POST.get('interfaceid'))
account = get_account(request)
# Skip a lot of queries if access_control is not turned on
if should_check_access_rights(account):
_logger.info('Checking access rights for %s', account)
if interface.vlan in [v.vlan for v in
find_allowed_vlans_for_user_on_netbox(
account, interface.netbox)]:
set_interface_values(account, interface, request)
else:
# Should only happen if user tries to avoid gui restrictions
messages.error(request, 'Not allowed to edit this interface')
else:
set_interface_values(account, interface, request)
else:
messages.error(request, 'Wrong request type')
result = {"messages": build_ajax_messages(request)}
return response_based_on_result(result)
def set_interface_values(account, interface, request):
"""Use snmp to set the values in the request on the netbox"""
try:
fac = SNMPFactory.get_instance(interface.netbox)
except SnmpError, error:
_logger.error('Error getting snmpfactory instance %s: %s',
interface.netbox, error)
messages.info(request, 'Could not connect to netbox')
else:
# Order is important here, set_voice need to be before set_vlan
set_voice_vlan(fac, interface, request)
set_ifalias(account, fac, interface, request)
set_vlan(account, fac, interface, request)
set_admin_status(fac, interface, request)
write_to_memory(fac)
save_to_database([interface])
def build_ajax_messages(request):
"""Create a structure suitable for converting to json from messages"""
ajax_messages = []
for message in messages.get_messages(request):
ajax_messages.append({
'level': message.level,
'message': message.message,
'extra_tags': message.tags
})
return ajax_messages
def set_ifalias(account, fac, interface, request):
"""Set ifalias on netbox if it is requested"""
if 'ifalias' in request.POST:
ifalias = request.POST.get('ifalias')
if check_format_on_ifalias(ifalias):
try:
fac.set_if_alias(interface.ifindex, ifalias)
interface.ifalias = ifalias
_logger.info('%s: %s:%s - ifalias set to "%s"' % (
account.login, interface.netbox.get_short_sysname(),
interface.ifname, ifalias))
except SnmpError, error:
_logger.error('Error setting ifalias: %s', error)
messages.error(request, "Error setting ifalias: %s" % error)
else:
messages.error(request, "Wrong format on ifalias")
def set_vlan(account, fac, interface, request):
"""Set vlan on netbox if it is requested"""
if 'vlan' in request.POST:
vlan = int(request.POST.get('vlan'))
# If the voice_vlan flag is flagged we need to take some extra care
voice_activated = request.POST.get('voice_activated', False)
try:
# If Cisco and voice vlan, we have to set native vlan instead of
# access vlan
if interface.netbox.type.vendor.id == 'cisco' and voice_activated:
fac.set_native_vlan(interface, vlan)
else:
fac.set_vlan(interface.ifindex, vlan)
interface.vlan = vlan
_logger.info('%s: %s:%s - vlan set to %s' % (
account.login, interface.netbox.get_short_sysname(),
interface.ifname, vlan))
except (SnmpError, TypeError), error:
_logger.error('Error setting vlan: %s', error)
messages.error(request, "Error setting vlan: %s" % error)
def set_voice_vlan(fac, interface, request):
"""Set voicevlan on interface
A voice vlan is a normal vlan that is defined by the user of NAV as
a vlan that is used only for ip telephone traffic.
To set a voice vlan we have to make sure the interface is configured
to tag both the voicevlan and the "access-vlan".
"""
if 'voicevlan' in request.POST:
voice_vlan = fetch_voice_vlan_for_netbox(request, fac)
# Either the voicevlan is turned off or turned on
turn_on_voice_vlan = request.POST.get('voicevlan') == 'true'
account = get_account(request)
try:
if turn_on_voice_vlan:
_logger.info('%s: %s:%s - %s', account.login,
interface.netbox.get_short_sysname(),
interface.ifname, 'voice vlan enabled')
fac.set_voice_vlan(interface, voice_vlan)
else:
_logger.info('%s: %s:%s - %s', account.login,
interface.netbox.get_short_sysname(),
interface.ifname, 'voice vlan disabled')
fac.set_access(interface, interface.vlan)
except (SnmpError, ValueError) as error:
messages.error(request, "Error setting voicevlan: %s" % error)
def set_admin_status(fac, interface, request):
"""Set admin status for the interface
:type fac: nav.portadmin.snmputils.SNMPFactory
:type request: django.http.HttpRequest
"""
status_up = '1'
status_down = '2'
account = request.account
if 'ifadminstatus' in request.POST:
adminstatus = request.POST['ifadminstatus']
try:
if adminstatus == status_up:
_logger.info('%s: Setting ifadminstatus for %s to %s',
account.login, interface, 'up')
fac.set_if_up(interface.ifindex)
elif adminstatus == status_down:
_logger.info('%s: Setting ifadminstatus for %s to %s',
account.login, interface, 'down')
fac.set_if_down(interface.ifindex)
except (SnmpError, ValueError) as error:
messages.error(request, "Error setting ifadminstatus: %s" % error)
def write_to_memory(fac):
"""Write changes on netbox to memory using snmp"""
try:
fac.write_mem()
except SnmpError, error:
_logger.error('Error doing write mem on %s: %s' % (fac.netbox, error))
def response_based_on_result(result):
"""Return response based on content of result
result: dict containing result and message keys
"""
if result['messages']:
return HttpResponse(simplejson.dumps(result), status=500,
mimetype="application/json")
else:
return HttpResponse(simplejson.dumps(result),
mimetype="application/json")
def render_trunk_edit(request, interfaceid):
"""Controller for rendering trunk edit view"""
interface = Interface.objects.get(pk=interfaceid)
agent = SNMPFactory().get_instance(interface.netbox)
if request.method == 'POST':
try:
handle_trunk_edit(request, agent, interface)
except SnmpError, error:
messages.error(request, 'Error editing trunk: %s' % error)
else:
messages.success(request, 'Trunk edit successful')
account = request.account
netbox = interface.netbox
check_read_write(netbox, request)
try:
vlans = agent.get_netbox_vlans() # All vlans on this netbox
native_vlan, trunked_vlans = agent.get_native_and_trunked_vlans(
interface)
except SnmpError:
vlans = native_vlan = trunked_vlans = allowed_vlans = None
messages.error(request, 'Error getting trunk information')
else:
if should_check_access_rights(account):
allowed_vlans = find_allowed_vlans_for_user_on_netbox(
account, interface.netbox, agent)
else:
allowed_vlans = vlans
extra_path = [(netbox.sysname,
reverse('portadmin-sysname',
kwargs={'sysname': netbox.sysname})),
("Trunk %s" % interface,)]
context = get_base_context(extra_path)
context.update({'interface': interface, 'available_vlans': vlans,
'native_vlan': native_vlan, 'trunked_vlans': trunked_vlans,
'allowed_vlans': allowed_vlans})
return render_to_response('portadmin/trunk_edit.html',
context,
RequestContext(request))
def handle_trunk_edit(request, agent, interface):
"""Edit a trunk"""
native_vlan = int(request.POST.get('native_vlan'))
trunked_vlans = [int(vlan) for vlan in request.POST.getlist('trunk_vlans')]
if should_check_access_rights(get_account(request)):
# A user can avoid the form restrictions by sending a forged post
# request Make sure only the allowed vlans are set
old_native, old_trunked = agent.get_native_and_trunked_vlans(interface)
allowed_vlans = [v.vlan for v in
find_allowed_vlans_for_user(get_account(request))]
trunked_vlans = filter_vlans(trunked_vlans, old_trunked, allowed_vlans)
native_vlan = (native_vlan if native_vlan in allowed_vlans
else old_native)
_logger.info('Interface %s - native: %s, trunk: %s', interface,
native_vlan, trunked_vlans)
if trunked_vlans:
agent.set_trunk(interface, native_vlan, trunked_vlans)
else:
agent.set_access(interface, native_vlan)
def restart_interface(request):
"""Restart the interface by setting admin status to down and up"""
if request.method == 'POST':
try:
interface = Interface.objects.get(
pk=request.POST.get('interfaceid'))
except Interface.DoesNotExist:
return HttpResponse(status=404)
try:
fac = SNMPFactory.get_instance(interface.netbox)
except SnmpError, error:
_logger.error('Error getting snmpfactory instance when '
'restarting interface %s: %s',
interface.netbox, error)
return HttpResponse(status=500)
# Restart interface so that client fetches new address
fac.restart_if(interface.ifindex)
return HttpResponse()
return HttpResponse(status=400)
|
alexanderfefelov/nav
|
python/nav/web/portadmin/views.py
|
Python
|
gpl-2.0
| 20,814 | 0 |
# (c) 2016, Allen Sanabria <asanabria@linuxdynasty.org>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from os import path, walk
import re
from ansible.errors import AnsibleError
from ansible.module_utils.six import string_types
from ansible.module_utils._text import to_native, to_text
from ansible.plugins.action import ActionBase
class ActionModule(ActionBase):
TRANSFERS_FILES = False
VALID_FILE_EXTENSIONS = ['yaml', 'yml', 'json']
VALID_DIR_ARGUMENTS = ['dir', 'depth', 'files_matching', 'ignore_files', 'extensions']
VALID_FILE_ARGUMENTS = ['file', '_raw_params']
VALID_ALL = ['name']
def _set_dir_defaults(self):
if not self.depth:
self.depth = 0
if self.files_matching:
self.matcher = re.compile(r'{0}'.format(self.files_matching))
else:
self.matcher = None
if not self.ignore_files:
self.ignore_files = list()
if isinstance(self.ignore_files, str):
self.ignore_files = self.ignore_files.split()
elif isinstance(self.ignore_files, dict):
return {
'failed': True,
'message': '{0} must be a list'.format(self.ignore_files)
}
def _set_args(self):
""" Set instance variables based on the arguments that were passed """
self.return_results_as_name = self._task.args.get('name', None)
self.source_dir = self._task.args.get('dir', None)
self.source_file = self._task.args.get('file', None)
if not self.source_dir and not self.source_file:
self.source_file = self._task.args.get('_raw_params')
self.depth = self._task.args.get('depth', None)
self.files_matching = self._task.args.get('files_matching', None)
self.ignore_files = self._task.args.get('ignore_files', None)
self.valid_extensions = self._task.args.get('extensions', self.VALID_FILE_EXTENSIONS)
# convert/validate extensions list
if isinstance(self.valid_extensions, string_types):
self.valid_extensions = list(self.valid_extensions)
if not isinstance(self.valid_extensions, list):
raise AnsibleError('Invalid type for "extensions" option, it must be a list')
def run(self, tmp=None, task_vars=None):
""" Load yml files recursively from a directory.
"""
if task_vars is None:
task_vars = dict()
self.show_content = True
self.included_files = []
# Validate arguments
dirs = 0
files = 0
for arg in self._task.args:
if arg in self.VALID_DIR_ARGUMENTS:
dirs += 1
elif arg in self.VALID_FILE_ARGUMENTS:
files += 1
elif arg in self.VALID_ALL:
pass
else:
raise AnsibleError('{0} is not a valid option in debug'.format(arg))
if dirs and files:
raise AnsibleError("Your are mixing file only and dir only arguments, these are incompatible")
# set internal vars from args
self._set_args()
results = dict()
if self.source_dir:
self._set_dir_defaults()
self._set_root_dir()
if path.exists(self.source_dir):
for root_dir, filenames in self._traverse_dir_depth():
failed, err_msg, updated_results = (self._load_files_in_dir(root_dir, filenames))
if failed:
break
results.update(updated_results)
else:
failed = True
err_msg = ('{0} directory does not exist'.format(self.source_dir))
else:
try:
self.source_file = self._find_needle('vars', self.source_file)
failed, err_msg, updated_results = (
self._load_files(self.source_file)
)
if not failed:
results.update(updated_results)
except AnsibleError as e:
failed = True
err_msg = to_native(e)
if self.return_results_as_name:
scope = dict()
scope[self.return_results_as_name] = results
results = scope
result = super(ActionModule, self).run(tmp, task_vars)
if failed:
result['failed'] = failed
result['message'] = err_msg
result['ansible_included_var_files'] = self.included_files
result['ansible_facts'] = results
result['_ansible_no_log'] = not self.show_content
return result
def _set_root_dir(self):
if self._task._role:
if self.source_dir.split('/')[0] == 'vars':
path_to_use = (
path.join(self._task._role._role_path, self.source_dir)
)
if path.exists(path_to_use):
self.source_dir = path_to_use
else:
path_to_use = (
path.join(
self._task._role._role_path, 'vars', self.source_dir
)
)
self.source_dir = path_to_use
else:
current_dir = (
"/".join(self._task._ds._data_source.split('/')[:-1])
)
self.source_dir = path.join(current_dir, self.source_dir)
def _traverse_dir_depth(self):
""" Recursively iterate over a directory and sort the files in
alphabetical order. Do not iterate pass the set depth.
The default depth is unlimited.
"""
current_depth = 0
sorted_walk = list(walk(self.source_dir))
sorted_walk.sort(key=lambda x: x[0])
for current_root, current_dir, current_files in sorted_walk:
current_depth += 1
if current_depth <= self.depth or self.depth == 0:
current_files.sort()
yield (current_root, current_files)
else:
break
def _ignore_file(self, filename):
""" Return True if a file matches the list of ignore_files.
Args:
filename (str): The filename that is being matched against.
Returns:
Boolean
"""
for file_type in self.ignore_files:
try:
if re.search(r'{0}$'.format(file_type), filename):
return True
except Exception:
err_msg = 'Invalid regular expression: {0}'.format(file_type)
raise AnsibleError(err_msg)
return False
def _is_valid_file_ext(self, source_file):
""" Verify if source file has a valid extension
Args:
source_file (str): The full path of source file or source file.
Returns:
Bool
"""
file_ext = path.splitext(source_file)
print(file_ext[-1][2:])
return bool(len(file_ext) > 1 and file_ext[-1][1:] in self.valid_extensions)
def _load_files(self, filename, validate_extensions=False):
""" Loads a file and converts the output into a valid Python dict.
Args:
filename (str): The source file.
Returns:
Tuple (bool, str, dict)
"""
results = dict()
failed = False
err_msg = ''
if validate_extensions and not self._is_valid_file_ext(filename):
failed = True
err_msg = ('{0} does not have a valid extension: {1}' .format(filename, ', '.join(self.valid_extensions)))
else:
b_data, show_content = self._loader._get_file_contents(filename)
data = to_text(b_data, errors='surrogate_or_strict')
self.show_content = show_content
data = self._loader.load(data, show_content)
if not data:
data = dict()
if not isinstance(data, dict):
failed = True
err_msg = ('{0} must be stored as a dictionary/hash' .format(filename))
else:
self.included_files.append(filename)
results.update(data)
return failed, err_msg, results
def _load_files_in_dir(self, root_dir, var_files):
""" Load the found yml files and update/overwrite the dictionary.
Args:
root_dir (str): The base directory of the list of files that is being passed.
var_files: (list): List of files to iterate over and load into a dictionary.
Returns:
Tuple (bool, str, dict)
"""
results = dict()
failed = False
err_msg = ''
for filename in var_files:
stop_iter = False
# Never include main.yml from a role, as that is the default included by the role
if self._task._role:
if filename == 'main.yml':
stop_iter = True
continue
filepath = path.join(root_dir, filename)
if self.files_matching:
if not self.matcher.search(filename):
stop_iter = True
if not stop_iter and not failed:
if path.exists(filepath) and not self._ignore_file(filename):
failed, err_msg, loaded_data = self._load_files(filepath, validate_extensions=True)
if not failed:
results.update(loaded_data)
return failed, err_msg, results
|
Tatsh-ansible/ansible
|
lib/ansible/plugins/action/include_vars.py
|
Python
|
gpl-3.0
| 10,206 | 0.001372 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('setlist', '0012_remove_show_leg'),
]
operations = [
migrations.CreateModel(
name='Show2',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('venue', models.ForeignKey(to='setlist.Venue', to_field='id')),
('tour', models.ForeignKey(to='setlist.Tour', to_field='id')),
('date', models.DateField(db_index=True)),
('setlist', models.TextField(default=b'', blank=True)),
('notes', models.TextField(default=b'', blank=True)),
('source', models.TextField(default=b'', blank=True)),
],
options={
},
bases=(models.Model,),
),
]
|
tylereaves/26md
|
setlist/migrations/0013_show2.py
|
Python
|
bsd-3-clause
| 970 | 0.002062 |
# Force loading views
from oioioi.disqualification.views import disqualification_fragment
app_name = 'disqualification'
urlpatterns = ()
|
sio2project/oioioi
|
oioioi/disqualification/urls.py
|
Python
|
gpl-3.0
| 139 | 0 |
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# CowBots -- Error detection bots for CKAN-of-Worms
# By: Emmanuel Raviart <emmanuel@raviart.com>
#
# Copyright (C) 2013 Etalab
# http://github.com/etalab/cowbots
#
# This file is part of CowBots.
#
# CowBots is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# CowBots is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Detect changes in CKAN-of-Worms objects and notify by email when some patterns are detected."""
import argparse
import ConfigParser
import email.header
import logging
import os
import re
import smtplib
import sys
from biryani1 import baseconv, custom_conv, jsonconv, netconv, states
import mako.lookup
app_dir = os.path.dirname(os.path.abspath(__file__))
app_name = os.path.splitext(os.path.basename(__file__))[0]
conf = None
conv = custom_conv(baseconv, jsonconv, netconv, states)
headers = None
line_re = re.compile(u"""(?P<indent>\s*)(?P<header>([-*]|=>|\[\d+\]|PS\s*\d*\s* ?:)\s*|)(?P<content>[^\s].*)$""")
log = logging.getLogger(app_name)
templates_lookup = None
# Converters
cow_response_to_value = conv.pipe(
conv.make_input_to_json(),
conv.not_none,
conv.test_isinstance(dict),
conv.struct(
dict(
apiVersion = conv.pipe(
conv.test_equals('1.0'),
conv.not_none,
),
context = conv.noop,
method = conv.pipe(
conv.test_isinstance(basestring),
conv.not_none,
),
params = conv.test_isinstance(dict),
url = conv.pipe(
conv.make_input_to_url(full = True),
conv.not_none,
),
value = conv.noop,
),
),
conv.function(lambda response: response['value']),
)
# Functions
def account_created(account):
log.debug(u'Notifying account creation: "{}".'.format(u' - '.join(
fragment
for fragment in [
account.get('fullname'),
account.get('name'),
account.get('email'),
]
if fragment is not None
)))
template = templates_lookup.get_template('new-account.mako')
message = template.render_unicode(
ckan_of_worms_url = conf['ckan_of_worms.site_url'],
account = account,
encoding = 'utf-8',
from_email = conf['from_email'],
qp = lambda s: to_quoted_printable(s, 'utf-8'),
to_emails = conf['admin_email'],
weckan_url = conf['weckan.site_url'],
wiki_url = conf['wiki.site_url'],
youckan_url = conf['youckan.site_url'],
).strip()
send_email(message)
def article_edited(article):
log.debug(u'Notifying article update: "{}".'.format(article['title']))
template = templates_lookup.get_template('edit-article.mako')
message = template.render_unicode(
article = article,
ckan_of_worms_url = conf['ckan_of_worms.site_url'],
encoding = 'utf-8',
from_email = conf['from_email'],
qp = lambda s: to_quoted_printable(s, 'utf-8'),
to_emails = conf['admin_email'],
weckan_url = conf['weckan.site_url'],
wiki_url = conf['wiki.site_url'],
youckan_url = conf['youckan.site_url'],
).strip()
send_email(message)
def dataset_created(dataset):
log.debug(u'Notifying dataset creation: "{}".'.format(dataset['name']))
template = templates_lookup.get_template('new-dataset.mako')
message = template.render_unicode(
ckan_of_worms_url = conf['ckan_of_worms.site_url'],
dataset = dataset,
encoding = 'utf-8',
from_email = conf['from_email'],
qp = lambda s: to_quoted_printable(s, 'utf-8'),
to_emails = conf['admin_email'],
weckan_url = conf['weckan.site_url'],
wiki_url = conf['wiki.site_url'],
youckan_url = conf['youckan.site_url'],
).strip()
send_email(message)
def group_created(group):
log.debug(u'Notifying group creation: "{}".'.format(group['name']))
template = templates_lookup.get_template('new-group.mako')
message = template.render_unicode(
ckan_of_worms_url = conf['ckan_of_worms.site_url'],
encoding = 'utf-8',
from_email = conf['from_email'],
group = group,
qp = lambda s: to_quoted_printable(s, 'utf-8'),
to_emails = conf['admin_email'],
weckan_url = conf['weckan.site_url'],
wiki_url = conf['wiki.site_url'],
youckan_url = conf['youckan.site_url'],
).strip()
send_email(message)
def main():
parser = argparse.ArgumentParser(description = __doc__)
parser.add_argument('config', help = 'path of configuration file')
parser.add_argument('-f', '--fedmsg', action = 'store_true', help = 'poll fedmsg events')
parser.add_argument('-v', '--verbose', action = 'store_true', help = 'increase output verbosity')
global args
args = parser.parse_args()
logging.basicConfig(level = logging.DEBUG if args.verbose else logging.WARNING, stream = sys.stdout)
config_parser = ConfigParser.SafeConfigParser(dict(
here = os.path.dirname(os.path.abspath(os.path.normpath(args.config))),
))
config_parser.read(args.config)
global conf
conf = conv.check(conv.pipe(
conv.test_isinstance(dict),
conv.struct(
{
'admin_email': conv.pipe(
conv.function(lambda emails: set(emails.split())),
conv.uniform_sequence(
conv.pipe(
conv.input_to_email,
conv.test_email(),
),
constructor = lambda emails: sorted(set(emails)),
drop_none_items = True,
),
conv.empty_to_none,
conv.not_none,
),
'ckan_of_worms.site_url': conv.pipe(
conv.make_input_to_url(error_if_fragment = True, error_if_path = True, error_if_query = True,
full = True),
conv.not_none,
),
'from_email': conv.pipe(
conv.input_to_email,
conv.test_email(),
conv.not_none,
),
'user_agent': conv.pipe(
conv.cleanup_line,
conv.not_none,
),
'weckan.site_url': conv.pipe(
conv.make_input_to_url(error_if_fragment = True, error_if_path = True, error_if_query = True,
full = True),
conv.not_none,
),
'wiki.site_url': conv.pipe(
conv.make_input_to_url(error_if_fragment = True, error_if_path = True, error_if_query = True,
full = True),
conv.not_none,
),
'youckan.site_url': conv.pipe(
conv.make_input_to_url(error_if_fragment = True, error_if_path = True, error_if_query = True,
full = True),
conv.not_none,
),
},
default = 'drop',
),
conv.not_none,
))(dict(config_parser.items('CowBots-Email-Changes')), conv.default_state)
cache_dir = os.path.join(app_dir, 'cache')
if not os.path.exists(cache_dir):
os.makedirs(cache_dir)
global headers
headers = {
'User-Agent': conf['user_agent'],
}
global templates_lookup
templates_lookup = mako.lookup.TemplateLookup(
directories = [os.path.join(app_dir, 'email-changes-templates')],
input_encoding = 'utf-8',
module_directory = os.path.join(cache_dir, 'email-changes-templates'),
)
if args.fedmsg:
import fedmsg
fedmsg_conf = conv.check(conv.struct(
dict(
environment = conv.pipe(
conv.empty_to_none,
conv.test_in(['dev', 'prod', 'stg']),
),
modname = conv.pipe(
conv.empty_to_none,
conv.test(lambda value: value == value.strip('.'),
error = 'Value must not begin or end with a "."'),
conv.default('ckan_of_worms'),
),
# name = conv.pipe(
# conv.empty_to_none,
# conv.default('ckan_of_worms.{}'.format(hostname)),
# ),
topic_prefix = conv.pipe(
conv.empty_to_none,
conv.test(lambda value: value == value.strip('.'),
error = 'Value must not begin or end with a "."'),
),
),
default = 'drop',
))(dict(config_parser.items('fedmsg')))
# Read in the config from /etc/fedmsg.d/.
fedmsg_config = fedmsg.config.load_config([], None)
# Disable a warning about not sending. We know. We only want to tail.
fedmsg_config['mute'] = True
# Disable timing out so that we can tail forever. This is deprecated
# and will disappear in future versions.
fedmsg_config['timeout'] = 0
# For the time being, don't require message to be signed.
fedmsg_config['validate_signatures'] = False
for key, value in fedmsg_conf.iteritems():
if value is not None:
fedmsg_config[key] = value
ckan_of_worms_topic_prefix = '{}.{}.ckan_of_worms.'.format(fedmsg_config['topic_prefix'],
fedmsg_config['environment'])
wiki_topic_prefix = '{}.{}.wiki.'.format(fedmsg_config['topic_prefix'], fedmsg_config['environment'])
for name, endpoint, topic, message in fedmsg.tail_messages(**fedmsg_config):
if topic.startswith(ckan_of_worms_topic_prefix):
kind, action = topic[len(ckan_of_worms_topic_prefix):].split('.')
if kind == 'account':
if action == 'create':
account_created(message['msg'])
elif kind == 'dataset':
if action == 'create':
dataset_created(message['msg'])
elif kind == 'group':
if action == 'create':
group_created(message['msg'])
elif kind == 'organization':
if action == 'create':
organization_created(message['msg'])
elif kind == 'related':
if action == 'create':
related_created(message['msg'])
elif topic.startswith(wiki_topic_prefix):
kind, action = topic[len(wiki_topic_prefix):].split('.')
if kind == 'article':
if action == 'edit':
article_edited(message['msg'])
elif kind == 'upload':
if action == 'complete':
upload_completed(message['msg'])
else:
log.debug(u'Ignoring message: {}, {}'.format(topic, name))
else:
pass # TODO
return 0
def organization_created(organization):
log.debug(u'Notifying organization creation: "{}".'.format(organization['name']))
template = templates_lookup.get_template('new-organization.mako')
message = template.render_unicode(
ckan_of_worms_url = conf['ckan_of_worms.site_url'],
encoding = 'utf-8',
from_email = conf['from_email'],
organization = organization,
qp = lambda s: to_quoted_printable(s, 'utf-8'),
to_emails = conf['admin_email'],
weckan_url = conf['weckan.site_url'],
wiki_url = conf['wiki.site_url'],
youckan_url = conf['youckan.site_url'],
).strip()
send_email(message)
def related_created(activity):
dataset = activity['target']
log.debug(u'Notifying related link creation: "{}".'.format(dataset['name']))
template = templates_lookup.get_template('new-related.mako')
message = template.render_unicode(
ckan_of_worms_url = conf['ckan_of_worms.site_url'],
dataset = dataset,
encoding = 'utf-8',
from_email = conf['from_email'],
owner = activity.get('actor'),
qp = lambda s: to_quoted_printable(s, 'utf-8'),
related = activity['object'],
to_emails = conf['admin_email'],
weckan_url = conf['weckan.site_url'],
wiki_url = conf['wiki.site_url'],
youckan_url = conf['youckan.site_url'],
).strip()
send_email(message)
def send_email(message):
# Rewrap message.
in_header = True
message_lines = []
for line in message.splitlines():
line = line.rstrip().replace(u' :', u' :').replace(u' [', u' [').replace(u'« ', u'« ').replace(
u' »', u' »')
if not line:
in_header = False
if in_header or len(line) <= 72:
message_lines.append(line)
else:
match = line_re.match(line)
assert match is not None
line_prefix = match.group('indent') + match.group('header')
line_len = len(line_prefix)
line_words = []
for word in match.group('content').split(' '):
if line_len > len(line_prefix) and line_len + len(word) > 72:
message_lines.append(line_prefix + u' '.join(line_words))
line_prefix = match.group('indent') + u' ' * len(match.group('header'))
line_len = len(line_prefix)
line_words = []
if line_len > 0:
line_len += 1
line_len += len(word)
line_words.append(word)
if line_words:
message_lines.append(line_prefix + u' '.join(line_words))
message = u'\r\n'.join(message_lines).replace(u' ', u' ').encode('utf-8')
server = smtplib.SMTP('localhost')
try:
server.sendmail(conf['from_email'], conf['admin_email'], message)
except smtplib.SMTPRecipientsRefused:
log.exception(u'Skipping email to {0}, because an exception occurred:'.format(conf['admin_email']))
server.quit()
def to_quoted_printable(s, encoding):
assert isinstance(s, unicode)
quoted_words = []
for word in s.split(' '):
try:
word = str(word)
except UnicodeEncodeError:
word = str(email.header.Header(word.encode(encoding), encoding))
quoted_words.append(word)
return ' '.join(quoted_words)
def upload_completed(upload):
log.debug(u'Notifying upload completed: "{}".'.format(upload['title']))
template = templates_lookup.get_template('complete-upload.mako')
message = template.render_unicode(
ckan_of_worms_url = conf['ckan_of_worms.site_url'],
encoding = 'utf-8',
from_email = conf['from_email'],
qp = lambda s: to_quoted_printable(s, 'utf-8'),
to_emails = conf['admin_email'],
upload = upload,
weckan_url = conf['weckan.site_url'],
wiki_url = conf['wiki.site_url'],
youckan_url = conf['youckan.site_url'],
).strip()
send_email(message)
if __name__ == '__main__':
sys.exit(main())
|
etalab/cowbots
|
email_changes.py
|
Python
|
agpl-3.0
| 16,025 | 0.016047 |
import json
import requests
class Building():
"""Building Client."""
# Service Setup
config = {
'schema': 'http',
'host': 'localhost',
'port': '9202',
'endpoint': 'api/v1/buildings'
}
@classmethod
def base_url(cls):
"""Form the base url for the service."""
return "{schema}://{host}:{port}/{endpoint}".format(**cls.config)
@classmethod
def configure(cls, options={}):
cls.config.update(options)
@classmethod
def get_all(cls):
"""Return all buildings."""
r = requests.get(cls.base_url())
if r.status_code == 200:
return r.json()
else:
return None
@classmethod
def get(cls, code):
"""Return an building."""
r = requests.get(cls.base_url() + '/' + code)
if r.status_code == 200:
return r.json()
else:
return None
@classmethod
def create(cls, attrs):
"""Create an building with the attributes passed in attrs dict."""
r = requests.post(cls.base_url(), data=json.dumps(attrs))
if r.status_code == 200:
return r.json()
else:
return None
@classmethod
def update(cls, code, attrs):
"""Update the building identified by code with attrs dict."""
r = requests.put(cls.base_url() + '/' + code, data=json.dumps(attrs))
if r.status_code == 200:
return r.json()
else:
return None
@classmethod
def delete(cls, code):
"""Delete the building identified by code."""
r = requests.delete(cls.base_url() + '/' + code)
return r.status_code == 204
@classmethod
def delete_all(cls):
"""Delete all buildings."""
r = requests.delete(cls.base_url())
return r.status_code == 204
@classmethod
def bulk_load(cls, json_string):
"""Bulk loads an array of buildings."""
h = {
'Content-Type': 'application/json'
}
return requests.post(cls.base_url(), data=json_string, headers=h)
|
Foris/darwined-core-python-clients
|
darwined_core_python_clients/physical/buildings.py
|
Python
|
mit
| 2,117 | 0 |
#!/usr/bin/env python3
#定义一个变量a
a = 100
#判断a是否大于等于0
if a >= 100:
# 如果大于等于0执行这里边的内容
print('a为正 a =',a)
else:
# 反之执行这段代码
print('a为负 a =',a)
#
#转义字符
#
print("I'm OK")
print('I\'m OK')
print('I\'m\tlearning\nPython')
# 使用 r'' 来使 '' 里边的字符串不需要转 // 但是这样不行 --> print(r'I'm OK')
print(r'\\n\\')
# 如果有很多换行的地方可以使用 '''...''' 来表示 试了一下 这个不行
print("line1\nline2\nline3")
#print(r'line1...line2...line3')
# True 注意大小写
print(3 > 2)
# False 注意大小写
print(2 > 3)
# and or not
# and 都为真 则真 反之假
print("3 > 2 and 2 > 1 -->",3 > 2 and 2 > 1)
print("3 > 2 and 1 > 2 -->",3 > 2 and 1 > 2)
print("2 > 3 and 1 > 2 -->",2 > 3 and 1 > 2)
# or 只要一个为真 则真 反之假
print("3 > 2 or 2 > 1 -->",3 > 2 or 2 > 1)
print("3 > 2 or 1 > 2 -->",3 > 2 or 1 > 2)
print("2 > 3 or 1 > 2 -->",2 > 3 or 1 > 2)
# not 取反
print("not 3 > 2 -->",not 3 > 2)
print("not 2 > 3 -->",not 2 > 3)
# None 在Python里边是一个特殊的值,None不能理解为0 因为0是有意义的,而None是一个特殊的空值
#
# 变量
#
a = 0
a_007 = "A_007"
answer = True
a = "ABC"
x = 2
x = x + 10
print(x)
b = a
a = "XYZ"
print(b)
#
# 在Python中通常全部大写的变量名表示常量
#
PI = 3.14159265359
#在Python中有两种除法
#1
print("10 / 3 --> ",10 / 3)
#2 --> 地板除 地板除只取结果的整数部分
print("10 // 3 --> ",10 // 3)
# 取余
print("10 % 3 -->",10 % 3)
|
CrazyDaiDai/learningPython
|
hellow.py
|
Python
|
gpl-3.0
| 1,589 | 0.019119 |
__author__ = 'mdavid'
|
netkicorp/wns-api-server
|
netki/util/__init__.py
|
Python
|
bsd-3-clause
| 23 | 0.043478 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Case',
fields=[
('id', models.AutoField(primary_key=True, serialize=False, auto_created=True, verbose_name='ID')),
('name', models.CharField(max_length=300)),
('dateStart', models.DateTimeField(verbose_name='Start case')),
('dateFinish', models.DateTimeField(verbose_name='Finish case')),
('description', models.CharField(max_length=4000)),
],
),
migrations.CreateModel(
name='Disease',
fields=[
('id', models.AutoField(primary_key=True, serialize=False, auto_created=True, verbose_name='ID')),
('title', models.CharField(max_length=1000)),
('dateCreation', models.DateTimeField(verbose_name='date published')),
('description', models.CharField(max_length=4000)),
],
),
]
|
Omrigan/diseases
|
diseases/migrations/0001_initial.py
|
Python
|
mit
| 1,149 | 0.003481 |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.4 on 2016-03-06 13:24
from __future__ import unicode_literals
import django.core.validators
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('badges', '0003_badgedesign_bg_color'),
]
operations = [
migrations.AlterField(
model_name='badgedesign',
name='bg_color',
field=models.CharField(default='#FFFFFF', help_text='E.g. #00ff00', max_length=7, validators=[django.core.validators.RegexValidator('^#[a-fA-F0-9]{6}$')], verbose_name='Background color'),
),
]
|
helfertool/helfertool
|
src/badges/migrations/0004_auto_20160306_1424.py
|
Python
|
agpl-3.0
| 638 | 0.001567 |
# coding=utf-8
'''
xssbot 必须具有以下功能
1.可对指定url进行访问
2.拦截alert等框框
3.拦截页内跳转
4.锁定设定cookies(未实现)
在EditThisCookie的chrome扩展中使用扩展特有的接口实现
chrome.cookies.onChanged.addListener
但在https://chromedevtools.github.io/devtools-protocol/文档中并没有相关类似功能、
'''
from chromeremote import ChromeTabThread as ChromeTab
class XssbotTab(ChromeTab):
# 一个页面允许运行10秒
TAB_TIMEOUT = 10
def __init__(self, url, host, port):
super(XssbotTab, self).__init__(host, port)
self.opened = False
self.url = url
self.initjs = '''
window.alert =function(){};
window.confirm =function(){};
window.prompt = function(){};
window.open= function(){};
'''
def run(self):
def processNavigation(para):
# 仅处理第一次我们跳转,其他禁止跳转
if self.opened:
response = 'CancelAndIgnore'
else:
self.opened = True
response = 'Proceed'
self.Page.processNavigation(
response=response, navigationId=para['navigationId'])
def javascriptDialogOpening(para):
# 基本上rewrite后就不会有弹框了,但如果有就关闭弹框
self.Page.handleJavaScriptDialog(accept=False, promptText='')
self.open_tab()
self.Page.enable()
self.register_event("Page.navigationRequested", processNavigation)
self.register_event("Page.javascriptDialogOpening",
javascriptDialogOpening)
# 设置所有跳转需要进行管制
self.Page.setControlNavigations(enabled=True)
self.Page.addScriptToEvaluateOnLoad(
scriptSource=self.initjs, identifier='rewrite')
# 打开设定url
self.Page.navigate(url=self.url)
super(XssbotTab, self).run()
if __name__ == '__main__':
tab = XssbotTab(
'https://github.com/BugScanTeam/chromeremote', '127.0.0.1', 9222)
tab.start()
|
sadnoodles/chromeremote
|
examples/xssbot.py
|
Python
|
gpl-3.0
| 2,168 | 0 |
# Copyright (C) 2016 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
from ggrc import db
from sqlalchemy.ext.associationproxy import association_proxy
from sqlalchemy.orm import validates
from .categorization import Categorization
from .mixins import deferred, Base, Hierarchical
class CategorizedPublishable(object):
def __init__(self, attr_name, type_name):
self.attr_name = attr_name
self.type_name = type_name
@property
def rel_class(self):
import ggrc.models
return getattr(ggrc.models, self.type_name)
def __call__(self, updater, obj, json_obj):
return updater.query_for(self.rel_class, json_obj, self.attr_name, True)
class CategoryBase(Hierarchical, Base, db.Model):
_table_plural = 'category_bases'
__tablename__ = 'categories'
type = db.Column(db.String)
name = deferred(db.Column(db.String), 'CategoryBase')
lft = deferred(db.Column(db.Integer), 'CategoryBase')
rgt = deferred(db.Column(db.Integer), 'CategoryBase')
scope_id = deferred(db.Column(db.Integer), 'CategoryBase')
depth = deferred(db.Column(db.Integer), 'CategoryBase')
required = deferred(db.Column(db.Boolean), 'CategoryBase')
__mapper_args__ = {
'polymorphic_on': type
}
categorizations = db.relationship(
'ggrc.models.categorization.Categorization',
backref='category',
cascade='all, delete-orphan',
)
@validates('type')
def validates_type(self, key, value):
return self.__class__.__name__
# REST properties
_publish_attrs = [
'name',
'type',
'required',
#'scope_id',
]
_sanitize_html = [
'name',
]
@classmethod
def eager_query(cls):
from sqlalchemy import orm
query = super(CategoryBase, cls).eager_query()
return query.options()
|
NejcZupec/ggrc-core
|
src/ggrc/models/category.py
|
Python
|
apache-2.0
| 1,823 | 0.013165 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyrigt: (c) 2017, Yanis Guenane <yanis+ansible@guenane.org>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: openssl_csr
version_added: '2.4'
short_description: Generate OpenSSL Certificate Signing Request (CSR)
description:
- This module allows one to (re)generate OpenSSL certificate signing requests.
- It uses the pyOpenSSL python library to interact with openssl. This module supports
the subjectAltName, keyUsage, extendedKeyUsage, basicConstraints and OCSP Must Staple
extensions.
- "Please note that the module regenerates existing CSR if it doesn't match the module's
options, or if it seems to be corrupt. If you are concerned that this could overwrite
your existing CSR, consider using the I(backup) option."
- The module can use the cryptography Python library, or the pyOpenSSL Python
library. By default, it tries to detect which one is available. This can be
overridden with the I(select_crypto_backend) option. Please note that the
PyOpenSSL backend was deprecated in Ansible 2.9 and will be removed in Ansible 2.13."
requirements:
- Either cryptography >= 1.3
- Or pyOpenSSL >= 0.15
author:
- Yanis Guenane (@Spredzy)
options:
state:
description:
- Whether the certificate signing request should exist or not, taking action if the state is different from what is stated.
type: str
default: present
choices: [ absent, present ]
digest:
description:
- The digest used when signing the certificate signing request with the private key.
type: str
default: sha256
privatekey_path:
description:
- The path to the private key to use when signing the certificate signing request.
- Either I(privatekey_path) or I(privatekey_content) must be specified if I(state) is C(present), but not both.
type: path
privatekey_content:
description:
- The content of the private key to use when signing the certificate signing request.
- Either I(privatekey_path) or I(privatekey_content) must be specified if I(state) is C(present), but not both.
type: str
version_added: "2.10"
privatekey_passphrase:
description:
- The passphrase for the private key.
- This is required if the private key is password protected.
type: str
version:
description:
- The version of the certificate signing request.
- "The only allowed value according to L(RFC 2986,https://tools.ietf.org/html/rfc2986#section-4.1)
is 1."
- This option will no longer accept unsupported values from Ansible 2.14 on.
type: int
default: 1
force:
description:
- Should the certificate signing request be forced regenerated by this ansible module.
type: bool
default: no
path:
description:
- The name of the file into which the generated OpenSSL certificate signing request will be written.
type: path
required: true
subject:
description:
- Key/value pairs that will be present in the subject name field of the certificate signing request.
- If you need to specify more than one value with the same key, use a list as value.
type: dict
version_added: '2.5'
country_name:
description:
- The countryName field of the certificate signing request subject.
type: str
aliases: [ C, countryName ]
state_or_province_name:
description:
- The stateOrProvinceName field of the certificate signing request subject.
type: str
aliases: [ ST, stateOrProvinceName ]
locality_name:
description:
- The localityName field of the certificate signing request subject.
type: str
aliases: [ L, localityName ]
organization_name:
description:
- The organizationName field of the certificate signing request subject.
type: str
aliases: [ O, organizationName ]
organizational_unit_name:
description:
- The organizationalUnitName field of the certificate signing request subject.
type: str
aliases: [ OU, organizationalUnitName ]
common_name:
description:
- The commonName field of the certificate signing request subject.
type: str
aliases: [ CN, commonName ]
email_address:
description:
- The emailAddress field of the certificate signing request subject.
type: str
aliases: [ E, emailAddress ]
subject_alt_name:
description:
- SAN extension to attach to the certificate signing request.
- This can either be a 'comma separated string' or a YAML list.
- Values must be prefixed by their options. (i.e., C(email), C(URI), C(DNS), C(RID), C(IP), C(dirName),
C(otherName) and the ones specific to your CA)
- Note that if no SAN is specified, but a common name, the common
name will be added as a SAN except if C(useCommonNameForSAN) is
set to I(false).
- More at U(https://tools.ietf.org/html/rfc5280#section-4.2.1.6).
type: list
elements: str
aliases: [ subjectAltName ]
subject_alt_name_critical:
description:
- Should the subjectAltName extension be considered as critical.
type: bool
aliases: [ subjectAltName_critical ]
use_common_name_for_san:
description:
- If set to C(yes), the module will fill the common name in for
C(subject_alt_name) with C(DNS:) prefix if no SAN is specified.
type: bool
default: yes
version_added: '2.8'
aliases: [ useCommonNameForSAN ]
key_usage:
description:
- This defines the purpose (e.g. encipherment, signature, certificate signing)
of the key contained in the certificate.
type: list
elements: str
aliases: [ keyUsage ]
key_usage_critical:
description:
- Should the keyUsage extension be considered as critical.
type: bool
aliases: [ keyUsage_critical ]
extended_key_usage:
description:
- Additional restrictions (e.g. client authentication, server authentication)
on the allowed purposes for which the public key may be used.
type: list
elements: str
aliases: [ extKeyUsage, extendedKeyUsage ]
extended_key_usage_critical:
description:
- Should the extkeyUsage extension be considered as critical.
type: bool
aliases: [ extKeyUsage_critical, extendedKeyUsage_critical ]
basic_constraints:
description:
- Indicates basic constraints, such as if the certificate is a CA.
type: list
elements: str
version_added: '2.5'
aliases: [ basicConstraints ]
basic_constraints_critical:
description:
- Should the basicConstraints extension be considered as critical.
type: bool
version_added: '2.5'
aliases: [ basicConstraints_critical ]
ocsp_must_staple:
description:
- Indicates that the certificate should contain the OCSP Must Staple
extension (U(https://tools.ietf.org/html/rfc7633)).
type: bool
version_added: '2.5'
aliases: [ ocspMustStaple ]
ocsp_must_staple_critical:
description:
- Should the OCSP Must Staple extension be considered as critical
- Note that according to the RFC, this extension should not be marked
as critical, as old clients not knowing about OCSP Must Staple
are required to reject such certificates
(see U(https://tools.ietf.org/html/rfc7633#section-4)).
type: bool
version_added: '2.5'
aliases: [ ocspMustStaple_critical ]
select_crypto_backend:
description:
- Determines which crypto backend to use.
- The default choice is C(auto), which tries to use C(cryptography) if available, and falls back to C(pyopenssl).
- If set to C(pyopenssl), will try to use the L(pyOpenSSL,https://pypi.org/project/pyOpenSSL/) library.
- If set to C(cryptography), will try to use the L(cryptography,https://cryptography.io/) library.
- Please note that the C(pyopenssl) backend has been deprecated in Ansible 2.9, and will be removed in Ansible 2.13.
From that point on, only the C(cryptography) backend will be available.
type: str
default: auto
choices: [ auto, cryptography, pyopenssl ]
version_added: '2.8'
backup:
description:
- Create a backup file including a timestamp so you can get the original
CSR back if you overwrote it with a new one by accident.
type: bool
default: no
version_added: "2.8"
create_subject_key_identifier:
description:
- Create the Subject Key Identifier from the public key.
- "Please note that commercial CAs can ignore the value, respectively use a value of
their own choice instead. Specifying this option is mostly useful for self-signed
certificates or for own CAs."
- Note that this is only supported if the C(cryptography) backend is used!
type: bool
default: no
version_added: "2.9"
subject_key_identifier:
description:
- The subject key identifier as a hex string, where two bytes are separated by colons.
- "Example: C(00:11:22:33:44:55:66:77:88:99:aa:bb:cc:dd:ee:ff:00:11:22:33)"
- "Please note that commercial CAs ignore this value, respectively use a value of their
own choice. Specifying this option is mostly useful for self-signed certificates
or for own CAs."
- Note that this option can only be used if I(create_subject_key_identifier) is C(no).
- Note that this is only supported if the C(cryptography) backend is used!
type: str
version_added: "2.9"
authority_key_identifier:
description:
- The authority key identifier as a hex string, where two bytes are separated by colons.
- "Example: C(00:11:22:33:44:55:66:77:88:99:aa:bb:cc:dd:ee:ff:00:11:22:33)"
- If specified, I(authority_cert_issuer) must also be specified.
- "Please note that commercial CAs ignore this value, respectively use a value of their
own choice. Specifying this option is mostly useful for self-signed certificates
or for own CAs."
- Note that this is only supported if the C(cryptography) backend is used!
- The C(AuthorityKeyIdentifier) will only be added if at least one of I(authority_key_identifier),
I(authority_cert_issuer) and I(authority_cert_serial_number) is specified.
type: str
version_added: "2.9"
authority_cert_issuer:
description:
- Names that will be present in the authority cert issuer field of the certificate signing request.
- Values must be prefixed by their options. (i.e., C(email), C(URI), C(DNS), C(RID), C(IP), C(dirName),
C(otherName) and the ones specific to your CA)
- "Example: C(DNS:ca.example.org)"
- If specified, I(authority_key_identifier) must also be specified.
- "Please note that commercial CAs ignore this value, respectively use a value of their
own choice. Specifying this option is mostly useful for self-signed certificates
or for own CAs."
- Note that this is only supported if the C(cryptography) backend is used!
- The C(AuthorityKeyIdentifier) will only be added if at least one of I(authority_key_identifier),
I(authority_cert_issuer) and I(authority_cert_serial_number) is specified.
type: list
elements: str
version_added: "2.9"
authority_cert_serial_number:
description:
- The authority cert serial number.
- Note that this is only supported if the C(cryptography) backend is used!
- "Please note that commercial CAs ignore this value, respectively use a value of their
own choice. Specifying this option is mostly useful for self-signed certificates
or for own CAs."
- The C(AuthorityKeyIdentifier) will only be added if at least one of I(authority_key_identifier),
I(authority_cert_issuer) and I(authority_cert_serial_number) is specified.
type: int
version_added: "2.9"
return_content:
description:
- If set to C(yes), will return the (current or generated) CSR's content as I(csr).
type: bool
default: no
version_added: "2.10"
extends_documentation_fragment:
- files
notes:
- If the certificate signing request already exists it will be checked whether subjectAltName,
keyUsage, extendedKeyUsage and basicConstraints only contain the requested values, whether
OCSP Must Staple is as requested, and if the request was signed by the given private key.
seealso:
- module: openssl_certificate
- module: openssl_dhparam
- module: openssl_pkcs12
- module: openssl_privatekey
- module: openssl_publickey
'''
EXAMPLES = r'''
- name: Generate an OpenSSL Certificate Signing Request
openssl_csr:
path: /etc/ssl/csr/www.ansible.com.csr
privatekey_path: /etc/ssl/private/ansible.com.pem
common_name: www.ansible.com
- name: Generate an OpenSSL Certificate Signing Request with an inline key
openssl_csr:
path: /etc/ssl/csr/www.ansible.com.csr
privatekey_content: "{{ private_key_content }}"
common_name: www.ansible.com
- name: Generate an OpenSSL Certificate Signing Request with a passphrase protected private key
openssl_csr:
path: /etc/ssl/csr/www.ansible.com.csr
privatekey_path: /etc/ssl/private/ansible.com.pem
privatekey_passphrase: ansible
common_name: www.ansible.com
- name: Generate an OpenSSL Certificate Signing Request with Subject information
openssl_csr:
path: /etc/ssl/csr/www.ansible.com.csr
privatekey_path: /etc/ssl/private/ansible.com.pem
country_name: FR
organization_name: Ansible
email_address: jdoe@ansible.com
common_name: www.ansible.com
- name: Generate an OpenSSL Certificate Signing Request with subjectAltName extension
openssl_csr:
path: /etc/ssl/csr/www.ansible.com.csr
privatekey_path: /etc/ssl/private/ansible.com.pem
subject_alt_name: 'DNS:www.ansible.com,DNS:m.ansible.com'
- name: Generate an OpenSSL CSR with subjectAltName extension with dynamic list
openssl_csr:
path: /etc/ssl/csr/www.ansible.com.csr
privatekey_path: /etc/ssl/private/ansible.com.pem
subject_alt_name: "{{ item.value | map('regex_replace', '^', 'DNS:') | list }}"
with_dict:
dns_server:
- www.ansible.com
- m.ansible.com
- name: Force regenerate an OpenSSL Certificate Signing Request
openssl_csr:
path: /etc/ssl/csr/www.ansible.com.csr
privatekey_path: /etc/ssl/private/ansible.com.pem
force: yes
common_name: www.ansible.com
- name: Generate an OpenSSL Certificate Signing Request with special key usages
openssl_csr:
path: /etc/ssl/csr/www.ansible.com.csr
privatekey_path: /etc/ssl/private/ansible.com.pem
common_name: www.ansible.com
key_usage:
- digitalSignature
- keyAgreement
extended_key_usage:
- clientAuth
- name: Generate an OpenSSL Certificate Signing Request with OCSP Must Staple
openssl_csr:
path: /etc/ssl/csr/www.ansible.com.csr
privatekey_path: /etc/ssl/private/ansible.com.pem
common_name: www.ansible.com
ocsp_must_staple: yes
'''
RETURN = r'''
privatekey:
description:
- Path to the TLS/SSL private key the CSR was generated for
- Will be C(none) if the private key has been provided in I(privatekey_content).
returned: changed or success
type: str
sample: /etc/ssl/private/ansible.com.pem
filename:
description: Path to the generated Certificate Signing Request
returned: changed or success
type: str
sample: /etc/ssl/csr/www.ansible.com.csr
subject:
description: A list of the subject tuples attached to the CSR
returned: changed or success
type: list
elements: list
sample: "[('CN', 'www.ansible.com'), ('O', 'Ansible')]"
subjectAltName:
description: The alternative names this CSR is valid for
returned: changed or success
type: list
elements: str
sample: [ 'DNS:www.ansible.com', 'DNS:m.ansible.com' ]
keyUsage:
description: Purpose for which the public key may be used
returned: changed or success
type: list
elements: str
sample: [ 'digitalSignature', 'keyAgreement' ]
extendedKeyUsage:
description: Additional restriction on the public key purposes
returned: changed or success
type: list
elements: str
sample: [ 'clientAuth' ]
basicConstraints:
description: Indicates if the certificate belongs to a CA
returned: changed or success
type: list
elements: str
sample: ['CA:TRUE', 'pathLenConstraint:0']
ocsp_must_staple:
description: Indicates whether the certificate has the OCSP
Must Staple feature enabled
returned: changed or success
type: bool
sample: false
backup_file:
description: Name of backup file created.
returned: changed and if I(backup) is C(yes)
type: str
sample: /path/to/www.ansible.com.csr.2019-03-09@11:22~
csr:
description: The (current or generated) CSR's content.
returned: if I(state) is C(present) and I(return_content) is C(yes)
type: str
version_added: "2.10"
'''
import abc
import binascii
import os
import traceback
from distutils.version import LooseVersion
from ansible.module_utils import crypto as crypto_utils
from ansible.module_utils.basic import AnsibleModule, missing_required_lib
from ansible.module_utils._text import to_native, to_bytes, to_text
from ansible.module_utils.compat import ipaddress as compat_ipaddress
MINIMAL_PYOPENSSL_VERSION = '0.15'
MINIMAL_CRYPTOGRAPHY_VERSION = '1.3'
PYOPENSSL_IMP_ERR = None
try:
import OpenSSL
from OpenSSL import crypto
PYOPENSSL_VERSION = LooseVersion(OpenSSL.__version__)
except ImportError:
PYOPENSSL_IMP_ERR = traceback.format_exc()
PYOPENSSL_FOUND = False
else:
PYOPENSSL_FOUND = True
if OpenSSL.SSL.OPENSSL_VERSION_NUMBER >= 0x10100000:
# OpenSSL 1.1.0 or newer
OPENSSL_MUST_STAPLE_NAME = b"tlsfeature"
OPENSSL_MUST_STAPLE_VALUE = b"status_request"
else:
# OpenSSL 1.0.x or older
OPENSSL_MUST_STAPLE_NAME = b"1.3.6.1.5.5.7.1.24"
OPENSSL_MUST_STAPLE_VALUE = b"DER:30:03:02:01:05"
CRYPTOGRAPHY_IMP_ERR = None
try:
import cryptography
import cryptography.x509
import cryptography.x509.oid
import cryptography.exceptions
import cryptography.hazmat.backends
import cryptography.hazmat.primitives.serialization
import cryptography.hazmat.primitives.hashes
CRYPTOGRAPHY_VERSION = LooseVersion(cryptography.__version__)
except ImportError:
CRYPTOGRAPHY_IMP_ERR = traceback.format_exc()
CRYPTOGRAPHY_FOUND = False
else:
CRYPTOGRAPHY_FOUND = True
CRYPTOGRAPHY_MUST_STAPLE_NAME = cryptography.x509.oid.ObjectIdentifier("1.3.6.1.5.5.7.1.24")
CRYPTOGRAPHY_MUST_STAPLE_VALUE = b"\x30\x03\x02\x01\x05"
class CertificateSigningRequestError(crypto_utils.OpenSSLObjectError):
pass
class CertificateSigningRequestBase(crypto_utils.OpenSSLObject):
def __init__(self, module):
super(CertificateSigningRequestBase, self).__init__(
module.params['path'],
module.params['state'],
module.params['force'],
module.check_mode
)
self.digest = module.params['digest']
self.privatekey_path = module.params['privatekey_path']
self.privatekey_content = module.params['privatekey_content']
if self.privatekey_content is not None:
self.privatekey_content = self.privatekey_content.encode('utf-8')
self.privatekey_passphrase = module.params['privatekey_passphrase']
self.version = module.params['version']
self.subjectAltName = module.params['subject_alt_name']
self.subjectAltName_critical = module.params['subject_alt_name_critical']
self.keyUsage = module.params['key_usage']
self.keyUsage_critical = module.params['key_usage_critical']
self.extendedKeyUsage = module.params['extended_key_usage']
self.extendedKeyUsage_critical = module.params['extended_key_usage_critical']
self.basicConstraints = module.params['basic_constraints']
self.basicConstraints_critical = module.params['basic_constraints_critical']
self.ocspMustStaple = module.params['ocsp_must_staple']
self.ocspMustStaple_critical = module.params['ocsp_must_staple_critical']
self.create_subject_key_identifier = module.params['create_subject_key_identifier']
self.subject_key_identifier = module.params['subject_key_identifier']
self.authority_key_identifier = module.params['authority_key_identifier']
self.authority_cert_issuer = module.params['authority_cert_issuer']
self.authority_cert_serial_number = module.params['authority_cert_serial_number']
self.request = None
self.privatekey = None
self.csr_bytes = None
self.return_content = module.params['return_content']
if self.create_subject_key_identifier and self.subject_key_identifier is not None:
module.fail_json(msg='subject_key_identifier cannot be specified if create_subject_key_identifier is true')
self.backup = module.params['backup']
self.backup_file = None
self.subject = [
('C', module.params['country_name']),
('ST', module.params['state_or_province_name']),
('L', module.params['locality_name']),
('O', module.params['organization_name']),
('OU', module.params['organizational_unit_name']),
('CN', module.params['common_name']),
('emailAddress', module.params['email_address']),
]
if module.params['subject']:
self.subject = self.subject + crypto_utils.parse_name_field(module.params['subject'])
self.subject = [(entry[0], entry[1]) for entry in self.subject if entry[1]]
if not self.subjectAltName and module.params['use_common_name_for_san']:
for sub in self.subject:
if sub[0] in ('commonName', 'CN'):
self.subjectAltName = ['DNS:%s' % sub[1]]
break
if self.subject_key_identifier is not None:
try:
self.subject_key_identifier = binascii.unhexlify(self.subject_key_identifier.replace(':', ''))
except Exception as e:
raise CertificateSigningRequestError('Cannot parse subject_key_identifier: {0}'.format(e))
if self.authority_key_identifier is not None:
try:
self.authority_key_identifier = binascii.unhexlify(self.authority_key_identifier.replace(':', ''))
except Exception as e:
raise CertificateSigningRequestError('Cannot parse authority_key_identifier: {0}'.format(e))
@abc.abstractmethod
def _generate_csr(self):
pass
def generate(self, module):
'''Generate the certificate signing request.'''
if not self.check(module, perms_required=False) or self.force:
result = self._generate_csr()
if self.backup:
self.backup_file = module.backup_local(self.path)
if self.return_content:
self.csr_bytes = result
crypto_utils.write_file(module, result)
self.changed = True
file_args = module.load_file_common_arguments(module.params)
if module.set_fs_attributes_if_different(file_args, False):
self.changed = True
@abc.abstractmethod
def _load_private_key(self):
pass
@abc.abstractmethod
def _check_csr(self):
pass
def check(self, module, perms_required=True):
"""Ensure the resource is in its desired state."""
state_and_perms = super(CertificateSigningRequestBase, self).check(module, perms_required)
self._load_private_key()
if not state_and_perms:
return False
return self._check_csr()
def remove(self, module):
if self.backup:
self.backup_file = module.backup_local(self.path)
super(CertificateSigningRequestBase, self).remove(module)
def dump(self):
'''Serialize the object into a dictionary.'''
result = {
'privatekey': self.privatekey_path,
'filename': self.path,
'subject': self.subject,
'subjectAltName': self.subjectAltName,
'keyUsage': self.keyUsage,
'extendedKeyUsage': self.extendedKeyUsage,
'basicConstraints': self.basicConstraints,
'ocspMustStaple': self.ocspMustStaple,
'changed': self.changed
}
if self.backup_file:
result['backup_file'] = self.backup_file
if self.return_content:
if self.csr_bytes is None:
self.csr_bytes = crypto_utils.load_file_if_exists(self.path, ignore_errors=True)
result['csr'] = self.csr_bytes.decode('utf-8') if self.csr_bytes else None
return result
class CertificateSigningRequestPyOpenSSL(CertificateSigningRequestBase):
def __init__(self, module):
if module.params['create_subject_key_identifier']:
module.fail_json(msg='You cannot use create_subject_key_identifier with the pyOpenSSL backend!')
for o in ('subject_key_identifier', 'authority_key_identifier', 'authority_cert_issuer', 'authority_cert_serial_number'):
if module.params[o] is not None:
module.fail_json(msg='You cannot use {0} with the pyOpenSSL backend!'.format(o))
super(CertificateSigningRequestPyOpenSSL, self).__init__(module)
def _generate_csr(self):
req = crypto.X509Req()
req.set_version(self.version - 1)
subject = req.get_subject()
for entry in self.subject:
if entry[1] is not None:
# Workaround for https://github.com/pyca/pyopenssl/issues/165
nid = OpenSSL._util.lib.OBJ_txt2nid(to_bytes(entry[0]))
if nid == 0:
raise CertificateSigningRequestError('Unknown subject field identifier "{0}"'.format(entry[0]))
res = OpenSSL._util.lib.X509_NAME_add_entry_by_NID(subject._name, nid, OpenSSL._util.lib.MBSTRING_UTF8, to_bytes(entry[1]), -1, -1, 0)
if res == 0:
raise CertificateSigningRequestError('Invalid value for subject field identifier "{0}": {1}'.format(entry[0], entry[1]))
extensions = []
if self.subjectAltName:
altnames = ', '.join(self.subjectAltName)
try:
extensions.append(crypto.X509Extension(b"subjectAltName", self.subjectAltName_critical, altnames.encode('ascii')))
except OpenSSL.crypto.Error as e:
raise CertificateSigningRequestError(
'Error while parsing Subject Alternative Names {0} (check for missing type prefix, such as "DNS:"!): {1}'.format(
', '.join(["{0}".format(san) for san in self.subjectAltName]), str(e)
)
)
if self.keyUsage:
usages = ', '.join(self.keyUsage)
extensions.append(crypto.X509Extension(b"keyUsage", self.keyUsage_critical, usages.encode('ascii')))
if self.extendedKeyUsage:
usages = ', '.join(self.extendedKeyUsage)
extensions.append(crypto.X509Extension(b"extendedKeyUsage", self.extendedKeyUsage_critical, usages.encode('ascii')))
if self.basicConstraints:
usages = ', '.join(self.basicConstraints)
extensions.append(crypto.X509Extension(b"basicConstraints", self.basicConstraints_critical, usages.encode('ascii')))
if self.ocspMustStaple:
extensions.append(crypto.X509Extension(OPENSSL_MUST_STAPLE_NAME, self.ocspMustStaple_critical, OPENSSL_MUST_STAPLE_VALUE))
if extensions:
req.add_extensions(extensions)
req.set_pubkey(self.privatekey)
req.sign(self.privatekey, self.digest)
self.request = req
return crypto.dump_certificate_request(crypto.FILETYPE_PEM, self.request)
def _load_private_key(self):
try:
self.privatekey = crypto_utils.load_privatekey(
path=self.privatekey_path,
content=self.privatekey_content,
passphrase=self.privatekey_passphrase
)
except crypto_utils.OpenSSLBadPassphraseError as exc:
raise CertificateSigningRequestError(exc)
def _normalize_san(self, san):
# Apparently OpenSSL returns 'IP address' not 'IP' as specifier when converting the subjectAltName to string
# although it won't accept this specifier when generating the CSR. (https://github.com/openssl/openssl/issues/4004)
if san.startswith('IP Address:'):
san = 'IP:' + san[len('IP Address:'):]
if san.startswith('IP:'):
ip = compat_ipaddress.ip_address(san[3:])
san = 'IP:{0}'.format(ip.compressed)
return san
def _check_csr(self):
def _check_subject(csr):
subject = [(OpenSSL._util.lib.OBJ_txt2nid(to_bytes(sub[0])), to_bytes(sub[1])) for sub in self.subject]
current_subject = [(OpenSSL._util.lib.OBJ_txt2nid(to_bytes(sub[0])), to_bytes(sub[1])) for sub in csr.get_subject().get_components()]
if not set(subject) == set(current_subject):
return False
return True
def _check_subjectAltName(extensions):
altnames_ext = next((ext for ext in extensions if ext.get_short_name() == b'subjectAltName'), '')
altnames = [self._normalize_san(altname.strip()) for altname in
to_text(altnames_ext, errors='surrogate_or_strict').split(',') if altname.strip()]
if self.subjectAltName:
if (set(altnames) != set([self._normalize_san(to_text(name)) for name in self.subjectAltName]) or
altnames_ext.get_critical() != self.subjectAltName_critical):
return False
else:
if altnames:
return False
return True
def _check_keyUsage_(extensions, extName, expected, critical):
usages_ext = [ext for ext in extensions if ext.get_short_name() == extName]
if (not usages_ext and expected) or (usages_ext and not expected):
return False
elif not usages_ext and not expected:
return True
else:
current = [OpenSSL._util.lib.OBJ_txt2nid(to_bytes(usage.strip())) for usage in str(usages_ext[0]).split(',')]
expected = [OpenSSL._util.lib.OBJ_txt2nid(to_bytes(usage)) for usage in expected]
return set(current) == set(expected) and usages_ext[0].get_critical() == critical
def _check_keyUsage(extensions):
usages_ext = [ext for ext in extensions if ext.get_short_name() == b'keyUsage']
if (not usages_ext and self.keyUsage) or (usages_ext and not self.keyUsage):
return False
elif not usages_ext and not self.keyUsage:
return True
else:
# OpenSSL._util.lib.OBJ_txt2nid() always returns 0 for all keyUsage values
# (since keyUsage has a fixed bitfield for these values and is not extensible).
# Therefore, we create an extension for the wanted values, and compare the
# data of the extensions (which is the serialized bitfield).
expected_ext = crypto.X509Extension(b"keyUsage", False, ', '.join(self.keyUsage).encode('ascii'))
return usages_ext[0].get_data() == expected_ext.get_data() and usages_ext[0].get_critical() == self.keyUsage_critical
def _check_extenededKeyUsage(extensions):
return _check_keyUsage_(extensions, b'extendedKeyUsage', self.extendedKeyUsage, self.extendedKeyUsage_critical)
def _check_basicConstraints(extensions):
return _check_keyUsage_(extensions, b'basicConstraints', self.basicConstraints, self.basicConstraints_critical)
def _check_ocspMustStaple(extensions):
oms_ext = [ext for ext in extensions if to_bytes(ext.get_short_name()) == OPENSSL_MUST_STAPLE_NAME and to_bytes(ext) == OPENSSL_MUST_STAPLE_VALUE]
if OpenSSL.SSL.OPENSSL_VERSION_NUMBER < 0x10100000:
# Older versions of libssl don't know about OCSP Must Staple
oms_ext.extend([ext for ext in extensions if ext.get_short_name() == b'UNDEF' and ext.get_data() == b'\x30\x03\x02\x01\x05'])
if self.ocspMustStaple:
return len(oms_ext) > 0 and oms_ext[0].get_critical() == self.ocspMustStaple_critical
else:
return len(oms_ext) == 0
def _check_extensions(csr):
extensions = csr.get_extensions()
return (_check_subjectAltName(extensions) and _check_keyUsage(extensions) and
_check_extenededKeyUsage(extensions) and _check_basicConstraints(extensions) and
_check_ocspMustStaple(extensions))
def _check_signature(csr):
try:
return csr.verify(self.privatekey)
except crypto.Error:
return False
try:
csr = crypto_utils.load_certificate_request(self.path, backend='pyopenssl')
except Exception as dummy:
return False
return _check_subject(csr) and _check_extensions(csr) and _check_signature(csr)
class CertificateSigningRequestCryptography(CertificateSigningRequestBase):
def __init__(self, module):
super(CertificateSigningRequestCryptography, self).__init__(module)
self.cryptography_backend = cryptography.hazmat.backends.default_backend()
self.module = module
if self.version != 1:
module.warn('The cryptography backend only supports version 1. (The only valid value according to RFC 2986.)')
def _generate_csr(self):
csr = cryptography.x509.CertificateSigningRequestBuilder()
try:
csr = csr.subject_name(cryptography.x509.Name([
cryptography.x509.NameAttribute(crypto_utils.cryptography_name_to_oid(entry[0]), to_text(entry[1])) for entry in self.subject
]))
except ValueError as e:
raise CertificateSigningRequestError(e)
if self.subjectAltName:
csr = csr.add_extension(cryptography.x509.SubjectAlternativeName([
crypto_utils.cryptography_get_name(name) for name in self.subjectAltName
]), critical=self.subjectAltName_critical)
if self.keyUsage:
params = crypto_utils.cryptography_parse_key_usage_params(self.keyUsage)
csr = csr.add_extension(cryptography.x509.KeyUsage(**params), critical=self.keyUsage_critical)
if self.extendedKeyUsage:
usages = [crypto_utils.cryptography_name_to_oid(usage) for usage in self.extendedKeyUsage]
csr = csr.add_extension(cryptography.x509.ExtendedKeyUsage(usages), critical=self.extendedKeyUsage_critical)
if self.basicConstraints:
params = {}
ca, path_length = crypto_utils.cryptography_get_basic_constraints(self.basicConstraints)
csr = csr.add_extension(cryptography.x509.BasicConstraints(ca, path_length), critical=self.basicConstraints_critical)
if self.ocspMustStaple:
try:
# This only works with cryptography >= 2.1
csr = csr.add_extension(cryptography.x509.TLSFeature([cryptography.x509.TLSFeatureType.status_request]), critical=self.ocspMustStaple_critical)
except AttributeError as dummy:
csr = csr.add_extension(
cryptography.x509.UnrecognizedExtension(CRYPTOGRAPHY_MUST_STAPLE_NAME, CRYPTOGRAPHY_MUST_STAPLE_VALUE),
critical=self.ocspMustStaple_critical
)
if self.create_subject_key_identifier:
csr = csr.add_extension(
cryptography.x509.SubjectKeyIdentifier.from_public_key(self.privatekey.public_key()),
critical=False
)
elif self.subject_key_identifier is not None:
csr = csr.add_extension(cryptography.x509.SubjectKeyIdentifier(self.subject_key_identifier), critical=False)
if self.authority_key_identifier is not None or self.authority_cert_issuer is not None or self.authority_cert_serial_number is not None:
issuers = None
if self.authority_cert_issuer is not None:
issuers = [crypto_utils.cryptography_get_name(n) for n in self.authority_cert_issuer]
csr = csr.add_extension(
cryptography.x509.AuthorityKeyIdentifier(self.authority_key_identifier, issuers, self.authority_cert_serial_number),
critical=False
)
digest = None
if crypto_utils.cryptography_key_needs_digest_for_signing(self.privatekey):
if self.digest == 'sha256':
digest = cryptography.hazmat.primitives.hashes.SHA256()
elif self.digest == 'sha384':
digest = cryptography.hazmat.primitives.hashes.SHA384()
elif self.digest == 'sha512':
digest = cryptography.hazmat.primitives.hashes.SHA512()
elif self.digest == 'sha1':
digest = cryptography.hazmat.primitives.hashes.SHA1()
elif self.digest == 'md5':
digest = cryptography.hazmat.primitives.hashes.MD5()
# FIXME
else:
raise CertificateSigningRequestError('Unsupported digest "{0}"'.format(self.digest))
try:
self.request = csr.sign(self.privatekey, digest, self.cryptography_backend)
except TypeError as e:
if str(e) == 'Algorithm must be a registered hash algorithm.' and digest is None:
self.module.fail_json(msg='Signing with Ed25519 and Ed448 keys requires cryptography 2.8 or newer.')
raise
return self.request.public_bytes(cryptography.hazmat.primitives.serialization.Encoding.PEM)
def _load_private_key(self):
try:
if self.privatekey_content is not None:
content = self.privatekey_content
else:
with open(self.privatekey_path, 'rb') as f:
content = f.read()
self.privatekey = cryptography.hazmat.primitives.serialization.load_pem_private_key(
content,
None if self.privatekey_passphrase is None else to_bytes(self.privatekey_passphrase),
backend=self.cryptography_backend
)
except Exception as e:
raise CertificateSigningRequestError(e)
def _check_csr(self):
def _check_subject(csr):
subject = [(crypto_utils.cryptography_name_to_oid(entry[0]), entry[1]) for entry in self.subject]
current_subject = [(sub.oid, sub.value) for sub in csr.subject]
return set(subject) == set(current_subject)
def _find_extension(extensions, exttype):
return next(
(ext for ext in extensions if isinstance(ext.value, exttype)),
None
)
def _check_subjectAltName(extensions):
current_altnames_ext = _find_extension(extensions, cryptography.x509.SubjectAlternativeName)
current_altnames = [str(altname) for altname in current_altnames_ext.value] if current_altnames_ext else []
altnames = [str(crypto_utils.cryptography_get_name(altname)) for altname in self.subjectAltName] if self.subjectAltName else []
if set(altnames) != set(current_altnames):
return False
if altnames:
if current_altnames_ext.critical != self.subjectAltName_critical:
return False
return True
def _check_keyUsage(extensions):
current_keyusage_ext = _find_extension(extensions, cryptography.x509.KeyUsage)
if not self.keyUsage:
return current_keyusage_ext is None
elif current_keyusage_ext is None:
return False
params = crypto_utils.cryptography_parse_key_usage_params(self.keyUsage)
for param in params:
if getattr(current_keyusage_ext.value, '_' + param) != params[param]:
return False
if current_keyusage_ext.critical != self.keyUsage_critical:
return False
return True
def _check_extenededKeyUsage(extensions):
current_usages_ext = _find_extension(extensions, cryptography.x509.ExtendedKeyUsage)
current_usages = [str(usage) for usage in current_usages_ext.value] if current_usages_ext else []
usages = [str(crypto_utils.cryptography_name_to_oid(usage)) for usage in self.extendedKeyUsage] if self.extendedKeyUsage else []
if set(current_usages) != set(usages):
return False
if usages:
if current_usages_ext.critical != self.extendedKeyUsage_critical:
return False
return True
def _check_basicConstraints(extensions):
bc_ext = _find_extension(extensions, cryptography.x509.BasicConstraints)
current_ca = bc_ext.value.ca if bc_ext else False
current_path_length = bc_ext.value.path_length if bc_ext else None
ca, path_length = crypto_utils.cryptography_get_basic_constraints(self.basicConstraints)
# Check CA flag
if ca != current_ca:
return False
# Check path length
if path_length != current_path_length:
return False
# Check criticality
if self.basicConstraints:
if bc_ext.critical != self.basicConstraints_critical:
return False
return True
def _check_ocspMustStaple(extensions):
try:
# This only works with cryptography >= 2.1
tlsfeature_ext = _find_extension(extensions, cryptography.x509.TLSFeature)
has_tlsfeature = True
except AttributeError as dummy:
tlsfeature_ext = next(
(ext for ext in extensions if ext.value.oid == CRYPTOGRAPHY_MUST_STAPLE_NAME),
None
)
has_tlsfeature = False
if self.ocspMustStaple:
if not tlsfeature_ext or tlsfeature_ext.critical != self.ocspMustStaple_critical:
return False
if has_tlsfeature:
return cryptography.x509.TLSFeatureType.status_request in tlsfeature_ext.value
else:
return tlsfeature_ext.value.value == CRYPTOGRAPHY_MUST_STAPLE_VALUE
else:
return tlsfeature_ext is None
def _check_subject_key_identifier(extensions):
ext = _find_extension(extensions, cryptography.x509.SubjectKeyIdentifier)
if self.create_subject_key_identifier or self.subject_key_identifier is not None:
if not ext or ext.critical:
return False
if self.create_subject_key_identifier:
digest = cryptography.x509.SubjectKeyIdentifier.from_public_key(self.privatekey.public_key()).digest
return ext.value.digest == digest
else:
return ext.value.digest == self.subject_key_identifier
else:
return ext is None
def _check_authority_key_identifier(extensions):
ext = _find_extension(extensions, cryptography.x509.AuthorityKeyIdentifier)
if self.authority_key_identifier is not None or self.authority_cert_issuer is not None or self.authority_cert_serial_number is not None:
if not ext or ext.critical:
return False
aci = None
csr_aci = None
if self.authority_cert_issuer is not None:
aci = [str(crypto_utils.cryptography_get_name(n)) for n in self.authority_cert_issuer]
if ext.value.authority_cert_issuer is not None:
csr_aci = [str(n) for n in ext.value.authority_cert_issuer]
return (ext.value.key_identifier == self.authority_key_identifier
and csr_aci == aci
and ext.value.authority_cert_serial_number == self.authority_cert_serial_number)
else:
return ext is None
def _check_extensions(csr):
extensions = csr.extensions
return (_check_subjectAltName(extensions) and _check_keyUsage(extensions) and
_check_extenededKeyUsage(extensions) and _check_basicConstraints(extensions) and
_check_ocspMustStaple(extensions) and _check_subject_key_identifier(extensions) and
_check_authority_key_identifier(extensions))
def _check_signature(csr):
if not csr.is_signature_valid:
return False
# To check whether public key of CSR belongs to private key,
# encode both public keys and compare PEMs.
key_a = csr.public_key().public_bytes(
cryptography.hazmat.primitives.serialization.Encoding.PEM,
cryptography.hazmat.primitives.serialization.PublicFormat.SubjectPublicKeyInfo
)
key_b = self.privatekey.public_key().public_bytes(
cryptography.hazmat.primitives.serialization.Encoding.PEM,
cryptography.hazmat.primitives.serialization.PublicFormat.SubjectPublicKeyInfo
)
return key_a == key_b
try:
csr = crypto_utils.load_certificate_request(self.path, backend='cryptography')
except Exception as dummy:
return False
return _check_subject(csr) and _check_extensions(csr) and _check_signature(csr)
def main():
module = AnsibleModule(
argument_spec=dict(
state=dict(type='str', default='present', choices=['absent', 'present']),
digest=dict(type='str', default='sha256'),
privatekey_path=dict(type='path'),
privatekey_content=dict(type='str'),
privatekey_passphrase=dict(type='str', no_log=True),
version=dict(type='int', default=1),
force=dict(type='bool', default=False),
path=dict(type='path', required=True),
subject=dict(type='dict'),
country_name=dict(type='str', aliases=['C', 'countryName']),
state_or_province_name=dict(type='str', aliases=['ST', 'stateOrProvinceName']),
locality_name=dict(type='str', aliases=['L', 'localityName']),
organization_name=dict(type='str', aliases=['O', 'organizationName']),
organizational_unit_name=dict(type='str', aliases=['OU', 'organizationalUnitName']),
common_name=dict(type='str', aliases=['CN', 'commonName']),
email_address=dict(type='str', aliases=['E', 'emailAddress']),
subject_alt_name=dict(type='list', elements='str', aliases=['subjectAltName']),
subject_alt_name_critical=dict(type='bool', default=False, aliases=['subjectAltName_critical']),
use_common_name_for_san=dict(type='bool', default=True, aliases=['useCommonNameForSAN']),
key_usage=dict(type='list', elements='str', aliases=['keyUsage']),
key_usage_critical=dict(type='bool', default=False, aliases=['keyUsage_critical']),
extended_key_usage=dict(type='list', elements='str', aliases=['extKeyUsage', 'extendedKeyUsage']),
extended_key_usage_critical=dict(type='bool', default=False, aliases=['extKeyUsage_critical', 'extendedKeyUsage_critical']),
basic_constraints=dict(type='list', elements='str', aliases=['basicConstraints']),
basic_constraints_critical=dict(type='bool', default=False, aliases=['basicConstraints_critical']),
ocsp_must_staple=dict(type='bool', default=False, aliases=['ocspMustStaple']),
ocsp_must_staple_critical=dict(type='bool', default=False, aliases=['ocspMustStaple_critical']),
backup=dict(type='bool', default=False),
create_subject_key_identifier=dict(type='bool', default=False),
subject_key_identifier=dict(type='str'),
authority_key_identifier=dict(type='str'),
authority_cert_issuer=dict(type='list', elements='str'),
authority_cert_serial_number=dict(type='int'),
select_crypto_backend=dict(type='str', default='auto', choices=['auto', 'cryptography', 'pyopenssl']),
return_content=dict(type='bool', default=False),
),
required_together=[('authority_cert_issuer', 'authority_cert_serial_number')],
required_if=[('state', 'present', ['privatekey_path', 'privatekey_content'], True)],
mutually_exclusive=(
['privatekey_path', 'privatekey_content'],
),
add_file_common_args=True,
supports_check_mode=True,
)
if module.params['version'] != 1:
module.deprecate('The version option will only support allowed values from Ansible 2.14 on. '
'Currently, only the value 1 is allowed by RFC 2986',
version='2.14', collection_name='ansible.builtin')
base_dir = os.path.dirname(module.params['path']) or '.'
if not os.path.isdir(base_dir):
module.fail_json(name=base_dir, msg='The directory %s does not exist or the file is not a directory' % base_dir)
backend = module.params['select_crypto_backend']
if backend == 'auto':
# Detection what is possible
can_use_cryptography = CRYPTOGRAPHY_FOUND and CRYPTOGRAPHY_VERSION >= LooseVersion(MINIMAL_CRYPTOGRAPHY_VERSION)
can_use_pyopenssl = PYOPENSSL_FOUND and PYOPENSSL_VERSION >= LooseVersion(MINIMAL_PYOPENSSL_VERSION)
# First try cryptography, then pyOpenSSL
if can_use_cryptography:
backend = 'cryptography'
elif can_use_pyopenssl:
backend = 'pyopenssl'
# Success?
if backend == 'auto':
module.fail_json(msg=("Can't detect any of the required Python libraries "
"cryptography (>= {0}) or PyOpenSSL (>= {1})").format(
MINIMAL_CRYPTOGRAPHY_VERSION,
MINIMAL_PYOPENSSL_VERSION))
try:
if backend == 'pyopenssl':
if not PYOPENSSL_FOUND:
module.fail_json(msg=missing_required_lib('pyOpenSSL >= {0}'.format(MINIMAL_PYOPENSSL_VERSION)),
exception=PYOPENSSL_IMP_ERR)
try:
getattr(crypto.X509Req, 'get_extensions')
except AttributeError:
module.fail_json(msg='You need to have PyOpenSSL>=0.15 to generate CSRs')
module.deprecate('The module is using the PyOpenSSL backend. This backend has been deprecated',
version='2.13', collection_name='ansible.builtin')
csr = CertificateSigningRequestPyOpenSSL(module)
elif backend == 'cryptography':
if not CRYPTOGRAPHY_FOUND:
module.fail_json(msg=missing_required_lib('cryptography >= {0}'.format(MINIMAL_CRYPTOGRAPHY_VERSION)),
exception=CRYPTOGRAPHY_IMP_ERR)
csr = CertificateSigningRequestCryptography(module)
if module.params['state'] == 'present':
if module.check_mode:
result = csr.dump()
result['changed'] = module.params['force'] or not csr.check(module)
module.exit_json(**result)
csr.generate(module)
else:
if module.check_mode:
result = csr.dump()
result['changed'] = os.path.exists(module.params['path'])
module.exit_json(**result)
csr.remove(module)
result = csr.dump()
module.exit_json(**result)
except crypto_utils.OpenSSLObjectError as exc:
module.fail_json(msg=to_native(exc))
if __name__ == "__main__":
main()
|
azaghal/ansible
|
test/support/integration/plugins/modules/openssl_csr.py
|
Python
|
gpl-3.0
| 53,940 | 0.004301 |
# TwilioQuest version 3.1.26
# Works in:
# 3.1.26
# bog standard main function
def main():
print("functions")
hail_friend()
print("function arguments")
hail_friend("Operator")
print("function return values")
print(f"{add_numbers(45, -1)}")
# functions the tasks demand
def add_numbers(num1, num2):
return num1 + num2
def hail_friend(name=None):
# use default value to pass Function challenge
if (None == name):
print("Hail, friend!")
else:
# use given value to pass argument challenge
print(f"Hail, {name}!")
# standard main guard
if ("__main__" == __name__):
main()
|
greysondn/gamesolutions
|
twilioquest/python/codepath/functions.py
|
Python
|
mit
| 644 | 0.006211 |
#! /usr/bin/env python
# Copyright (C) 2011-2012 Free Software Foundation, Inc.
#
# This file is part of GDB.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""copyright.py
This script updates the list of years in the copyright notices in
most files maintained by the GDB project.
Usage: cd src/gdb && python copyright.py
Always review the output of this script before committing it!
A useful command to review the output is:
% filterdiff -x \*.c -x \*.cc -x \*.h -x \*.exp updates.diff
This removes the bulk of the changes which are most likely to be correct.
"""
import datetime
import os
import os.path
import subprocess
# A list of prefixes that start a multi-line comment. These prefixes
# should not be repeatead when wraping long lines.
MULTILINE_COMMENT_PREFIXES = (
'/*', # C/C++
'<!--', # XML
'{', # Pascal
)
def get_update_list():
"""Return the list of files to update.
Assumes that the current working directory when called is the root
of the GDB source tree (NOT the gdb/ subdirectory!). The names of
the files are relative to that root directory.
"""
result = []
for gdb_dir in ('gdb', 'sim', 'include/gdb'):
for root, dirs, files in os.walk(gdb_dir, topdown=True):
for dirname in dirs:
reldirname = "%s/%s" % (root, dirname)
if (dirname in EXCLUDE_ALL_LIST
or reldirname in EXCLUDE_LIST
or reldirname in NOT_FSF_LIST
or reldirname in BY_HAND):
# Prune this directory from our search list.
dirs.remove(dirname)
for filename in files:
relpath = "%s/%s" % (root, filename)
if (filename in EXCLUDE_ALL_LIST
or relpath in EXCLUDE_LIST
or relpath in NOT_FSF_LIST
or relpath in BY_HAND):
# Ignore this file.
pass
else:
result.append(relpath)
return result
def update_files(update_list):
"""Update the copyright header of the files in the given list.
We use gnulib's update-copyright script for that.
"""
# Tell the update-copyright script that we do not want it to
# repeat the prefixes in MULTILINE_COMMENT_PREFIXES.
os.environ['MULTILINE_COMMENT_PREFIXES'] = \
'\n'.join(MULTILINE_COMMENT_PREFIXES)
# We want to use year intervals in the copyright notices.
os.environ['UPDATE_COPYRIGHT_USE_INTERVALS'] = '1'
# Perform the update, and save the output in a string.
update_cmd = ['bash', 'gdb/gnulib/extra/update-copyright'] + update_list
p = subprocess.Popen(update_cmd, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
update_out = p.communicate()[0]
# Process the output. Typically, a lot of files do not have
# a copyright notice :-(. The update-copyright script prints
# a well defined warning when it did not find the copyright notice.
# For each of those, do a sanity check and see if they may in fact
# have one. For the files that are found not to have one, we filter
# the line out from the output, since there is nothing more to do,
# short of looking at each file and seeing which notice is appropriate.
# Too much work! (~4,000 files listed as of 2012-01-03).
update_out = update_out.splitlines()
warning_string = ': warning: copyright statement not found'
warning_len = len(warning_string)
for line in update_out:
if line.endswith('\n'):
line = line[:-1]
if line.endswith(warning_string):
filename = line[:-warning_len]
if may_have_copyright_notice(filename):
print line
else:
# Unrecognized file format. !?!
print "*** " + line
def may_have_copyright_notice(filename):
"""Check that the given file does not seem to have a copyright notice.
The filename is relative to the root directory.
This function assumes that the current working directory is that root
directory.
The algorigthm is fairly crude, meaning that it might return
some false positives. I do not think it will return any false
negatives... We might improve this function to handle more
complex cases later...
"""
# For now, it may have a copyright notice if we find the word
# "Copyright" at the (reasonable) start of the given file, say
# 50 lines...
MAX_LINES = 50
fd = open(filename)
lineno = 1
for line in fd:
if 'Copyright' in line:
return True
lineno += 1
if lineno > 50:
return False
return False
def main ():
"""The main subprogram."""
if not os.path.isfile("gnulib/extra/update-copyright"):
print "Error: This script must be called from the gdb directory."
root_dir = os.path.dirname(os.getcwd())
os.chdir(root_dir)
update_list = get_update_list()
update_files (update_list)
# Remind the user that some files need to be updated by HAND...
if BY_HAND:
print
print "\033[31mREMINDER: The following files must be updated by hand." \
"\033[0m"
for filename in BY_HAND:
print " ", filename
############################################################################
#
# Some constants, placed at the end because they take up a lot of room.
# The actual value of these constants is not significant to the understanding
# of the script.
#
############################################################################
# Files which should not be modified, either because they are
# generated, non-FSF, or otherwise special (e.g. license text,
# or test cases which must be sensitive to line numbering).
#
# Filenames are relative to the root directory.
EXCLUDE_LIST = (
'gdb/gdbarch.c', 'gdb/gdbarch.h',
'gdb/gnulib'
)
# Files which should not be modified, either because they are
# generated, non-FSF, or otherwise special (e.g. license text,
# or test cases which must be sensitive to line numbering).
#
# Matches any file or directory name anywhere. Use with caution.
# This is mostly for files that can be found in multiple directories.
# Eg: We want all files named COPYING to be left untouched.
EXCLUDE_ALL_LIST = (
"COPYING", "COPYING.LIB", "CVS", "configure", "copying.c",
"fdl.texi", "gpl.texi", "aclocal.m4",
)
# The list of files to update by hand.
BY_HAND = (
# These files are sensitive to line numbering.
"gdb/testsuite/gdb.base/step-line.inp",
"gdb/testsuite/gdb.base/step-line.c",
)
# The list of file which have a copyright, but not head by the FSF.
# Filenames are relative to the root directory.
NOT_FSF_LIST = (
"gdb/exc_request.defs",
"gdb/osf-share",
"gdb/gdbtk",
"gdb/testsuite/gdb.gdbtk/",
"sim/arm/armemu.h", "sim/arm/armos.c", "sim/arm/gdbhost.c",
"sim/arm/dbg_hif.h", "sim/arm/dbg_conf.h", "sim/arm/communicate.h",
"sim/arm/armos.h", "sim/arm/armcopro.c", "sim/arm/armemu.c",
"sim/arm/kid.c", "sim/arm/thumbemu.c", "sim/arm/armdefs.h",
"sim/arm/armopts.h", "sim/arm/dbg_cp.h", "sim/arm/dbg_rdi.h",
"sim/arm/parent.c", "sim/arm/armsupp.c", "sim/arm/armrdi.c",
"sim/arm/bag.c", "sim/arm/armvirt.c", "sim/arm/main.c", "sim/arm/bag.h",
"sim/arm/communicate.c", "sim/arm/gdbhost.h", "sim/arm/armfpe.h",
"sim/arm/arminit.c",
"sim/common/cgen-fpu.c", "sim/common/cgen-fpu.h", "sim/common/cgen-fpu.h",
"sim/common/cgen-accfp.c", "sim/common/sim-fpu.c",
"sim/erc32/sis.h", "sim/erc32/erc32.c", "sim/erc32/func.c",
"sim/erc32/float.c", "sim/erc32/interf.c", "sim/erc32/sis.c",
"sim/erc32/exec.c",
"sim/mips/m16run.c", "sim/mips/sim-main.c",
"sim/mn10300/sim-main.h",
"sim/moxie/moxie-gdb.dts",
# Not a single file in sim/ppc/ appears to be copyright FSF :-(.
"sim/ppc/filter.h", "sim/ppc/gen-support.h", "sim/ppc/ld-insn.h",
"sim/ppc/hw_sem.c", "sim/ppc/hw_disk.c", "sim/ppc/idecode_branch.h",
"sim/ppc/sim-endian.h", "sim/ppc/table.c", "sim/ppc/hw_core.c",
"sim/ppc/gen-support.c", "sim/ppc/gen-semantics.h", "sim/ppc/cpu.h",
"sim/ppc/sim_callbacks.h", "sim/ppc/RUN", "sim/ppc/Makefile.in",
"sim/ppc/emul_chirp.c", "sim/ppc/hw_nvram.c", "sim/ppc/dc-test.01",
"sim/ppc/hw_phb.c", "sim/ppc/hw_eeprom.c", "sim/ppc/bits.h",
"sim/ppc/hw_vm.c", "sim/ppc/cap.h", "sim/ppc/os_emul.h",
"sim/ppc/options.h", "sim/ppc/gen-idecode.c", "sim/ppc/filter.c",
"sim/ppc/corefile-n.h", "sim/ppc/std-config.h", "sim/ppc/ld-decode.h",
"sim/ppc/filter_filename.h", "sim/ppc/hw_shm.c",
"sim/ppc/pk_disklabel.c", "sim/ppc/dc-simple", "sim/ppc/misc.h",
"sim/ppc/device_table.h", "sim/ppc/ld-insn.c", "sim/ppc/inline.c",
"sim/ppc/emul_bugapi.h", "sim/ppc/hw_cpu.h", "sim/ppc/debug.h",
"sim/ppc/hw_ide.c", "sim/ppc/debug.c", "sim/ppc/gen-itable.h",
"sim/ppc/interrupts.c", "sim/ppc/hw_glue.c", "sim/ppc/emul_unix.c",
"sim/ppc/sim_calls.c", "sim/ppc/dc-complex", "sim/ppc/ld-cache.c",
"sim/ppc/registers.h", "sim/ppc/dc-test.02", "sim/ppc/options.c",
"sim/ppc/igen.h", "sim/ppc/registers.c", "sim/ppc/device.h",
"sim/ppc/emul_chirp.h", "sim/ppc/hw_register.c", "sim/ppc/hw_init.c",
"sim/ppc/sim-endian-n.h", "sim/ppc/filter_filename.c",
"sim/ppc/bits.c", "sim/ppc/idecode_fields.h", "sim/ppc/hw_memory.c",
"sim/ppc/misc.c", "sim/ppc/double.c", "sim/ppc/psim.h",
"sim/ppc/hw_trace.c", "sim/ppc/emul_netbsd.h", "sim/ppc/psim.c",
"sim/ppc/ppc-instructions", "sim/ppc/tree.h", "sim/ppc/README",
"sim/ppc/gen-icache.h", "sim/ppc/gen-model.h", "sim/ppc/ld-cache.h",
"sim/ppc/mon.c", "sim/ppc/corefile.h", "sim/ppc/vm.c",
"sim/ppc/INSTALL", "sim/ppc/gen-model.c", "sim/ppc/hw_cpu.c",
"sim/ppc/corefile.c", "sim/ppc/hw_opic.c", "sim/ppc/gen-icache.c",
"sim/ppc/events.h", "sim/ppc/os_emul.c", "sim/ppc/emul_generic.c",
"sim/ppc/main.c", "sim/ppc/hw_com.c", "sim/ppc/gen-semantics.c",
"sim/ppc/emul_bugapi.c", "sim/ppc/device.c", "sim/ppc/emul_generic.h",
"sim/ppc/tree.c", "sim/ppc/mon.h", "sim/ppc/interrupts.h",
"sim/ppc/cap.c", "sim/ppc/cpu.c", "sim/ppc/hw_phb.h",
"sim/ppc/device_table.c", "sim/ppc/lf.c", "sim/ppc/lf.c",
"sim/ppc/dc-stupid", "sim/ppc/hw_pal.c", "sim/ppc/ppc-spr-table",
"sim/ppc/emul_unix.h", "sim/ppc/words.h", "sim/ppc/basics.h",
"sim/ppc/hw_htab.c", "sim/ppc/lf.h", "sim/ppc/ld-decode.c",
"sim/ppc/sim-endian.c", "sim/ppc/gen-itable.c",
"sim/ppc/idecode_expression.h", "sim/ppc/table.h", "sim/ppc/dgen.c",
"sim/ppc/events.c", "sim/ppc/gen-idecode.h", "sim/ppc/emul_netbsd.c",
"sim/ppc/igen.c", "sim/ppc/vm_n.h", "sim/ppc/vm.h",
"sim/ppc/hw_iobus.c", "sim/ppc/inline.h",
"sim/testsuite/sim/bfin/s21.s", "sim/testsuite/sim/mips/mips32-dsp2.s",
)
if __name__ == "__main__":
main()
|
adapteva/epiphany-gdb
|
gdb/copyright.py
|
Python
|
gpl-2.0
| 11,525 | 0.000954 |
"""
@file
@brief This file loads and saves settings (as JSON)
@author Ted Lazaros <tedlaz@gmail.com>
@section LICENSE
"""
try:
import json
except ImportError:
import simplejson as json
import copy
from classes.logger import log
class JsonDataStore:
"""
This class which allows getting/setting of key/value settings,
and loading and saving to json files.
Internal storage of a dictionary.
Uses json or simplejson packages to serialize and deserialize
from json to dictionary.
Keys are assumed to be strings, but subclasses which override get/set
methods may use different key types.
The write_to_file and read_from_file methods are key type agnostic.
"""
# Create default data storage and default data type for logging messages
def __init__(self):
# Private data store, accessible through the get and set methods
self._data = {}
self.data_type = "json data"
def get(self, key):
""" Get copied value of a given key in data store """
key = key.lower()
# Determine if the root element is a dictionary or list
# (i.e. project data or settings data)
if type(self._data) == list:
# Settings data, search for matching "setting" attribute (i.e.list)
# Load user setting's values (for easy merging)
user_values = {}
for item in self._data:
if "setting" in item and "value" in item:
user_values[item["setting"].lower()] = item["value"]
# Settings data
return copy.deepcopy(user_values.get(key, None))
else:
# Project data (i.e dictionary)
return copy.deepcopy(self._data.get(key, None))
def set(self, key, value):
""" Store value in key """
key = key.lower()
# Determine if the root element is a dictionary or list
# (i.e. project data or settings data)
if type(self._data) == list:
# Settings data, search for matching "setting" attribute (i.e.list)
# Load user setting's values (for easy merging)
user_values = {}
for item in self._data:
if "setting" in item and "value" in item:
user_values[item["setting"].lower()] = item
# Settings data
user_values[key]["value"] = value
else:
# Project data (i.e dictionary)
self._data[key] = value
def merge_settings(self, default, user):
"""
Merge settings files, removing invalid settings based on default
settings. This is only called by some sub-classes that use
string keys
"""
# Determine if the root element is a dictionary or list
if type(default) == list:
# pass
# Load user setting's values (for easy merging)
user_values = {}
for item in user:
if "setting" in item and "value" in item:
user_values[item["setting"]] = item["value"]
# Update default values to match user values
for item in default:
user_value = user_values.get(item["setting"], None)
if user_value:
item["value"] = user_value
# Return merged list
return default
else:
# Root object is a dictionary (i.e. project data)
for key in default:
if key not in user:
# Add missing key to user dictionary
user[key] = default[key]
# Return merged dictionary
return user
def read_from_file(self, file_path):
""" Load JSON settings from a file """
# log.debug("loading {}".format(file_path))
try:
with open(file_path.encode('UTF-8'), 'r') as f:
contents = f.read()
if contents:
# log.debug("loaded", contents)
return json.loads(contents)
except Exception as ex:
msg = ("Couldn't load {} file: {}".format(self.data_type, ex))
log.error(msg)
raise Exception(msg)
msg = ("Couldn't load {} file, no data.".format(self.data_type))
log.warning(msg)
raise Exception(msg)
def write_to_file(self, file_path, data):
""" Save JSON settings to a file """
# log.debug("saving", file_path, data)
try:
with open(file_path.encode('UTF-8'), 'w') as f:
f.write(json.dumps(data))
except Exception as ex:
msg = ("Couldn't save {} file:\n{}\n{}".format(
self.data_type, file_path, ex))
log.error(msg)
raise Exception(msg)
|
tedlaz/pyted
|
tests/pysqlqt/src/classes/json_data.py
|
Python
|
gpl-3.0
| 4,956 | 0 |
[m,n] = map(int,input().split())
def find(v,cs):
for c in cities:
if v in c:
return (True,c)
return (False,set([v]))
cities = []
for _ in range(n):
[a,b] = map(int,input().split())
(ra,fa) = find(a,cities)
(rb,fb) = find(b,cities)
mg = fa | fb
if ra:
cities.remove(fa)
else:
m = m - 1
if rb:
if fa != fb:
cities.remove(fb)
else:
m = m - 1
cities.append(mg)
print(abs(m-len(cities)))
|
a143753/AOJ
|
1576.py
|
Python
|
apache-2.0
| 497 | 0.026157 |
#!/usr/bin/env python
import glob
import os
import platform
import shutil
import stat
import sys
from distutils import log
from distutils import dir_util
from distutils.command.build_clib import build_clib
from distutils.command.sdist import sdist
from distutils.core import setup
from distutils.sysconfig import get_python_lib
# prebuilt libraries for Windows - for sdist
PATH_LIB64 = "prebuilt/win64/capstone.dll"
PATH_LIB32 = "prebuilt/win32/capstone.dll"
# package name can be 'capstone' or 'capstone-windows'
PKG_NAME = 'capstone'
if os.path.exists(PATH_LIB64) and os.path.exists(PATH_LIB32):
PKG_NAME = 'capstone-windows'
VERSION = '3.0.2'
SYSTEM = sys.platform
SITE_PACKAGES = os.path.join(get_python_lib(), "capstone")
SETUP_DATA_FILES = []
# adapted from commit e504b81 of Nguyen Tan Cong
# Reference: https://docs.python.org/2/library/platform.html#cross-platform
is_64bits = sys.maxsize > 2**32
def copy_sources():
"""Copy the C sources into the source directory.
This rearranges the source files under the python distribution
directory.
"""
src = []
try:
dir_util.remove_tree("src/")
except (IOError, OSError):
pass
dir_util.copy_tree("../../arch", "src/arch/")
dir_util.copy_tree("../../include", "src/include/")
dir_util.copy_tree("../../msvc/headers", "src/msvc/headers")
src.extend(glob.glob("../../*.[ch]"))
src.extend(glob.glob("../../*.mk"))
src.extend(glob.glob("../../Makefile"))
src.extend(glob.glob("../../LICENSE*"))
src.extend(glob.glob("../../README"))
src.extend(glob.glob("../../*.TXT"))
src.extend(glob.glob("../../RELEASE_NOTES"))
src.extend(glob.glob("../../make.sh"))
src.extend(glob.glob("../../CMakeLists.txt"))
for filename in src:
outpath = os.path.join("./src/", os.path.basename(filename))
log.info("%s -> %s" % (filename, outpath))
shutil.copy(filename, outpath)
class custom_sdist(sdist):
"""Reshuffle files for distribution."""
def run(self):
# if prebuilt libraries are existent, then do not copy source
if os.path.exists(PATH_LIB64) and os.path.exists(PATH_LIB32):
return sdist.run(self)
copy_sources()
return sdist.run(self)
class custom_build_clib(build_clib):
"""Customized build_clib command."""
def run(self):
log.info('running custom_build_clib')
build_clib.run(self)
def finalize_options(self):
# We want build-clib to default to build-lib as defined by the "build"
# command. This is so the compiled library will be put in the right
# place along side the python code.
self.set_undefined_options('build',
('build_lib', 'build_clib'),
('build_temp', 'build_temp'),
('compiler', 'compiler'),
('debug', 'debug'),
('force', 'force'))
build_clib.finalize_options(self)
def build_libraries(self, libraries):
if SYSTEM in ("win32", "cygwin"):
# if Windows prebuilt library is available, then include it
if is_64bits and os.path.exists(PATH_LIB64):
SETUP_DATA_FILES.append(PATH_LIB64)
return
elif os.path.exists(PATH_LIB32):
SETUP_DATA_FILES.append(PATH_LIB32)
return
# build library from source if src/ is existent
if not os.path.exists('src'):
return
try:
for (lib_name, build_info) in libraries:
log.info("building '%s' library", lib_name)
os.chdir("src")
# platform description refers at https://docs.python.org/2/library/sys.html#sys.platform
if SYSTEM == "win32":
# Windows build: this process requires few things:
# - CMake + MSVC installed
# - Run this command in an environment setup for MSVC
os.mkdir("build")
os.chdir("build")
# Do not build tests & static library
os.system('cmake -DCMAKE_BUILD_TYPE=RELEASE -DCAPSTONE_BUILD_TESTS=0 -DCAPSTONE_BUILD_STATIC=0 -G "NMake Makefiles" ..')
os.system("nmake")
os.chdir("..")
SETUP_DATA_FILES.append("src/build/capstone.dll")
elif SYSTEM == "cygwin":
os.chmod("make.sh", stat.S_IREAD|stat.S_IEXEC)
if is_64bits:
os.system("CAPSTONE_BUILD_CORE_ONLY=yes ./make.sh cygwin-mingw64")
else:
os.system("CAPSTONE_BUILD_CORE_ONLY=yes ./make.sh cygwin-mingw32")
SETUP_DATA_FILES.append("src/capstone.dll")
else: # Unix
os.chmod("make.sh", stat.S_IREAD|stat.S_IEXEC)
os.system("CAPSTONE_BUILD_CORE_ONLY=yes ./make.sh")
if SYSTEM == "darwin":
SETUP_DATA_FILES.append("src/libcapstone.dylib")
else: # Non-OSX
SETUP_DATA_FILES.append("src/libcapstone.so")
os.chdir("..")
except:
pass
def dummy_src():
return []
setup(
provides=['capstone'],
packages=['capstone'],
name=PKG_NAME,
version=VERSION,
author='Nguyen Anh Quynh',
author_email='aquynh@gmail.com',
description='Capstone disassembly engine',
url='http://www.capstone-engine.org',
classifiers=[
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
requires=['ctypes'],
cmdclass=dict(
build_clib=custom_build_clib,
sdist=custom_sdist,
),
libraries=[(
'capstone', dict(
package='capstone',
sources=dummy_src()
),
)],
data_files=[(SITE_PACKAGES, SETUP_DATA_FILES)],
)
|
zneak/capstone
|
bindings/python/setup.py
|
Python
|
bsd-3-clause
| 6,134 | 0.001304 |
#!/usr/bin/env python3
#
# Create frozen modules structure for MicroPython.
#
# Usage:
#
# Have a directory with modules to be frozen (only modules, not packages
# supported so far):
#
# frozen/foo.py
# frozen/bar.py
#
# Run script, passing path to the directory above:
#
# ./make-frozen.py frozen > frozen.c
#
# Include frozen.c in your build, having defined MICROPY_MODULE_FROZEN in
# config.
#
import sys
import os
def module_name(f):
return f[:-len(".py")]
modules = []
for dirpath, dirnames, filenames in os.walk(sys.argv[1]):
for f in filenames:
st = os.stat(dirpath + "/" + f)
modules.append((f, st))
print("#include <stdint.h>")
print("const uint16_t mp_frozen_sizes[] = {")
for f, st in modules:
print("%d," % st.st_size)
print("0};")
print("const char mp_frozen_content[] = {")
for f, st in modules:
m = module_name(f)
print('"%s\\0"' % m)
data = open(sys.argv[1] + "/" + f).read()
data = repr(data)[1:-1]
data = data.replace('"', '\\"')
print('"%s"' % data)
print("};")
|
jimkmc/micropython
|
tools/make-frozen.py
|
Python
|
mit
| 1,041 | 0.000961 |
import platform
from base64 import b64encode
import re
from ingenico.connect.sdk.data_object import DataObject
from ingenico.connect.sdk.defaultimpl.default_marshaller import \
DefaultMarshaller
from ingenico.connect.sdk.domain.metadata.shopping_cart_extension import ShoppingCartExtension
from request_header import RequestHeader
class IterProperty(object):
def __init__(self, func):
self.func = func
def __get__(self, instance, owner):
return self.func(owner)
class MetaDataProvider:
"""
Provides meta info about the server.
"""
__SDK_VERSION = "3.30.0"
__SERVER_META_INFO_HEADER = "X-GCS-ServerMetaInfo"
__prohibited_headers = [__SERVER_META_INFO_HEADER, "X-GCS-Idempotence-Key",
"Date", "Content-Type", "Authorization"]
__PROHIBITED_HEADERS = tuple(sorted(__prohibited_headers, key=str.lower))
__meta_data_headers = None
class ServerMetaInfo(DataObject):
platform_identifier = None
sdk_identifier = None
sdk_creator = None
integrator = None
shopping_cart_extension = None
def to_dictionary(self):
dictionary = super(MetaDataProvider.ServerMetaInfo, self).to_dictionary()
if self.platform_identifier is not None:
dictionary['platformIdentifier'] = self.platform_identifier
if self.sdk_identifier is not None:
dictionary['sdkIdentifier'] = self.sdk_identifier
if self.sdk_creator is not None:
dictionary['sdkCreator'] = self.sdk_creator
if self.integrator is not None:
dictionary['integrator'] = self.integrator
if self.shopping_cart_extension is not None:
dictionary['shoppingCartExtension'] = self.shopping_cart_extension.to_dictionary()
return dictionary
def from_dictionary(self, dictionary):
super(MetaDataProvider.ServerMetaInfo, self).from_dictionary(dictionary)
if 'platformIdentifier' in dictionary:
self.platform_identifier = dictionary['platformIdentifier']
if 'sdkIdentifier' in dictionary:
self.sdk_identifier = dictionary['sdkIdentifier']
if 'sdkCreator' in dictionary:
self.sdk_creator = dictionary['sdkCreator']
if 'integrator' in dictionary:
self.integrator = dictionary['integrator']
if 'shoppingCartExtension' in dictionary:
if not isinstance(dictionary['shoppingCartExtension'], dict):
raise TypeError('value \'{}\' is not a dictionary'.format(dictionary['shoppingCartExtension']))
self.shopping_cart_extension = ShoppingCartExtension.create_from_dictionary(dictionary['shoppingCartExtension'])
return self
def __init__(self, integrator, shopping_cart_extension=None,
additional_request_headers=()):
MetaDataProvider.__validate_additional_request_headers(
additional_request_headers)
for i in additional_request_headers:
i.name = re.sub(r'\r?\n(?:(?![\r\n])\s)*', " ", i.name)
i.name = i.name.strip()
i.value = re.sub(r'\r?\n(?:(?![\r\n])\s)*', " ", i.value)
i.value = i.value.strip()
server_meta_info = self.ServerMetaInfo()
server_meta_info.platform_identifier = self._platform_identifier
server_meta_info.sdk_identifier = self._sdk_identifier
server_meta_info.sdk_creator = "Ingenico"
server_meta_info.integrator = integrator
server_meta_info.shopping_cart_extension = shopping_cart_extension
server_meta_info_string = DefaultMarshaller.INSTANCE().marshal(
server_meta_info)
server_meta_info_header = RequestHeader(
self.__SERVER_META_INFO_HEADER, b64encode(
server_meta_info_string.encode('utf-8')))
if not additional_request_headers:
self.__meta_data_headers = tuple([server_meta_info_header])
else:
request_headers = [server_meta_info_header]
request_headers.extend(additional_request_headers)
self.__meta_data_headers = tuple(request_headers)
@staticmethod
def __validate_additional_request_headers(additional_request_headers):
if additional_request_headers is not None:
for additional_request_header in additional_request_headers:
MetaDataProvider.__validate_additional_request_header(
additional_request_header)
@staticmethod
def __validate_additional_request_header(additional_request_header):
try:
if additional_request_header.name in MetaDataProvider.__PROHIBITED_HEADERS:
raise ValueError("request header not allowed: ",
str(additional_request_header))
except AttributeError:
raise AttributeError("Each request header should have an attribute 'name' and an attribute 'value'")
@IterProperty
def prohibited_headers(self):
return self.__PROHIBITED_HEADERS
@property
def meta_data_headers(self):
"""
:return: The server related headers containing the META data to be
associated with the request (if any). This will always contain at least
an automatically generated header X-GCS-ServerMetaInfo.
"""
return self.__meta_data_headers
@property
def _platform_identifier(self):
return platform.system() + " " + platform.release() + "/" + \
platform.version() + " Python/" + platform.python_version() + \
" (" + platform.python_implementation() + "; " + \
str(platform.python_compiler()) + ")"
@property
def _sdk_identifier(self):
return "Python2ServerSDK/v" + self.__SDK_VERSION
|
Ingenico-ePayments/connect-sdk-python2
|
ingenico/connect/sdk/meta_data_provider.py
|
Python
|
mit
| 5,912 | 0.001522 |
##
## This file is part of the libsigrokdecode project.
##
## Copyright (C) 2014 Torsten Duwe <duwe@suse.de>
## Copyright (C) 2014 Sebastien Bourdelin <sebastien.bourdelin@savoirfairelinux.com>
##
## This program is free software; you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation; either version 2 of the License, or
## (at your option) any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with this program; if not, see <http://www.gnu.org/licenses/>.
##
import sigrokdecode as srd
from collections import deque
class SamplerateError(Exception):
pass
def normalize_time(t):
if t >= 1.0:
return '%.3f s (%.3f Hz)' % (t, (1/t))
elif t >= 0.001:
if 1/t/1000 < 1:
return '%.3f ms (%.3f Hz)' % (t * 1000.0, (1/t))
else:
return '%.3f ms (%.3f kHz)' % (t * 1000.0, (1/t)/1000)
elif t >= 0.000001:
if 1/t/1000/1000 < 1:
return '%.3f μs (%.3f kHz)' % (t * 1000.0 * 1000.0, (1/t)/1000)
else:
return '%.3f μs (%.3f MHz)' % (t * 1000.0 * 1000.0, (1/t)/1000/1000)
elif t >= 0.000000001:
if 1/t/1000/1000/1000:
return '%.3f ns (%.3f MHz)' % (t * 1000.0 * 1000.0 * 1000.0, (1/t)/1000/1000)
else:
return '%.3f ns (%.3f GHz)' % (t * 1000.0 * 1000.0 * 1000.0, (1/t)/1000/1000/1000)
else:
return '%f' % t
class Decoder(srd.Decoder):
api_version = 3
id = 'timing'
name = 'Timing'
longname = 'Timing calculation with frequency and averaging'
desc = 'Calculate time between edges.'
license = 'gplv2+'
inputs = ['logic']
outputs = ['timing']
channels = (
{'id': 'data', 'name': 'Data', 'desc': 'Data line'},
)
annotations = (
('time', 'Time'),
('average', 'Average'),
)
annotation_rows = (
('time', 'Time', (0,)),
('average', 'Average', (1,)),
)
options = (
{ 'id': 'avg_period', 'desc': 'Averaging period', 'default': 100 },
)
def __init__(self):
self.samplerate = None
self.oldpin = None
self.last_samplenum = None
self.last_n = deque()
self.chunks = 0
def metadata(self, key, value):
if key == srd.SRD_CONF_SAMPLERATE:
self.samplerate = value
def start(self):
self.out_ann = self.register(srd.OUTPUT_ANN)
self.initial_pins = [0]
def decode(self):
if not self.samplerate:
raise SamplerateError('Cannot decode without samplerate.')
while True:
pin = self.wait({0: 'e'})
if self.oldpin is None:
self.oldpin = pin
self.last_samplenum = self.samplenum
continue
if self.oldpin != pin:
samples = self.samplenum - self.last_samplenum
t = samples / self.samplerate
self.chunks += 1
# Don't insert the first chunk into the averaging as it is
# not complete probably.
if self.last_samplenum is None or self.chunks < 2:
# Report the timing normalized.
self.put(self.last_samplenum, self.samplenum, self.out_ann,
[0, [normalize_time(t)]])
else:
if t > 0:
self.last_n.append(t)
if len(self.last_n) > self.options['avg_period']:
self.last_n.popleft()
# Report the timing normalized.
self.put(self.last_samplenum, self.samplenum, self.out_ann,
[0, [normalize_time(t)]])
self.put(self.last_samplenum, self.samplenum, self.out_ann,
[1, [normalize_time(sum(self.last_n) / len(self.last_n))]])
# Store data for next round.
self.last_samplenum = self.samplenum
self.oldpin = pin
|
zeldin/libsigrokdecode
|
decoders/timing/pd.py
|
Python
|
gpl-3.0
| 4,356 | 0.005282 |
import sys, os, fabric
class PiServicePolicies:
@staticmethod
def is_local():
return (not fabric.api.env.hosts or fabric.api.env.hosts[0] in ['localhost', '127.0.0.1', '::1'])
@staticmethod
def is_pi():
return os.path.isdir('/home/pi')
@staticmethod
def check_local_or_exit():
if not PiServicePolicies.is_local():
print "...only callable on localhost!!!"
sys.exit(-1)
@staticmethod
def check_remote_or_exit():
if PiServicePolicies.is_local():
print "...only callable on remote host!!!"
sys.exit(-1)
def check_installed_or_exit(self):
if not PiServicePolicies.installed(self):
print "...first you have to install this service! fab pi %s:install"
sys.exit(-1)
def installed(self):
ret = self.file_exists('__init__.py')
if not ret: print self.name+' not installed'
return ret
|
creative-workflow/pi-setup
|
lib/piservices/policies.py
|
Python
|
mit
| 867 | 0.023068 |
"""autogenerated by genpy from geographic_msgs/GetGeographicMapRequest.msg. Do not edit."""
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
import geographic_msgs.msg
class GetGeographicMapRequest(genpy.Message):
_md5sum = "505cc89008cb1745810d2ee4ea646d6e"
_type = "geographic_msgs/GetGeographicMapRequest"
_has_header = False #flag to mark the presence of a Header object
_full_text = """
string url
BoundingBox bounds
================================================================================
MSG: geographic_msgs/BoundingBox
# Geographic map bounding box.
#
# The two GeoPoints denote diagonally opposite corners of the box.
#
# If min_pt.latitude is NaN, the bounding box is "global", matching
# any valid latitude, longitude and altitude.
#
# If min_pt.altitude is NaN, the bounding box is two-dimensional and
# matches any altitude within the specified latitude and longitude
# range.
GeoPoint min_pt # lowest and most Southwestern corner
GeoPoint max_pt # highest and most Northeastern corner
================================================================================
MSG: geographic_msgs/GeoPoint
# Geographic point, using the WGS 84 reference ellipsoid.
# Latitude [degrees]. Positive is north of equator; negative is south
# (-90 <= latitude <= +90).
float64 latitude
# Longitude [degrees]. Positive is east of prime meridian; negative is
# west (-180 <= longitude <= +180). At the poles, latitude is -90 or
# +90, and longitude is irrelevant, but must be in range.
float64 longitude
# Altitude [m]. Positive is above the WGS 84 ellipsoid (NaN if unspecified).
float64 altitude
"""
__slots__ = ['url','bounds']
_slot_types = ['string','geographic_msgs/BoundingBox']
def __init__(self, *args, **kwds):
"""
Constructor. Any message fields that are implicitly/explicitly
set to None will be assigned a default value. The recommend
use is keyword arguments as this is more robust to future message
changes. You cannot mix in-order arguments and keyword arguments.
The available fields are:
url,bounds
:param args: complete set of field values, in .msg order
:param kwds: use keyword arguments corresponding to message field names
to set specific fields.
"""
if args or kwds:
super(GetGeographicMapRequest, self).__init__(*args, **kwds)
#message fields cannot be None, assign default values for those that are
if self.url is None:
self.url = ''
if self.bounds is None:
self.bounds = geographic_msgs.msg.BoundingBox()
else:
self.url = ''
self.bounds = geographic_msgs.msg.BoundingBox()
def _get_types(self):
"""
internal API method
"""
return self._slot_types
def serialize(self, buff):
"""
serialize message into buffer
:param buff: buffer, ``StringIO``
"""
try:
_x = self.url
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
if python3:
buff.write(struct.pack('<I%sB'%length, length, *_x))
else:
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = self
buff.write(_struct_6d.pack(_x.bounds.min_pt.latitude, _x.bounds.min_pt.longitude, _x.bounds.min_pt.altitude, _x.bounds.max_pt.latitude, _x.bounds.max_pt.longitude, _x.bounds.max_pt.altitude))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(_x))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(_x))))
def deserialize(self, str):
"""
unpack serialized message in str into this message instance
:param str: byte array of serialized message, ``str``
"""
try:
if self.bounds is None:
self.bounds = geographic_msgs.msg.BoundingBox()
end = 0
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.url = str[start:end].decode('utf-8')
else:
self.url = str[start:end]
_x = self
start = end
end += 48
(_x.bounds.min_pt.latitude, _x.bounds.min_pt.longitude, _x.bounds.min_pt.altitude, _x.bounds.max_pt.latitude, _x.bounds.max_pt.longitude, _x.bounds.max_pt.altitude,) = _struct_6d.unpack(str[start:end])
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
def serialize_numpy(self, buff, numpy):
"""
serialize message with numpy array types into buffer
:param buff: buffer, ``StringIO``
:param numpy: numpy python module
"""
try:
_x = self.url
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
if python3:
buff.write(struct.pack('<I%sB'%length, length, *_x))
else:
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = self
buff.write(_struct_6d.pack(_x.bounds.min_pt.latitude, _x.bounds.min_pt.longitude, _x.bounds.min_pt.altitude, _x.bounds.max_pt.latitude, _x.bounds.max_pt.longitude, _x.bounds.max_pt.altitude))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(_x))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(_x))))
def deserialize_numpy(self, str, numpy):
"""
unpack serialized message in str into this message instance using numpy for array types
:param str: byte array of serialized message, ``str``
:param numpy: numpy python module
"""
try:
if self.bounds is None:
self.bounds = geographic_msgs.msg.BoundingBox()
end = 0
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.url = str[start:end].decode('utf-8')
else:
self.url = str[start:end]
_x = self
start = end
end += 48
(_x.bounds.min_pt.latitude, _x.bounds.min_pt.longitude, _x.bounds.min_pt.altitude, _x.bounds.max_pt.latitude, _x.bounds.max_pt.longitude, _x.bounds.max_pt.altitude,) = _struct_6d.unpack(str[start:end])
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
_struct_I = genpy.struct_I
_struct_6d = struct.Struct("<6d")
"""autogenerated by genpy from geographic_msgs/GetGeographicMapResponse.msg. Do not edit."""
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
import geographic_msgs.msg
import std_msgs.msg
class GetGeographicMapResponse(genpy.Message):
_md5sum = "0910332806c65953a4f4252eb780811a"
_type = "geographic_msgs/GetGeographicMapResponse"
_has_header = False #flag to mark the presence of a Header object
_full_text = """
bool success
string status
GeographicMap map
================================================================================
MSG: geographic_msgs/GeographicMap
# Geographic map for a specified region.
Header header # stamp specifies time
# frame_id (normally /map)
UniqueID id # identifier for this map
BoundingBox bounds # 2D bounding box containing map
WayPoint[] points # way-points
MapFeature[] features # map features
KeyValue[] props # map properties
================================================================================
MSG: std_msgs/Header
# Standard metadata for higher-level stamped data types.
# This is generally used to communicate timestamped data
# in a particular coordinate frame.
#
# sequence ID: consecutively increasing ID
uint32 seq
#Two-integer timestamp that is expressed as:
# * stamp.sec: seconds (stamp_secs) since epoch (in Python the variable is called 'secs')
# * stamp.nsec: nanoseconds since stamp_secs (in Python the variable is called 'nsecs')
# time-handling sugar is provided by the client library
time stamp
#Frame this data is associated with
# 0: no frame
# 1: global frame
string frame_id
================================================================================
MSG: geographic_msgs/UniqueID
# A universally unique identifier (UUID).
#
# http://en.wikipedia.org/wiki/Universally_unique_identifier
# http://tools.ietf.org/html/rfc4122.html
uint8[16] uuid
================================================================================
MSG: geographic_msgs/BoundingBox
# Geographic map bounding box.
#
# The two GeoPoints denote diagonally opposite corners of the box.
#
# If min_pt.latitude is NaN, the bounding box is "global", matching
# any valid latitude, longitude and altitude.
#
# If min_pt.altitude is NaN, the bounding box is two-dimensional and
# matches any altitude within the specified latitude and longitude
# range.
GeoPoint min_pt # lowest and most Southwestern corner
GeoPoint max_pt # highest and most Northeastern corner
================================================================================
MSG: geographic_msgs/GeoPoint
# Geographic point, using the WGS 84 reference ellipsoid.
# Latitude [degrees]. Positive is north of equator; negative is south
# (-90 <= latitude <= +90).
float64 latitude
# Longitude [degrees]. Positive is east of prime meridian; negative is
# west (-180 <= longitude <= +180). At the poles, latitude is -90 or
# +90, and longitude is irrelevant, but must be in range.
float64 longitude
# Altitude [m]. Positive is above the WGS 84 ellipsoid (NaN if unspecified).
float64 altitude
================================================================================
MSG: geographic_msgs/WayPoint
# Way-point element for a geographic map.
UniqueID id # Unique way-point identifier
GeoPoint position # Position relative to WGS 84 ellipsoid
KeyValue[] props # Key/value properties for this point
================================================================================
MSG: geographic_msgs/KeyValue
# Geographic map tag (key, value) pair
#
# This is equivalent to diagnostic_msgs/KeyValue, repeated here to
# avoid introducing a trivial stack dependency.
string key # tag label
string value # corresponding value
================================================================================
MSG: geographic_msgs/MapFeature
# Geographic map feature.
#
# A list of WayPoint IDs for features like streets, highways, hiking
# trails, the outlines of buildings and parking lots in sequential
# order.
#
# Feature lists may also contain other feature lists as members.
UniqueID id # Unique feature identifier
UniqueID[] components # Sequence of feature components
KeyValue[] props # Key/value properties for this feature
"""
__slots__ = ['success','status','map']
_slot_types = ['bool','string','geographic_msgs/GeographicMap']
def __init__(self, *args, **kwds):
"""
Constructor. Any message fields that are implicitly/explicitly
set to None will be assigned a default value. The recommend
use is keyword arguments as this is more robust to future message
changes. You cannot mix in-order arguments and keyword arguments.
The available fields are:
success,status,map
:param args: complete set of field values, in .msg order
:param kwds: use keyword arguments corresponding to message field names
to set specific fields.
"""
if args or kwds:
super(GetGeographicMapResponse, self).__init__(*args, **kwds)
#message fields cannot be None, assign default values for those that are
if self.success is None:
self.success = False
if self.status is None:
self.status = ''
if self.map is None:
self.map = geographic_msgs.msg.GeographicMap()
else:
self.success = False
self.status = ''
self.map = geographic_msgs.msg.GeographicMap()
def _get_types(self):
"""
internal API method
"""
return self._slot_types
def serialize(self, buff):
"""
serialize message into buffer
:param buff: buffer, ``StringIO``
"""
try:
buff.write(_struct_B.pack(self.success))
_x = self.status
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
if python3:
buff.write(struct.pack('<I%sB'%length, length, *_x))
else:
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = self
buff.write(_struct_3I.pack(_x.map.header.seq, _x.map.header.stamp.secs, _x.map.header.stamp.nsecs))
_x = self.map.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
if python3:
buff.write(struct.pack('<I%sB'%length, length, *_x))
else:
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = self.map.id.uuid
# - if encoded as a list instead, serialize as bytes instead of string
if type(_x) in [list, tuple]:
buff.write(_struct_16B.pack(*_x))
else:
buff.write(_struct_16s.pack(_x))
_x = self
buff.write(_struct_6d.pack(_x.map.bounds.min_pt.latitude, _x.map.bounds.min_pt.longitude, _x.map.bounds.min_pt.altitude, _x.map.bounds.max_pt.latitude, _x.map.bounds.max_pt.longitude, _x.map.bounds.max_pt.altitude))
length = len(self.map.points)
buff.write(_struct_I.pack(length))
for val1 in self.map.points:
_v1 = val1.id
_x = _v1.uuid
# - if encoded as a list instead, serialize as bytes instead of string
if type(_x) in [list, tuple]:
buff.write(_struct_16B.pack(*_x))
else:
buff.write(_struct_16s.pack(_x))
_v2 = val1.position
_x = _v2
buff.write(_struct_3d.pack(_x.latitude, _x.longitude, _x.altitude))
length = len(val1.props)
buff.write(_struct_I.pack(length))
for val2 in val1.props:
_x = val2.key
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
if python3:
buff.write(struct.pack('<I%sB'%length, length, *_x))
else:
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = val2.value
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
if python3:
buff.write(struct.pack('<I%sB'%length, length, *_x))
else:
buff.write(struct.pack('<I%ss'%length, length, _x))
length = len(self.map.features)
buff.write(_struct_I.pack(length))
for val1 in self.map.features:
_v3 = val1.id
_x = _v3.uuid
# - if encoded as a list instead, serialize as bytes instead of string
if type(_x) in [list, tuple]:
buff.write(_struct_16B.pack(*_x))
else:
buff.write(_struct_16s.pack(_x))
length = len(val1.components)
buff.write(_struct_I.pack(length))
for val2 in val1.components:
_x = val2.uuid
# - if encoded as a list instead, serialize as bytes instead of string
if type(_x) in [list, tuple]:
buff.write(_struct_16B.pack(*_x))
else:
buff.write(_struct_16s.pack(_x))
length = len(val1.props)
buff.write(_struct_I.pack(length))
for val2 in val1.props:
_x = val2.key
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
if python3:
buff.write(struct.pack('<I%sB'%length, length, *_x))
else:
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = val2.value
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
if python3:
buff.write(struct.pack('<I%sB'%length, length, *_x))
else:
buff.write(struct.pack('<I%ss'%length, length, _x))
length = len(self.map.props)
buff.write(_struct_I.pack(length))
for val1 in self.map.props:
_x = val1.key
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
if python3:
buff.write(struct.pack('<I%sB'%length, length, *_x))
else:
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = val1.value
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
if python3:
buff.write(struct.pack('<I%sB'%length, length, *_x))
else:
buff.write(struct.pack('<I%ss'%length, length, _x))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(_x))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(_x))))
def deserialize(self, str):
"""
unpack serialized message in str into this message instance
:param str: byte array of serialized message, ``str``
"""
try:
if self.map is None:
self.map = geographic_msgs.msg.GeographicMap()
end = 0
start = end
end += 1
(self.success,) = _struct_B.unpack(str[start:end])
self.success = bool(self.success)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.status = str[start:end].decode('utf-8')
else:
self.status = str[start:end]
_x = self
start = end
end += 12
(_x.map.header.seq, _x.map.header.stamp.secs, _x.map.header.stamp.nsecs,) = _struct_3I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.map.header.frame_id = str[start:end].decode('utf-8')
else:
self.map.header.frame_id = str[start:end]
start = end
end += 16
self.map.id.uuid = str[start:end]
_x = self
start = end
end += 48
(_x.map.bounds.min_pt.latitude, _x.map.bounds.min_pt.longitude, _x.map.bounds.min_pt.altitude, _x.map.bounds.max_pt.latitude, _x.map.bounds.max_pt.longitude, _x.map.bounds.max_pt.altitude,) = _struct_6d.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.map.points = []
for i in range(0, length):
val1 = geographic_msgs.msg.WayPoint()
_v4 = val1.id
start = end
end += 16
_v4.uuid = str[start:end]
_v5 = val1.position
_x = _v5
start = end
end += 24
(_x.latitude, _x.longitude, _x.altitude,) = _struct_3d.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val1.props = []
for i in range(0, length):
val2 = geographic_msgs.msg.KeyValue()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val2.key = str[start:end].decode('utf-8')
else:
val2.key = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val2.value = str[start:end].decode('utf-8')
else:
val2.value = str[start:end]
val1.props.append(val2)
self.map.points.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.map.features = []
for i in range(0, length):
val1 = geographic_msgs.msg.MapFeature()
_v6 = val1.id
start = end
end += 16
_v6.uuid = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val1.components = []
for i in range(0, length):
val2 = geographic_msgs.msg.UniqueID()
start = end
end += 16
val2.uuid = str[start:end]
val1.components.append(val2)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val1.props = []
for i in range(0, length):
val2 = geographic_msgs.msg.KeyValue()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val2.key = str[start:end].decode('utf-8')
else:
val2.key = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val2.value = str[start:end].decode('utf-8')
else:
val2.value = str[start:end]
val1.props.append(val2)
self.map.features.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.map.props = []
for i in range(0, length):
val1 = geographic_msgs.msg.KeyValue()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.key = str[start:end].decode('utf-8')
else:
val1.key = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.value = str[start:end].decode('utf-8')
else:
val1.value = str[start:end]
self.map.props.append(val1)
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
def serialize_numpy(self, buff, numpy):
"""
serialize message with numpy array types into buffer
:param buff: buffer, ``StringIO``
:param numpy: numpy python module
"""
try:
buff.write(_struct_B.pack(self.success))
_x = self.status
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
if python3:
buff.write(struct.pack('<I%sB'%length, length, *_x))
else:
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = self
buff.write(_struct_3I.pack(_x.map.header.seq, _x.map.header.stamp.secs, _x.map.header.stamp.nsecs))
_x = self.map.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
if python3:
buff.write(struct.pack('<I%sB'%length, length, *_x))
else:
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = self.map.id.uuid
# - if encoded as a list instead, serialize as bytes instead of string
if type(_x) in [list, tuple]:
buff.write(_struct_16B.pack(*_x))
else:
buff.write(_struct_16s.pack(_x))
_x = self
buff.write(_struct_6d.pack(_x.map.bounds.min_pt.latitude, _x.map.bounds.min_pt.longitude, _x.map.bounds.min_pt.altitude, _x.map.bounds.max_pt.latitude, _x.map.bounds.max_pt.longitude, _x.map.bounds.max_pt.altitude))
length = len(self.map.points)
buff.write(_struct_I.pack(length))
for val1 in self.map.points:
_v7 = val1.id
_x = _v7.uuid
# - if encoded as a list instead, serialize as bytes instead of string
if type(_x) in [list, tuple]:
buff.write(_struct_16B.pack(*_x))
else:
buff.write(_struct_16s.pack(_x))
_v8 = val1.position
_x = _v8
buff.write(_struct_3d.pack(_x.latitude, _x.longitude, _x.altitude))
length = len(val1.props)
buff.write(_struct_I.pack(length))
for val2 in val1.props:
_x = val2.key
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
if python3:
buff.write(struct.pack('<I%sB'%length, length, *_x))
else:
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = val2.value
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
if python3:
buff.write(struct.pack('<I%sB'%length, length, *_x))
else:
buff.write(struct.pack('<I%ss'%length, length, _x))
length = len(self.map.features)
buff.write(_struct_I.pack(length))
for val1 in self.map.features:
_v9 = val1.id
_x = _v9.uuid
# - if encoded as a list instead, serialize as bytes instead of string
if type(_x) in [list, tuple]:
buff.write(_struct_16B.pack(*_x))
else:
buff.write(_struct_16s.pack(_x))
length = len(val1.components)
buff.write(_struct_I.pack(length))
for val2 in val1.components:
_x = val2.uuid
# - if encoded as a list instead, serialize as bytes instead of string
if type(_x) in [list, tuple]:
buff.write(_struct_16B.pack(*_x))
else:
buff.write(_struct_16s.pack(_x))
length = len(val1.props)
buff.write(_struct_I.pack(length))
for val2 in val1.props:
_x = val2.key
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
if python3:
buff.write(struct.pack('<I%sB'%length, length, *_x))
else:
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = val2.value
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
if python3:
buff.write(struct.pack('<I%sB'%length, length, *_x))
else:
buff.write(struct.pack('<I%ss'%length, length, _x))
length = len(self.map.props)
buff.write(_struct_I.pack(length))
for val1 in self.map.props:
_x = val1.key
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
if python3:
buff.write(struct.pack('<I%sB'%length, length, *_x))
else:
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = val1.value
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
if python3:
buff.write(struct.pack('<I%sB'%length, length, *_x))
else:
buff.write(struct.pack('<I%ss'%length, length, _x))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(_x))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(_x))))
def deserialize_numpy(self, str, numpy):
"""
unpack serialized message in str into this message instance using numpy for array types
:param str: byte array of serialized message, ``str``
:param numpy: numpy python module
"""
try:
if self.map is None:
self.map = geographic_msgs.msg.GeographicMap()
end = 0
start = end
end += 1
(self.success,) = _struct_B.unpack(str[start:end])
self.success = bool(self.success)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.status = str[start:end].decode('utf-8')
else:
self.status = str[start:end]
_x = self
start = end
end += 12
(_x.map.header.seq, _x.map.header.stamp.secs, _x.map.header.stamp.nsecs,) = _struct_3I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.map.header.frame_id = str[start:end].decode('utf-8')
else:
self.map.header.frame_id = str[start:end]
start = end
end += 16
self.map.id.uuid = str[start:end]
_x = self
start = end
end += 48
(_x.map.bounds.min_pt.latitude, _x.map.bounds.min_pt.longitude, _x.map.bounds.min_pt.altitude, _x.map.bounds.max_pt.latitude, _x.map.bounds.max_pt.longitude, _x.map.bounds.max_pt.altitude,) = _struct_6d.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.map.points = []
for i in range(0, length):
val1 = geographic_msgs.msg.WayPoint()
_v10 = val1.id
start = end
end += 16
_v10.uuid = str[start:end]
_v11 = val1.position
_x = _v11
start = end
end += 24
(_x.latitude, _x.longitude, _x.altitude,) = _struct_3d.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val1.props = []
for i in range(0, length):
val2 = geographic_msgs.msg.KeyValue()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val2.key = str[start:end].decode('utf-8')
else:
val2.key = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val2.value = str[start:end].decode('utf-8')
else:
val2.value = str[start:end]
val1.props.append(val2)
self.map.points.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.map.features = []
for i in range(0, length):
val1 = geographic_msgs.msg.MapFeature()
_v12 = val1.id
start = end
end += 16
_v12.uuid = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val1.components = []
for i in range(0, length):
val2 = geographic_msgs.msg.UniqueID()
start = end
end += 16
val2.uuid = str[start:end]
val1.components.append(val2)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val1.props = []
for i in range(0, length):
val2 = geographic_msgs.msg.KeyValue()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val2.key = str[start:end].decode('utf-8')
else:
val2.key = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val2.value = str[start:end].decode('utf-8')
else:
val2.value = str[start:end]
val1.props.append(val2)
self.map.features.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.map.props = []
for i in range(0, length):
val1 = geographic_msgs.msg.KeyValue()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.key = str[start:end].decode('utf-8')
else:
val1.key = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.value = str[start:end].decode('utf-8')
else:
val1.value = str[start:end]
self.map.props.append(val1)
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
_struct_I = genpy.struct_I
_struct_B = struct.Struct("<B")
_struct_6d = struct.Struct("<6d")
_struct_16B = struct.Struct("<16B")
_struct_3I = struct.Struct("<3I")
_struct_16s = struct.Struct("<16s")
_struct_3d = struct.Struct("<3d")
class GetGeographicMap(object):
_type = 'geographic_msgs/GetGeographicMap'
_md5sum = 'c0278e653eee0ad79600510650e7be39'
_request_class = GetGeographicMapRequest
_response_class = GetGeographicMapResponse
|
superdyzio/PWR-Stuff
|
AIR-ARR/Projekt Zespołowy/catkin_ws/devel/lib/python2.7/dist-packages/geographic_msgs/srv/_GetGeographicMap.py
|
Python
|
mit
| 33,019 | 0.015385 |
import csv
import os
class ArticleMappingReader(object):
def __init__(self, arcticlesFilepath, delimiter=','):
self.__delimiter = delimiter
self.__articlesFilepath = arcticlesFilepath
self.__articlesFile = None
self.__articlesReader = None
self.__currentArticleData = None
def open(self):
if not os.path.exists(os.path.dirname(self.__articlesFilepath)):
raise Exception("article mapping file not found!")
self.__articlesFile = open(self.__articlesFilepath, 'r', newline='', encoding="UTF-8")
self.__articlesReader = csv.reader(self.__articlesFile, delimiter=self.__delimiter)
return self
def close(self):
self.__articlesFile.close()
def currentArticleId(self):
return self.__currentArticleData[0]
def currentArticleUrl(self):
return self.__currentArticleData[1]
def __parseIterRow(self, row):
articleId = int(row[0])
articleUrl = row[1]
return (articleId, articleUrl)
def __iter__(self):
del self.__currentArticleData
self.__articlesFile.seek(0)
iter(self.__articlesReader)
# skip csv header in iteration mode:
next(self.__articlesReader)
return self
def __next__(self):
self.__currentArticleData = self.__parseIterRow(next(self.__articlesReader))
return self.__currentArticleData
def __enter__(self):
return self.open()
def __exit__(self, type, value, traceback):
self.close()
if __name__ == "__main__":
with ArticleMappingReader(os.path.join("data", "articleMapping.data")) as reader:
for aid, url in reader:
print(aid, url)
|
CodeLionX/CommentSearchEngine
|
cse/reader/ArticleMappingReader.py
|
Python
|
mit
| 1,729 | 0.009254 |
import requests
class YGOPricesAPI():
def __init__(self):
self.url = "http://yugiohprices.com/api"
def __make_request(self, url):
"""Request a resource from api"""
request = requests.get(url)
if request.status_code != 200:
status_code = request.status_code
reason = request.reason
raise Exception(f'Status code: {status_code} Reason: {reason}')
return request.json()
def get_price_by_name(self, name):
"""Retrieves price data for every version of a card using its name"""
url = f"{self.url}/get_card_prices/{name}"
return self.__make_request(url)
def get_price_by_tag(self, tag, rarity=None):
"""Retrieve price data for a specific version of a card using its print tag"""
if rarity:
url = f"{self.url}/price_history/{tag}?rarity={rarity}"
else:
url = f"{self.url}/price_for_print_tag/{tag}"
return self.__make_request(url)
def get_set_data(self, set_name):
"""Returns rarities and low/high/average prices for each card in the set."""
url = f"{self.url}/set_data/{set_name}"
return self.__make_request(url)
def get_sets(self):
"""Retrieve list of all set names in Yugioh Prices database"""
url = f"{self.url}/card_sets"
return self.__make_request(url)
def get_rising_and_falling(self):
"""Retrieve rising and falling cards list"""
url = f"{self.url}/rising_and_falling"
return self.__make_request(url)
def get_top_100(self, rarity=None):
"""Retrieve Top 100 most expensive cards"""
url = f"{self.url}/top_100_cards"
if rarity:
url = f"{url}?rarity={rarity}"
return self.__make_request(url)
def get_card_names(self):
"""Retrieve all cards name"""
url = f"{self.url}/card_names"
return self.__make_request(url)
def get_card_data(self, name):
"""Retrieve all information for a card using its name"""
url = f"{self.url}/card_data/{name}"
return self.__make_request(url)
def get_card_versions(self, name):
"""Retrieve a list of all known versions of a card using its name"""
url = f"{self.url}/card_versions/{name}"
return self.__make_request(url)
def get_card_support(self, name):
"""Retrieve a list of support cards for a given card using its name"""
url = f"{self.url}/card_support/{name}"
return self.__make_request(url)
|
mneary1/YGOPricesAPI
|
prices.py
|
Python
|
mit
| 2,551 | 0.000784 |
# Copyright 2013 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Image caching and management.
"""
import os
import re
from nova.compute import utils as compute_utils
from nova import exception
from nova import utils
from nova.virt import imagecache
from nova.virt import images
from os_win import utilsfactory
from oslo_log import log as logging
from oslo_utils import excutils
from oslo_utils import units
from oslo_utils import uuidutils
from compute_hyperv.i18n import _
import compute_hyperv.nova.conf
from compute_hyperv.nova import pathutils
LOG = logging.getLogger(__name__)
CONF = compute_hyperv.nova.conf.CONF
class ImageCache(imagecache.ImageCacheManager):
def __init__(self):
super(ImageCache, self).__init__()
self._pathutils = pathutils.PathUtils()
self._vhdutils = utilsfactory.get_vhdutils()
self.used_images = []
self.unexplained_images = []
self.originals = []
def _get_root_vhd_size_gb(self, instance):
if instance.old_flavor:
return instance.old_flavor.root_gb
else:
return instance.flavor.root_gb
def _resize_and_cache_vhd(self, instance, vhd_path):
vhd_size = self._vhdutils.get_vhd_size(vhd_path)['VirtualSize']
root_vhd_size_gb = self._get_root_vhd_size_gb(instance)
root_vhd_size = root_vhd_size_gb * units.Gi
root_vhd_internal_size = (
self._vhdutils.get_internal_vhd_size_by_file_size(
vhd_path, root_vhd_size))
if root_vhd_internal_size < vhd_size:
raise exception.FlavorDiskSmallerThanImage(
flavor_size=root_vhd_size, image_size=vhd_size)
if root_vhd_internal_size > vhd_size:
path_parts = os.path.splitext(vhd_path)
resized_vhd_path = '%s_%s%s' % (path_parts[0],
root_vhd_size_gb,
path_parts[1])
lock_path = os.path.dirname(resized_vhd_path)
lock_name = "%s-cache.lock" % os.path.basename(resized_vhd_path)
@utils.synchronized(name=lock_name, external=True,
lock_path=lock_path)
def copy_and_resize_vhd():
if not self._pathutils.exists(resized_vhd_path):
try:
LOG.debug("Copying VHD %(vhd_path)s to "
"%(resized_vhd_path)s",
{'vhd_path': vhd_path,
'resized_vhd_path': resized_vhd_path})
self._pathutils.copyfile(vhd_path, resized_vhd_path)
LOG.debug("Resizing VHD %(resized_vhd_path)s to new "
"size %(root_vhd_size)s",
{'resized_vhd_path': resized_vhd_path,
'root_vhd_size': root_vhd_size})
self._vhdutils.resize_vhd(resized_vhd_path,
root_vhd_internal_size,
is_file_max_size=False)
except Exception:
with excutils.save_and_reraise_exception():
if self._pathutils.exists(resized_vhd_path):
self._pathutils.remove(resized_vhd_path)
copy_and_resize_vhd()
return resized_vhd_path
def get_cached_image(self, context, instance, rescue_image_id=None):
image_id = rescue_image_id or instance.image_ref
image_type = self.get_image_format(context, image_id, instance)
trusted_certs = instance.trusted_certs
image_path, already_exists = self.cache_image(
context, image_id, image_type, trusted_certs)
# Note: rescue images are not resized.
is_vhd = image_path.split('.')[-1].lower() == 'vhd'
if (CONF.use_cow_images and is_vhd and not rescue_image_id):
# Resize the base VHD image as it's not possible to resize a
# differencing VHD. This does not apply to VHDX images.
resized_image_path = self._resize_and_cache_vhd(instance,
image_path)
if resized_image_path:
return resized_image_path
if rescue_image_id:
self._verify_rescue_image(instance, rescue_image_id, image_path)
return image_path
def fetch(self, context, image_id, path, trusted_certs=None):
with compute_utils.disk_ops_semaphore:
images.fetch(context, image_id, path, trusted_certs)
def append_image_format(self, path, image_type, do_rename=True):
if image_type == 'iso':
format_ext = 'iso'
else:
# Historically, the Hyper-V driver allowed VHDX images registered
# as VHD. We'll continue to do so for now.
format_ext = self._vhdutils.get_vhd_format(path)
new_path = path + '.' + format_ext.lower()
if do_rename:
self._pathutils.rename(path, new_path)
return new_path
def get_image_format(self, context, image_id, instance=None):
image_format = None
if instance:
image_format = instance.system_metadata['image_disk_format']
if not image_format:
image_info = images.get_info(context, image_id)
image_format = image_info['disk_format']
return image_format
def cache_image(self, context, image_id,
image_type=None, trusted_certs=None):
if not image_type:
image_type = self.get_image_format(context, image_id)
base_image_dir = self._pathutils.get_base_vhd_dir()
base_image_path = os.path.join(base_image_dir, image_id)
lock_name = "%s-cache.lock" % image_id
@utils.synchronized(name=lock_name, external=True,
lock_path=base_image_dir)
def fetch_image_if_not_existing():
fetched = False
image_path = None
for format_ext in ['vhd', 'vhdx', 'iso']:
test_path = base_image_path + '.' + format_ext
if self._pathutils.exists(test_path):
image_path = test_path
self._update_image_timestamp(image_id)
break
if not image_path:
try:
self.fetch(context, image_id, base_image_path,
trusted_certs)
fetched = True
image_path = self.append_image_format(
base_image_path, image_type)
except Exception:
with excutils.save_and_reraise_exception():
if self._pathutils.exists(base_image_path):
self._pathutils.remove(base_image_path)
return image_path, fetched
return fetch_image_if_not_existing()
def _verify_rescue_image(self, instance, rescue_image_id,
rescue_image_path):
rescue_image_info = self._vhdutils.get_vhd_info(rescue_image_path)
rescue_image_size = rescue_image_info['VirtualSize']
flavor_disk_size = instance.flavor.root_gb * units.Gi
if rescue_image_size > flavor_disk_size:
err_msg = _('Using a rescue image bigger than the instance '
'flavor disk size is not allowed. '
'Rescue image size: %(rescue_image_size)s. '
'Flavor disk size:%(flavor_disk_size)s.') % dict(
rescue_image_size=rescue_image_size,
flavor_disk_size=flavor_disk_size)
raise exception.ImageUnacceptable(reason=err_msg,
image_id=rescue_image_id)
def get_image_details(self, context, instance):
image_id = instance.image_ref
return images.get_info(context, image_id)
def _age_and_verify_cached_images(self, context, all_instances, base_dir):
for img in self.originals:
if img in self.used_images:
# change the timestamp on the image so as to reflect the last
# time it was used
self._update_image_timestamp(img)
elif CONF.image_cache.remove_unused_base_images:
self._remove_if_old_image(img)
def _update_image_timestamp(self, image):
backing_files = self._get_image_backing_files(image)
for img in backing_files:
os.utime(img, None)
def _get_image_backing_files(self, image):
base_file = self._pathutils.get_image_path(image)
if not base_file:
# not vhd or vhdx, ignore.
return []
backing_files = [base_file]
resize_re = re.compile('%s_[0-9]+$' % image, re.IGNORECASE)
for img in self.unexplained_images:
match = resize_re.match(img)
if match:
backing_files.append(self._pathutils.get_image_path(img))
return backing_files
def _remove_if_old_image(self, image):
backing_files = self._get_image_backing_files(image)
max_age_seconds = (
CONF.image_cache.remove_unused_original_minimum_age_seconds)
for img in backing_files:
age_seconds = self._pathutils.get_age_of_file(img)
if age_seconds > max_age_seconds:
LOG.info("Removing old, unused image: %s", img)
self._remove_old_image(img)
def _remove_old_image(self, image_path):
lock_path = os.path.dirname(image_path)
lock_name = "%s-cache.lock" % os.path.basename(image_path)
@utils.synchronized(name=lock_name, external=True,
lock_path=lock_path)
def _image_synchronized_remove():
self._pathutils.remove(image_path)
_image_synchronized_remove()
def update(self, context, all_instances):
base_vhd_dir = self._pathutils.get_base_vhd_dir()
running = self._list_running_instances(context, all_instances)
self.used_images = running['used_images'].keys()
all_files = self._list_base_images(base_vhd_dir)
self.originals = all_files['originals']
self.unexplained_images = all_files['unexplained_images']
self._age_and_verify_cached_images(context, all_instances,
base_vhd_dir)
def _list_base_images(self, base_dir):
unexplained_images = []
originals = []
for entry in os.listdir(base_dir):
file_name, extension = os.path.splitext(entry)
# extension has a leading '.'. E.g.: '.vhdx'
if extension.lstrip('.').lower() not in ['vhd', 'vhdx']:
# File is not an image. Ignore it.
# imagecache will not store images of any other formats.
continue
if uuidutils.is_uuid_like(file_name):
originals.append(file_name)
else:
unexplained_images.append(file_name)
return {'unexplained_images': unexplained_images,
'originals': originals}
|
openstack/compute-hyperv
|
compute_hyperv/nova/imagecache.py
|
Python
|
apache-2.0
| 11,925 | 0 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.paging import Paged
class IntegrationAccountMapPaged(Paged):
"""
A paging container for iterating over a list of IntegrationAccountMap object
"""
_attribute_map = {
'next_link': {'key': 'nextLink', 'type': 'str'},
'current_page': {'key': 'value', 'type': '[IntegrationAccountMap]'}
}
def __init__(self, *args, **kwargs):
super(IntegrationAccountMapPaged, self).__init__(*args, **kwargs)
|
lmazuel/azure-sdk-for-python
|
azure-mgmt-logic/azure/mgmt/logic/models/integration_account_map_paged.py
|
Python
|
mit
| 926 | 0.00108 |
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'LoginWidget.ui'
#
# Created: Wed Jul 13 22:46:23 2011
# by: PyQt4 UI code generator 4.8.3
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
_fromUtf8 = lambda s: s
class Ui_LoginWidget(object):
def setupUi(self, LoginWidget):
LoginWidget.setObjectName(_fromUtf8("LoginWidget"))
LoginWidget.resize(299, 342)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(LoginWidget.sizePolicy().hasHeightForWidth())
LoginWidget.setSizePolicy(sizePolicy)
self.verticalLayout = QtGui.QVBoxLayout(LoginWidget)
self.verticalLayout.setObjectName(_fromUtf8("verticalLayout"))
self.formTabs = QtGui.QTabWidget(LoginWidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.formTabs.sizePolicy().hasHeightForWidth())
self.formTabs.setSizePolicy(sizePolicy)
self.formTabs.setObjectName(_fromUtf8("formTabs"))
self.tab = QtGui.QWidget()
self.tab.setObjectName(_fromUtf8("tab"))
self.verticalLayout_2 = QtGui.QVBoxLayout(self.tab)
self.verticalLayout_2.setObjectName(_fromUtf8("verticalLayout_2"))
self.formLayout = QtGui.QFormLayout()
self.formLayout.setObjectName(_fromUtf8("formLayout"))
self.lblUsername = QtGui.QLabel(self.tab)
self.lblUsername.setObjectName(_fromUtf8("lblUsername"))
self.formLayout.setWidget(0, QtGui.QFormLayout.LabelRole, self.lblUsername)
self.txtUsername = QtGui.QLineEdit(self.tab)
self.txtUsername.setObjectName(_fromUtf8("txtUsername"))
self.formLayout.setWidget(0, QtGui.QFormLayout.FieldRole, self.txtUsername)
self.lblPassword = QtGui.QLabel(self.tab)
self.lblPassword.setObjectName(_fromUtf8("lblPassword"))
self.formLayout.setWidget(1, QtGui.QFormLayout.LabelRole, self.lblPassword)
self.txtPassword = QtGui.QLineEdit(self.tab)
self.txtPassword.setEchoMode(QtGui.QLineEdit.Password)
self.txtPassword.setObjectName(_fromUtf8("txtPassword"))
self.formLayout.setWidget(1, QtGui.QFormLayout.FieldRole, self.txtPassword)
self.horizontalLayout = QtGui.QHBoxLayout()
self.horizontalLayout.setObjectName(_fromUtf8("horizontalLayout"))
spacerItem = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout.addItem(spacerItem)
self.btnLogin = QtGui.QPushButton(self.tab)
self.btnLogin.setObjectName(_fromUtf8("btnLogin"))
self.horizontalLayout.addWidget(self.btnLogin)
spacerItem1 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout.addItem(spacerItem1)
self.formLayout.setLayout(2, QtGui.QFormLayout.SpanningRole, self.horizontalLayout)
self.verticalLayout_2.addLayout(self.formLayout)
self.grpServerNews = QtGui.QGroupBox(self.tab)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.grpServerNews.sizePolicy().hasHeightForWidth())
self.grpServerNews.setSizePolicy(sizePolicy)
self.grpServerNews.setObjectName(_fromUtf8("grpServerNews"))
self.verticalLayout_4 = QtGui.QVBoxLayout(self.grpServerNews)
self.verticalLayout_4.setObjectName(_fromUtf8("verticalLayout_4"))
self.scrServerNews = QtGui.QScrollArea(self.grpServerNews)
self.scrServerNews.setVerticalScrollBarPolicy(QtCore.Qt.ScrollBarAsNeeded)
self.scrServerNews.setWidgetResizable(True)
self.scrServerNews.setObjectName(_fromUtf8("scrServerNews"))
self.scrollAreaWidgetContents = QtGui.QWidget()
self.scrollAreaWidgetContents.setGeometry(QtCore.QRect(0, 0, 255, 157))
self.scrollAreaWidgetContents.setObjectName(_fromUtf8("scrollAreaWidgetContents"))
self.scrServerNews.setWidget(self.scrollAreaWidgetContents)
self.verticalLayout_4.addWidget(self.scrServerNews)
self.verticalLayout_2.addWidget(self.grpServerNews)
self.formTabs.addTab(self.tab, _fromUtf8(""))
self.tab_2 = QtGui.QWidget()
self.tab_2.setObjectName(_fromUtf8("tab_2"))
self.verticalLayout_5 = QtGui.QVBoxLayout(self.tab_2)
self.verticalLayout_5.setObjectName(_fromUtf8("verticalLayout_5"))
self.verticalLayout_3 = QtGui.QVBoxLayout()
self.verticalLayout_3.setObjectName(_fromUtf8("verticalLayout_3"))
self.formLayout_2 = QtGui.QFormLayout()
self.formLayout_2.setObjectName(_fromUtf8("formLayout_2"))
self.lblRegUsername = QtGui.QLabel(self.tab_2)
self.lblRegUsername.setObjectName(_fromUtf8("lblRegUsername"))
self.formLayout_2.setWidget(0, QtGui.QFormLayout.LabelRole, self.lblRegUsername)
self.txtRegUsername = QtGui.QLineEdit(self.tab_2)
self.txtRegUsername.setObjectName(_fromUtf8("txtRegUsername"))
self.formLayout_2.setWidget(0, QtGui.QFormLayout.FieldRole, self.txtRegUsername)
self.lblRegPassword = QtGui.QLabel(self.tab_2)
self.lblRegPassword.setObjectName(_fromUtf8("lblRegPassword"))
self.formLayout_2.setWidget(1, QtGui.QFormLayout.LabelRole, self.lblRegPassword)
self.txtRegPassword = QtGui.QLineEdit(self.tab_2)
self.txtRegPassword.setEchoMode(QtGui.QLineEdit.Password)
self.txtRegPassword.setObjectName(_fromUtf8("txtRegPassword"))
self.formLayout_2.setWidget(1, QtGui.QFormLayout.FieldRole, self.txtRegPassword)
self.lblRegConfPwd = QtGui.QLabel(self.tab_2)
self.lblRegConfPwd.setObjectName(_fromUtf8("lblRegConfPwd"))
self.formLayout_2.setWidget(2, QtGui.QFormLayout.LabelRole, self.lblRegConfPwd)
self.txtRegConfPwd = QtGui.QLineEdit(self.tab_2)
self.txtRegConfPwd.setEchoMode(QtGui.QLineEdit.Password)
self.txtRegConfPwd.setObjectName(_fromUtf8("txtRegConfPwd"))
self.formLayout_2.setWidget(2, QtGui.QFormLayout.FieldRole, self.txtRegConfPwd)
self.lblRegEmail = QtGui.QLabel(self.tab_2)
self.lblRegEmail.setObjectName(_fromUtf8("lblRegEmail"))
self.formLayout_2.setWidget(3, QtGui.QFormLayout.LabelRole, self.lblRegEmail)
self.txtRegEmail = QtGui.QLineEdit(self.tab_2)
self.txtRegEmail.setObjectName(_fromUtf8("txtRegEmail"))
self.formLayout_2.setWidget(3, QtGui.QFormLayout.FieldRole, self.txtRegEmail)
self.horizontalLayout_2 = QtGui.QHBoxLayout()
self.horizontalLayout_2.setObjectName(_fromUtf8("horizontalLayout_2"))
spacerItem2 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout_2.addItem(spacerItem2)
self.btnRegister = QtGui.QPushButton(self.tab_2)
self.btnRegister.setObjectName(_fromUtf8("btnRegister"))
self.horizontalLayout_2.addWidget(self.btnRegister)
spacerItem3 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout_2.addItem(spacerItem3)
self.formLayout_2.setLayout(4, QtGui.QFormLayout.SpanningRole, self.horizontalLayout_2)
self.verticalLayout_3.addLayout(self.formLayout_2)
self.lblRegHint = QtGui.QLabel(self.tab_2)
self.lblRegHint.setFrameShape(QtGui.QFrame.Box)
self.lblRegHint.setFrameShadow(QtGui.QFrame.Raised)
self.lblRegHint.setMidLineWidth(0)
self.lblRegHint.setText(_fromUtf8(""))
self.lblRegHint.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignTop)
self.lblRegHint.setObjectName(_fromUtf8("lblRegHint"))
self.verticalLayout_3.addWidget(self.lblRegHint)
self.verticalLayout_5.addLayout(self.verticalLayout_3)
self.formTabs.addTab(self.tab_2, _fromUtf8(""))
self.verticalLayout.addWidget(self.formTabs)
self.retranslateUi(LoginWidget)
self.formTabs.setCurrentIndex(0)
QtCore.QMetaObject.connectSlotsByName(LoginWidget)
def retranslateUi(self, LoginWidget):
LoginWidget.setWindowTitle(QtGui.QApplication.translate("LoginWidget", "Form", None, QtGui.QApplication.UnicodeUTF8))
self.lblUsername.setText(QtGui.QApplication.translate("LoginWidget", "Username:", None, QtGui.QApplication.UnicodeUTF8))
self.lblPassword.setText(QtGui.QApplication.translate("LoginWidget", "Password:", None, QtGui.QApplication.UnicodeUTF8))
self.btnLogin.setText(QtGui.QApplication.translate("LoginWidget", "Login", None, QtGui.QApplication.UnicodeUTF8))
self.grpServerNews.setTitle(QtGui.QApplication.translate("LoginWidget", "Server News", None, QtGui.QApplication.UnicodeUTF8))
self.formTabs.setTabText(self.formTabs.indexOf(self.tab), QtGui.QApplication.translate("LoginWidget", "&Login", None, QtGui.QApplication.UnicodeUTF8))
self.lblRegUsername.setWhatsThis(QtGui.QApplication.translate("LoginWidget", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'Sans\'; font-size:8pt; font-weight:400; font-style:normal;\">\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Please choose a username for your account. <span style=\" font-style:italic;\">This is not the name of your character; it is just the name you use to login.</span></p></body></html>", None, QtGui.QApplication.UnicodeUTF8))
self.lblRegUsername.setText(QtGui.QApplication.translate("LoginWidget", "Username:", None, QtGui.QApplication.UnicodeUTF8))
self.lblRegPassword.setWhatsThis(QtGui.QApplication.translate("LoginWidget", "Please choose a unique password that is at least 6 characters long and contains only letters and/or numbers.", None, QtGui.QApplication.UnicodeUTF8))
self.lblRegPassword.setText(QtGui.QApplication.translate("LoginWidget", "Password:", None, QtGui.QApplication.UnicodeUTF8))
self.lblRegConfPwd.setWhatsThis(QtGui.QApplication.translate("LoginWidget", "Please type your password again to help prevent typos.", None, QtGui.QApplication.UnicodeUTF8))
self.lblRegConfPwd.setText(QtGui.QApplication.translate("LoginWidget", "Confirm Password:", None, QtGui.QApplication.UnicodeUTF8))
self.lblRegEmail.setWhatsThis(QtGui.QApplication.translate("LoginWidget", "Please enter a valid email address so that we can reset your password if you forget it.", None, QtGui.QApplication.UnicodeUTF8))
self.lblRegEmail.setText(QtGui.QApplication.translate("LoginWidget", "Email:", None, QtGui.QApplication.UnicodeUTF8))
self.txtRegEmail.setWhatsThis(QtGui.QApplication.translate("LoginWidget", "An email address to register your account to. Used for verification purposes, and if you forget your password.", None, QtGui.QApplication.UnicodeUTF8))
self.btnRegister.setText(QtGui.QApplication.translate("LoginWidget", "&Register", None, QtGui.QApplication.UnicodeUTF8))
self.formTabs.setTabText(self.formTabs.indexOf(self.tab_2), QtGui.QApplication.translate("LoginWidget", "&Register", None, QtGui.QApplication.UnicodeUTF8))
|
buchwj/xvector
|
client/xVClient/ui/LoginWidgetUI.py
|
Python
|
gpl-3.0
| 11,829 | 0.004734 |
#!/usr/bin/python
from __future__ import (absolute_import, division, print_function)
# Copyright 2019 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
__metaclass__ = type
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: fortios_system_replacemsg_traffic_quota
short_description: Replacement messages in Fortinet's FortiOS and FortiGate.
description:
- This module is able to configure a FortiGate or FortiOS (FOS) device by allowing the
user to set and modify system_replacemsg feature and traffic_quota category.
Examples include all parameters and values need to be adjusted to datasources before usage.
Tested with FOS v6.0.5
version_added: "2.9"
author:
- Miguel Angel Munoz (@mamunozgonzalez)
- Nicolas Thomas (@thomnico)
notes:
- Requires fortiosapi library developed by Fortinet
- Run as a local_action in your playbook
requirements:
- fortiosapi>=0.9.8
options:
host:
description:
- FortiOS or FortiGate IP address.
type: str
required: false
username:
description:
- FortiOS or FortiGate username.
type: str
required: false
password:
description:
- FortiOS or FortiGate password.
type: str
default: ""
vdom:
description:
- Virtual domain, among those defined previously. A vdom is a
virtual instance of the FortiGate that can be configured and
used as a different unit.
type: str
default: root
https:
description:
- Indicates if the requests towards FortiGate must use HTTPS protocol.
type: bool
default: true
ssl_verify:
description:
- Ensures FortiGate certificate must be verified by a proper CA.
type: bool
default: true
state:
description:
- Indicates whether to create or remove the object.
type: str
required: true
choices:
- present
- absent
system_replacemsg_traffic_quota:
description:
- Replacement messages.
default: null
type: dict
suboptions:
buffer:
description:
- Message string.
type: str
format:
description:
- Format flag.
type: str
choices:
- none
- text
- html
- wml
header:
description:
- Header flag.
type: str
choices:
- none
- http
- 8bit
msg_type:
description:
- Message type.
type: str
'''
EXAMPLES = '''
- hosts: localhost
vars:
host: "192.168.122.40"
username: "admin"
password: ""
vdom: "root"
ssl_verify: "False"
tasks:
- name: Replacement messages.
fortios_system_replacemsg_traffic_quota:
host: "{{ host }}"
username: "{{ username }}"
password: "{{ password }}"
vdom: "{{ vdom }}"
https: "False"
state: "present"
system_replacemsg_traffic_quota:
buffer: "<your_own_value>"
format: "none"
header: "none"
msg_type: "<your_own_value>"
'''
RETURN = '''
build:
description: Build number of the fortigate image
returned: always
type: str
sample: '1547'
http_method:
description: Last method used to provision the content into FortiGate
returned: always
type: str
sample: 'PUT'
http_status:
description: Last result given by FortiGate on last operation applied
returned: always
type: str
sample: "200"
mkey:
description: Master key (id) used in the last call to FortiGate
returned: success
type: str
sample: "id"
name:
description: Name of the table used to fulfill the request
returned: always
type: str
sample: "urlfilter"
path:
description: Path of the table used to fulfill the request
returned: always
type: str
sample: "webfilter"
revision:
description: Internal revision number
returned: always
type: str
sample: "17.0.2.10658"
serial:
description: Serial number of the unit
returned: always
type: str
sample: "FGVMEVYYQT3AB5352"
status:
description: Indication of the operation's result
returned: always
type: str
sample: "success"
vdom:
description: Virtual domain used
returned: always
type: str
sample: "root"
version:
description: Version of the FortiGate
returned: always
type: str
sample: "v5.6.3"
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.connection import Connection
from ansible.module_utils.network.fortios.fortios import FortiOSHandler
from ansible.module_utils.network.fortimanager.common import FAIL_SOCKET_MSG
def login(data, fos):
host = data['host']
username = data['username']
password = data['password']
ssl_verify = data['ssl_verify']
fos.debug('on')
if 'https' in data and not data['https']:
fos.https('off')
else:
fos.https('on')
fos.login(host, username, password, verify=ssl_verify)
def filter_system_replacemsg_traffic_quota_data(json):
option_list = ['buffer', 'format', 'header',
'msg_type']
dictionary = {}
for attribute in option_list:
if attribute in json and json[attribute] is not None:
dictionary[attribute] = json[attribute]
return dictionary
def underscore_to_hyphen(data):
if isinstance(data, list):
for elem in data:
elem = underscore_to_hyphen(elem)
elif isinstance(data, dict):
new_data = {}
for k, v in data.items():
new_data[k.replace('_', '-')] = underscore_to_hyphen(v)
data = new_data
return data
def system_replacemsg_traffic_quota(data, fos):
vdom = data['vdom']
state = data['state']
system_replacemsg_traffic_quota_data = data['system_replacemsg_traffic_quota']
filtered_data = underscore_to_hyphen(filter_system_replacemsg_traffic_quota_data(system_replacemsg_traffic_quota_data))
if state == "present":
return fos.set('system.replacemsg',
'traffic-quota',
data=filtered_data,
vdom=vdom)
elif state == "absent":
return fos.delete('system.replacemsg',
'traffic-quota',
mkey=filtered_data['msg-type'],
vdom=vdom)
def is_successful_status(status):
return status['status'] == "success" or \
status['http_method'] == "DELETE" and status['http_status'] == 404
def fortios_system_replacemsg(data, fos):
if data['system_replacemsg_traffic_quota']:
resp = system_replacemsg_traffic_quota(data, fos)
return not is_successful_status(resp), \
resp['status'] == "success", \
resp
def main():
fields = {
"host": {"required": False, "type": "str"},
"username": {"required": False, "type": "str"},
"password": {"required": False, "type": "str", "default": "", "no_log": True},
"vdom": {"required": False, "type": "str", "default": "root"},
"https": {"required": False, "type": "bool", "default": True},
"ssl_verify": {"required": False, "type": "bool", "default": True},
"state": {"required": True, "type": "str",
"choices": ["present", "absent"]},
"system_replacemsg_traffic_quota": {
"required": False, "type": "dict", "default": None,
"options": {
"buffer": {"required": False, "type": "str"},
"format": {"required": False, "type": "str",
"choices": ["none", "text", "html",
"wml"]},
"header": {"required": False, "type": "str",
"choices": ["none", "http", "8bit"]},
"msg_type": {"required": False, "type": "str"}
}
}
}
module = AnsibleModule(argument_spec=fields,
supports_check_mode=False)
# legacy_mode refers to using fortiosapi instead of HTTPAPI
legacy_mode = 'host' in module.params and module.params['host'] is not None and \
'username' in module.params and module.params['username'] is not None and \
'password' in module.params and module.params['password'] is not None
if not legacy_mode:
if module._socket_path:
connection = Connection(module._socket_path)
fos = FortiOSHandler(connection)
is_error, has_changed, result = fortios_system_replacemsg(module.params, fos)
else:
module.fail_json(**FAIL_SOCKET_MSG)
else:
try:
from fortiosapi import FortiOSAPI
except ImportError:
module.fail_json(msg="fortiosapi module is required")
fos = FortiOSAPI()
login(module.params, fos)
is_error, has_changed, result = fortios_system_replacemsg(module.params, fos)
fos.logout()
if not is_error:
module.exit_json(changed=has_changed, meta=result)
else:
module.fail_json(msg="Error in repo", meta=result)
if __name__ == '__main__':
main()
|
kvar/ansible
|
lib/ansible/modules/network/fortios/fortios_system_replacemsg_traffic_quota.py
|
Python
|
gpl-3.0
| 10,207 | 0.001568 |
#!/usr/bin/python
import re,os
def compare_file(x,y):
dr = re.compile(r"[\S]+?([0-9]+)[\S]+")
orderx = -1
ordery = -1
m = dr.match(x)
if m:
orderx = int(m.group(1))
m = dr.match(y)
if m:
ordery = int(m.group(1))
if orderx == -1 or ordery== -1:
return 0
if orderx>ordery:
return 1
elif orderx==ordery:
return 0
else:
return -1
file_list = []
dd = re.compile(r"p[\S]+?pddl$")
for f in os.listdir("./"):
if f == "readsoln.py":
continue;
m = dd.match(f)
if m:
file_list.append( f )
file_list.sort(compare_file)
index = 1
for f in file_list:
file = open(f,"r")
for line in file.readlines():
if "-" not in line:
continue
t = line.split("-")[1]
t.strip()
l = line.split("-")[0]
l.strip()
if "truck" in t:
print "truck ", l.count("truck"),
elif "driver" in t:
print "driver", l.count("driver"),
elif "obj" in t:
print "Object ", l.count("package"),
print "--- ",f
|
thierry1985/project-1022
|
domains/driverlogshift/readsetting.py
|
Python
|
mit
| 965 | 0.048705 |
import numpy as np
import random
class NeuralNetwork():
def __init__(self, sizes):
# sizes is an array with the number of units in each layer
# [2,3,1] means w neurons of input, 3 in the hidden layer and 1 as output
self.num_layers = len(sizes)
self.sizes = sizes
# the syntax [1:] gets all elements of sizes array beginning at index 1 (second position)
# np,random.randn(rows, cols) retuns a matrix of random elements
# np.random.randn(2,1) =>
# array([[ 0.68265325],
# [-0.52939261]])
# biases will have one vector per layer
self.biases = [np.random.randn(y,1) for y in sizes[1:]]
#zip returns a tuple in which x is the element of the first array and y the element of the second
#sizes[:-1] returns all the elements till the second to last
#sizes[1:] returns all the elements from the second and on]
# [2,3,1] means:
# * matrix of 3 rows and 2 columns -- will be multiplied by the inputs
# * matrix of 1 row and 3 columns -- will multiply the hidden layer and produce the output
self.weights = [np.random.randn(y,x) for x,y in zip(sizes[:-1],sizes[1:])]
def feedforward(self, a):
for b,w in zip(self.biases, self.weights):
a = sigmoid(np.dot(w, a) + b)
return a
def separate_batches(self, training_data, batch_size):
random.shuffle(training_data)
n = len(training_data)
# extracts chunks of data from the training set
# the xrange function will return indices starting with 0 untill n, with a step size o batch_size
# batches, then, will have several chunks of the main set, each defined by the batch_size_variable
return [training_data[i:i + batch_size] for i in range(0, n, batch_size)]
def update_batches(self, batches, alpha):
for batch in batches:
nabla_b = [np.zeros(b.shape) for b in self.biases]
nabla_w = [np.zeros(w.shape) for w in self.weights]
m = len(batch)
# x is a array of length 901
# y is a single value indicating the digit represented by the 901 elements
for x, y in batch:
delta_b, delta_w = self.backpropagation(x, y)
nabla_b = [nb + dnb for nb, dnb in zip(nabla_b, delta_b)]
nabla_w = [nw + dnw for nw, dnw in zip(nabla_w, delta_w)]
self.weights = [w - (alpha / m) * nw for w, nw in zip(self.weights, nabla_w)]
self.biases = [b - (alpha / m) * nb for b, nb in zip(self.biases, nabla_b)]
def backpropagation(self, x, y):
nabla_b = [np.zeros(b.shape) for b in self.biases]
nabla_w = [np.zeros(w.shape) for w in self.weights]
activation = x
activations = [x]
zs = []
for b, w in zip(self.biases, self.weights):
# layer-bound b and w
z = np.dot(w, activation)+b
zs.append(z)
activation = sigmoid(z)
activations.append(activation)
# backward pass
delta = self.cost_derivative(activations[-1], y) * \
sigmoid_prime(zs[-1])
nabla_b[-1] = delta
nabla_w[-1] = np.dot(delta, activations[-2].transpose())
for l in range(2, self.num_layers):
z = zs[-l]
sp = sigmoid_prime(z)
delta = np.dot(self.weights[-l+1].transpose(), delta) * sp
nabla_b[-l] = delta
nabla_w[-l] = np.dot(delta, activations[-l-1].transpose())
return (nabla_b, nabla_w)
def sgd(self, training_data, epochs, batch_size, alpha, test_data):
n_test = len(test_data)
for epoch in range(epochs):
batches = self.separate_batches(training_data, batch_size)
self.update_batches(batches, alpha)
print("Epoch {0}: {1} / {2}".format(epoch, self.evaluate(test_data), n_test))
def evaluate(self, test_data):
#r = [self.feedforward(x) for (x, y) in test_data]
#for a in r:
# print("{0}, {1}".format(format(a[0][0], 'f'), format(a[1][0], 'f')))
test_results = [(np.argmax(self.feedforward(x)), y)
for (x, y) in test_data]
return sum(int(x == y) for (x, y) in test_results)
def cost_derivative(self, output_activations, y):
return output_activations - y
def sigmoid(z):
return 1.0 / (1.0 + np.exp(-z))
def sigmoid_prime(z):
return sigmoid(z) * (1-sigmoid(z))
|
guilhermefloriani/signature-recognition
|
network.py
|
Python
|
mit
| 4,512 | 0.005319 |
import enum
class calendar_permissions(enum.IntEnum):
ASCIT = 21
AVERY = 22
BECHTEL = 23
BLACKER = 24
DABNEY = 25
FLEMING = 26
LLOYD = 27
PAGE = 28
RICKETTS = 29
RUDDOCK = 30
OTHER = 31
ATHLETICS = 32
|
ASCIT/donut-python
|
donut/modules/calendar/permissions.py
|
Python
|
mit
| 251 | 0 |
#!/usr/bin/env python
"""
SyntaxError - There's something wrong with how you wrote the surrounding code.
Check your parentheses, and make sure there are colons where needed.
"""
while True
print "Where's the colon at?"
|
selimnairb/2014-02-25-swctest
|
lessons/thw-python-debugging/basic_exceptions/syntax_error.py
|
Python
|
bsd-2-clause
| 223 | 0.008969 |
# Copyright 2012 Nebula, Inc.
# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.tests.functional.api_sample_tests import test_servers
from nova.tests.functional import api_samples_test_base
class ServerActionsSampleJsonTest(test_servers.ServersSampleBase):
microversion = None
ADMIN_API = True
sample_dir = 'os-instance-actions'
def setUp(self):
super(ServerActionsSampleJsonTest, self).setUp()
# Create and stop a server
self.uuid = self._post_server()
self._get_response('servers/%s/action' % self.uuid, 'POST',
'{"os-stop": null}')
response = self._do_get('servers/%s/os-instance-actions' % self.uuid)
response_data = api_samples_test_base.pretty_data(response.content)
actions = api_samples_test_base.objectify(response_data)
self.action_stop = actions['instanceActions'][0]
self._wait_for_state_change({'id': self.uuid}, 'SHUTOFF')
def _get_subs(self):
return {
'uuid': self.uuid,
'project_id': self.action_stop['project_id']
}
def test_instance_action_get(self):
req_id = self.action_stop['request_id']
response = self._do_get('servers/%s/os-instance-actions/%s' %
(self.uuid, req_id))
# Non-admins can see event details except for the "traceback" field
# starting in the 2.51 microversion.
if self.ADMIN_API:
name = 'instance-action-get-resp'
else:
name = 'instance-action-get-non-admin-resp'
self._verify_response(name, self._get_subs(), response, 200)
def test_instance_actions_list(self):
response = self._do_get('servers/%s/os-instance-actions' % self.uuid)
self._verify_response('instance-actions-list-resp', self._get_subs(),
response, 200)
class ServerActionsV221SampleJsonTest(ServerActionsSampleJsonTest):
microversion = '2.21'
scenarios = [('v2_21', {'api_major_version': 'v2.1'})]
class ServerActionsV251AdminSampleJsonTest(ServerActionsSampleJsonTest):
"""Tests the 2.51 microversion for the os-instance-actions API.
The 2.51 microversion allows non-admins to see instance action event
details *except* for the traceback field.
The tests in this class are run as an admin user so all fields will be
displayed.
"""
microversion = '2.51'
scenarios = [('v2_51', {'api_major_version': 'v2.1'})]
class ServerActionsV251NonAdminSampleJsonTest(ServerActionsSampleJsonTest):
"""Tests the 2.51 microversion for the os-instance-actions API.
The 2.51 microversion allows non-admins to see instance action event
details *except* for the traceback field.
The tests in this class are run as a non-admin user so all fields except
for the ``traceback`` field will be displayed.
"""
ADMIN_API = False
microversion = '2.51'
scenarios = [('v2_51', {'api_major_version': 'v2.1'})]
class ServerActionsV258SampleJsonTest(ServerActionsV251AdminSampleJsonTest):
microversion = '2.58'
scenarios = [('v2_58', {'api_major_version': 'v2.1'})]
def test_instance_actions_list_with_limit(self):
response = self._do_get('servers/%s/os-instance-actions'
'?limit=1' % self.uuid)
self._verify_response('instance-actions-list-with-limit-resp',
self._get_subs(), response, 200)
def test_instance_actions_list_with_marker(self):
marker = self.action_stop['request_id']
response = self._do_get('servers/%s/os-instance-actions'
'?marker=%s' % (self.uuid, marker))
self._verify_response('instance-actions-list-with-marker-resp',
self._get_subs(), response, 200)
def test_instance_actions_with_changes_since(self):
stop_action_time = self.action_stop['start_time']
response = self._do_get(
'servers/%s/os-instance-actions'
'?changes-since=%s' % (self.uuid, stop_action_time))
self._verify_response(
'instance-actions-list-with-changes-since',
self._get_subs(), response, 200)
class ServerActionsV258NonAdminSampleJsonTest(ServerActionsV258SampleJsonTest):
ADMIN_API = False
class ServerActionsV262SampleJsonTest(ServerActionsV258SampleJsonTest):
microversion = '2.62'
scenarios = [('v2_62', {'api_major_version': 'v2.1'})]
def _get_subs(self):
return {
'uuid': self.uuid,
'project_id': self.action_stop['project_id'],
'event_host': r'\w+',
'event_hostId': '[a-f0-9]+'
}
class ServerActionsV262NonAdminSampleJsonTest(ServerActionsV262SampleJsonTest):
ADMIN_API = False
class ServerActionsV266SampleJsonTest(ServerActionsV262SampleJsonTest):
microversion = '2.66'
scenarios = [('v2_66', {'api_major_version': 'v2.1'})]
def test_instance_actions_with_changes_before(self):
stop_action_time = self.action_stop['updated_at']
response = self._do_get(
'servers/%s/os-instance-actions'
'?changes-before=%s' % (self.uuid, stop_action_time))
self._verify_response(
'instance-actions-list-with-changes-before',
self._get_subs(), response, 200)
|
rahulunair/nova
|
nova/tests/functional/api_sample_tests/test_instance_actions.py
|
Python
|
apache-2.0
| 5,930 | 0 |
# Author: Nick Raptis <airscorp@gmail.com>
"""
Module for listing commands and help.
"""
from basemodule import BaseModule, BaseCommandContext
from alternatives import _
class HelpContext(BaseCommandContext):
def cmd_list(self, argument):
"""List commands"""
arg = argument.lower()
index = self.bot.help_index
public = "public commands -- %s" % " ".join(index['public'])
private = "private commands -- %s" % " ".join(index['private'])
if 'all' in arg or 'both' in arg:
output = "\n".join((public, private))
elif 'pub' in arg or self.target.startswith('#'):
output = public
elif 'priv' in arg or not self.target.startswith('#'):
output = private
else:
# we shouldn't be here
self.logger.error("cmd_list")
return
self.send(self.target, output)
def cmd_modules(self, argument):
"""List active modules"""
index = self.bot.help_index
output = "active modules -- %s" % " ".join(index['modules'].keys())
self.send(self.target, output)
def cmd_help(self, argument):
"""Get help on a command or module"""
arg = argument.lower()
index = self.bot.help_index
target = self.target
args = arg.split()
if not args:
s = "usage: help <command> [public|private] / help module <module>"
self.send(target, s)
elif args[0] == 'module':
args.pop(0)
if not args:
self.send(target, "usage: help module <module>")
else:
help_item = index['modules'].get(args[0])
if help_item:
self.send(target, help_item['summary'])
else:
self.send(target, _("No help for %s"), args[0])
else:
args.append("")
cmd = args.pop(0)
cmd_type = args.pop(0)
if 'pu' in cmd_type or self.target.startswith('#'):
cmd_type = 'public'
elif 'pr' in cmd_type or not self.target.startswith('#'):
cmd_type = 'private'
else:
# we shouldn't be here
self.logger.error("cmd_list")
return
help_item = index[cmd_type].get(cmd)
if help_item:
self.send(target, index[cmd_type][cmd]['summary'])
else:
self.send(target, _("No help for %s"), cmd)
class HelpModule(BaseModule):
context_class = HelpContext
module = HelpModule
|
nickraptis/fidibot
|
src/modules/help.py
|
Python
|
bsd-2-clause
| 2,615 | 0.000765 |
# -*- coding: utf-8 -*-
import fcntl
import glob
import json
import os
import random
import shutil
import socket
import subprocess
import sys
import tempfile
import time
TEST_DIR = os.path.normpath(os.path.dirname(os.path.realpath(os.path.join(__file__, ".."))))
def browser_path(headless=True):
"""Return path to CDP browser.
Support the following locations:
- /usr/lib*/chromium-browser/headless_shell (chromium-headless RPM), if
headless is true
- "chromium-browser", "chromium", or "google-chrome" in $PATH (distro package)
- node_modules/chromium/lib/chromium/chrome-linux/chrome (npm install chromium)
Exit with an error if none is found.
"""
if headless:
g = glob.glob("/usr/lib*/chromium-browser/headless_shell")
if g:
return g[0]
p = subprocess.check_output("which chromium-browser || which chromium || which google-chrome || true", shell=True).strip()
if p:
return p
p = os.path.join(os.path.dirname(TEST_DIR), "node_modules/chromium/lib/chromium/chrome-linux/chrome")
if os.access(p, os.X_OK):
return p
return None
def jsquote(str):
return json.dumps(str)
class CDP:
def __init__(self, lang=None, headless=True, verbose=False, trace=False, inject_helpers=[]):
self.lang = lang
self.timeout = 60
self.valid = False
self.headless = headless
self.verbose = verbose
self.trace = trace
self.inject_helpers = inject_helpers
self._driver = None
self._browser = None
self._browser_home = None
self._browser_path = None
self._cdp_port_lockfile = None
def invoke(self, fn, **kwargs):
"""Call a particular CDP method such as Runtime.evaluate
Use command() for arbitrary JS code.
"""
trace = self.trace and not kwargs.get("no_trace", False)
try:
del kwargs["no_trace"]
except KeyError:
pass
cmd = fn + "(" + json.dumps(kwargs) + ")"
# frame support for Runtime.evaluate(): map frame name to
# executionContextId and insert into argument object; this must not be quoted
# see "Frame tracking" in cdp-driver.js for how this works
if fn == 'Runtime.evaluate' and self.cur_frame:
cmd = "%s, contextId: getFrameExecId(%s)%s" % (cmd[:-2], jsquote(self.cur_frame), cmd[-2:])
if trace:
print("-> " + kwargs.get('trace', cmd))
# avoid having to write the "client." prefix everywhere
cmd = "client." + cmd
res = self.command(cmd)
if trace:
if "result" in res:
print("<- " + repr(res["result"]))
else:
print("<- " + repr(res))
return res
def command(self, cmd):
if not self._driver:
self.start()
self._driver.stdin.write(cmd + "\n")
line = self._driver.stdout.readline()
if not line:
self.kill()
raise RuntimeError("CDP broken")
try:
res = json.loads(line)
except ValueError:
print(line.strip())
raise
if "error" in res:
if self.trace:
print("<- raise %s" % str(res["error"]))
raise RuntimeError(res["error"])
return res["result"]
def claim_port(self, port):
f = None
try:
f = open(os.path.join(tempfile.gettempdir(), ".cdp-%i.lock" % port), "w")
fcntl.flock(f, fcntl.LOCK_EX | fcntl.LOCK_NB)
self._cdp_port_lockfile = f
return True
except (IOError, OSError):
if f:
f.close()
return False
def find_cdp_port(self):
"""Find an unused port and claim it through lock file"""
for retry in range(100):
# don't use the default CDP port 9222 to avoid interfering with running browsers
port = random.randint (9223, 10222)
if self.claim_port(port):
return port
else:
raise RuntimeError("unable to find free port")
def get_browser_path(self):
if self._browser_path is None:
self._browser_path = browser_path(self.headless)
return self._browser_path
def start(self):
environ = os.environ.copy()
if self.lang:
environ["LC_ALL"] = self.lang
self.cur_frame = None
# allow attaching to external browser
cdp_port = None
if "TEST_CDP_PORT" in os.environ:
p = int(os.environ["TEST_CDP_PORT"])
if self.claim_port(p):
# can fail when a test starts multiple browsers; only show the first one
cdp_port = p
if not cdp_port:
# start browser on a new port
cdp_port = self.find_cdp_port()
self._browser_home = tempfile.mkdtemp()
environ = os.environ.copy()
environ["HOME"] = self._browser_home
environ["LC_ALL"] = "C.utf8"
# this might be set for the tests themselves, but we must isolate caching between tests
try:
del environ["XDG_CACHE_HOME"]
except KeyError:
pass
exe = self.get_browser_path()
if not exe:
raise SystemError("chromium not installed")
if self.headless:
argv = [exe, "--headless"]
else:
argv = [os.path.join(TEST_DIR, "common/xvfb-wrapper"), exe]
# sandboxing does not work in Docker container
self._browser = subprocess.Popen(
argv + ["--disable-gpu", "--no-sandbox", "--disable-setuid-sandbox",
"--disable-namespace-sandbox", "--disable-seccomp-filter-sandbox",
"--disable-sandbox-denial-logging", "--window-size=1280x1200",
"--remote-debugging-port=%i" % cdp_port, "about:blank"],
env=environ, close_fds=True)
if self.verbose:
sys.stderr.write("Started %s (pid %i) on port %i\n" % (exe, self._browser.pid, cdp_port))
# wait for CDP to be up
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
for retry in range(300):
try:
s.connect(('127.0.0.1', cdp_port))
break
except socket.error:
time.sleep(0.1)
else:
raise RuntimeError('timed out waiting for browser to start')
# now start the driver
if self.trace:
# enable frame/execution context debugging if tracing is on
environ["TEST_CDP_DEBUG"] = "1"
self._driver = subprocess.Popen(["%s/cdp-driver.js" % os.path.dirname(__file__), str(cdp_port)],
env=environ,
stdout=subprocess.PIPE,
stdin=subprocess.PIPE,
close_fds=True)
self.valid = True
for inject in self.inject_helpers:
with open(inject) as f:
src = f.read()
# HACK: injecting sizzle fails on missing `document` in assert()
src = src.replace('function assert( fn ) {', 'function assert( fn ) { return true;')
self.invoke("Page.addScriptToEvaluateOnLoad", scriptSource=src, no_trace=True)
def kill(self):
self.valid = False
self.cur_frame = None
if self._driver:
self._driver.stdin.close()
self._driver.wait()
self._driver = None
if self._browser:
if self.verbose:
sys.stderr.write("Killing browser (pid %i)\n" % self._browser.pid)
try:
self._browser.terminate()
except OSError:
pass # ignore if it crashed for some reason
self._browser.wait()
self._browser = None
shutil.rmtree(self._browser_home, ignore_errors=True)
os.remove(self._cdp_port_lockfile.name)
self._cdp_port_lockfile.close()
def set_frame(self, frame):
self.cur_frame = frame
if self.trace:
print("-> switch to frame %s" % frame)
def get_js_log(self):
"""Return the current javascript console log"""
if self.valid:
# needs to be wrapped in Promise
messages = self.command("Promise.resolve(messages)")
return map(lambda m: "%s: %s" % tuple(m), messages)
return []
def read_log(self):
"""Returns an iterator that produces log messages one by one.
Blocks if there are no new messages right now."""
if not self.valid:
yield []
return
while True:
messages = self.command("waitLog()")
for m in messages:
yield m
|
stefwalter/cockpit
|
test/common/cdp.py
|
Python
|
lgpl-2.1
| 9,003 | 0.002777 |
import numpy as np
STR_NOBOND = """AU
3 1 2 1
1 0.00000000 0.00000000 0.00000000 -0.66387672 0.00000000 -0.00000000 0.34509720 3.78326969 -0.00000000 -0.00000000 3.96610412 0.00000000 3.52668267 0.00000000 -0.00000000 -2.98430053 0.00000000 -0.00000000 0.00000000 -0.00000000 1.26744725 -0.00000000 2.16730601
1 1.43043000 0.00000000 1.10716000 0.33193836 -0.16057903 -0.00000000 -0.11299312 1.55235099 -0.00000000 1.15495299 0.60859677 -0.00000000 1.21104235 -4.46820475 0.00000000 -4.55909022 -0.05601735 0.00000000 -3.72029878 -0.00000000 0.46039909 -0.00000000 -2.40410436
1 -1.43043000 0.00000000 1.10716000 0.33193836 0.16057903 -0.00000000 -0.11299312 1.55235099 -0.00000000 -1.15495299 0.60859677 0.00000000 1.21104235 4.46820475 -0.00000000 -4.55909022 0.05601735 0.00000000 3.72029878 -0.00000000 0.46039909 -0.00000000 -2.40410436
Time used in Loprop : 0.45 (cpu) 0.11 (wall)
"""
STR_BOND = """AU
5 1 22 1
1 0.00000000 0.00000000 0.00000000 -0.66387672 0.00000000 -0.00000000 0.41788500 1.19165567 0.00000000 0.00000000 2.74891057 0.00000000 1.33653383 0.00000000 0.00000000 4.18425484 0.00000000 -0.00000000 -0.00000000 -0.00000000 0.19037387 0.00000000 5.96033807
1 0.71521500 0.00000000 0.55358000 0.00000000 -0.06567795 -0.00000000 -0.07278780 2.59161403 -0.00000000 1.21719355 1.98015668 -0.00000000 2.19014883 -7.24839104 0.00000000 -7.16855538 0.59534043 0.00000000 -5.74640170 -0.00000000 1.07707338 -0.00000000 -3.79303206
1 1.43043000 0.00000000 1.10716000 0.33193836 -0.12774005 0.00000000 -0.07659922 0.25654398 0.00000000 0.16487465 -0.00000000 -0.00000000 0.11596794 -0.84400923 0.00000000 -0.97481253 -0.35368757 -0.00000000 -0.84709793 0.00000000 -0.07813759 0.00000000 -0.50758833
1 -0.71521500 0.00000000 0.55358000 0.00000000 0.06567795 -0.00000000 -0.07278780 2.59161403 -0.00000000 1.21719355 -1.98015668 0.00000000 2.19014883 7.24839104 -0.00000000 -7.16855538 -0.59534043 0.00000000 5.74640170 -0.00000000 1.07707338 -0.00000000 -3.79303206
1 -1.43043000 0.00000000 1.10716000 0.33193836 0.12774005 0.00000000 -0.07659922 0.25654398 -0.00000000 -0.16487465 0.00000000 0.00000000 0.11596794 0.84400923 -0.00000000 -0.97481253 0.35368757 0.00000000 0.84709793 -0.00000000 -0.07813759 -0.00000000 -0.50758833
Time used in Loprop : 0.45 (cpu) 0.11 (wall)
"""
class TestBondH2O:
"""H2O tests bonded versus non-bonden results"""
def setup(self):
# Read in string that is for no bonds output
lines = [line for line in STR_BOND.split("\n") if len(line.split()) > 10]
a0 = 1.0
self.n_bond = np.array([8.0, 0.0, 1.0, 0.0, 1.0], dtype=float)
self.r_bond = a0 * np.array([l.split()[1:4] for l in lines], dtype=float)
self.q_bond = np.array([l.split()[4] for l in lines], dtype=float)
self.d_bond = np.array([l.split()[5:8] for l in lines], dtype=float)
self.a_bond = np.array([l.split()[8:15] for l in lines], dtype=float)
self.b_bond = np.array([l.split()[15:26] for l in lines], dtype=float)
self.coc_bond = np.einsum("ij,i", self.r_bond, self.n_bond) / self.n_bond.sum()
# Read in string that is for bonds output -b
lines = [line for line in STR_NOBOND.split("\n") if len(line.split()) > 10]
self.n_nobond = np.array([8.0, 1.0, 1.0], dtype=float)
self.r_nobond = a0 * np.array([l.split()[1:4] for l in lines], dtype=float)
self.q_nobond = np.array([l.split()[4] for l in lines], dtype=float)
self.d_nobond = np.array([l.split()[5:8] for l in lines], dtype=float)
self.a_nobond = np.array([l.split()[8:15] for l in lines], dtype=float)
self.b_nobond = np.array([l.split()[15:26] for l in lines], dtype=float)
self.coc_nobond = (
np.einsum("ij,i", self.r_nobond, self.n_nobond) / self.n_nobond.sum()
)
def test_bond_nobond_properties(self):
"""Center-of-charge equality"""
np.testing.assert_allclose(self.coc_bond, self.coc_nobond)
def test_a(self):
"""Polarizability equality"""
a_tot_bond = np.sum(self.a_bond)
a_tot_nobond = np.sum(self.a_nobond)
np.testing.assert_allclose(a_tot_bond, a_tot_nobond)
def test_b(self):
"""Hyperpolarizability equality"""
b_tot_bond = np.sum(self.b_bond)
b_tot_nobond = np.sum(self.b_nobond)
np.testing.assert_allclose(b_tot_bond, b_tot_nobond)
def test_dip(self):
"""Dipole equality"""
dip_bond = np.einsum(
"ij,i", (self.r_bond - self.coc_bond), self.q_bond
) + self.d_bond.sum(axis=0)
dip_nobond = np.einsum(
"ij,i", (self.r_nobond - self.coc_nobond), self.q_nobond
) + self.d_nobond.sum(axis=0)
np.testing.assert_allclose(dip_bond, dip_nobond)
class TestBondH2S:
"""H2O tests bonded versus non-bonden results"""
def setup(self):
# Read in string that is for no bonds output
lines = [line for line in STR_BOND.split("\n") if len(line.split()) > 10]
a0 = 1.0
self.n_bond = np.array([16.0, 0.0, 1.0, 0.0, 1.0], dtype=float)
self.r_bond = a0 * np.array([l.split()[1:4] for l in lines], dtype=float)
self.q_bond = np.array([l.split()[4] for l in lines], dtype=float)
self.d_bond = np.array([l.split()[5:8] for l in lines], dtype=float)
self.a_bond = np.array([l.split()[8:15] for l in lines], dtype=float)
self.b_bond = np.array([l.split()[15:26] for l in lines], dtype=float)
self.coc_bond = np.einsum("ij,i", self.r_bond, self.n_bond) / self.n_bond.sum()
# Read in string that is for bonds output -b
lines = [line for line in STR_NOBOND.split("\n") if len(line.split()) > 10]
self.n_nobond = np.array([16.0, 1.0, 1.0], dtype=float)
self.r_nobond = a0 * np.array([l.split()[1:4] for l in lines], dtype=float)
self.q_nobond = np.array([l.split()[4] for l in lines], dtype=float)
self.d_nobond = np.array([l.split()[5:8] for l in lines], dtype=float)
self.a_nobond = np.array([l.split()[8:15] for l in lines], dtype=float)
self.b_nobond = np.array([l.split()[15:26] for l in lines], dtype=float)
self.coc_nobond = (
np.einsum("ij,i", self.r_nobond, self.n_nobond) / self.n_nobond.sum()
)
def test_bond_nobond_properties(self):
"""Center-of-charge equality"""
np.testing.assert_allclose(self.coc_bond, self.coc_nobond)
def test_a(self):
"""Polarizability equality"""
a_tot_bond = np.sum(self.a_bond)
a_tot_nobond = np.sum(self.a_nobond)
np.testing.assert_allclose(a_tot_bond, a_tot_nobond)
def test_b(self):
"""Hyperpolarizability equality"""
b_tot_bond = np.sum(self.b_bond)
b_tot_nobond = np.sum(self.b_nobond)
np.testing.assert_allclose(b_tot_bond, b_tot_nobond)
def test_dip(self):
"""Dipole equality"""
dip_bond = np.einsum(
"ij,i", (self.r_bond - self.coc_bond), self.q_bond
) + self.d_bond.sum(axis=0)
dip_nobond = np.einsum(
"ij,i", (self.r_nobond - self.coc_nobond), self.q_nobond
) + self.d_nobond.sum(axis=0)
np.testing.assert_allclose(dip_bond, dip_nobond)
|
vahtras/loprop
|
tests/test_bond.py
|
Python
|
gpl-3.0
| 7,956 | 0.005279 |
from django.test import TestCase
from restclients.mock_http import MockHTTP
from myuw.util.cache_implementation import MyUWCache
from restclients.models import CacheEntryTimed
from datetime import timedelta
CACHE = 'myuw.util.cache_implementation.MyUWCache'
class TestCustomCachePolicy(TestCase):
def test_sws_default_policies(self):
with self.settings(RESTCLIENTS_DAO_CACHE_CLASS=CACHE):
cache = MyUWCache()
ok_response = MockHTTP()
ok_response.status = 200
ok_response.data = "xx"
response = cache.getCache('sws', '/student/myuwcachetest1', {})
self.assertEquals(response, None)
cache.processResponse("sws",
"/student/myuwcachetest1",
ok_response)
response = cache.getCache('sws', '/student/myuwcachetest1', {})
self.assertEquals(response["response"].data, 'xx')
cache_entry = CacheEntryTimed.objects.get(
service="sws",
url="/student/myuwcachetest1")
# Cached response is returned after 3 hours and 58 minutes
orig_time_saved = cache_entry.time_saved
cache_entry.time_saved = (orig_time_saved -
timedelta(minutes=(60 * 4)-2))
cache_entry.save()
response = cache.getCache('sws', '/student/myuwcachetest1', {})
self.assertNotEquals(response, None)
# Cached response is not returned after 4 hours and 1 minute
cache_entry.time_saved = (orig_time_saved -
timedelta(minutes=(60 * 4)+1))
cache_entry.save()
response = cache.getCache('sws', '/student/myuwcachetest1', {})
self.assertEquals(response, None)
def test_sws_term_policy(self):
with self.settings(RESTCLIENTS_DAO_CACHE_CLASS=CACHE):
cache = MyUWCache()
ok_response = MockHTTP()
ok_response.status = 200
ok_response.data = "xx"
response = cache.getCache(
'sws', '/student/v5/term/1014,summer.json', {})
self.assertEquals(response, None)
cache.processResponse(
"sws", "/student/v5/term/1014,summer.json", ok_response)
response = cache.getCache(
'sws', '/student/v5/term/1014,summer.json', {})
self.assertEquals(response["response"].data, 'xx')
cache_entry = CacheEntryTimed.objects.get(
service="sws", url="/student/v5/term/1014,summer.json")
# Cached response is returned after 29 days
orig_time_saved = cache_entry.time_saved
cache_entry.time_saved = orig_time_saved - timedelta(days=29)
cache_entry.save()
response = cache.getCache(
'sws', '/student/v5/term/1014,summer.json', {})
self.assertNotEquals(response, None)
# Cached response is not returned after 31 days
cache_entry.time_saved = orig_time_saved - timedelta(days=31)
cache_entry.save()
response = cache.getCache(
'sws', '/student/v5/term/1014,summer.json', {})
self.assertEquals(response, None)
def test_myplan_default(self):
with self.settings(RESTCLIENTS_DAO_CACHE_CLASS=CACHE):
cache = MyUWCache()
ok_response = MockHTTP()
ok_response.status = 200
ok_response.data = "xx"
response = cache.getCache('myplan', '/api/plan/xx', {})
self.assertEquals(response, None)
cache.processResponse("myplan", "/api/plan/xx", ok_response)
response = cache.getCache('myplan', '/api/plan/xx', {})
self.assertEquals(response, None)
def test_default_policies(self):
with self.settings(RESTCLIENTS_DAO_CACHE_CLASS=CACHE):
cache = MyUWCache()
ok_response = MockHTTP()
ok_response.status = 200
ok_response.data = "xx"
response = cache.getCache('no_such', '/student/myuwcachetest1', {})
self.assertEquals(response, None)
cache.processResponse(
"no_such", "/student/myuwcachetest1", ok_response)
response = cache.getCache('no_such', '/student/myuwcachetest1', {})
self.assertEquals(response["response"].data, 'xx')
cache_entry = CacheEntryTimed.objects.get(
service="no_such", url="/student/myuwcachetest1")
# Cached response is returned after 3 hours and 58 minutes
orig_time_saved = cache_entry.time_saved
cache_entry.time_saved = (orig_time_saved -
timedelta(minutes=(60 * 4)-2))
cache_entry.save()
response = cache.getCache('no_such', '/student/myuwcachetest1', {})
self.assertNotEquals(response, None)
# Cached response is not returned after 4 hours and 1 minute
cache_entry.time_saved = (orig_time_saved -
timedelta(minutes=(60 * 4)+1))
cache_entry.save()
response = cache.getCache('no_such', '/student/myuwcachetest1', {})
self.assertEquals(response, None)
|
fanglinfang/myuw
|
myuw/test/cache.py
|
Python
|
apache-2.0
| 5,375 | 0 |
# coding: utf-8
# In[2]:
# Import and read the datset
import numpy as np
from sklearn import linear_model
import matplotlib.pyplot as plt
import pandas as pd
dataset = pd.read_csv("C://Users//Koyel//Desktop/MieRobotAdvert.csv")
dataset.head()
# In[3]:
dataset.describe()
# In[4]:
dataset.columns
# In[5]:
import seaborn as sns
get_ipython().magic('matplotlib inline')
sns.pairplot(dataset)
# In[6]:
sns.heatmap(dataset.corr())
# In[7]:
dataset.columns
# In[8]:
X = dataset[['Facebook', 'Twitter', 'Google']]
y = dataset['Hits']
# In[9]:
from sklearn.model_selection import train_test_split
# In[10]:
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.4, random_state=101)
# In[11]:
from sklearn.linear_model import LinearRegression
# In[12]:
lm = LinearRegression()
# In[13]:
lm.fit(X_train,y_train)
# In[14]:
print(lm.intercept_)
# In[15]:
coeff_df = pd.DataFrame(lm.coef_,X.columns,columns=['Calculated Coefficient'])
coeff_df
# In[17]:
predictions = lm.predict(X_test)
# In[26]:
plt.ylabel("likes predicted")
plt.title("Likes predicated for MieRobot.com blogs",color='r')
plt.scatter(y_test,predictions)
# In[23]:
print (lm.score)
# In[19]:
sns.distplot((y_test-predictions),bins=50);
# In[20]:
from sklearn import metrics
print('MAE:', metrics.mean_absolute_error(y_test, predictions))
print('MSE:', metrics.mean_squared_error(y_test, predictions))
print('RMSE:', np.sqrt(metrics.mean_squared_error(y_test, predictions)))
# In[ ]:
|
MieRobot/Blogs
|
Blog_LinearRegression.py
|
Python
|
gpl-3.0
| 1,518 | 0.009223 |
"""Utility classes and functions for configuring and setting up the content and the look of a QR code."""
import datetime
import decimal
from collections import namedtuple
from dataclasses import dataclass, asdict
from datetime import date
from typing import Optional, Any, Union, Sequence
from django.utils.html import escape
from qr_code.qrcode.constants import DEFAULT_MODULE_SIZE, SIZE_DICT, DEFAULT_ERROR_CORRECTION, DEFAULT_IMAGE_FORMAT
from segno import helpers
class QRCodeOptions:
"""
Represents the options used to create and draw a QR code.
"""
def __init__(
self,
size: Union[int, str] = DEFAULT_MODULE_SIZE,
border: int = 4,
version: Union[int, str, None] = None,
image_format: str = "svg",
error_correction: str = DEFAULT_ERROR_CORRECTION,
encoding: Optional[str] = "utf-8",
boost_error: bool = True,
micro: bool = False,
eci: bool = False,
dark_color: Union[tuple, str] = "#000",
light_color: Union[tuple, str] = "#fff",
finder_dark_color: bool = False,
finder_light_color: bool = False,
data_dark_color: bool = False,
data_light_color: bool = False,
version_dark_color: bool = False,
version_light_color: bool = False,
format_dark_color: bool = False,
format_light_color: bool = False,
alignment_dark_color: bool = False,
alignment_light_color: bool = False,
timing_dark_color: bool = False,
timing_light_color: bool = False,
separator_color: bool = False,
dark_module_color: bool = False,
quiet_zone_color: bool = False,
) -> None:
"""
:param size: The size of the QR code as an integer or a string. Default is *'m'*.
:type: str or int
:param int border: The size of the border (blank space around the code).
:param version: The version of the QR code gives the size of the matrix.
Default is *None* which mean automatic in order to avoid data overflow.
:param version: QR Code version. If the value is ``None`` (default), the
minimal version which fits for the input data will be used.
Valid values: "M1", "M2", "M3", "M4" (for Micro QR codes) or an
integer between 1 and 40 (for QR codes).
The `version` parameter is case insensitive.
:type version: int, str or None
:param str image_format: The graphics format used to render the QR code.
It can be either *'svg'* or *'png'*. Default is *'svg'*.
:param str error_correction: How much error correction that might be required
to read the code. It can be either *'L'*, *'M'*, *'Q'*, or *'H'*. Default is *'M'*.
:param bool boost_error: Tells whether the QR code encoding engine tries to increase the error correction level
if it does not affect the version. Error correction level is not increased when it impacts the version of
the code.
:param bool micro: Indicates if a Micro QR Code should be created. Default: False
:param encoding: Indicates the encoding in mode "byte". By default
`encoding` is ``UTF-8``. When set to ``None``, the implementation tries to use the standard conform
ISO/IEC 8859-1 encoding and if it does not fit, it will use UTF-8. Note that no ECI mode indicator is
inserted by default (see :paramref:`eci`).
The `encoding` parameter is case-insensitive.
:type encoding: str or None
:param bool eci: Indicates if binary data which does not use the default
encoding (ISO/IEC 8859-1) should enforce the ECI mode. Since a lot
of QR code readers do not support the ECI mode, this feature is
disabled by default and the data is encoded in the provided
`encoding` using the usual "byte" mode. Set `eci` to ``True`` if
an ECI header should be inserted into the QR Code. Note that
the implementation may not know the ECI designator for the provided
`encoding` and may raise an exception if the ECI designator cannot
be found.
The ECI mode is not supported by Micro QR Codes.
:param dark_color: Color of the dark modules (default: black). The
color can be provided as ``(R, G, B)`` tuple, as hexadecimal
format (``#RGB``, ``#RRGGBB`` ``RRGGBBAA``), or web color
name (i.e. ``red``).
:param light_color: Color of the light modules (default: white).
See `color` for valid values. If light is set to ``None`` the
light modules will be transparent.
:param finder_dark_color: Color of the dark finder modules (default: same as ``dark_color``)
:param finder_light_color: Color of the light finder modules (default: same as ``light_color``)
:param data_dark_color: Color of the dark data modules (default: same as ``dark_color``)
:param data_light_color: Color of the light data modules (default: same as ``light_color``)
:param version_dark_color: Color of the dark version modules (default: same as ``dark_color``)
:param version_light_color: Color of the light version modules (default: same as ``light_color``)
:param format_dark_color: Color of the dark format modules (default: same as ``dark_color``)
:param format_light_color: Color of the light format modules (default: same as ``light_color``)
:param alignment_dark_color: Color of the dark alignment modules (default: same as ``dark_color``)
:param alignment_light_color: Color of the light alignment modules (default: same as ``light_color``)
:param timing_dark_color: Color of the dark timing pattern modules (default: same as ``dark_color``)
:param timing_light_color: Color of the light timing pattern modules (default: same as ``light_color``)
:param separator_color: Color of the separator (default: same as ``light_color``)
:param dark_module_color: Color of the dark module (default: same as ``dark_color``)
:param quiet_zone_color: Color of the quiet zone modules (default: same as ``light_color``)
The *size* parameter gives the size of each module of the QR code matrix. It can be either a positive integer or one of the following letters:
* t or T: tiny (value: 6)
* s or S: small (value: 12)
* m or M: medium (value: 18)
* l or L: large (value: 30)
* h or H: huge (value: 48)
For PNG image format the size unit is in pixels, while the unit is 0.1 mm for SVG format.
The *border* parameter controls how many modules thick the border should be (blank space around the code).
The default is 4, which is the minimum according to the specs.
The *version* parameter is an integer from 1 to 40 that controls the size of the QR code matrix. Set to None to
determine this automatically. The smallest, version 1, is a 21 x 21 matrix. The biggest, version 40, is
177 x 177 matrix.
The size grows by 4 modules/side.
For Micro QR codes, valid values are "M1", "M2", "M3", "M4".
There are 4 error correction levels used for QR codes, with each one adding different amounts of "backup" data
depending on how much damage the QR code is expected to suffer in its intended environment, and hence how much
error correction may be required. The correction level can be configured with the *error_correction* parameter as follow:
* l or L: error correction level L – up to 7% damage
* m or M: error correction level M – up to 15% damage
* q or Q: error correction level Q – up to 25% damage
* h or H: error correction level H – up to 30% damage
You may enforce the creation of a Micro QR Code with `micro=True`. The `micro` option defaults to `False`.
The `encoding` option controls the text encoding used in mode "byte" (used for any general text content). By default `encoding` is ``UTF-8``. When set to ``None``, the implementation (based on Segno) tries to use the standard conform ISO/IEC 8859-1 encoding and if it does not fit, it will use UTF-8. Note that no ECI mode indicator is inserted by default (see `eci` option). The `encoding` parameter is case-insensitive.
The `boost_error` indicates whether the QR code encoding engine (Segno) tries to increase the error correction level if it does not affect the version. Error correction level is not increased when it impacts the version of the code.
The `eci` option indicates if binary data which does not use the default encoding (ISO/IEC 8859-1) should enforce the ECI mode. Since a lot of QR code readers do not support the ECI mode, this feature is disabled by default and the data is encoded in the provided encoding using the usual “byte” mode. Set eci to `True` if an ECI header should be inserted into the QR Code. Note that the implementation may not know the ECI designator for the provided encoding and may raise an exception if the ECI designator cannot be found. The ECI mode is not supported by Micro QR Codes.
:raises: TypeError in case an unknown argument is given.
"""
self._size = size
self._border = int(border)
if _can_be_cast_to_int(version):
version = int(version) # type: ignore
if not 1 <= version <= 40:
version = None
elif version in ("m1", "m2", "m3", "m4", "M1", "M2", "M3", "M4"):
version = version.lower() # type: ignore
# Set / change the micro setting otherwise Segno complains about
# conflicting parameters
micro = True
else:
version = None
self._version = version
# if not isinstance(micro, bool):
# micro = micro == 'True'
self._micro = micro
# if not isinstance(eci, bool):
# eci = eci == 'True'
self._eci = eci
try:
error = error_correction.lower()
self._error_correction = error if error in ("l", "m", "q", "h") else DEFAULT_ERROR_CORRECTION
except AttributeError:
self._error_correction = DEFAULT_ERROR_CORRECTION
self._boost_error = boost_error
# Handle encoding
self._encoding = None if encoding == "" else encoding
try:
image_format = image_format.lower()
self._image_format = image_format if image_format in ("svg", "png") else DEFAULT_IMAGE_FORMAT
except AttributeError:
self._image_format = DEFAULT_IMAGE_FORMAT
self._colors = dict(
dark_color=dark_color,
light_color=light_color,
finder_dark_color=finder_dark_color,
finder_light_color=finder_light_color,
data_dark_color=data_dark_color,
data_light_color=data_light_color,
version_dark_color=version_dark_color,
version_light_color=version_light_color,
format_dark_color=format_dark_color,
format_light_color=format_light_color,
alignment_dark_color=alignment_dark_color,
alignment_light_color=alignment_light_color,
timing_dark_color=timing_dark_color,
timing_light_color=timing_light_color,
separator_color=separator_color,
dark_module_color=dark_module_color,
quiet_zone_color=quiet_zone_color,
)
def kw_make(self):
"""Internal method which returns a dict of parameters to create a QR code.
:rtype: dict
"""
return dict(
version=self._version,
error=self._error_correction,
micro=self._micro,
eci=self._eci,
boost_error=self._boost_error,
encoding=self._encoding,
)
def kw_save(self):
"""Internal method which returns a dict of parameters to save a QR code.
:rtype: dict
"""
image_format = self._image_format
kw = dict(border=self.border, kind=image_format, scale=self._size_as_int())
# Change the color mapping into the keywords Segno expects
# (remove the "_color" suffix from the module names)
kw.update({k[:-6]: v for k, v in self.color_mapping().items()})
if image_format == "svg":
kw["unit"] = "mm"
scale = decimal.Decimal(kw["scale"]) / 10
kw["scale"] = scale
return kw
def color_mapping(self):
"""Internal method which returns the color mapping.
Only non-default values are returned.
:rtype: dict
"""
colors = {k: v for k, v in self._colors.items() if v is not False}
# Remove common default "dark" and "light" values
if colors.get("dark_color") in ("#000", "#000000", "black"):
del colors["dark_color"]
if colors.get("light_color") in ("#fff", "#FFF", "#ffffff", "#FFFFFF", "white"):
del colors["light_color"]
return colors
def _size_as_int(self):
"""Returns the size as integer value.
:rtype: int
"""
size = self._size
if _can_be_cast_to_int(size):
actual_size = int(size)
if actual_size < 1:
actual_size = SIZE_DICT[DEFAULT_MODULE_SIZE]
elif isinstance(size, str):
actual_size = SIZE_DICT.get(size.lower(), DEFAULT_MODULE_SIZE)
else:
actual_size = SIZE_DICT[DEFAULT_MODULE_SIZE]
return actual_size
@property
def size(self):
return self._size
@property
def border(self):
return self._border
@property
def version(self):
return self._version
@property
def image_format(self):
return self._image_format
@property
def error_correction(self):
return self._error_correction
@property
def boost_error(self):
return self._boost_error
@property
def micro(self):
return self._micro
@property
def encoding(self):
return self._encoding
@property
def eci(self):
return self._eci
def _can_be_cast_to_int(value: Any) -> bool:
return isinstance(value, int) or (isinstance(value, str) and value.isdigit())
@dataclass
class EpcData:
"""
Data for representing an European Payments Council Quick Response Code (EPC QR Code) version 002.
You must always use the error correction level "M" and utilizes max. version 13 to fulfill the constraints of the
EPC QR Code standard.
.. note::
Either the ``text`` or ``reference`` must be provided but not both
.. note::
Neither the IBAN, BIC, nor remittance reference number or any other
information is validated (aside from checks regarding the allowed string
lengths).
Fields meaning:
* name: Name of the recipient.
* iban: International Bank Account Number (IBAN)
* amount: The amount to transfer. The currency is always Euro, no other currencies are supported.
* text: Remittance Information (unstructured)
* reference: Remittance Information (structured)
* bic: Bank Identifier Code (BIC). Optional, only required for non-EEA countries.
* purpose: SEPA purpose code.
"""
name: str
iban: str
amount: Union[int, float, decimal.Decimal]
text: Optional[str] = None
reference: Optional[str] = None
bic: Optional[str] = None
purpose: Optional[str] = None
def make_qr_code_data(self) -> str:
"""
Validates the input and creates the data for an European Payments Council Quick Response Code
(EPC QR Code) version 002.
This is a wrapper for :py:func:`segno.helpers._make_epc_qr_data` with no choice for encoding.
:rtype: str
"""
return helpers._make_epc_qr_data(**asdict(self), encoding=1) # type: ignore
class ContactDetail:
"""
Represents the detail of a contact for MeCARD encoding.
.. note::
This is a legacy class. Please use :py:class:`MeCard` instead for new projects.
Fields meaning:
* first_name
* last_name
* first_name_reading: the sound of the first name.
* last_name_reading: the sound of the last name.
* tel: the phone number, it can appear multiple times.
* tel_av: the video-phone number, it can appear multiple times.
* email: the email address, it can appear multiple times.
* memo: notes.
* birthday: the birth date (Python date).
* address: the fields divided by commas (,) denote PO box, room number, house number, city, prefecture, zip code and country, in order.
* url: homepage URL.
* nickname: display name.
* org: organization or company name (non-standard,but often recognized, ORG field).
"""
def __init__(
self,
first_name: Optional[str] = None,
last_name: Optional[str] = None,
first_name_reading: Optional[str] = None,
last_name_reading: Optional[str] = None,
tel: Optional[str] = None,
tel_av: Optional[str] = None,
email: Optional[str] = None,
memo: Optional[str] = None,
birthday: Optional[date] = None,
address: Optional[str] = None,
url: Optional[str] = None,
nickname: Optional[str] = None,
org: Optional[str] = None,
):
self.first_name = first_name
self.last_name = last_name
self.first_name_reading = first_name_reading
self.last_name_reading = last_name_reading
self.tel = tel
self.tel_av = tel_av
self.email = email
self.memo = memo
self.birthday = birthday
self.address = address
self.url = url
self.nickname = nickname
self.org = org
def make_qr_code_data(self) -> str:
"""
Make a text for configuring a contact in a phone book. The MeCARD format is used, with an optional, non-standard (but often recognized) ORG field.
See this archive of the format specifications: https://web.archive.org/web/20160304025131/https://www.nttdocomo.co.jp/english/service/developer/make/content/barcode/function/application/addressbook/index.html
:return: the MeCARD contact description.
"""
# See this for an archive of the format specifications:
# https://web.archive.org/web/20160304025131/https://www.nttdocomo.co.jp/english/service/developer/make/content/barcode/function/application/addressbook/index.html
contact_text = "MECARD:"
for name_components_pair in (
("N:%s;", (_escape_mecard_special_chars(self.last_name), _escape_mecard_special_chars(self.first_name))),
("SOUND:%s;", (_escape_mecard_special_chars(self.last_name_reading), _escape_mecard_special_chars(self.first_name_reading))),
):
if name_components_pair[1][0] and name_components_pair[1][1]:
name = "%s,%s" % name_components_pair[1]
else:
name = name_components_pair[1][0] or name_components_pair[1][1] or ""
if name:
contact_text += name_components_pair[0] % name
if self.tel:
contact_text += "TEL:%s;" % _escape_mecard_special_chars(self.tel)
if self.tel_av:
contact_text += "TEL-AV:%s;" % _escape_mecard_special_chars(self.tel_av)
if self.email:
contact_text += "EMAIL:%s;" % _escape_mecard_special_chars(self.email)
if self.memo:
contact_text += "NOTE:%s;" % _escape_mecard_special_chars(self.memo)
if self.birthday:
# Format date to YYMMDD.
contact_text += "BDAY:%s;" % self.birthday.strftime("%Y%m%d")
if self.address:
contact_text += "ADR:%s;" % self.address
if self.url:
contact_text += "URL:%s;" % _escape_mecard_special_chars(self.url)
if self.nickname:
contact_text += "NICKNAME:%s;" % _escape_mecard_special_chars(self.nickname)
# Not standard, but recognized by several readers.
if self.org:
contact_text += "ORG:%s;" % _escape_mecard_special_chars(self.org)
contact_text += ";"
return contact_text
def escaped_value(self, field_name: str):
return _escape_mecard_special_chars(getattr(self, field_name))
@dataclass
class MeCard:
"""Represents the detail of a contact for MeCARD encoding.
Fields meaning:
* name: Name. If it contains a comma, the first part is treated as lastname and the second part is treated as forename.
* reading: Designates a text string to be set as the kana name in the phonebook
* email: E-mail address. Multiple values are allowed.
* phone: Phone number. Multiple values are allowed.
* videophone: Phone number for video calls. Multiple values are allowed.
* memo: A notice for the contact.
* nickname: Nickname.
* birthday: Birthday. If a string is provided, it should encode the date as YYYYMMDD value.
* url: Homepage. Multiple values are allowed.
* pobox: P.O. box (address information).
* roomno: Room number (address information).
* houseno: House number (address information).
* city: City (address information).
* prefecture: Prefecture (address information).
* zipcode: Zip code (address information).
* country: Country (address information).
* org: organization or company name (ORG field, non-standard,but often recognized by readers).
"""
name: str
reading: Optional[str] = None
email: Union[str, Sequence[str], None] = None
phone: Union[str, Sequence[str], None] = None
videophone: Union[str, Sequence[str], None] = None
memo: Optional[str] = None
nickname: Optional[str] = None
birthday: Union[str, datetime.date, None] = None
url: Union[str, Sequence[str], None] = None
pobox: Optional[str] = None
roomno: Optional[str] = None
houseno: Optional[str] = None
city: Optional[str] = None
prefecture: Optional[str] = None
zipcode: Union[int, str, None] = None
country: Optional[str] = None
org: Optional[str] = None
def make_qr_code_data(self) -> str:
"""\
Creates a string encoding the contact information as MeCARD.
:rtype: str
"""
kw = asdict(self)
if self.zipcode is not None and self.zipcode != "":
kw["zipcode"] = str(self.zipcode)
org = kw.pop("org")
contact_text = helpers.make_mecard_data(**kw)
# Not standard, but recognized by several readers.
if org:
contact_text += f"ORG:{_escape_mecard_special_chars(org)};"
return contact_text
@dataclass
class VCard:
"""Represents the detail of a contact for vCard encoding.
Only a subset of available `vCard 3.0 properties <https://tools.ietf.org/html/rfc2426>`
Fields meaning:
* name: The name. If it contains a semicolon, the first part is treated as lastname and the second part is treated as forename.
* displayname: Common name. Defaults to `name` without the semicolon if ``None``.
* email: E-mail address. Multiple values are allowed.
* phone: Phone number. Multiple values are allowed.
* fax: Fax number. Multiple values are allowed.
* videophone: Phone number for video calls. Multiple values are allowed.
* memo: A notice for the contact.
* nickname: Nickname.
* birthday: Birthday. If a string is provided, it should encode the date as ``YYYY-MM-DD`` value.
* url: Homepage. Multiple values are allowed.
* pobox: P.O. box (address information).
* street: Street address.
* city: City (address information).
* region: Region (address information).
* zipcode: Zip code (address information).
* country: Country (address information).
* org: Company / organization name.
* lat: Latitude.
* lng: Longitude.
* source: URL where to obtain the vCard.
* rev: Revision of the vCard / last modification date.
* title: Job Title. Multiple values are allowed.
* photo_uri: Photo URI. Multiple values are allowed.
"""
name: str
displayname: Optional[str] = None
email: Union[str, Sequence[str], None] = None
phone: Union[str, Sequence[str], None] = None
fax: Union[str, Sequence[str], None] = None
videophone: Union[str, Sequence[str], None] = None
memo: Optional[str] = None
nickname: Optional[str] = None
birthday: Union[str, datetime.date, None] = None
url: Union[str, Sequence[str], None] = None
pobox: Optional[str] = None
street: Optional[str] = None
city: Optional[str] = None
region: Optional[str] = None
zipcode: Union[int, str, None] = None
country: Optional[str] = None
org: Optional[str] = None
lat: Optional[float] = None
lng: Optional[float] = None
source: Optional[str] = None
rev: Union[str, datetime.date, None] = None
title: Union[str, Sequence[str], None] = None
photo_uri: Union[str, Sequence[str], None] = None
def __post_init__(self):
if self.displayname is None:
self.displayname = self.name.replace(" ; ", " ").replace("; ", " ").replace(";", " ")
def make_qr_code_data(self) -> str:
"""\
Creates a string encoding the contact information as vCard 3.0.
Only a subset of available `vCard 3.0 properties <https://tools.ietf.org/html/rfc2426>`
is supported.
:rtype: str
"""
kw = asdict(self)
kw["zipcode"] = str(self.zipcode)
return helpers.make_vcard_data(**kw)
@dataclass
class WifiConfig:
"""\
Represents a WIFI configuration.
Fields meaning:
* ssid: the name of the SSID
* authentication: the authentication type for the SSID; can be AUTHENTICATION.wep or AUTHENTICATION.wpa, or AUTHENTICATION.nopass for no password. Or, omit for no password.
* password: the password, ignored if "authentication" is 'nopass' (in which case it may be omitted).
* hidden: tells whether the SSID is hidden or not; can be True or False.
"""
AUTHENTICATION = namedtuple("AUTHENTICATION", "nopass WEP WPA")._make(range(3)) # type: ignore
AUTHENTICATION_CHOICES = ((AUTHENTICATION.nopass, "nopass"), (AUTHENTICATION.WEP, "WEP"), (AUTHENTICATION.WPA, "WPA"))
def __init__(self, ssid: str = "", authentication: int = AUTHENTICATION.nopass, password: str = "", hidden: bool = False) -> None:
self.ssid = ssid
self.authentication = authentication
self.password = password
self.hidden = hidden
def make_qr_code_data(self) -> str:
"""
Make a text for configuring a Wi-Fi connexion. The syntax is inspired by the MeCARD format used for contacts.
:return: the WIFI configuration text that can be translated to a QR code.
:rtype: str
"""
wifi_config = "WIFI:"
if self.ssid:
wifi_config += "S:%s;" % _escape_mecard_special_chars(self.ssid)
if self.authentication:
wifi_config += "T:%s;" % WifiConfig.AUTHENTICATION_CHOICES[self.authentication][1]
if self.password:
wifi_config += "P:%s;" % _escape_mecard_special_chars(self.password)
if self.hidden:
wifi_config += "H:%s;" % str(self.hidden).lower()
wifi_config += ";"
return wifi_config
@dataclass
class Coordinates:
"""\
Represents a set of coordinates with an optional altitude.
Fields meaning:
* latitude: The latitude.
* latitude: The longitude.
* latitude: The optional altitude.
"""
latitude: float
longitude: float
altitude: Optional[float] = None
def __str__(self) -> str:
if self.altitude:
return "latitude: %s, longitude: %s, altitude: %s" % (self.latitude, self.longitude, self.altitude)
return "latitude: %s, longitude: %s" % (self.latitude, self.longitude)
def float_to_str(self, f):
return "{0:.8f}".format(f).rstrip("0")
def make_geolocation_text(self) -> str:
geo = f"geo:{self.float_to_str(self.latitude)},{self.float_to_str(self.longitude)}"
if self.altitude:
return f"{geo},{self.float_to_str(self.altitude)}"
return geo
def make_google_maps_text(self) -> str:
geo = f"https://maps.google.com/local?q={self.float_to_str(self.latitude)},{self.float_to_str(self.longitude)}"
if self.altitude:
return f"{geo},{self.float_to_str(self.altitude)}"
return geo
def make_tel_text(phone_number: Any) -> str:
return "tel:%s" % phone_number
def make_sms_text(phone_number: Any) -> str:
return "sms:%s" % phone_number
def make_youtube_text(video_id: str) -> str:
return f"https://www.youtube.com/watch/?v={escape(video_id)}"
def make_google_play_text(package_id: str) -> str:
return f"https://play.google.com/store/apps/details?id={escape(package_id)}"
@dataclass
class Email:
"""Represents the data of an e-mail.
Fields meaning:
* to: The email address (recipient). Multiple values are allowed.
* cc: The carbon copy recipient. Multiple values are allowed.
* bcc: The blind carbon copy recipient. Multiple values are allowed.
* subject: The subject.
* body: The message body.
"""
to: Union[str, Sequence[str]]
cc: Union[str, Sequence[str], None] = None
bcc: Union[str, Sequence[str], None] = None
subject: Optional[str] = None
body: Optional[str] = None
def make_qr_code_data(self) -> str:
"""\
Creates either a simple "mailto:" URL or complete e-mail message with
(blind) carbon copies and a subject and a body.
:rtype: str
"""
return helpers.make_make_email_data(**asdict(self))
def _escape_mecard_special_chars(string_to_escape: Optional[str]) -> Optional[str]:
if not string_to_escape:
return string_to_escape
special_chars = ["\\", '"', ";", ",", ":"]
for sc in special_chars:
string_to_escape = string_to_escape.replace(sc, "\\%s" % sc)
return string_to_escape
|
dprog-philippe-docourt/django-qr-code
|
qr_code/qrcode/utils.py
|
Python
|
bsd-3-clause
| 30,602 | 0.003171 |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.utils.translation import ugettext_lazy as _
from glanceclient import exc
from horizon import exceptions
from horizon import forms
from horizon import messages
from openstack_dashboard import api
def str2bool(value):
"""Convert a string value to boolean
"""
return value.lower() in ("yes", "true", "1")
# Mapping of property names to type, used for converting input string value
# before submitting.
PROPERTY_TYPES = {'min_disk': long, 'min_ram': long, 'protected': str2bool}
def convert_value(key, value):
"""Convert the property value to the proper type if necessary.
"""
_type = PROPERTY_TYPES.get(key)
if _type:
return _type(value)
return value
class CreateProperty(forms.SelfHandlingForm):
key = forms.CharField(max_length="255", label=_("Key"))
value = forms.CharField(label=_("Value"))
def handle(self, request, data):
try:
api.glance.image_update_properties(request,
self.initial['image_id'],
**{data['key']: convert_value(data['key'], data['value'])})
msg = _('Created custom property "%s".') % data['key']
messages.success(request, msg)
return True
except exc.HTTPForbidden:
msg = _('Unable to create image custom property. Property "%s" '
'is read only.') % data['key']
exceptions.handle(request, msg)
except exc.HTTPConflict:
msg = _('Unable to create image custom property. Property "%s" '
'already exists.') % data['key']
exceptions.handle(request, msg)
except Exception:
msg = _('Unable to create image custom '
'property "%s".') % data['key']
exceptions.handle(request, msg)
class EditProperty(forms.SelfHandlingForm):
key = forms.CharField(widget=forms.widgets.HiddenInput)
value = forms.CharField(label=_("Value"))
def handle(self, request, data):
try:
api.glance.image_update_properties(request,
self.initial['image_id'],
**{data['key']: convert_value(data['key'], data['value'])})
msg = _('Saved custom property "%s".') % data['key']
messages.success(request, msg)
return True
except exc.HTTPForbidden:
msg = _('Unable to edit image custom property. Property "%s" '
'is read only.') % data['key']
exceptions.handle(request, msg)
except Exception:
msg = _('Unable to edit image custom '
'property "%s".') % data['key']
exceptions.handle(request, msg)
|
jumpstarter-io/horizon
|
openstack_dashboard/dashboards/admin/images/properties/forms.py
|
Python
|
apache-2.0
| 3,269 | 0.000612 |
# coding: utf-8
#
# Copyright 2014 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for object schema definitions."""
# pylint: disable=relative-import
import inspect
from core.domain import email_manager
from core.tests import test_utils
import schema_utils
SCHEMA_KEY_ITEMS = schema_utils.SCHEMA_KEY_ITEMS
SCHEMA_KEY_LEN = schema_utils.SCHEMA_KEY_LEN
SCHEMA_KEY_PROPERTIES = schema_utils.SCHEMA_KEY_PROPERTIES
SCHEMA_KEY_TYPE = schema_utils.SCHEMA_KEY_TYPE
SCHEMA_KEY_POST_NORMALIZERS = schema_utils.SCHEMA_KEY_POST_NORMALIZERS
SCHEMA_KEY_CHOICES = schema_utils.SCHEMA_KEY_CHOICES
SCHEMA_KEY_NAME = schema_utils.SCHEMA_KEY_NAME
SCHEMA_KEY_SCHEMA = schema_utils.SCHEMA_KEY_SCHEMA
SCHEMA_KEY_OBJ_TYPE = schema_utils.SCHEMA_KEY_OBJ_TYPE
SCHEMA_KEY_VALIDATORS = schema_utils.SCHEMA_KEY_VALIDATORS
SCHEMA_KEY_DESCRIPTION = 'description'
SCHEMA_KEY_UI_CONFIG = 'ui_config'
# The following keys are always accepted as optional keys in any schema.
OPTIONAL_SCHEMA_KEYS = [
SCHEMA_KEY_CHOICES, SCHEMA_KEY_POST_NORMALIZERS, SCHEMA_KEY_UI_CONFIG,
SCHEMA_KEY_VALIDATORS]
SCHEMA_TYPE_BOOL = schema_utils.SCHEMA_TYPE_BOOL
# 'Custom' objects undergo an entirely separate normalization process, defined
# in the relevant extensions/objects/models/objects.py class.
SCHEMA_TYPE_CUSTOM = schema_utils.SCHEMA_TYPE_CUSTOM
SCHEMA_TYPE_DICT = schema_utils.SCHEMA_TYPE_DICT
SCHEMA_TYPE_FLOAT = schema_utils.SCHEMA_TYPE_FLOAT
SCHEMA_TYPE_HTML = schema_utils.SCHEMA_TYPE_HTML
SCHEMA_TYPE_INT = schema_utils.SCHEMA_TYPE_INT
SCHEMA_TYPE_LIST = schema_utils.SCHEMA_TYPE_LIST
SCHEMA_TYPE_UNICODE = schema_utils.SCHEMA_TYPE_UNICODE
ALLOWED_SCHEMA_TYPES = [
SCHEMA_TYPE_BOOL, SCHEMA_TYPE_CUSTOM, SCHEMA_TYPE_DICT, SCHEMA_TYPE_FLOAT,
SCHEMA_TYPE_HTML, SCHEMA_TYPE_INT, SCHEMA_TYPE_LIST, SCHEMA_TYPE_UNICODE]
ALLOWED_CUSTOM_OBJ_TYPES = [
'Filepath', 'LogicQuestion', 'MathLatexString', 'MusicPhrase',
'ParameterName', 'SanitizedUrl', 'Graph', 'ImageWithRegions',
'ListOfTabs']
# Schemas for the UI config for the various types. All of these configuration
# options are optional additions to the schema, and, if omitted, should not
# result in any errors.
# Note to developers: please keep this in sync with
# https://github.com/oppia/oppia/wiki/Schema-Based-Forms
UI_CONFIG_SPECS = {
SCHEMA_TYPE_BOOL: {},
SCHEMA_TYPE_DICT: {},
SCHEMA_TYPE_FLOAT: {},
SCHEMA_TYPE_HTML: {
'hide_complex_extensions': {
'type': SCHEMA_TYPE_BOOL,
},
'placeholder': {
'type': SCHEMA_TYPE_UNICODE,
}
},
SCHEMA_TYPE_INT: {},
SCHEMA_TYPE_LIST: {
'add_element_text': {
'type': SCHEMA_TYPE_UNICODE
}
},
SCHEMA_TYPE_UNICODE: {
'rows': {
'type': SCHEMA_TYPE_INT,
'validators': [{
'id': 'is_at_least',
'min_value': 1,
}]
},
'coding_mode': {
'type': SCHEMA_TYPE_UNICODE,
'choices': ['none', 'python', 'coffeescript'],
},
'placeholder': {
'type': SCHEMA_TYPE_UNICODE,
},
},
}
# Schemas for validators for the various types.
VALIDATOR_SPECS = {
SCHEMA_TYPE_BOOL: {},
SCHEMA_TYPE_DICT: {},
SCHEMA_TYPE_FLOAT: {
'is_at_least': {
'min_value': {
'type': SCHEMA_TYPE_FLOAT
}
},
'is_at_most': {
'max_value': {
'type': SCHEMA_TYPE_FLOAT
}
},
},
SCHEMA_TYPE_HTML: {},
SCHEMA_TYPE_INT: {
'is_at_least': {
'min_value': {
'type': SCHEMA_TYPE_INT
}
},
'is_at_most': {
'max_value': {
'type': SCHEMA_TYPE_INT
}
},
},
SCHEMA_TYPE_LIST: {
'has_length_at_least': {
'min_value': {
'type': SCHEMA_TYPE_INT,
'validators': [{
'id': 'is_at_least',
'min_value': 1,
}],
}
},
'has_length_at_most': {
'max_value': {
'type': SCHEMA_TYPE_INT,
'validators': [{
'id': 'is_at_least',
'min_value': 1,
}],
}
},
'is_uniquified': {},
},
SCHEMA_TYPE_UNICODE: {
'matches_regex': {
'regex': {
'type': SCHEMA_TYPE_UNICODE,
'validators': [{
'id': 'is_regex',
}]
}
},
'is_nonempty': {},
'is_regex': {},
'is_valid_email': {},
},
}
def _validate_ui_config(obj_type, ui_config):
"""Validates the value of a UI configuration."""
reference_dict = UI_CONFIG_SPECS[obj_type]
assert set(ui_config.keys()) <= set(reference_dict.keys())
for key, value in ui_config.iteritems():
schema_utils.normalize_against_schema(
value, reference_dict[key])
def _validate_validator(obj_type, validator):
"""Validates the value of a 'validator' field."""
reference_dict = VALIDATOR_SPECS[obj_type]
assert 'id' in validator and validator['id'] in reference_dict
customization_keys = validator.keys()
customization_keys.remove('id')
assert (set(customization_keys) ==
set(reference_dict[validator['id']].keys()))
for key in customization_keys:
value = validator[key]
schema = reference_dict[validator['id']][key]
try:
schema_utils.normalize_against_schema(value, schema)
except Exception as e:
raise AssertionError(e)
# Check that the id corresponds to a valid normalizer function.
validator_fn = schema_utils.get_validator(validator['id'])
assert set(inspect.getargspec(validator_fn).args) == set(
customization_keys + ['obj'])
def _validate_dict_keys(dict_to_check, required_keys, optional_keys):
"""Checks that all of the required keys, and possibly some of the optional
keys, are in the given dict.
Raises:
AssertionError: if the validation fails.
"""
assert set(required_keys) <= set(dict_to_check.keys()), (
'Missing keys: %s' % dict_to_check)
assert set(dict_to_check.keys()) <= set(required_keys + optional_keys), (
'Extra keys: %s' % dict_to_check)
def validate_schema(schema):
"""Validates a schema.
This is meant to be a utility function that should be used by tests to
ensure that all schema definitions in the codebase are valid.
Each schema is a dict with at least a key called 'type'. The 'type' can
take one of the SCHEMA_TYPE_* values declared above. In addition, there
may be additional keys for specific types:
- 'list' requires an additional 'items' property, which specifies the type
of the elements in the list. It also allows for an optional 'len'
property which specifies the len of the list.
- 'dict' requires an additional 'properties' property, which specifies the
names of the keys in the dict, and schema definitions for their values.
There may also be an optional 'post_normalizers' key whose value is a list
of normalizers.
Raises:
AssertionError: if the schema is not valid.
"""
assert isinstance(schema, dict)
assert SCHEMA_KEY_TYPE in schema
assert schema[SCHEMA_KEY_TYPE] in ALLOWED_SCHEMA_TYPES
if schema[SCHEMA_KEY_TYPE] == SCHEMA_TYPE_CUSTOM:
_validate_dict_keys(
schema,
[SCHEMA_KEY_TYPE, SCHEMA_KEY_OBJ_TYPE],
[])
assert schema[SCHEMA_KEY_OBJ_TYPE] in ALLOWED_CUSTOM_OBJ_TYPES, schema
elif schema[SCHEMA_KEY_TYPE] == SCHEMA_TYPE_LIST:
_validate_dict_keys(
schema,
[SCHEMA_KEY_ITEMS, SCHEMA_KEY_TYPE],
OPTIONAL_SCHEMA_KEYS + [SCHEMA_KEY_LEN])
validate_schema(schema[SCHEMA_KEY_ITEMS])
if SCHEMA_KEY_LEN in schema:
assert isinstance(schema[SCHEMA_KEY_LEN], int)
assert schema[SCHEMA_KEY_LEN] > 0
elif schema[SCHEMA_KEY_TYPE] == SCHEMA_TYPE_DICT:
_validate_dict_keys(
schema,
[SCHEMA_KEY_PROPERTIES, SCHEMA_KEY_TYPE],
OPTIONAL_SCHEMA_KEYS)
assert isinstance(schema[SCHEMA_KEY_PROPERTIES], list)
for prop in schema[SCHEMA_KEY_PROPERTIES]:
_validate_dict_keys(
prop,
[SCHEMA_KEY_NAME, SCHEMA_KEY_SCHEMA],
[SCHEMA_KEY_DESCRIPTION])
assert isinstance(prop[SCHEMA_KEY_NAME], basestring)
validate_schema(prop[SCHEMA_KEY_SCHEMA])
if SCHEMA_KEY_DESCRIPTION in prop:
assert isinstance(prop[SCHEMA_KEY_DESCRIPTION], basestring)
else:
_validate_dict_keys(schema, [SCHEMA_KEY_TYPE], OPTIONAL_SCHEMA_KEYS)
if SCHEMA_KEY_UI_CONFIG in schema:
_validate_ui_config(
schema[SCHEMA_KEY_TYPE], schema[SCHEMA_KEY_UI_CONFIG])
if SCHEMA_KEY_POST_NORMALIZERS in schema:
assert isinstance(schema[SCHEMA_KEY_POST_NORMALIZERS], list)
for post_normalizer in schema[SCHEMA_KEY_POST_NORMALIZERS]:
assert isinstance(post_normalizer, dict)
assert 'id' in post_normalizer
# Check that the id corresponds to a valid normalizer function.
schema_utils.Normalizers.get(post_normalizer['id'])
# TODO(sll): Check the arguments too.
if SCHEMA_KEY_VALIDATORS in schema:
assert isinstance(schema[SCHEMA_KEY_VALIDATORS], list)
for validator in schema[SCHEMA_KEY_VALIDATORS]:
assert isinstance(validator, dict)
assert 'id' in validator
_validate_validator(schema[SCHEMA_KEY_TYPE], validator)
class SchemaValidationUnitTests(test_utils.GenericTestBase):
"""Test validation of schemas."""
def test_schemas_are_correctly_validated(self):
"""Test validation of schemas."""
invalid_schemas = [[
'type'
], {
'type': 'invalid'
}, {
'type': 'dict',
}, {
'type': 'list',
'items': {}
}, {
'type': 'list',
'items': {
'type': 'unicode'
},
'len': -1
}, {
'type': 'list',
'items': {
'type': 'unicode'
},
'len': 0
}, {
'type': 'list',
'items': {
'type': 'unicode'
},
'validators': [{
'id': 'has_length_at_most',
'max_value': 0
}]
}, {
'type': 'dict',
'items': {
'type': 'float'
}
}, {
'type': 'dict',
'properties': {
123: {
'type': 'unicode'
}
}
}, {
'type': 'unicode',
'validators': [{
'id': 'fake_validator',
}]
}, {
'type': 'unicode',
'validators': [{
'id': 'is_nonempty',
'fake_arg': 'unused_value',
}]
}, {
'type': 'unicode',
'validators': [{
'id': 'matches_regex',
}]
}, {
'type': 'float',
'validators': [{
'id': 'is_at_least',
'min_value': 'value_of_wrong_type',
}]
}, {
'type': 'unicode',
'ui_config': {
'rows': -1,
}
}, {
'type': 'unicode',
'ui_config': {
'coding_mode': 'invalid_mode',
}
}]
valid_schemas = [{
'type': 'float'
}, {
'type': 'bool'
}, {
'type': 'dict',
'properties': [{
'name': 'str_property',
'schema': {
'type': 'unicode'
}
}]
}, {
'type': 'list',
'items': {
'type': 'list',
'items': {
'type': 'list',
'items': {
'type': 'bool'
},
'len': 100
}
}
}, {
'type': 'list',
'items': {
'type': 'unicode'
},
'validators': [{
'id': 'has_length_at_most',
'max_value': 3
}]
}, {
'type': 'float',
'validators': [{
'id': 'is_at_least',
'min_value': 3.0,
}]
}, {
'type': 'unicode',
'ui_config': {
'rows': 5,
}
}, {
'type': 'unicode',
'ui_config': {
'coding_mode': 'python',
}
}]
for schema in valid_schemas:
validate_schema(schema)
for schema in invalid_schemas:
with self.assertRaises((AssertionError, KeyError)):
validate_schema(schema)
def test_normalize_against_schema_raises_exception(self):
"""Tests if normalize against schema raises exception
for invalid key.
"""
with self.assertRaises(Exception):
schema = {SCHEMA_KEY_TYPE: 'invalid'}
schema_utils.normalize_against_schema('obj', schema)
def test_is_nonempty_validator(self):
"""Tests if static method is_nonempty returns true iff obj
is not an empty str.
"""
is_nonempty = schema_utils.get_validator('is_nonempty')
self.assertTrue(is_nonempty('non-empty string'))
self.assertTrue(is_nonempty(' '))
self.assertTrue(is_nonempty(' '))
self.assertFalse(is_nonempty(''))
def test_is_at_most_validator(self):
"""Tests if static method is_at_most returns true iff obj
is at most a value.
"""
is_at_most = schema_utils.get_validator('is_at_most')
self.assertTrue(is_at_most(2, 3))
self.assertTrue(is_at_most(2, 2)) # boundary
self.assertFalse(is_at_most(2, 1))
def test_has_length_at_least_validator(self):
"""Tests if static method has_length_at_least returns true iff
given list has length of at least the given value.
"""
has_len_at_least = schema_utils.get_validator('has_length_at_least')
self.assertTrue(has_len_at_least(['elem'], 0))
self.assertTrue(has_len_at_least(['elem'], 1)) # boundary
self.assertFalse(has_len_at_least(['elem'], 2))
def test_get_raises_invalid_validator_id(self):
"""Tests if class method 'get' in _Validator raises exception
for invalid validator id.
"""
with self.assertRaises(Exception):
schema_utils.get_validator('some invalid validator method name')
class SchemaNormalizationUnitTests(test_utils.GenericTestBase):
"""Test schema-based normalization of objects."""
def check_normalization(self, schema, mappings, invalid_items):
"""Validates the schema and tests that values are normalized correctly.
Args:
schema: the schema to normalize the value against.
mappings: a list of 2-element tuples. The first element of
each item is expected to be normalized to the second.
invalid_items: a list of values. Each of these is expected to raise
an AssertionError when normalized.
"""
validate_schema(schema)
for raw_value, expected_value in mappings:
self.assertEqual(
schema_utils.normalize_against_schema(raw_value, schema),
expected_value)
for value in invalid_items:
with self.assertRaises(Exception):
schema_utils.normalize_against_schema(value, schema)
def test_float_schema(self):
schema = {
'type': schema_utils.SCHEMA_TYPE_FLOAT,
}
mappings = [(1.2, 1.2), (3, 3.0), (-1, -1.0), ('1', 1.0)]
invalid_vals = [[13], 'abc', None]
self.check_normalization(schema, mappings, invalid_vals)
def test_list_schema_with_len(self):
schema = {
'type': schema_utils.SCHEMA_TYPE_LIST,
'items': {
'type': schema_utils.SCHEMA_TYPE_UNICODE,
},
'len': 2,
}
mappings = [
(['a', 'b'], ['a', 'b']),
(['abc', ''], ['abc', '']),
(['adaA13', '13'], ['adaA13', '13'])]
invalid_vals = [['1', 13], {'a': 'b'}, {}, None, 123, 'abc', ['c'], []]
self.check_normalization(schema, mappings, invalid_vals)
def test_html_schema(self):
"""Tests for valid html schema, an html string. Note that
html.cleaner() is called in normalize_against_schema.
"""
schema = {
'type': schema_utils.SCHEMA_TYPE_HTML,
}
mappings = [
('<script></script>', ''),
('<a class="webLink" href="https'
'://www.oppia.com/"><img src="images/oppia.png"></a>',
'<a href="https://www.oppia.com/"></a>')]
invalid_vals = [['<script></script>', '<script></script>']]
self.check_normalization(schema, mappings, invalid_vals)
def test_schema_key_post_normalizers(self):
"""Test post normalizers in schema using basic html schema."""
schema_1 = {
'type': schema_utils.SCHEMA_TYPE_HTML,
'post_normalizers': [
{'id': 'normalize_spaces'}, # html strings with no extra spaces
]
}
obj_1 = 'a a'
normalize_obj_1 = schema_utils.normalize_against_schema(obj_1, schema_1)
self.assertEqual(u'a a', normalize_obj_1)
schema_2 = {
'type': schema_utils.SCHEMA_TYPE_HTML,
'post_normalizers': [
{'id': 'sanitize_url'}
]
}
obj_2 = 'http://www.oppia.org/splash/<script>'
normalize_obj_2 = schema_utils.normalize_against_schema(obj_2, schema_2)
self.assertEqual(u'http://www.oppia.org/splash/', normalize_obj_2)
def test_list_schema(self):
schema = {
'type': schema_utils.SCHEMA_TYPE_LIST,
'items': {
'type': schema_utils.SCHEMA_TYPE_UNICODE,
}
}
mappings = [
(['a', 'b'], ['a', 'b']),
(['c'], ['c']),
(['abc', ''], ['abc', '']),
([], []),
(['adaA13', '13'], ['adaA13', '13'])]
invalid_vals = [['1', 13], {'a': 'b'}, {}, None, 123, 'abc']
self.check_normalization(schema, mappings, invalid_vals)
def test_dict_schema(self):
schema = {
'type': schema_utils.SCHEMA_TYPE_DICT,
'properties': [{
'name': 'unicodeListProp',
'schema': {
'type': schema_utils.SCHEMA_TYPE_LIST,
'items': {
'type': schema_utils.SCHEMA_TYPE_UNICODE
}
},
}, {
'name': 'intProp',
'schema': {
'type': schema_utils.SCHEMA_TYPE_INT
},
}, {
'name': 'dictProp',
'schema': {
'type': schema_utils.SCHEMA_TYPE_DICT,
'properties': [{
'name': 'floatProp',
'schema': {
'type': schema_utils.SCHEMA_TYPE_FLOAT
}
}]
}
}]
}
mappings = [({
'unicodeListProp': [],
'intProp': 1,
'dictProp': {
'floatProp': 3
}
}, {
'unicodeListProp': [],
'intProp': 1,
'dictProp': {
'floatProp': 3.0
}
}), ({
'intProp': 10,
'unicodeListProp': ['abc', 'def'],
'dictProp': {
'floatProp': -1.0
}
}, {
'intProp': 10,
'unicodeListProp': ['abc', 'def'],
'dictProp': {
'floatProp': -1.0
}
})]
invalid_vals = [{
'unicodeListProp': [],
'intPROP': 1,
'dictProp': {
'floatProp': 3.0
}
}, {
'unicodeListProp': ['aaa'],
'intProp': 1,
}, {
'unicodeListProp': [],
'intProp': 3,
'dictProp': {},
}, [
'unicodeListProp', 'intProp', 'dictProp'
], None, 123, 'abc']
self.check_normalization(schema, mappings, invalid_vals)
def test_notification_email_list_validator(self):
schema = email_manager.NOTIFICATION_EMAIL_LIST_SCHEMA
valid_email_list = [u'user{}@oppia.com'.format(i) for i in xrange(0, 5)]
big_email_list = [u'user{}@oppia.com'.format(i) for i in xrange(0, 7)]
mappings = [
([u'admin@oppia.com'], [u'admin@oppia.com']),
(valid_email_list, valid_email_list)]
invalid_vals = [[u'admin@oppia'], big_email_list,
[u'admin@oppia.commmm'], [u'a@.com']]
self.check_normalization(schema, mappings, invalid_vals)
def test_normalize_spaces(self):
"""Test static method normalize_spaces; should collapse multiple
spaces.
"""
normalize_spaces = schema_utils.Normalizers.get('normalize_spaces')
self.assertEqual('dog cat', normalize_spaces('dog cat'))
self.assertEqual('dog cat', normalize_spaces(' dog cat'))
self.assertEqual('dog cat', normalize_spaces(' dog cat '))
self.assertNotEqual('dog cat', normalize_spaces('dogcat'))
def test_normalizer_get(self):
"""Tests the class method 'get' of Normalizers, should return the
normalizer method corresponding to the given normalizer id.
"""
normalize_spaces = schema_utils.Normalizers.get('normalize_spaces')
self.assertEqual('normalize_spaces', normalize_spaces.__name__)
def test_normalizer_get_raises_exception_for_invalid_id(self):
"""Tests if class method get of Normalizers raises exception when given
an invalid normalizer id.
"""
with self.assertRaises(Exception):
schema_utils.Normalizers.get('some invalid normalizer method name')
with self.assertRaises(Exception):
# Test substring of an actual id.
schema_utils.Normalizers.get('normalize_space')
def test_normalizer_sanitize_url(self):
"""Tests if static method sanitize_url of Normalizers correctly
sanitizes a URL when given its string representation and raises
error for invalid URLs.
"""
sanitize_url = schema_utils.Normalizers.get('sanitize_url')
self.assertEqual(
'https://www.oppia.org/splash/',
sanitize_url('https://www.oppia.org/splash/'))
self.assertEqual(
'http://www.oppia.org/splash/',
sanitize_url('http://www.oppia.org/splash/'))
self.assertEqual(
sanitize_url('http://example.com/~path;parameters?q=arg#fragment'),
'http://example.com/%7Epath%3Bparameters?q%3Darg#fragment')
self.assertEqual(
'https://www.web.com/%3Cscript%20type%3D%22text/javascript%22%'
'3Ealert%28%27rm%20-rf%27%29%3B%3C/script%3E',
sanitize_url(
'https://www.web.com/<script type="text/javascript">alert(\'rm'
' -rf\');</script>'))
self.assertEqual('', sanitize_url(''))
# Raise AssertionError if string does not start with http:// or
# https://.
with self.assertRaisesRegexp(
AssertionError,
'Invalid URL: Sanitized URL should start with \'http://\' or'
' \'https://\'; received oppia.org'):
sanitize_url('oppia.org')
with self.assertRaisesRegexp(
AssertionError,
'Invalid URL: Sanitized URL should start with \'http://\' or'
' \'https://\'; received www.oppia.org'):
sanitize_url('www.oppia.org')
|
souravbadami/oppia
|
schema_utils_test.py
|
Python
|
apache-2.0
| 25,162 | 0.000318 |
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# eLyXer -- convert LyX source files to HTML output.
#
# Copyright (C) 2009 Alex Fernández
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# --end--
# Alex 20090411
# LyX layout and derived classes
from elyxer.util.trace import Trace
from elyxer.parse.parser import *
from elyxer.out.output import *
from elyxer.gen.container import *
from elyxer.gen.styles import *
from elyxer.gen.header import *
from elyxer.proc.postprocess import *
from elyxer.ref.label import *
from elyxer.ref.partkey import *
from elyxer.ref.link import *
class Layout(Container):
"A layout (block of text) inside a lyx file"
type = 'none'
def __init__(self):
"Initialize the layout."
self.contents = []
self.parser = BoundedParser()
self.output = TaggedOutput().setbreaklines(True)
def process(self):
"Get the type and numerate if necessary."
self.type = self.header[1]
if self.type in TagConfig.layouts:
self.output.tag = TagConfig.layouts[self.type] + ' class="' + self.type + '"'
elif self.type.replace('*', '') in TagConfig.layouts:
self.output.tag = TagConfig.layouts[self.type.replace('*', '')]
self.output.tag += ' class="' + self.type.replace('*', '-') + '"'
else:
self.output.tag = 'div class="' + self.type + '"'
self.numerate()
def numerate(self):
"Numerate if necessary."
partkey = PartKeyGenerator.forlayout(self)
if partkey:
self.partkey = partkey
self.output.tag = self.output.tag.replace('?', unicode(partkey.level))
def __unicode__(self):
"Return a printable representation."
if self.partkey:
return 'Layout ' + self.type + ' #' + unicode(self.partkey.partkey)
return 'Layout of type ' + self.type
class StandardLayout(Layout):
"A standard layout -- can be a true div or nothing at all"
indentation = False
def process(self):
self.type = 'standard'
self.output = ContentsOutput()
def complete(self, contents):
"Set the contents and return it."
self.process()
self.contents = contents
return self
class Title(Layout):
"The title of the whole document"
def process(self):
self.type = 'title'
self.output.tag = 'h1 class="title"'
title = self.extracttext()
DocumentTitle.title = title
Trace.message('Title: ' + title)
class Author(Layout):
"The document author"
def process(self):
self.type = 'author'
self.output.tag = 'h2 class="author"'
author = self.extracttext()
Trace.debug('Author: ' + author)
DocumentAuthor.appendauthor(author)
class Abstract(Layout):
"A paper abstract"
done = False
def process(self):
self.type = 'abstract'
self.output.tag = 'div class="abstract"'
if Abstract.done:
return
message = Translator.translate('abstract')
tagged = TaggedText().constant(message, 'p class="abstract-message"', True)
self.contents.insert(0, tagged)
Abstract.done = True
class FirstWorder(Layout):
"A layout where the first word is extracted"
def extractfirstword(self):
"Extract the first word as a list"
return self.extractfromcontents(self.contents)
def extractfromcontents(self, contents):
"Extract the first word in contents."
firstcontents = []
while len(contents) > 0:
if self.isfirstword(contents[0]):
firstcontents.append(contents[0])
del contents[0]
return firstcontents
if self.spaceincontainer(contents[0]):
extracted = self.extractfromcontainer(contents[0])
firstcontents.append(extracted)
return firstcontents
firstcontents.append(contents[0])
del contents[0]
return firstcontents
def extractfromcontainer(self, container):
"Extract the first word from a container cloning it including its output."
if isinstance(container, StringContainer):
return self.extractfromstring(container)
result = Cloner.clone(container)
result.output = container.output
result.contents = self.extractfromcontents(container.contents)
return result
def extractfromstring(self, container):
"Extract the first word from elyxer.a string container."
if not ' ' in container.string:
Trace.error('No space in string ' + container.string)
return container
split = container.string.split(' ', 1)
container.string = split[1]
return Constant(split[0])
def spaceincontainer(self, container):
"Find out if the container contains a space somewhere."
return ' ' in container.extracttext()
def isfirstword(self, container):
"Find out if the container is valid as a first word."
if not isinstance(container, FirstWord):
return False
return not container.isempty()
class FirstWord(Container):
"A container which is in itself a first word, unless it's empty."
"Should be inherited by other containers, e.g. ERT."
def isempty(self):
"Find out if the first word is empty."
Trace.error('Unimplemented isempty()')
return True
class Description(FirstWorder):
"A description layout"
def process(self):
"Set the first word to bold"
self.type = 'Description'
self.output.tag = 'div class="Description"'
firstword = self.extractfirstword()
if not firstword:
return
tag = 'span class="Description-entry"'
self.contents.insert(0, TaggedText().complete(firstword, tag))
self.contents.insert(1, Constant(u' '))
class List(FirstWorder):
"A list layout"
def process(self):
"Set the first word to bold"
self.type = 'List'
self.output.tag = 'div class="List"'
firstword = self.extractfirstword()
if not firstword:
return
first = TaggedText().complete(firstword, 'span class="List-entry"')
second = TaggedText().complete(self.contents, 'span class="List-contents"')
self.contents = [first, second]
class PlainLayout(Layout):
"A plain layout"
def process(self):
"Output just as contents."
self.output = ContentsOutput()
self.type = 'Plain'
def makevisible(self):
"Make the layout visible, output as tagged text."
self.output = TaggedOutput().settag('div class="PlainVisible"', True)
class LyXCode(Layout):
"A bit of LyX-Code."
def process(self):
"Output as pre."
self.output.tag = 'pre class="LyX-Code"'
for newline in self.searchall(Newline):
index = newline.parent.contents.index(newline)
newline.parent.contents[index] = Constant('\n')
class PostLayout(object):
"Numerate an indexed layout"
processedclass = Layout
def postprocess(self, last, layout, next):
"Group layouts and/or number them."
if layout.type in TagConfig.group['layouts']:
return self.group(last, layout)
if layout.partkey:
self.number(layout)
return layout
def group(self, last, layout):
"Group two layouts if they are the same type."
if not self.isgroupable(layout) or not self.isgroupable(last) or last.type != layout.type:
return layout
layout.contents = last.contents + [Constant('<br/>\n')] + layout.contents
last.contents = []
last.output = EmptyOutput()
return layout
def isgroupable(self, container):
"Check that the container can be grouped."
if not isinstance(container, Layout):
return False
for element in container.contents:
if not element.__class__.__name__ in LayoutConfig.groupable['allowed']:
return False
return True
def number(self, layout):
"Generate a number and place it before the text"
layout.partkey.addtoclabel(layout)
class PostStandard(object):
"Convert any standard spans in root to divs"
processedclass = StandardLayout
def postprocess(self, last, standard, next):
"Switch to div, and clear if empty."
type = 'Standard'
if self.isempty(standard):
standard.output = EmptyOutput()
return standard
if DocumentParameters.indentstandard:
if isinstance(last, StandardLayout):
type = 'Indented'
else:
type = 'Unindented'
standard.output = TaggedOutput().settag('div class="' + type + '"', True)
return standard
def isempty(self, standard):
"Find out if the standard layout is empty."
for element in standard.contents:
if not element.output.isempty():
return False
return True
class PostPlainLayout(PostLayout):
"Numerate a plain layout"
processedclass = PlainLayout
def postprocess(self, last, plain, next):
"Group plain layouts."
if not self.istext(last) or not self.istext(plain):
return plain
plain.makevisible()
return self.group(last, plain)
def istext(self, container):
"Find out if the container is only text."
if not isinstance(container, PlainLayout):
return False
extractor = ContainerExtractor(TOCConfig.extractplain)
text = extractor.extract(container)
return (len(text) > 0)
class PostLyXCode(object):
"Coalesce contiguous LyX-Code layouts."
processedclass = LyXCode
def postprocess(self, last, lyxcode, next):
"Coalesce if last was also LyXCode"
if not isinstance(last, LyXCode):
return lyxcode
if hasattr(last, 'first'):
lyxcode.first = last.first
else:
lyxcode.first = last
toappend = lyxcode.first.contents
toappend.append(Constant('\n'))
toappend += lyxcode.contents
lyxcode.output = EmptyOutput()
return lyxcode
Postprocessor.stages += [
PostLayout, PostStandard, PostLyXCode, PostPlainLayout
]
|
hainm/elyxer
|
src/elyxer/gen/layout.py
|
Python
|
gpl-3.0
| 10,103 | 0.010792 |
"""Test cases for JSON lws_logger module, assumes Pytest."""
from jsonutils.lws import lws_logger
class TestDictToTreeHelpers:
"""Test the helper functions for dict_to_tree."""
def test_flatten_list(self):
"""Test flattening of nested lists."""
f = lws_logger.flatten_list
nested = [1, [2, 3, [[4], 5]]]
assert list(f(nested)) == [1, 2, 3, 4, 5]
nested = [[[1]]]
assert list(f(nested)) == [1]
flat = [1, 2]
assert list(f(flat)) == [1, 2]
def test_filter_errors(self):
"""Test error filtering (helper function to filter_keys)."""
f = lws_logger.filter_errors
errors = {'key': 99,
'key_str': 'key error',
'val': -99,
'val_str': 'val error'}
seq = [100, 99, 99, 99]
assert f(seq, errors) == [100]
seq = [99]
assert f(seq, errors) == ['key error']
seq = [-99, -99, 100]
assert f(seq, errors) == [100]
seq = [-99, -99]
assert f(seq, errors) == ['val error']
def test_filter_errors_single(self):
"""Test list error term filtering, single error."""
f = lws_logger.filter_keys
errors = {'key': 99,
'key_str': 'key error',
'val': -99,
'val_str': 'val error'}
pairs = [('a', 'hi'), ('a', 99), ('b', 'hi')]
filtered = [('a', 'hi'), ('b', 'hi')]
assert f(pairs, errors) == filtered
def test_filter_errors_multiple(self):
"""Test list error term filtering, multiple errors."""
f = lws_logger.filter_keys
errors = {'key': 99,
'key_str': 'key error',
'val': -99,
'val_str': 'val error'}
pairs = [('a', 'hi'), ('a', 99), ('a', 99),
('b', 'hi'), ('b', -99)]
filtered = [('a', 'hi'), ('b', 'hi')]
assert f(pairs, errors) == filtered
def test_filter_errors_only(self):
"""Test list error term filtering, only errors."""
f = lws_logger.filter_keys
errors = {'key': 99,
'key_str': 'key error',
'val': -99,
'val_str': 'val error'}
pairs = [('a', 99), ('b', -99)]
filtered = [('a', 'key error'), ('b', 'val error')]
assert f(pairs, errors) == filtered
class TestLoggerHelpers:
"""Test the helper functions for logger."""
def test_dict_to_tree_simple(self):
"""Test dict_to_tree simple dicts."""
f = lws_logger.dict_to_tree
simple_d = {'root': ['a', 'b']}
flat_list = [('root', 0), [('a', 1)], [('b', 1)]]
assert f(simple_d, 'root', [('root', 0)]) == flat_list
nested_d = {'root': ['a', 'b'], 'a': ['one', 'two']}
nested_list = [('root', 0), [('a', 1), [('one', 2)], [('two', 2)]],
[('b', 1)]]
assert f(nested_d, 'root', [('root', 0)]) == nested_list
def test_parse_errors_one(self):
"""Test scenario with one type of error."""
f = lws_logger.parse_errors
errors = {'key_str': 'key error',
'val_str': 'val error'}
nodes = [('one', 'key error'), ('two', 3), ('three', 3)]
output = 'Key Errors:\t1\nValue Errors:\t0'
assert f(nodes, errors) == (1, 0, output)
def test_parse_errors_both(self):
"""Test scenario with two types of errors."""
f = lws_logger.parse_errors
errors = {'key_str': 'key error',
'val_str': 'val error'}
nodes = [('one', 'key error'), ('two', 3), ('three', 3),
('four', 'val error')]
output = 'Key Errors:\t1\nValue Errors:\t1'
assert f(nodes, errors) == (1, 1, output)
def test_format_node(self):
"""Test node to string function."""
f = lws_logger.format_node
assert f('a', '----', 1) == '|----a'
assert f('a', '----', 2) == ' |----a'
|
tkuriyama/jsonutils
|
jsonutils/lws/test/test_lws_logger.py
|
Python
|
mit
| 3,990 | 0 |
from django.contrib import messages
from django.contrib.auth.decorators import login_required
from django.db.models import Q
from django.http import HttpResponseRedirect
from django.shortcuts import get_object_or_404
from django.urls import reverse
from django.utils.decorators import method_decorator
from django.views.generic import ListView, DetailView, CreateView
from blog.models import Post, Comment
class Posts(ListView):
"""
Display every post paginated or if given a type filter/search for it.
"""
model = Post
template_name = 'posts.html'
paginate_by = 5
def get_queryset(self):
qs = self.model.objects.for_display()
type_of_page = self.kwargs.get('type', False)
search_query = self.request.GET.get('q', False)
if type_of_page == 'user':
qs = qs.filter(posted_by__username=self.kwargs.get('slug', ''))
elif type_of_page == 'category':
qs = qs.filter(categories__slug=self.kwargs.get('slug', ''))
if search_query:
qs = qs.filter(Q(title__icontains=search_query) | Q(content__icontains=search_query))
return qs
class SinglePost(DetailView):
"""
Display a single selected post.
"""
model = Post
template_name = 'post.html'
def get_queryset(self):
return self.model.objects.for_display()
class PostComment(CreateView):
"""
Saves comments received to a post. Removed ability to GET the page.
"""
model = Comment
fields = ['text']
@method_decorator(login_required)
def dispatch(self, request, *args, **kwargs):
return super(PostComment, self).dispatch(request, *args, **kwargs)
def get(self, request, *args, **kwargs):
return HttpResponseRedirect(reverse('blog:detail', kwargs={'slug': self.kwargs['slug']}))
def form_valid(self, form):
comment = form.save(commit=False)
comment.posted_by = self.request.user
comment.post = get_object_or_404(Post, slug=self.kwargs['slug'])
comment.save()
messages.success(self.request, 'Your comment was posted.')
return super(PostComment, self).form_valid(form)
def get_success_url(self):
return reverse('blog:detail', kwargs={'slug': self.kwargs['slug']})
|
Tusky/DjangoSample
|
blog/views.py
|
Python
|
mit
| 2,273 | 0.00088 |
from __future__ import with_statement
from sympy.printing.mathml import mathml
import tempfile
import os
def print_gtk(x, start_viewer=True):
"""Print to Gtkmathview, a gtk widget capable of rendering MathML.
Needs libgtkmathview-bin"""
from sympy.utilities.mathml import c2p
tmp = tempfile.mktemp() # create a temp file to store the result
with open(tmp, 'wb') as file:
file.write( c2p(mathml(x), simple=True) )
if start_viewer:
os.system("mathmlviewer " + tmp)
|
lidavidm/mathics-heroku
|
venv/lib/python2.7/site-packages/sympy/printing/gtk.py
|
Python
|
gpl-3.0
| 510 | 0.003922 |
from pahera.models import Person
from django.db import connection, transaction
from pahera.Utilities import DictFetchAll
# To check whether there exists a user with same email or phone no before registering the new user..!!!
def VerifyTheUser(data):
cursor = connection.cursor()
email = data['email']
phone = data['phone']
cursor.execute("SELECT * from pahera_person WHERE email = %s OR phone_no = %s", [email, phone])
person_Data = {}
person_Data = DictFetchAll.dictfetchall(cursor)
if person_Data:
if person_Data[0]['email'] == email or person_Data[0]['phone_no'] == phone:
return False
else:
return True
else:
return True
# To check whether there exists a user with same email or phone no, before Updating the user..!!!
def VerifyTheUserUpdate(data, person):
cursor = connection.cursor()
email = data['email']
phone = data['phone']
cursor.execute("SELECT * from pahera_person WHERE email = %s OR phone_no = %s", [email, phone])
person_Data = {}
person_Data = DictFetchAll.dictfetchall(cursor)
if person_Data:
for post in person_Data:
if post['email'] == email or post['phone_no'] == phone:
if post['id'] == person.id:
return True
else:
return False
else:
return True
else:
return True
|
thebachchaoproject/bachchao-server
|
pahera/PythonModules/CheckIfUserExists_mod.py
|
Python
|
mit
| 1,431 | 0.004892 |
import unittest
import pqkmeans
import numpy
import collections
import pickle
class TestPQKMeans(unittest.TestCase):
def data_source(self, n: int):
for i in range(n):
yield [i * 100] * 6
def setUp(self):
# Train PQ encoder
self.encoder = pqkmeans.encoder.PQEncoder(num_subdim=3, Ks=20)
self.encoder.fit(numpy.array(list(self.data_source(200))))
def test_just_construction(self):
pqkmeans.clustering.PQKMeans(encoder=self.encoder, k=5, iteration=10, verbose=False)
def test_fit_and_predict(self):
engine = pqkmeans.clustering.PQKMeans(encoder=self.encoder, k=2, iteration=10, verbose=False)
codes = self.encoder.transform(numpy.array(list(self.data_source(100))))
predicted = engine.fit_predict(codes)
count = collections.defaultdict(int)
for cluster in predicted:
count[cluster] += 1
# roughly balanced clusters
self.assertGreaterEqual(min(count.values()), max(count.values()) * 0.7)
a = engine.predict(codes[0:1, :])
b = engine.predict(codes[0:1, :])
self.assertEqual(a, b)
def test_cluster_centers_are_really_nearest(self):
engine = pqkmeans.clustering.PQKMeans(encoder=self.encoder, k=2, iteration=10, verbose=False)
codes = self.encoder.transform(numpy.array(list(self.data_source(100))))
fit_predicted = engine.fit_predict(codes)
cluster_centers = numpy.array(engine.cluster_centers_, dtype=numpy.uint8)
predicted = engine.predict(codes)
self.assertTrue((fit_predicted == predicted).all())
# Reconstruct the original vectors
codes_decoded = self.encoder.inverse_transform(codes)
cluster_centers_decoded = self.encoder.inverse_transform(cluster_centers)
for cluster, code_decoded in zip(predicted, codes_decoded):
other_cluster = (cluster + 1) % max(predicted)
self.assertLessEqual(
numpy.linalg.norm(cluster_centers_decoded[cluster] - code_decoded),
numpy.linalg.norm(cluster_centers_decoded[other_cluster] - code_decoded)
)
def test_constructor_with_cluster_center(self):
# Run pqkmeans first.
engine = pqkmeans.clustering.PQKMeans(encoder=self.encoder, k=5, iteration=10, verbose=False)
codes = self.encoder.transform(numpy.array(list(self.data_source(100))))
fit_predicted = engine.fit_predict(codes)
cluster_centers = numpy.array(engine.cluster_centers_, dtype=numpy.uint8)
predicted = engine.predict(codes)
# save current engine and recover from savedata
engine_savedata = pickle.dumps(engine)
engine_recovered = pickle.loads(engine_savedata)
fit_predicted_from_recovered_obj = engine_recovered.predict(codes)
numpy.testing.assert_array_equal(predicted, fit_predicted_from_recovered_obj)
|
kogaki/pqkmeans
|
test/clustering/test_pqkmeans.py
|
Python
|
mit
| 2,918 | 0.004798 |
HTBRootQdisc = """\
tc qdisc add dev {interface!s} root handle 1: \
htb default {default_class!s}\
"""
HTBQdisc = """\
tc qdisc add dev {interface!s} parent {parent!s} handle {handle!s} \
htb default {default_class!s}\
"""
NetemDelayQdisc = """\
tc qdisc add dev {interface!s} parent {parent!s} handle {handle!s} \
netem delay {delay!s}ms\
"""
IngressQdisc = "tc qdisc add dev {interface!s} ingress"
PRIOQdisc = "tc qdisc add dev {interface!s} root handle 1: prio"
pfifoQdisc = "tc qdisc add dev {interface!s} root handle 1: pfifo"
|
praus/shapy
|
shapy/framework/commands/qdisc.py
|
Python
|
mit
| 537 | 0.001862 |
import time as real_time
import unittest
import jwt as jwt_lib
from mock import patch
from twilio.jwt import Jwt, JwtDecodeError
class DummyJwt(Jwt):
"""Jwt implementation that allows setting arbitrary payload and headers for testing."""
ALGORITHM = 'HS256'
def __init__(self, secret_key, issuer, subject=None, algorithm=None,
nbf=Jwt.GENERATE, ttl=3600, valid_until=None, headers=None,
payload=None):
super(DummyJwt, self).__init__(
secret_key=secret_key,
issuer=issuer,
subject=subject,
algorithm=algorithm or self.ALGORITHM,
nbf=nbf,
ttl=ttl,
valid_until=valid_until
)
self._payload = payload or {}
self._headers = headers or {}
def _generate_payload(self):
return self._payload
def _generate_headers(self):
return self._headers
class JwtTest(unittest.TestCase):
def assertIn(self, foo, bar, msg=None):
"""backport for 2.6"""
assert foo in bar, (msg or "%s not found in %s" % (foo, bar))
def now(self):
return int(real_time.time())
def assertJwtsEqual(self, jwt, key, expected_payload=None, expected_headers=None):
expected_headers = expected_headers or {}
expected_payload = expected_payload or {}
decoded_payload = jwt_lib.decode(jwt, key, algorithms=["HS256"], options={"verify_signature": False})
decoded_headers = jwt_lib.get_unverified_header(jwt)
self.assertEqual(expected_headers, decoded_headers)
self.assertEqual(expected_payload, decoded_payload)
@patch('time.time')
def test_basic_encode(self, time_mock):
time_mock.return_value = 0.0
jwt = DummyJwt('secret_key', 'issuer', headers={}, payload={})
self.assertJwtsEqual(
jwt.to_jwt(), 'secret_key',
expected_headers={'typ': 'JWT', 'alg': 'HS256'},
expected_payload={'iss': 'issuer', 'exp': 3600, 'nbf': 0},
)
@patch('time.time')
def test_encode_with_subject(self, time_mock):
time_mock.return_value = 0.0
jwt = DummyJwt('secret_key', 'issuer', subject='subject', headers={}, payload={})
self.assertJwtsEqual(
jwt.to_jwt(), 'secret_key',
expected_headers={'typ': 'JWT', 'alg': 'HS256'},
expected_payload={'iss': 'issuer', 'exp': 3600, 'nbf': 0, 'sub': 'subject'},
)
@patch('time.time')
def test_encode_without_nbf(self, time_mock):
time_mock.return_value = 0.0
jwt = DummyJwt('secret_key', 'issuer', subject='subject', headers={}, payload={}, nbf=None)
self.assertJwtsEqual(
jwt.to_jwt(), 'secret_key',
expected_headers={'typ': 'JWT', 'alg': 'HS256'},
expected_payload={'iss': 'issuer', 'exp': 3600, 'sub': 'subject'},
)
@patch('time.time')
def test_encode_custom_ttl(self, time_mock):
time_mock.return_value = 0.0
jwt = DummyJwt('secret_key', 'issuer', ttl=10, headers={}, payload={})
self.assertJwtsEqual(
jwt.to_jwt(), 'secret_key',
expected_headers={'typ': 'JWT', 'alg': 'HS256'},
expected_payload={'iss': 'issuer', 'exp': 10, 'nbf': 0},
)
@patch('time.time')
def test_encode_ttl_added_to_current_time(self, time_mock):
time_mock.return_value = 50.0
jwt = DummyJwt('secret_key', 'issuer', ttl=10, headers={}, payload={})
self.assertJwtsEqual(
jwt.to_jwt(), 'secret_key',
expected_headers={'typ': 'JWT', 'alg': 'HS256'},
expected_payload={'iss': 'issuer', 'exp': 60, 'nbf': 50},
)
@patch('time.time')
def test_encode_override_ttl(self, time_mock):
time_mock.return_value = 0.0
jwt = DummyJwt('secret_key', 'issuer', ttl=10, headers={}, payload={})
self.assertJwtsEqual(
jwt.to_jwt(ttl=20),
'secret_key',
expected_headers={'typ': 'JWT', 'alg': 'HS256'},
expected_payload={'iss': 'issuer', 'exp': 20, 'nbf': 0},
)
@patch('time.time')
def test_encode_valid_until_overrides_ttl(self, time_mock):
time_mock.return_value = 0.0
jwt = DummyJwt('secret_key', 'issuer', ttl=10, valid_until=70, headers={}, payload={})
self.assertJwtsEqual(
jwt.to_jwt(), 'secret_key',
expected_headers={'typ': 'JWT', 'alg': 'HS256'},
expected_payload={'iss': 'issuer', 'exp': 70, 'nbf': 0},
)
@patch('time.time')
def test_encode_custom_nbf(self, time_mock):
time_mock.return_value = 0.0
jwt = DummyJwt('secret_key', 'issuer', ttl=10, nbf=5, headers={}, payload={})
self.assertJwtsEqual(
jwt.to_jwt(), 'secret_key',
expected_headers={'typ': 'JWT', 'alg': 'HS256'},
expected_payload={'iss': 'issuer', 'exp': 10, 'nbf': 5},
)
@patch('time.time')
def test_encode_with_headers(self, time_mock):
time_mock.return_value = 0.0
jwt = DummyJwt('secret_key', 'issuer', headers={'sooper': 'secret'}, payload={})
self.assertJwtsEqual(
jwt.to_jwt(), 'secret_key',
expected_headers={'typ': 'JWT', 'alg': 'HS256', 'sooper': 'secret'},
expected_payload={'iss': 'issuer', 'exp': 3600, 'nbf': 0},
)
@patch('time.time')
def test_encode_with_payload(self, time_mock):
time_mock.return_value = 0.0
jwt = DummyJwt('secret_key', 'issuer', payload={'root': 'true'})
self.assertJwtsEqual(
jwt.to_jwt(), 'secret_key',
expected_headers={'typ': 'JWT', 'alg': 'HS256'},
expected_payload={'iss': 'issuer', 'exp': 3600, 'nbf': 0, 'root': 'true'},
)
@patch('time.time')
def test_encode_with_payload_and_headers(self, time_mock):
time_mock.return_value = 0.0
jwt = DummyJwt('secret_key', 'issuer', headers={'yes': 'oui'}, payload={'pay': 'me'})
self.assertJwtsEqual(
jwt.to_jwt(), 'secret_key',
expected_headers={'typ': 'JWT', 'alg': 'HS256', 'yes': 'oui'},
expected_payload={'iss': 'issuer', 'exp': 3600, 'nbf': 0, 'pay': 'me'},
)
def test_encode_no_key_fails(self):
jwt = DummyJwt(None, 'issuer')
self.assertRaises(ValueError, jwt.to_jwt)
def test_encode_decode(self):
test_start = self.now()
jwt = DummyJwt('secret_key', 'issuer', subject='hey', payload={'sick': 'sick'})
decoded_jwt = Jwt.from_jwt(jwt.to_jwt(), 'secret_key')
self.assertGreaterEqual(decoded_jwt.valid_until, self.now() + 3600)
self.assertGreaterEqual(decoded_jwt.nbf, test_start)
self.assertEqual(decoded_jwt.issuer, 'issuer')
self.assertEqual(decoded_jwt.secret_key, 'secret_key')
self.assertEqual(decoded_jwt.algorithm, 'HS256')
self.assertEqual(decoded_jwt.subject, 'hey')
self.assertEqual(decoded_jwt.headers, {'typ': 'JWT', 'alg': 'HS256'})
self.assertDictContainsSubset({
'iss': 'issuer',
'sub': 'hey',
'sick': 'sick',
}, decoded_jwt.payload)
def test_encode_decode_mismatched_algorithms(self):
jwt = DummyJwt('secret_key', 'issuer', algorithm='HS512', subject='hey', payload={'sick': 'sick'})
self.assertRaises(JwtDecodeError, Jwt.from_jwt, jwt.to_jwt())
def test_decode_bad_secret(self):
jwt = DummyJwt('secret_key', 'issuer')
self.assertRaises(JwtDecodeError, Jwt.from_jwt, jwt.to_jwt(), 'letmeinplz')
def test_decode_modified_jwt_fails(self):
jwt = DummyJwt('secret_key', 'issuer')
example_jwt = jwt.to_jwt()
example_jwt = 'ABC' + example_jwt[3:]
self.assertRaises(JwtDecodeError, Jwt.from_jwt, example_jwt, 'secret_key')
def test_decode_validates_expiration(self):
expired_jwt = DummyJwt('secret_key', 'issuer', valid_until=self.now())
real_time.sleep(1)
self.assertRaises(JwtDecodeError, Jwt.from_jwt, expired_jwt.to_jwt(), 'secret_key')
def test_decode_validates_nbf(self):
expired_jwt = DummyJwt('secret_key', 'issuer', nbf=self.now() + 3600) # valid 1hr from now
self.assertRaises(JwtDecodeError, Jwt.from_jwt, expired_jwt.to_jwt(), 'secret_key')
def test_decodes_valid_jwt(self):
expiry_time = self.now() + 1000
example_jwt = jwt_lib.encode(
{'hello': 'world', 'iss': 'me', 'sub': 'being awesome', 'exp': expiry_time},
'secret'
)
decoded_jwt = Jwt.from_jwt(example_jwt, 'secret')
self.assertEqual(decoded_jwt.issuer, 'me')
self.assertEqual(decoded_jwt.subject, 'being awesome')
self.assertEqual(decoded_jwt.valid_until, expiry_time)
self.assertIn('hello', decoded_jwt.payload)
self.assertEqual(decoded_jwt.payload['hello'], 'world')
def test_decode_allows_skip_verification(self):
jwt = DummyJwt('secret', 'issuer', payload={'get': 'rekt'})
decoded_jwt = Jwt.from_jwt(jwt.to_jwt(), key=None)
self.assertEqual(decoded_jwt.issuer, 'issuer')
self.assertEqual(decoded_jwt.payload['get'], 'rekt')
self.assertIsNone(decoded_jwt.secret_key)
|
twilio/twilio-python
|
tests/unit/jwt/test_jwt.py
|
Python
|
mit
| 9,331 | 0.002251 |
# -*- coding: utf-8 -*-
# Copyright: (c) 2014, Hewlett-Packard Development Company, L.P.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
class ModuleDocFragment(object):
# Standard openstack documentation fragment
DOCUMENTATION = r'''
options:
cloud:
description:
- Named cloud or cloud config to operate against.
If I(cloud) is a string, it references a named cloud config as defined
in an OpenStack clouds.yaml file. Provides default values for I(auth)
and I(auth_type). This parameter is not needed if I(auth) is provided
or if OpenStack OS_* environment variables are present.
If I(cloud) is a dict, it contains a complete cloud configuration like
would be in a section of clouds.yaml.
type: raw
auth:
description:
- Dictionary containing auth information as needed by the cloud's auth
plugin strategy. For the default I(password) plugin, this would contain
I(auth_url), I(username), I(password), I(project_name) and any
information about domains (for example, I(os_user_domain_name) or I(os_project_domain_name)) if the cloud supports them.
For other plugins,
this param will need to contain whatever parameters that auth plugin
requires. This parameter is not needed if a named cloud is provided or
OpenStack OS_* environment variables are present.
type: dict
auth_type:
description:
- Name of the auth plugin to use. If the cloud uses something other than
password authentication, the name of the plugin should be indicated here
and the contents of the I(auth) parameter should be updated accordingly.
type: str
region_name:
description:
- Name of the region.
type: str
wait:
description:
- Should ansible wait until the requested resource is complete.
type: bool
default: yes
timeout:
description:
- How long should ansible wait for the requested resource.
type: int
default: 180
api_timeout:
description:
- How long should the socket layer wait before timing out for API calls.
If this is omitted, nothing will be passed to the requests library.
type: int
validate_certs:
description:
- Whether or not SSL API requests should be verified.
- Before Ansible 2.3 this defaulted to C(yes).
type: bool
default: no
aliases: [ verify ]
ca_cert:
description:
- A path to a CA Cert bundle that can be used as part of verifying
SSL API requests.
type: str
aliases: [ cacert ]
client_cert:
description:
- A path to a client certificate to use as part of the SSL transaction.
type: str
aliases: [ cert ]
client_key:
description:
- A path to a client key to use as part of the SSL transaction.
type: str
aliases: [ key ]
interface:
description:
- Endpoint URL type to fetch from the service catalog.
type: str
choices: [ admin, internal, public ]
default: public
aliases: [ endpoint_type ]
version_added: "2.3"
requirements:
- python >= 2.7
- openstacksdk >= 0.12.0
notes:
- The standard OpenStack environment variables, such as C(OS_USERNAME)
may be used instead of providing explicit values.
- Auth information is driven by openstacksdk, which means that values
can come from a yaml config file in /etc/ansible/openstack.yaml,
/etc/openstack/clouds.yaml or ~/.config/openstack/clouds.yaml, then from
standard environment variables, then finally by explicit parameters in
plays. More information can be found at
U(https://docs.openstack.org/openstacksdk/)
'''
|
thaim/ansible
|
lib/ansible/plugins/doc_fragments/openstack.py
|
Python
|
mit
| 3,726 | 0.001074 |
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from .oslogin import (
DeletePosixAccountRequest,
DeleteSshPublicKeyRequest,
GetLoginProfileRequest,
GetSshPublicKeyRequest,
ImportSshPublicKeyRequest,
ImportSshPublicKeyResponse,
LoginProfile,
UpdateSshPublicKeyRequest,
)
__all__ = (
"DeletePosixAccountRequest",
"DeleteSshPublicKeyRequest",
"GetLoginProfileRequest",
"GetSshPublicKeyRequest",
"ImportSshPublicKeyRequest",
"ImportSshPublicKeyResponse",
"LoginProfile",
"UpdateSshPublicKeyRequest",
)
|
googleapis/python-oslogin
|
google/cloud/oslogin_v1/types/__init__.py
|
Python
|
apache-2.0
| 1,116 | 0 |
from django.apps import AppConfig
class FilesConfig(AppConfig):
name = 'files'
verbose_name = 'Files'
def ready(self):
from . import signals
|
chaos-soft/chocola
|
files/apps.py
|
Python
|
mit
| 164 | 0 |
try:
from configparser import NoSectionError, NoOptionError
except ImportError:
from ConfigParser import NoSectionError, NoOptionError
from django import forms
from django.utils.translation import ugettext as _
def get_option(settings, section, option):
try:
return settings.get(section, option)
except NoSectionError:
return ""
except NoOptionError:
return ""
class BaseURLForm(forms.Form):
title = _("Base URL")
url = forms.URLField(
help_text=_("The absolute URL this application will be served at."),
initial="https://example.com",
label=_("URL"),
)
def populate_from_settings(self, settings):
self.data['url'] = get_option(settings, "base_url", "url")
def populate_settings(self, settings):
settings.add_section("base_url")
settings.set("base_url", "url", self.cleaned_data['url'])
class DatabaseForm(forms.Form):
title = _("Database")
engine = forms.ChoiceField(
choices=(
('django.db.backends.mysql', _("MySQL")),
('django.db.backends.oracle', _("Oracle")),
('django.db.backends.postgresql_psycopg2', _("Postgres")),
),
initial='django.db.backends.postgresql_psycopg2',
label=_("Engine"),
)
host = forms.CharField(
initial="localhost",
label=_("Hostname"),
max_length=128,
)
name = forms.CharField(
label=_("Database name"),
max_length=128,
)
password = forms.CharField(
label=_("Password"),
max_length=128,
required=False,
widget=forms.PasswordInput,
)
port = forms.IntegerField(
label=_("Port"),
min_value=1,
max_value=65535,
)
user = forms.CharField(
label=_("Username"),
min_length=1,
max_length=128,
)
def populate_from_settings(self, settings):
try:
for field in ('engine', 'host', 'name', 'password', 'port', 'user'):
self.data[field] = get_option(settings, "database", field)
except NoSectionError:
pass
def populate_settings(self, settings):
settings.add_section("database")
for field in ('engine', 'host', 'name', 'password', 'user'):
settings.set("database", field, self.cleaned_data[field])
settings.set("database", "port", str(self.cleaned_data['port']))
|
trehn/django-installer
|
django_installer/installer/forms.py
|
Python
|
isc
| 2,440 | 0.00041 |
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "Taoni.settings")
try:
from django.core.management import execute_from_command_line
except ImportError:
# The above import may fail for some other reason. Ensure that the
# issue is really that Django is missing to avoid masking other
# exceptions on Python 2.
try:
import django
except ImportError:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
)
raise
execute_from_command_line(sys.argv)
|
LunacyZeus/Tao-ni
|
web/Taoni/manage.py
|
Python
|
gpl-3.0
| 803 | 0 |
from django.test import TestCase
from wagtail.search.tests.test_backends import BackendTests
from wagtail.tests.search import models
from ..utils import BOOSTS_WEIGHTS, WEIGHTS_VALUES, determine_boosts_weights, get_weight
class TestPostgresSearchBackend(BackendTests, TestCase):
backend_path = 'wagtail.contrib.postgres_search.backend'
def test_weights(self):
self.assertListEqual(BOOSTS_WEIGHTS,
[(10, 'A'), (2, 'B'), (0.5, 'C'), (0.25, 'D')])
self.assertListEqual(WEIGHTS_VALUES, [0.025, 0.05, 0.2, 1.0])
self.assertEqual(get_weight(15), 'A')
self.assertEqual(get_weight(10), 'A')
self.assertEqual(get_weight(9.9), 'B')
self.assertEqual(get_weight(2), 'B')
self.assertEqual(get_weight(1.9), 'C')
self.assertEqual(get_weight(0), 'D')
self.assertEqual(get_weight(-1), 'D')
self.assertListEqual(determine_boosts_weights([1]),
[(1, 'A'), (0, 'B'), (0, 'C'), (0, 'D')])
self.assertListEqual(determine_boosts_weights([-1]),
[(-1, 'A'), (-1, 'B'), (-1, 'C'), (-1, 'D')])
self.assertListEqual(determine_boosts_weights([-1, 1, 2]),
[(2, 'A'), (1, 'B'), (-1, 'C'), (-1, 'D')])
self.assertListEqual(determine_boosts_weights([0, 1, 2, 3]),
[(3, 'A'), (2, 'B'), (1, 'C'), (0, 'D')])
self.assertListEqual(determine_boosts_weights([0, 0.25, 0.75, 1, 1.5]),
[(1.5, 'A'), (1, 'B'), (0.5, 'C'), (0, 'D')])
self.assertListEqual(determine_boosts_weights([0, 1, 2, 3, 4, 5, 6]),
[(6, 'A'), (4, 'B'), (2, 'C'), (0, 'D')])
self.assertListEqual(determine_boosts_weights([-2, -1, 0, 1, 2, 3, 4]),
[(4, 'A'), (2, 'B'), (0, 'C'), (-2, 'D')])
def test_search_tsquery_chars(self):
"""
Checks that tsquery characters are correctly escaped
and do not generate a PostgreSQL syntax error.
"""
# Simple quote should be escaped inside each tsquery term.
results = self.backend.search("L'amour piqué par une abeille",
models.Book)
self.assertUnsortedListEqual([r.title for r in results], [])
results = self.backend.search("'starting quote",
models.Book)
self.assertUnsortedListEqual([r.title for r in results], [])
results = self.backend.search("ending quote'",
models.Book)
self.assertUnsortedListEqual([r.title for r in results], [])
results = self.backend.search("double quo''te",
models.Book)
self.assertUnsortedListEqual([r.title for r in results], [])
results = self.backend.search("triple quo'''te",
models.Book)
self.assertUnsortedListEqual([r.title for r in results], [])
# Now suffixes.
results = self.backend.search("Something:B", models.Book)
self.assertUnsortedListEqual([r.title for r in results], [])
results = self.backend.search("Something:*", models.Book)
self.assertUnsortedListEqual([r.title for r in results], [])
results = self.backend.search("Something:A*BCD", models.Book)
self.assertUnsortedListEqual([r.title for r in results], [])
# Now the AND operator.
results = self.backend.search("first & second", models.Book)
self.assertUnsortedListEqual([r.title for r in results], [])
# Now the OR operator.
results = self.backend.search("first | second", models.Book)
self.assertUnsortedListEqual([r.title for r in results], [])
# Now the NOT operator.
results = self.backend.search("first & !second", models.Book)
self.assertUnsortedListEqual([r.title for r in results], [])
# Now the phrase operator.
results = self.backend.search("first <-> second", models.Book)
self.assertUnsortedListEqual([r.title for r in results], [])
def test_autocomplete_tsquery_chars(self):
"""
Checks that tsquery characters are correctly escaped
and do not generate a PostgreSQL syntax error.
"""
# Simple quote should be escaped inside each tsquery term.
results = self.backend.autocomplete("L'amour piqué par une abeille",
models.Book)
self.assertUnsortedListEqual([r.title for r in results], [])
results = self.backend.autocomplete("'starting quote",
models.Book)
self.assertUnsortedListEqual([r.title for r in results], [])
results = self.backend.autocomplete("ending quote'",
models.Book)
self.assertUnsortedListEqual([r.title for r in results], [])
results = self.backend.autocomplete("double quo''te",
models.Book)
self.assertUnsortedListEqual([r.title for r in results], [])
results = self.backend.autocomplete("triple quo'''te",
models.Book)
self.assertUnsortedListEqual([r.title for r in results], [])
# Backslashes should be escaped inside each tsquery term.
results = self.backend.autocomplete("backslash\\",
models.Book)
self.assertUnsortedListEqual([r.title for r in results], [])
# Now suffixes.
results = self.backend.autocomplete("Something:B", models.Book)
self.assertUnsortedListEqual([r.title for r in results], [])
results = self.backend.autocomplete("Something:*", models.Book)
self.assertUnsortedListEqual([r.title for r in results], [])
results = self.backend.autocomplete("Something:A*BCD", models.Book)
self.assertUnsortedListEqual([r.title for r in results], [])
# Now the AND operator.
results = self.backend.autocomplete("first & second", models.Book)
self.assertUnsortedListEqual([r.title for r in results], [])
# Now the OR operator.
results = self.backend.autocomplete("first | second", models.Book)
self.assertUnsortedListEqual([r.title for r in results], [])
# Now the NOT operator.
results = self.backend.autocomplete("first & !second", models.Book)
self.assertUnsortedListEqual([r.title for r in results], [])
# Now the phrase operator.
results = self.backend.autocomplete("first <-> second", models.Book)
self.assertUnsortedListEqual([r.title for r in results], [])
def test_index_without_upsert(self):
# Test the add_items code path for Postgres 9.4, where upsert is not available
self.backend.reset_index()
index = self.backend.get_index_for_model(models.Book)
index._enable_upsert = False
index.add_items(models.Book, models.Book.objects.all())
results = self.backend.search("JavaScript", models.Book)
self.assertUnsortedListEqual([r.title for r in results], [
"JavaScript: The good parts",
"JavaScript: The Definitive Guide"
])
|
kaedroho/wagtail
|
wagtail/contrib/postgres_search/tests/test_backend.py
|
Python
|
bsd-3-clause
| 7,353 | 0.000272 |
#!/usr/bin/env python
# Copyright 2014 Rafael Dantas Justo. All rights reserved.
# Use of this source code is governed by a GPL
# license that can be found in the LICENSE file.
import getopt
import sys
import subprocess
import urllib.request
class NS:
def __init__(self):
self.name = ""
self.type = "NS"
self.namserver = ""
def __str__(self):
return "{} {} {}".format(self.name, self.type, self.namserver)
class DS:
def __init__(self):
self.name = ""
self.type = "DS"
self.keytag = 0
self.algorithm = 0
self.digestType = 0
self.digest = ""
def __str__(self):
return "{} {} {} {} {} {}".format(self.name, self.type, self.keytag,
self.algorithm, self.digestType, self.digest)
class A:
def __init__(self):
self.name = ""
self.type = "A"
self.address = ""
def __str__(self):
return "{} {} {}".format(self.name, self.type, self.address)
class AAAA:
def __init__(self):
self.name = ""
self.type = "AAAA"
self.address = ""
def __str__(self):
return "{} {} {}".format(self.name, self.type, self.address)
def retrieveData(url):
response = urllib.request.urlopen(url)
data = response.read()
response.close()
return data.decode()
def buildZone(data):
zone = []
for line in data.split("\n"):
lineParts = line.split()
if len(lineParts) < 4:
print(line)
continue
if lineParts[3] == "NS" and len(lineParts) == 5:
ns = NS()
ns.name = lineParts[0]
ns.namserver = lineParts[4]
zone.append(ns)
elif lineParts[3] == "A" and len(lineParts) == 5:
a = A()
a.name = lineParts[0]
a.address = lineParts[4]
zone.append(a)
elif lineParts[3] == "AAAA" and len(lineParts) == 5:
aaaa = AAAA()
aaaa.name = lineParts[0]
aaaa.address = lineParts[4]
zone.append(aaaa)
elif lineParts[3] == "DS" and len(lineParts) == 8:
ds = DS()
ds.name = lineParts[0]
ds.keytag = int(lineParts[4])
ds.algorithm = int(lineParts[5])
ds.digestType = int(lineParts[6])
ds.digest = lineParts[7]
zone.append(ds)
return zone
def writeZone(zone, outputPath):
output = open(outputPath, "w")
for rr in zone:
print(str(rr), file=output)
output.close()
###################################################################
defaultURL = "http://www.internic.net/domain/root.zone"
defaultOutput = "scan_querier.input"
def usage():
print("")
print("Usage: " + sys.argv[0] + " [-h|--help] [-u|--url] [-o|--output]")
print(" Where -h or --help is for showing this usage")
print(" -u or --url is the URL of the source file")
print(" -o or --output is the path where the Go code will written")
def main(argv):
try:
opts, args = getopt.getopt(argv, "u:o:", ["url", "output"])
except getopt.GetoptError as err:
print(str(err))
usage()
sys.exit(1)
url = ""
outputPath = ""
for key, value in opts:
if key in ("-u", "--url"):
url = value
elif key in ("-o", "--output"):
outputPath = value
elif key in ("-h", "--help"):
usage()
sys.exit(0)
if len(url) == 0:
url = defaultURL
if len(outputPath) == 0:
outputPath = defaultOutput
try:
data = retrieveData(url)
rootZone = buildZone(data)
writeZone(rootZone, outputPath)
except KeyboardInterrupt:
sys.exit(1)
if __name__ == "__main__":
main(sys.argv[1:])
|
rafaeljusto/shelter
|
testing/scan_querier/scan_querier.input.py
|
Python
|
gpl-2.0
| 3,451 | 0.019994 |
__author__ = 'Christof Pieloth'
import logging
from packbacker.errors import ParameterError
from packbacker.installers import installer_prototypes
from packbacker.utils import UtilsUI
class Job(object):
log = logging.getLogger(__name__)
def __init__(self):
self._installers = []
def add_installer(self, installer):
self._installers.append(installer)
def execute(self):
errors = 0
for i in self._installers:
if not UtilsUI.ask_for_execute('Install ' + i.label):
continue
try:
if i.install():
Job.log.info(i.name + ' executed.')
else:
errors += 1
Job.log.error('Error on executing ' + i.name + '!')
except Exception as ex:
errors += 1
Job.log.error('Unknown error:\n' + str(ex))
return errors
@staticmethod
def read_job(fname):
prototypes = []
prototypes.extend(installer_prototypes())
job = None
try:
job_file = open(fname, 'r')
except IOError as err:
Job.log.critical('Error on reading job file:\n' + str(err))
else:
with job_file:
job = Job()
for line in job_file:
if line[0] == '#':
continue
for p in prototypes:
if p.matches(line):
try:
params = Job.read_parameter(line)
cmd = p.instance(params)
job.add_installer(cmd)
except ParameterError as err:
Job.log.error("Installer '" + p.name + "' is skipped: " + str(err))
except Exception as ex:
Job.log.critical('Unknown error: \n' + str(ex))
continue
return job
@staticmethod
def read_parameter(line):
params = {}
i = line.find(': ') + 2
line = line[i:]
pairs = line.split(';')
for pair in pairs:
pair = pair.strip()
par = pair.split('=')
if len(par) == 2:
params[par[0]] = par[1]
return params
|
cpieloth/CppMath
|
tools/PackBacker/packbacker/job.py
|
Python
|
apache-2.0
| 2,389 | 0.000837 |
import json
import pickle
import numpy as np
import pandas as pd
import numpy as np
import datatables.traveltime
def write_model(baserate, model_file):
"""
Write model to file
baserate -- average travel time
output_file -- file
"""
model_params = {
'baserate': baserate
}
model_str = json.dumps(model_params)
with open(model_file, 'w') as out_f:
out_f.write(model_str)
def load_model(model_file):
"""
Load linear model from file
model_file -- file
returns -- baserate
"""
with open(model_file, 'r') as model_f:
model_str = model_f.read()
model_params = json.loads(model_str)
return model_params['baserate']
def train(train_data_file, model_file):
data = datatables.traveltime.read_xs(train_data_file)
y = data['y'].values # travel times
# use mean value as baserate prediction
baserate = np.mean(y)
write_model(baserate, model_file)
def predict(model_file, test_xs_file, output_file):
baserate = load_model(model_file)
data = datatables.traveltime.read_xs(test_xs_file)
num_rows = data.shape[0]
# predict constant baserate for every row
y_pred = np.full(num_rows, baserate)
data['pred'] = y_pred
datatables.traveltime.write_pred(data, output_file)
|
anjsimmo/simple-ml-pipeline
|
learners/traveltime_baserate.py
|
Python
|
mit
| 1,292 | 0.00387 |
import logging
import tmlib.models as tm
class SubmissionManager(object):
'''Mixin class for submission and monitoring of computational tasks.'''
def __init__(self, experiment_id, program_name):
'''
Parameters
----------
experiment_id: int
ID of the processed experiment
program_name: str
name of the submitting program
'''
self.experiment_id = experiment_id
self.program_name = program_name
def register_submission(self, user_id=None):
'''Creates a database entry in the "submissions" table.
Parameters
----------
user_id: int, optional
ID of submitting user (if not the user who owns the experiment)
Returns
-------
Tuple[int, str]
ID of the submission and the name of the submitting user
Warning
-------
Ensure that the "submissions" table get updated once the jobs
were submitted, i.e. added to a running `GC3Pie` engine.
To this end, use the ::meth:`tmlib.workflow.api.update_submission`
method.
See also
--------
:class:`tmlib.models.submission.Submission`
'''
with tm.utils.MainSession() as session:
if user_id is None:
experiment = session.query(tm.ExperimentReference).\
get(self.experiment_id)
user_id = experiment.user_id
submission = tm.Submission(
experiment_id=self.experiment_id, program=self.program_name,
user_id=user_id
)
session.add(submission)
session.commit()
return (submission.id, submission.user.name)
|
TissueMAPS/TmLibrary
|
tmlib/submission.py
|
Python
|
agpl-3.0
| 1,761 | 0.000568 |
#!/usr/bin/env python3
#
# Copyright (c) 2015-2017 Nest Labs, Inc.
# All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
##
# @file
# Implements ReturnMsg class.
#
# ReturnMsg is used to return not only numerical status of
# success or fail, but allows to return any data structure as well.
#
class ReturnMsg:
def __init__(self, value=None, data=None):
self.value = value
if data is None:
self.data = None
elif isinstance(data, dict):
self.data = data.copy()
elif isinstance(data, list):
self.data = data[:]
else:
self.data = data
def Value(self, value=None):
if value is None:
return self.value
else:
self.data = value
def Data(self, data=None):
if data is None:
return self.data
elif isinstance(data, dict):
self.data = data.copy()
elif isinstance(data, list):
self.data = data[:]
else:
self.data = data
|
openweave/happy
|
happy/ReturnMsg.py
|
Python
|
apache-2.0
| 1,604 | 0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.