repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
---|---|---|---|---|---|---|---|---|
roninek/python101
|
docs/pygame/life/code1a.py
|
Python
|
mit
| 5,039 | 0.001628 |
# coding=utf-8
import pygame
import pygame.locals
class Board(object):
"""
Plansza do gry. Odpowiada za rysowanie okna gry.
"""
def __init__(self, width, height):
"""
Konstruktor planszy do gry. Przygotowuje okienko gry.
:param width: szerokość w pikselach
:param height: wysokość w pikselach
"""
self.surface = pygame.display.set_mode((width, height), 0, 32)
pygame.display.set_caption('Game of life')
def draw(self, *args):
"""
Rysuje okno gry
:param args: lista obiektów do narysowania
"""
background = (0, 0, 0)
self.surface.fill(background)
for drawable in args:
drawable.draw_on(self.surface)
# dopiero w tym miejscu następuje fatyczne rysowanie
# w oknie gry, wcześniej tylko ustalaliśmy co i jak ma zostać narysowane
pygame.display.update()
class GameOfLife(object):
"""
Łączy wszystkie elementy gry w całość.
"""
def __init__(self, width, height, cell_size=10):
"""
Przygotowanie ustawień gry
:param width: szerokość planszy mierzona liczbą komórek
:param height: wysokość planszy mierzona liczbą komórek
:param cell_size: bok komórki w pikselach
"""
pygame.init()
self.board = Board(width * cell_size, height * cell_size)
# zegar którego użyjemy do kontrolowania szybkości rysowania
# kolejnych klatek gry
self.fps_clock = pygame.time.Clock()
def run(self):
"""
Główna pętla gry
"""
while not self.handle_events():
# działaj w pętli do momentu otrzymania sygnału do wyjścia
self.board.draw()
self.fps_clock.tick(15)
def handle_events(self):
"""
Obsługa zdarzeń systemowych, tutaj zinterpretujemy np. ruchy myszką
:return True jeżeli pygame przekazał zdarzenie wyjścia z gry
"""
for event in pygame.event.get():
if event.type == pygame.locals.QUIT:
pygame.quit()
return True
# magiczne liczby używane do określenia czy komórka jest żywa
DEAD = 0
ALIVE = 1
class Population(object):
"""
Populacja komórek
"""
def __init__(self, width, height, cell_size=10):
"""
Przygotowuje ustawienia populacji
:param width: szerokość planszy mierzona liczbą komórek
:param height: wysokość planszy mierzona liczbą komórek
:param cell_size: bok komórki w pikselach
"""
self.box_size = cell_size
self.height = height
self.width = width
self.generation = self.reset_generation()
def reset_generation(self):
"""
Tworzy i zwraca macierz pustej populacji
"""
# w pętli wypełnij listę kolumnami
# które także w pętli zostają wypełnione wartością 0 (DEAD)
return [[DEAD for y in xrange(self.height)] for x in xrange(self.width)]
def handle_mouse(self):
# pobierz stan guzików myszki z wykorzystaniem funcji pygame
buttons = pygame.mouse.get_pressed()
if not any(buttons):
# ignoruj zdarzenie jeśli żaden z guzików nie jest wciśnięty
return
# dodaj żywą komórką jeśli wciśnięty jest pierwszy guzik myszki
# będziemy mogli nie tylko dodawać żywe komórki ale także je usuwać
alive = True if buttons[0] else False
# pobierz pozycję kursora na planszy mierzoną w pikselach
x, y = pygame.mouse.get_pos()
# przeliczamy współrzędne komórki z pikseli na współrzędne komórki w macierz
# gracz może kliknąć w kwadracie o szerokości box_size by wybrać komórkę
x /= self.box_size
y /= self.box_size
# ustaw stan komórki na macierzy
self.generation[x][y] = ALIVE if alive else DEAD
def draw_on(self, surface):
"""
Rysuje komórki na planszy
"""
for x, y in self.alive_cells():
size = (self.box_size, self.box
|
_size)
position = (x * self.box_size, y * self.box_size)
color = (255, 255, 255)
thickness = 1
pygame.draw.rect(surface, color, pygame.locals.Rect(pos
|
ition, size), thickness)
def alive_cells(self):
"""
Generator zwracający współrzędne żywych komórek.
"""
for x in range(len(self.generation)):
column = self.generation[x]
for y in range(len(column)):
if column[y] == ALIVE:
# jeśli komórka jest żywa zwrócimy jej współrzędne
yield x, y
# Ta część powinna być zawsze na końcu modułu (ten plik jest modułem)
# chcemy uruchomić naszą grę dopiero po tym jak wszystkie klasy zostaną zadeklarowane
if __name__ == "__main__":
game = GameOfLife(80, 40)
game.run()
|
getsentry/zeus
|
zeus/migrations/340d5cc7e806_index_artifacts.py
|
Python
|
apache-2.0
| 705 | 0.001418 |
"""index_artifacts
Revision ID: 340d5cc7e806
Revises: af3f4bdc27d1
Create Date: 2019-08-09 12:37:50.706914
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "340d5cc7e806"
down_revision = "af3f4bdc27d1"
branch_labels = ()
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_index(
"idx_artifact_job", "artifact", ["repository_id", "job_id"], unique=False
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto gener
|
ated by Alembic - please adjust! ###
op.drop_index("
|
idx_artifact_job", table_name="artifact")
# ### end Alembic commands ###
|
myt00seven/svrg
|
para_gpu/tools.py
|
Python
|
mit
| 2,391 | 0.000836 |
import os
import numpy as np
def save_weights(layers, weights_dir, epoch):
for idx in range(len(layers)):
if hasattr(layers[idx], 'W'):
layers[idx].W.save_weight(
weights_dir, 'W' + '_' + str(idx) + '_' + str(epoch))
if hasattr(layers[idx], 'W0'):
layers[idx].W0.save_weight(
weights_dir, 'W0' + '_' + str(idx) + '_' + str(epoch))
if hasattr(layers[idx], 'W1'):
layers[idx].W1.save_weight(
weights_dir, 'W1' + '_' + str(idx) + '_' + str(epoch))
if hasattr(layers[idx], 'b'):
layers[idx].b.save_weight(
|
weights_dir, 'b' + '_' + str(idx) + '_' + str(epoch))
|
if hasattr(layers[idx], 'b0'):
layers[idx].b0.save_weight(
weights_dir, 'b0' + '_' + str(idx) + '_' + str(epoch))
if hasattr(layers[idx], 'b1'):
layers[idx].b1.save_weight(
weights_dir, 'b1' + '_' + str(idx) + '_' + str(epoch))
def load_weights(layers, weights_dir, epoch):
for idx in range(len(layers)):
if hasattr(layers[idx], 'W'):
layers[idx].W.load_weight(
weights_dir, 'W' + '_' + str(idx) + '_' + str(epoch))
if hasattr(layers[idx], 'W0'):
layers[idx].W0.load_weight(
weights_dir, 'W0' + '_' + str(idx) + '_' + str(epoch))
if hasattr(layers[idx], 'W1'):
layers[idx].W1.load_weight(
weights_dir, 'W1' + '_' + str(idx) + '_' + str(epoch))
if hasattr(layers[idx], 'b'):
layers[idx].b.load_weight(
weights_dir, 'b' + '_' + str(idx) + '_' + str(epoch))
if hasattr(layers[idx], 'b0'):
layers[idx].b0.load_weight(
weights_dir, 'b0' + '_' + str(idx) + '_' + str(epoch))
if hasattr(layers[idx], 'b1'):
layers[idx].b1.load_weight(
weights_dir, 'b1' + '_' + str(idx) + '_' + str(epoch))
def save_momentums(vels, weights_dir, epoch):
for ind in range(len(vels)):
np.save(os.path.join(weights_dir, 'mom_' + str(ind) + '_' + str(epoch)),
vels[ind].get_value())
def load_momentums(vels, weights_dir, epoch):
for ind in range(len(vels)):
vels[ind].set_value(np.load(os.path.join(
weights_dir, 'mom_' + str(ind) + '_' + str(epoch) + '.npy')))
|
gusDuarte/software-center-5.2
|
softwarecenter/ui/gtk3/review_gui_helper.py
|
Python
|
lgpl-3.0
| 55,851 | 0.00222 |
# -*- coding: utf-8 -*-
# Copyright (C) 2009 Canonical
#
# Authors:
# Michael Vogt
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation; version 3.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
from gi.repository import GObject, Gtk, Gdk
import datetime
import gettext
import logging
import os
import json
import sys
import tempfile
import time
import threading
# py3
try:
from urllib.request import urlopen
urlopen # pyflakes
from queue import Queue
Queue # pyflakes
except ImportError:
# py2 fallbacks
from urllib import urlopen
from Queue import Queue
from gettext import gettext as _
from softwarecenter.backend.ubuntusso import get_ubuntu_sso_backend
import piston_mini_client
from softwarecenter.paths import SOFTWARE_CENTER_CONFIG_DIR
from softwarecenter.enums import Icons, SOFTWARE_CENTER_NAME_KEYRING
from softwarecenter.config import get_config
from softwarecenter.distro import get_distro, get_current_arch
from softwarecenter.backend.login_sso import get_sso_backend
from softwarecenter.backend.reviews import Review
from softwarecenter.db.database import Application
from softwarecenter.gwibber_helper import GwibberHelper, GwibberHelperMock
from softwarecenter.i18n import get_language
from softwarecenter.ui.gtk3.SimpleGtkbuilderApp import SimpleGtkbuilderApp
from softwarecenter.ui.gtk3.dialogs import SimpleGtkbuilderDialog
from softwarecenter.ui.gtk3.widgets.stars import ReactiveStar
from softwarecenter.utils import make_string_from_list, utf8
from softwarecenter.backend.piston.rnrclient import RatingsAndReviewsAPI
from softwarecenter.backend.piston.rnrclient_pristine import ReviewRequest
# get current distro and set default server root
distro = get_distro()
SERVER_ROOT = distro.REVIEWS_SERVER
# server status URL
SERVER_STATUS_URL = SERVER_ROOT + "/server-status/"
class UserCancelException(Exception):
""" user pressed cancel """
pass
TRANSMIT_STATE_NONE = "transmit-state-none"
TRANSMIT_STATE_INPROGRESS = "transmit-state-inprogress"
TRANSMIT_STATE_DONE = "transmit-state-done"
TRANSMIT_STATE_ERROR = "transmit-state-erro
|
r"
class GRatingsAndReviews(GObject.GObject):
""" Access ratings&reviews API as a gobject """
__gsignals__ = {
# send when a transmit is started
"transmit-start": (GObject.SIGNAL_RUN_LAST,
GObject.TYPE_NONE,
(GObject.TYPE_PYOBJECT, ),
),
# send when a transmit was successful
"transmit-success": (GObject.SIGNAL_RUN_L
|
AST,
GObject.TYPE_NONE,
(GObject.TYPE_PYOBJECT, ),
),
# send when a transmit failed
"transmit-failure": (GObject.SIGNAL_RUN_LAST,
GObject.TYPE_NONE,
(GObject.TYPE_PYOBJECT, str),
),
}
def __init__(self, token):
super(GRatingsAndReviews, self).__init__()
# piston worker thread
self.worker_thread = Worker(token)
self.worker_thread.start()
GObject.timeout_add(500,
self._check_thread_status,
None)
def submit_review(self, review):
self.emit("transmit-start", review)
self.worker_thread.pending_reviews.put(review)
def report_abuse(self, review_id, summary, text):
self.emit("transmit-start", review_id)
self.worker_thread.pending_reports.put((int(review_id), summary, text))
def submit_usefulness(self, review_id, is_useful):
self.emit("transmit-start", review_id)
self.worker_thread.pending_usefulness.put((int(review_id), is_useful))
def modify_review(self, review_id, review):
self.emit("transmit-start", review_id)
self.worker_thread.pending_modify.put((int(review_id), review))
def delete_review(self, review_id):
self.emit("transmit-start", review_id)
self.worker_thread.pending_delete.put(int(review_id))
def server_status(self):
self.worker_thread.pending_server_status()
def shutdown(self):
self.worker_thread.shutdown()
# internal
def _check_thread_status(self, data):
if self.worker_thread._transmit_state == TRANSMIT_STATE_DONE:
self.emit("transmit-success", "")
self.worker_thread._transmit_state = TRANSMIT_STATE_NONE
elif self.worker_thread._transmit_state == TRANSMIT_STATE_ERROR:
self.emit("transmit-failure", "",
self.worker_thread._transmit_error_str)
self.worker_thread._transmit_state = TRANSMIT_STATE_NONE
return True
class Worker(threading.Thread):
def __init__(self, token):
# init parent
threading.Thread.__init__(self)
self.pending_reviews = Queue()
self.pending_reports = Queue()
self.pending_usefulness = Queue()
self.pending_modify = Queue()
self.pending_delete = Queue()
self.pending_server_status = Queue()
self._shutdown = False
# FIXME: instead of a binary value we need the state associated
# with each request from the queue
self._transmit_state = TRANSMIT_STATE_NONE
self._transmit_error_str = ""
self.display_name = "No display name"
auth = piston_mini_client.auth.OAuthAuthorizer(token["token"],
token["token_secret"], token["consumer_key"],
token["consumer_secret"])
# change default server to the SSL one
distro = get_distro()
service_root = distro.REVIEWS_SERVER
self.rnrclient = RatingsAndReviewsAPI(service_root=service_root,
auth=auth)
def run(self):
"""Main thread run interface, logs into launchpad and waits
for commands
"""
logging.debug("worker thread run")
# loop
self._wait_for_commands()
def shutdown(self):
"""Request shutdown"""
self._shutdown = True
def _wait_for_commands(self):
"""internal helper that waits for commands"""
while True:
#logging.debug("worker: _wait_for_commands")
self._submit_reviews_if_pending()
self._submit_reports_if_pending()
self._submit_usefulness_if_pending()
self._submit_modify_if_pending()
self._submit_delete_if_pending()
time.sleep(0.2)
if (self._shutdown and
self.pending_reviews.empty() and
self.pending_usefulness.empty() and
self.pending_reports.empty() and
self.pending_modify.empty() and
self.pending_delete.empty()):
return
def _submit_usefulness_if_pending(self):
""" the actual usefulness function """
while not self.pending_usefulness.empty():
logging.debug("POST usefulness")
self._transmit_state = TRANSMIT_STATE_INPROGRESS
(review_id, is_useful) = self.pending_usefulness.get()
try:
res = self.rnrclient.submit_usefulness(
review_id=review_id, useful=str(is_useful))
self._transmit_state = TRANSMIT_STATE_DONE
sys.stdout.write(json.dumps(res))
except Exception as e:
logging.exception("submit_usefulness failed")
err_str = self._get_error_messages(e)
self._transmit_error_str = err_str
self._write_exception_html_log_if_needed(e)
|
lgritz/OpenShadingLanguage
|
testsuite/regex-reg/run.py
|
Python
|
bsd-3-clause
| 2,682 | 0.011186 |
#!/usr/bin/env python
# Copyright Contributors to the Open Shading Language project.
# SPDX-License-Identifier: B
|
SD-3-Clause
# https://github.com/AcademySoftwareFoundation/OpenShadingLanguage
######################
#Uniform result
#
|
#########################
#Uniform subject, uniform pattern#
command += testshade("-t 1 -g 64 64 -od uint8 u_subj_u_pattern -o cout uu_out.tif")
#Uniform subject, varying pattern#
command += testshade("-t 1 -g 64 64 -od uint8 u_subj_v_pattern -o cout uv_out.tif")
#Varying subject, uniform pattern#
command += testshade("-t 1 -g 64 64 -od uint8 v_subj_u_pattern -o cout vu_out.tif")
#Varying subject, varying pattern#
command += testshade("-t 1 -g 64 64 -od uint8 v_subj_v_pattern -o cout vv_out.tif")
##################
#Varying result
##################
#Uniform subject, uniform pattern#
command += testshade("-t 1 -g 64 64 -od uint8 u_subj_u_pattern_vr -o cout uu_vr_out.tif")
#Uniform subject, varying pattern#
command += testshade("-t 1 -g 64 64 -od uint8 u_subj_v_pattern_vr -o cout uv_vr_out.tif")
#Varying subject, uniform pattern#
command += testshade("-t 1 -g 64 64 -od uint8 v_subj_u_pattern_vr -o cout vu_vr_out.tif")
#Varying subject, varying pattern#
command += testshade("-t 1 -g 64 64 -od uint8 v_subj_v_pattern_vr -o cout vv_vr_out.tif")
##########################################
#Uniform result array
##########################################
command += testshade("-t 1 -g 64 64 -od uint8 u_subj_u_pattern_ura -o cout uu_ura_out.tif")
command += testshade("-t 1 -g 64 64 -od uint8 u_subj_v_pattern_ura -o cout uv_ura_out.tif")
command += testshade("-t 1 -g 64 64 -od uint8 v_subj_u_pattern_ura -o cout vu_ura_out.tif")
command += testshade("-t 1 -g 64 64 -od uint8 v_subj_v_pattern_ura -o cout vv_ura_out.tif")
##########################################
#Varying result array
##########################################
command += testshade("-t 1 -g 64 64 -od uint8 u_subj_u_pattern_vra -o cout uu_vra_out.tif")
command += testshade("-t 1 -g 64 64 -od uint8 u_subj_v_pattern_vra -o cout uv_vra_out.tif")
command += testshade("-t 1 -g 64 64 -od uint8 v_subj_u_pattern_vra -o cout vu_vra_out.tif")
command += testshade("-t 1 -g 64 64 -od uint8 v_subj_v_pattern_vra -o cout vv_vra_out.tif")
outputs = [
"uu_out.tif",
"uv_out.tif",
"vu_out.tif",
"vv_out.tif",
"uu_vr_out.tif",
"uv_vr_out.tif",
"vu_vr_out.tif",
"vv_vr_out.tif",
"uu_ura_out.tif",
"uv_ura_out.tif",
"vu_ura_out.tif",
"vv_ura_out.tif",
"uu_vra_out.tif",
"uv_vra_out.tif",
"vu_vra_out.tif",
"vv_vra_out.tif",
]
# expect a few LSB failures
failthresh = 0.008
failpercent = 3
|
piksels-and-lines-orchestra/inkscape
|
share/extensions/grid_cartesian.py
|
Python
|
gpl-2.0
| 14,264 | 0.01998 |
#!/usr/bin/env python
'''
Copyright (C) 2007 John Beard john.j.beard@gmail.com
##This extension allows you to draw a Cartesian grid in Inkscape.
##There is a wide range of options including subdivision, subsubdivions
## and logarithmic scales. Custom line widths are also possible.
##All elements are grouped with similar elements (eg all x-subdivs)
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Gen
|
eral Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
'''
import inkex
import simplestyle, sys
from math import *
def draw_SVG_line(x1, y1, x2, y2, width, name, parent):
style = { 'st
|
roke': '#000000', 'stroke-width':str(width), 'fill': 'none' }
line_attribs = {'style':simplestyle.formatStyle(style),
inkex.addNS('label','inkscape'):name,
'd':'M '+str(x1)+','+str(y1)+' L '+str(x2)+','+str(y2)}
inkex.etree.SubElement(parent, inkex.addNS('path','svg'), line_attribs )
def draw_SVG_rect(x,y,w,h, width, fill, name, parent):
style = { 'stroke': '#000000', 'stroke-width':str(width), 'fill':fill}
rect_attribs = {'style':simplestyle.formatStyle(style),
inkex.addNS('label','inkscape'):name,
'x':str(x), 'y':str(y), 'width':str(w), 'height':str(h)}
inkex.etree.SubElement(parent, inkex.addNS('rect','svg'), rect_attribs )
class Grid_Polar(inkex.Effect):
def __init__(self):
inkex.Effect.__init__(self)
self.OptionParser.add_option("--x_divs",
action="store", type="int",
dest="x_divs", default=5,
help="Major X Divisions")
self.OptionParser.add_option("--dx",
action="store", type="float",
dest="dx", default=100.0,
help="Major X divison Spacing")
self.OptionParser.add_option("--x_subdivs",
action="store", type="int",
dest="x_subdivs", default=2,
help="Subdivisions per Major X division")
self.OptionParser.add_option("--x_log",
action="store", type="inkbool",
dest="x_log", default=False,
help="Logarithmic x subdivisions if true")
self.OptionParser.add_option("--x_subsubdivs",
action="store", type="int",
dest="x_subsubdivs", default=5,
help="Subsubdivisions per Minor X division")
self.OptionParser.add_option("--x_half_freq",
action="store", type="int",
dest="x_half_freq", default=4,
help="Halve Subsubdiv. Frequency after 'n' Subdivs. (log only)")
self.OptionParser.add_option("--x_divs_th",
action="store", type="float",
dest="x_divs_th", default=2,
help="Major X Division Line thickness")
self.OptionParser.add_option("--x_subdivs_th",
action="store", type="float",
dest="x_subdivs_th", default=1,
help="Minor X Division Line thickness")
self.OptionParser.add_option("--x_subsubdivs_th",
action="store", type="float",
dest="x_subsubdivs_th", default=1,
help="Subminor X Division Line thickness")
self.OptionParser.add_option("--y_divs",
action="store", type="int",
dest="y_divs", default=6,
help="Major Y Divisions")
self.OptionParser.add_option("--dy",
action="store", type="float",
dest="dy", default=100.0,
help="Major Gridline Increment")
self.OptionParser.add_option("--y_subdivs",
action="store", type="int",
dest="y_subdivs", default=2,
help="Minor Divisions per Major Y division")
self.OptionParser.add_option("--y_log",
action="store", type="inkbool",
dest="y_log", default=False,
help="Logarithmic y subdivisions if true")
self.OptionParser.add_option("--y_subsubdivs",
action="store", type="int",
dest="y_subsubdivs", default=5,
help="Subsubdivisions per Minor Y division")
self.OptionParser.add_option("--y_half_freq",
action="store", type="int",
dest="y_half_freq", default=4,
help="Halve Y Subsubdiv. Frequency after 'n' Subdivs. (log only)")
self.OptionParser.add_option("--y_divs_th",
action="store", type="float",
dest="y_divs_th", default=2,
help="Major Y Division Line thickness")
self.OptionParser.add_option("--y_subdivs_th",
action="store", type="float",
dest="y_subdivs_th", default=1,
help="Minor Y Division Line thickness")
self.OptionParser.add_option("--y_subsubdivs_th",
action="store", type="float",
dest="y_subsubdivs_th", default=1,
help="Subminor Y Division Line thickness")
self.OptionParser.add_option("--border_th",
action="store", type="float",
dest="border_th", default=3,
help="Border Line thickness")
def effect(self):
#find the pixel dimensions of the overall grid
ymax = self.options.dy * self.options.y_divs
xmax = self.options.dx * self.options.x_divs
# Embed grid in group
#Put in in the centre of the current view
t = 'translate(' + str( self.view_center[0]- xmax/2.0) + ',' + \
str( self.view_center[1]- ymax/2.0) + ')'
g_attribs = {inkex.addNS('label','inkscape'):'Grid_Polar:X' + \
str( self.options.x_divs )+':Y'+str( self.options.y_divs ),
'transform':t }
grid = inkex.etree.SubElement(self.current_layer, 'g', g_attribs)
#Group for major x gridlines
g_attribs = {inkex.addNS('label','inkscape'):'MajorXGridlines'}
majglx = inkex.etree.SubElement(grid, 'g', g_attribs)
#Group for major y gridlines
g_attribs = {inkex.addNS('label','inkscape'):'MajorYGridlines'}
majgly = inkex.etree.SubElement(grid, 'g', g_attribs)
#Group for minor x gridlines
if self.options.x_subdivs > 1:#if there are any minor x gridlines
g_attribs = {inkex.addNS('label','inkscape'):'MinorXGridlines'}
minglx = inkex.etree.SubElement(grid, 'g', g_attribs)
#Group for subminor x gridlines
if self.options.x_subsubdivs > 1:#if there are any minor minor x gridlines
g_attribs = {inkex.addNS('label','inkscape'):'SubMinorXGridlines'}
mminglx = inkex.etree.SubElement(grid, 'g', g_attribs)
#Group for minor y gridlines
if self.options.y_subdivs > 1:#if there are any minor y gridlines
g_attribs = {inkex.addNS('label','inkscape'):'MinorYGridlines'}
mingly = inkex.etree.SubElement(grid, 'g', g_attribs)
#Group for subminor y gridlines
|
slashdd/sos
|
sos/report/plugins/docker_distribution.py
|
Python
|
gpl-2.0
| 1,334 | 0 |
# Copyright (C) 2017 Red Hat, Inc. Jake Hunsaker <jhunsake@redhat.com>
# This file is part of the sos project: https://github.com/sosreport/sos
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# version 2
|
of the GNU General Public License.
#
# See the LICENSE file in the source distribution for further information.
from sos.report.plugins import Plugin, RedHat
|
Plugin
class DockerDistribution(Plugin):
short_desc = 'Docker Distribution'
plugin_name = "docker_distribution"
profiles = ('container',)
def setup(self):
self.add_copy_spec('/etc/docker-distribution/')
self.add_journal('docker-distribution')
conf = self.path_join('/etc/docker-distribution/registry/config.yml')
if self.path_exists(conf):
with open(conf) as f:
for line in f:
if 'rootdirectory' in line:
loc = line.split()[1]
self.add_cmd_output('tree ' + loc)
class RedHatDockerDistribution(DockerDistribution, RedHatPlugin):
packages = ('docker-distribution',)
def setup(self):
self.add_forbidden_path('/etc/docker-distribution/registry/*passwd')
super(RedHatDockerDistribution, self).setup()
|
KingSpork/sporklib
|
algorithms/binarySearch.py
|
Python
|
unlicense
| 670 | 0.055224 |
def binarySearch(someList, target):
lo = 0
hi = len(someList)
while lo+1 < hi:
test = (lo + hi) / 2
if someList[test] > target:
hi = test
else:
lo = test
if someList[lo] == target:
return lo
else:
return -1
import random
def quickSort(someList):
listSize = len(someList)
if len(someList) == 0:
return []
less = []
greater = []
pivot = someList.pop(random.randint(0,
|
listSize-1))
for element in someList:
if element <= pivot:
less.append(element)
else:
greater.append(element)
retList = quickSort(less) + [pivot] + quickSort(greater)
#print("Return list:");print(retList)
return re
|
tList
|
danieka/churchplanner
|
planner/functional_tests.py
|
Python
|
gpl-3.0
| 1,831 | 0.03337 |
from selenium import webdriver
from django.test import LiveServerTestCase, TestCase
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
import datetime
from planner.models import Participation, Event, Occurrence, EventType, Role
from django.contrib.auth.models import User
import pytz
import time
tz = pytz.timezone("Europe/Stockholm")
def event(date):
e = Event.objects.create(title="TestEvent", event_type=EventType.objects.get(name="Gudstjänst"))
e.event = Occurrence.objects.create(start_time = tz.localize(date))
Participation.objects.create(user = User.objects.get(pk=2), event = e, attending = "true", role = Role.objects.get(name = "Mötesledare"))
Participation.objects.create(user = User.objects.get(pk=3), event = e, attending = "null", role = Role.objects.get(name = "Textläsare"))
e.save()
def login(browser):
browser.find_element_by_id('id_username').send_keys("admin")
browser.find_element_by_id('id_password').send_keys("1234")
browser.find_element_by_id('id_submit').click()
class BasicTest(StaticLiveServerTestCase):
fixtures = ["fixture1.json"]
def setUp(self):
self.browser = webdriver.Firefox()
self.browser.implicitly_wait(3)
def tearDown(self):
self.browser.quit()
def test_login(self):
self.browser.get(self.live_server_url)
assert "Planering" in self.browser.title
login(self.browser)
menu = self.browser.find_element_by_id('main-menu').text
assert 'Nytt evenemang' in menu
assert 'Tabellvy' in menu
def test_event_is_displayed(self):
event(datetime.datetime.now() + datetime.timedelta(days = 17))
self.browser.get(self.live_server_url)
login(self.browser)
t = self.browser.find_element_by_id("table-scroll").text
|
time.sleep(10)
print(t)
assert 'Testevenemang' in t
if __name__
|
== '__main__':
unittest.main()
|
Falkonry/falkonry-python-client
|
test/TestDatastream.py
|
Python
|
mit
| 35,686 | 0.00737 |
import os
import unittest
import random
import xmlrunner
host = os.environ['FALKONRY_HOST_URL'] # host url
token = os.environ['FALKONRY_TOKEN'] # auth token
class TestDatastream(unittest.TestCase):
def setUp(self):
self.created_datastreams = []
self.fclient = FClient(host=host, token=token, options=None)
pass
# Create datastream without any signals
def test_create_standalone_datastream(self):
datastream = Schemas.Datastream()
datastream.set_name('Motor Health' + str(random.random()))
datasource = Schemas.Datasource()
field = Schemas.Field()
time = Schemas.Time()
signal = Schemas.Signal()
time.set_zone("GMT")
time.set_identifier("time")
time.set_format("iso_8601")
field.set_signal(signal)
datasource.set_type("STANDALONE")
field.set_time(time)
datastream.set_datasource(datasource)
datastream.set_field(field)
try:
response = self.fclient.create_datastream(datastream)
self.created_datastreams.append(response.get_id())
self.assertEqual(isinstance(response, Schemas.Datastream), True, 'Invalid Datastream object after creation')
self.assertEqual(isinstance(response.get_id(), str), True, 'Invalid id of datastream after creation')
self.assertEqual(response.get_name(), datastream.get_name(), 'Invalid name of Datastream after creation')
fieldResponse = response.get_field()
self.assertEqual(isinstance(fieldResponse, Schemas.Field), True, 'Invalid field in Datastream object after creation')
self.assertEqual(fieldResponse.get_entityIdentifier(),"entity",'Invalid entity identifier object after creation')
self.assertEqual(fieldResponse.get_entityName(),response.get_name(),'Invalid entity name object after creation')
timeResponse = fieldResponse.get_time()
self.assertEqual(isinstance(timeResponse, Schemas.Time), True, 'Invalid time object after creation')
self.assertEqual(timeResponse.get_zone(), time.get_zone(), 'Invalid zone object after creation')
self.assertEqual(timeResponse.get_identifier(), time.get_identifier(), 'Invalid time identifier object after creation')
self.assertEqual(timeResponse.get_format(), time.get_format(), 'Invalid time format object after creation')
except Exception as e:
print(exception_handler(e))
self.assertEqual(0, 1, 'Cannot create datastream')
# Create Datastream for narrow/historian style data from a single entity
def test_create_datastream_narrow_style_single_entity(self):
datastream = Schemas.Datastream()
datasource = Schemas.Datasource()
field = Schemas.Field()
time = Schemas.Time()
signal = Schemas.Signal()
datastream.set_name('Motor Health' + str(random.random())) # set name of the Datastream
time.set_zone("GMT") # set timezone of the datastream
time.set_identifier("time") # set time identifier of the datastream
time.set_format("iso_8601") # set time format of the datastream
field.set_time(time)
signal.set_valueIdentifier("value")
signal.set_signalIdentifier("signal")
field.set_signal(signal) # set signal in field
datasource.set_type("STANDALONE") # set datastource type in datastream
datastream.set_datasource(datasource)
datastream.set_field(field)
try:
# create Datastream
response = self.fclient.create_datastream(datastream)
self.created_datastreams.append(response.get_id())
self.assertEqual(isinstance(response, Schemas.Datastream), True, 'Invalid Datastream object after creation')
self.assertEqual(isinstance(response.get_id(), str), True, 'Invalid id of datastream after creation')
self.assertEqual(response.get_name(), datastream.get_name(), 'Invalid name of Datastream after creation')
fieldResponse = response.get_field()
self.assertEqual(isinstance(fieldResponse, Schemas.Field), True, 'Invalid field in Datastream object after creation')
self.assertEqual(fieldResponse.get_entityIdentifier(),"entity",'Invalid entity identifier object after creation')
self.assertEqual(fieldResponse.get_entityName(),response.get_name(),'Invalid entity name object after creation')
signalResponse = fieldResponse.get_signal()
self.assertEqual(signalResponse.get_valueIdentifier(),signal.get_valueIdentifier(), 'Invalid value identifier after object creation')
timeResponse = fieldResponse.get_time()
self.assertEqual(isinstance(timeResponse, Schemas.Time), True, 'Invalid time object after creation')
self.assertEqual(timeResponse.get_zone(), time.get_zone(), 'Invalid zone object after creation')
self.assertEqual(timeResponse.get_identifier(), time.get_identifier(), 'Invalid time identifier object after creation')
self.assertEqual(timeResponse.get_format(), time.get_format(), 'Invalid time format object after creation')
except Exception as e:
print(exception_handler(e))
self.assertEqual(0, 1, 'Cannot create datastream')
# Create Datastream for narrow/historian style data from a multiple entities
def test_create_datastream_narrow_style_multiple_entity(self):
datastream = Schemas.Datastream()
datasource = Schemas.Datasource()
field = Schemas.Field()
time = Schemas.Time()
signal = Schemas.Signal()
datastream.set_name('Motor Health' + str(random.random())) # set name of the Datastream
time.set_zone("GMT") # set timezone of the datastream
time.set_identifier("time") # set time identifier of the datastream
time.set_format("iso_8601") # set time format of the datastream
field.set_time(time)
signal.set_signalIdentifier("signal") # set signal identifier
signal.set_valueIdentifier("value") # set value identifier
field.set_entityIdentifier("entity") # set entity identifier
field.set_signal(signal) # set signal in field
datasource.set_type("STANDALONE") # set datastource type in datastream
datastream.set_datasource(datasource)
datastream.set_field(field)
try:
# create Datastream
response = self.fclient.create_datastream(datastream)
self.created_datastreams.append(response.get_id())
self.assertEqual(isinstance(response, Schemas.Datastream), True, 'Invalid Datastream object after creation')
self.assertEqual(isinstance(response.get_id(), str), True, 'Invalid id of datastream after creation')
self.assertEqual(response.get_name(), datastream.get_name(), 'Invalid name of Datastream after creation')
fieldResponse = response.get_field()
self.assertEqual(isinstance(fieldResponse, Schemas.Field), True, 'Invalid field in Datastream object after creation')
self.assertEqual(fieldResponse.get_entityName(),None,'Invalid entity name object after creation')
signalResponse = fieldResponse.get_signal()
self.assertEqual(signalResponse.get_valueIdentifier(),si
|
gnal.get_valueIdentifier(), 'Invalid value identifier after object creation')
self.assertEqual(signalResponse.get_signalIdentifier(), signal.get_signalIdentifier(), 'Invalid signal identifier after object creation')
timeResponse = fieldResponse.get_time()
self.assertEqual(isinstance(timeResponse, S
|
chemas.Time), True, 'Invalid time object after creation')
|
pelgoros/kwyjibo
|
kwyjibo/views.py
|
Python
|
gpl-3.0
| 4,918 | 0.005696 |
from django.contrib.auth import authenticate, login, logout
from django.contrib.auth.mixins import LoginRequiredMixin
from django.contrib.auth.models import User
from django.http import HttpResponseForbidden
from django.shortcuts import redirect, render
from django.views.generic import View
from django.views.generic.base import TemplateView
from django.utils.translation import ugettext_lazy as _
from mailing.models import Mail
from .forms import *
LENGTHPASSWORD = 8
REDIRECTADMIN = "/admin"
SUBJECTMAIL = _("New SignUp at Kwyjibo")
BODYMAIL = _("Welcome to Kwyjibo! You have signed up successfully. You can now acces to the platform with the user: {username}.")
SUBJECTMAILRECOVERY = _("Recupero de password en Jarvis")
BODYMAILRECOVERY = _("Has pedido un recupero de password en Jarvis. Aquí están tus nuevos datos de acceso: '%s'/'%s'.")
SUBJECTMAILCHANGE = _("Cambio de password en Jarvis")
BODYMAILCHANGE = _("Has cambiado el password en Jarvis. Ya puedes acceder con el usuario '%s'. El password ingresado no se envia por cuestiones de seguridad.")
class IndexView(LoginRequiredMixin, View):
def get(self, request, course_id = None):
user = request.user
course = None
if(Teacher.objects.filter(user_id=user.id)):
if course_id:
course = Course.objects.get(pk = course_id)
request.session["current_course_id"] = course.pk
request.session["current_course_name"] = course.name
return redirect('teachers:dashboard', course_id = course_id)
else:
if Course.objects.all().exists():
course = Course.objects.all().order_by('-name')[:1][0]
if course:
request.session["current_course_id"] = course.pk
request.session["current_course_name"] = course.name
return redirect('teachers:dashboard', course_id = course.pk)
return redirect('teachers:index')
elif(user.is_superuser):
return HttpResponseRedirect(REDIRECTADMIN)
elif(Student.objects.filter(user_id=user.id).exists()):
student = Student.objects.get(user_id=user.id)
if (student.shifts.all().count() == 1):
return redirect('students:assignments', course_id = student.shifts.all()[0].course.pk)
else:
return redirect('students:index')
else:
return render(request, 'index.html')
class SignUpView(View):
def get(self, request):
form = RegistrationForm()
return render(request, 'registration/register.html', {'form': form, })
def post(self, request):
form = RegistrationForm(request.POST)
if (form.is_valid()):
user = User()
user.username = form.data['username']
user.first_name = form.data['first_name']
user.last_name = form.data['last_name']
user.set_password(form.data['password'])
user.email = form.data['email']
user.save()
student = Student()
student.user = user
student.uid = form.data['username']
student.save()
if (Shift.objects.all().count() > 0):
shift = Shift.objects.get(pk=form.data['shifts']);
student.shifts.add(shift)
student.save()
mail = Mail()
mail.save_mail(SUBJECTMAIL, BODYMAIL.format(username = user.username), user.email)
return render(request, 'registration/registration-success.html')
return render(request, 'registration/register.html', {'form': form,})
class ChangePasswordView(View):
def get(self, request):
if not request.user.is_authenticated():
redirect('index')
form = ChangePasswordForm()
return render(request, 'registration/change_pass.html', {'form': form, })
def post(self, request):
if not request.user.is_authenticated():
redirect('index')
form = ChangePasswordForm(request.POST)
if form.is_valid():
data = form.cleaned_data
user = User.objects.get(pk = request.user.pk)
if user.check_password(data['current_password']):
user.set_password(dat
|
a['password'])
user.save()
else:
bad_password = True
return render(request, 'registration/change_password.html', {
'form': form,
'bad_password': bad_password
})
login(request, user)
return redirect('index')
return render(request, 'registration/change_password.html',
|
{'form': form, })
def logout_page(request):
"""
Log users out and re-direct them to the main page.
"""
logout(request)
return redirect('index')
|
acsone/Arelle
|
arelle/plugin/validate/ESEF/Const.py
|
Python
|
apache-2.0
| 19,152 | 0.00282 |
'''
Created on June 6, 2018
Filer Guidelines: esma32-60-254_esef_reporting_manual.pdf
@author: Workiva
(c) Copyright 2022 Workiva, All rights reserved.
'''
try:
import regex as re
except ImportError:
import re
from arelle.ModelValue import qname
from arelle.XbrlConst import all, notAll, hypercubeDimension, dimensionDomain, domainMember, dimensionDefault, widerNarrower
browserMaxBase64ImageLength = 5242880 # 5MB
esefTaxonomyNamespaceURIs = {
"http://xbrl.ifrs.org/taxonomy/20",
"http://xbrl.ifrs.org/taxonomy/20",
}
disallowedURIsPattern = re.compile(
"http://xbrl.ifrs.org/taxonomy/[0-9-]{10}/full_ifrs/
|
full_ifrs-cor_[0-9-]{10}[.]xsd|"
"http://www.esma.europa.eu/taxonomy/[0-9-]
|
{10}/esef_all.xsd"
)
DefaultDimensionLinkroles = ("http://www.esma.europa.eu/xbrl/role/cor/ifrs-dim_role-990000",)
LineItemsNotQualifiedLinkrole = "http://www.esma.europa.eu/xbrl/role/cor/esef_role-999999"
qnDomainItemTypes = {qname("{http://www.xbrl.org/dtr/type/non-numeric}nonnum:domainItemType"),
qname("{http://www.xbrl.org/dtr/type/2020-01-21}nonnum:domainItemType")}
linkbaseRefTypes = {
"http://www.xbrl.org/2003/role/calculationLinkbaseRef": "cal",
"http://www.xbrl.org/2003/role/definitionLinkbaseRef": "def",
"http://www.xbrl.org/2003/role/labelLinkbaseRef": "lab",
"http://www.xbrl.org/2003/role/presentationLinkbaseRef": "pre",
"http://www.xbrl.org/2003/role/referenceLinkbaseRef": "ref"
}
filenamePatterns = {
"cal": "{base}-{date}_cal.xml",
"def": "{base}-{date}_def.xml",
"lab": "{base}-{date}_lab-{lang}.xml",
"pre": "{base}-{date}_pre.xml",
"ref": "{base}-{date}_ref.xml"
}
filenameRegexes = {
"cal": r"(.{1,})-[0-9]{4}-[0-9]{2}-[0-9]{2}_cal[.]xml$",
"def": r"(.{1,})-[0-9]{4}-[0-9]{2}-[0-9]{2}_def[.]xml$",
"lab": r"(.{1,})-[0-9]{4}-[0-9]{2}-[0-9]{2}_lab-[a-zA-Z]{1,8}(-[a-zA-Z0-9]{1,8})*[.]xml$",
"pre": r"(.{1,})-[0-9]{4}-[0-9]{2}-[0-9]{2}_pre[.]xml$",
"ref": r"(.{1,})-[0-9]{4}-[0-9]{2}-[0-9]{2}_ref[.]xml$"
}
mandatory = set() # mandatory element qnames
# hidden references
untransformableTypes = {"anyURI", "base64Binary", "hexBinary", "NOTATION", "QName", "time",
"token", "language"}
esefDefinitionArcroles = {
all, notAll, hypercubeDimension, dimensionDomain, domainMember, dimensionDefault,
widerNarrower
}
esefPrimaryStatementPlaceholderNames = (
# to be augmented with future IFRS releases as they come known, as well as further PFS placeholders
"StatementOfFinancialPositionAbstract",
"IncomeStatementAbstract",
"StatementOfComprehensiveIncomeAbstract",
"StatementOfCashFlowsAbstract",
"StatementOfChangesInEquityAbstract",
"StatementOfChangesInNetAssetsAvailableForBenefitsAbstract",
"StatementOfProfitOrLossAndOtherComprehensiveIncomeAbstract"
)
esefStatementsOfMonetaryDeclarationNames = {
# from Annex II para 1
"StatementOfFinancialPositionAbstract",
"StatementOfProfitOrLossAndOtherComprehensiveIncomeAbstract"
"StatementOfChangesInEquityAbstract",
"StatementOfCashFlowsAbstract",
}
esefMandatoryElementNames2020 = (
"NameOfReportingEntityOrOtherMeansOfIdentification",
"ExplanationOfChangeInNameOfReportingEntityOrOtherMeansOfIdentificationFromEndOfPrecedingReportingPeriod",
"DomicileOfEntity",
"LegalFormOfEntity",
"CountryOfIncorporation",
"AddressOfRegisteredOfficeOfEntity",
"PrincipalPlaceOfBusiness",
"DescriptionOfNatureOfEntitysOperationsAndPrincipalActivities",
"NameOfParentEntity",
"NameOfUltimateParentOfGroup"
)
esefMandatoryElementNames2022 = (
"AddressOfRegisteredOfficeOfEntity",
"CountryOfIncorporation",
"DescriptionOfAccountingPolicyForAvailableforsaleFinancialAssetsExplanatory",
"DescriptionOfAccountingPolicyForBiologicalAssetsExplanatory",
"DescriptionOfAccountingPolicyForBorrowingCostsExplanatory",
"DescriptionOfAccountingPolicyForBorrowingsExplanatory",
"DescriptionOfAccountingPolicyForBusinessCombinationsExplanatory",
"DescriptionOfAccountingPolicyForBusinessCombinationsAndGoodwillExplanatory",
"DescriptionOfAccountingPolicyForCashFlowsExplanatory",
"DescriptionOfAccountingPolicyForCollateralExplanatory",
"DescriptionOfAccountingPolicyForConstructionInProgressExplanatory",
"DescriptionOfAccountingPolicyForContingentLiabilitiesAndContingentAssetsExplanatory",
"DescriptionOfAccountingPolicyForCustomerAcquisitionCostsExplanatory",
"DescriptionOfAccountingPolicyForCustomerLoyaltyProgrammesExplanatory",
"DescriptionOfAccountingPolicyForDecommissioningRestorationAndRehabilitationProvisionsExplanatory",
"DescriptionOfAccountingPolicyForDeferredAcquisitionCostsArisingFromInsuranceContractsExplanatory",
"DescriptionOfAccountingPolicyForDeferredIncomeTaxExplanatory",
"DescriptionOfAccountingPolicyForDepreciationExpenseExplanatory",
"DescriptionOfAccountingPolicyForDerecognitionOfFinancialInstrumentsExplanatory",
"DescriptionOfAccountingPolicyForDerivativeFinancialInstrumentsExplanatory",
"DescriptionOfAccountingPolicyForDerivativeFinancialInstrumentsAndHedgingExplanatory",
"DescriptionOfAccountingPolicyToDetermineComponentsOfCashAndCashEquivalents",
"DescriptionOfAccountingPolicyForDiscontinuedOperationsExplanatory",
"DescriptionOfAccountingPolicyForDiscountsAndRebatesExplanatory",
"DescriptionOfAccountingPolicyForDividendsExplanatory",
"DescriptionOfAccountingPolicyForEarningsPerShareExplanatory",
"DescriptionOfAccountingPolicyForEmissionRightsExplanatory",
"DescriptionOfAccountingPolicyForEmployeeBenefitsExplanatory",
"DescriptionOfAccountingPolicyForEnvironmentRelatedExpenseExplanatory",
"DescriptionOfAccountingPolicyForExceptionalItemsExplanatory",
"DescriptionOfAccountingPolicyForExpensesExplanatory",
"DescriptionOfAccountingPolicyForExplorationAndEvaluationExpenditures",
"DescriptionOfAccountingPolicyForFairValueMeasurementExplanatory",
"DescriptionOfAccountingPolicyForFeeAndCommissionIncomeAndExpenseExplanatory",
"DescriptionOfAccountingPolicyForFinanceCostsExplanatory",
"DescriptionOfAccountingPolicyForFinanceIncomeAndCostsExplanatory",
"DescriptionOfAccountingPolicyForFinancialAssetsExplanatory",
"DescriptionOfAccountingPolicyForFinancialGuaranteesExplanatory",
"DescriptionOfAccountingPolicyForFinancialInstrumentsExplanatory",
"DescriptionOfAccountingPolicyForFinancialInstrumentsAtFairValueThroughProfitOrLossExplanatory",
"DescriptionOfAccountingPolicyForFinancialLiabilitiesExplanatory",
"DescriptionOfAccountingPolicyForForeignCurrencyTranslationExplanatory",
"DescriptionOfAccountingPolicyForFranchiseFeesExplanatory",
"DescriptionOfAccountingPolicyForFunctionalCurrencyExplanatory",
"DescriptionOfAccountingPolicyForGoodwillExplanatory",
"DescriptionOfAccountingPolicyForGovernmentGrants",
"DescriptionOfAccountingPolicyForHedgingExplanatory",
"DescriptionOfAccountingPolicyForHeldtomaturityInvestmentsExplanatory",
"DescriptionOfAccountingPolicyForImpairmentOfAssetsExplanatory",
"DescriptionOfAccountingPolicyForImpairmentOfFinancialAssetsExplanatory",
"DescriptionOfAccountingPolicyForImpairmentOfNonfinancialAssetsExplanatory",
"DescriptionOfAccountingPolicyForIncomeTaxExplanatory",
"DescriptionOfAccountingPolicyForInsuranceContracts",
"DescriptionOfAccountingPolicyForIntangibleAssetsAndGoodwillExplanatory",
"DescriptionOfAccountingPolicyForIntangibleAssetsOtherThanGoodwillExplanatory",
"DescriptionOfAccountingPolicyForInterestIncomeAndExpenseExplanatory",
"DescriptionOfAccountingPolicyForInvestmentInAssociates",
"DescriptionOfAccountingPolicyForInvestmentInAssociatesAndJointVenturesExplanatory",
"DescriptionOfAccountingPolicyForInvestmentPropertyExplanatory",
"DescriptionOfAccountingPolicyForInvestmentsInJointVentures",
"DescriptionOfAccountingPolicyForInvestmentsOtherThanInvestmentsAccountedForUsingEquityMethodExplanatory",
"DescriptionOfAccountingPolicyForIssuedCapitalExplanatory",
"DescriptionOfAccountingPolicyFo
|
RUB-NDS/PRET
|
helper.py
|
Python
|
gpl-2.0
| 21,528 | 0.019595 |
#!/usr/bin/env python3
# -*- co
|
ding: utf-8 -*-
from __future__ import print_function
# python standard library
from socket import socket
import sys, os, re, stat, math, time, datetime
import importlib
# third party modules
try: # unicode monkeypatch for win
|
doze
import win_unicode_console
win_unicode_console.enable()
except:
msg = "Please install the 'win_unicode_console' module."
if os.name == 'nt': print(msg)
try: # os independent color support
from colorama import init, Fore, Back, Style
init() # required to get colors on windoze
except ImportError:
msg = "Please install the 'colorama' module for color support."
# poor man's colored output (ANSI)
class Back():
BLUE = '\x1b[44m' if os.name == 'posix' else ''
CYAN = '\x1b[46m' if os.name == 'posix' else ''
GREEN = '\x1b[42m' if os.name == 'posix' else ''
MAGENTA = '\x1b[45m' if os.name == 'posix' else ''
RED = '\x1b[41m' if os.name == 'posix' else ''
class Fore():
BLUE = '\x1b[34m' if os.name == 'posix' else ''
CYAN = '\x1b[36m' if os.name == 'posix' else ''
MAGENTA = '\x1b[35m' if os.name == 'posix' else ''
YELLOW = '\x1b[33m' if os.name == 'posix' else ''
class Style():
DIM = '\x1b[2m' if os.name == 'posix' else ''
BRIGHT = '\x1b[1m' if os.name == 'posix' else ''
RESET_ALL = '\x1b[0m' if os.name == 'posix' else ''
NORMAL = '\x1b[22m' if os.name == 'posix' else ''
print(Back.RED + msg + Style.RESET_ALL)
# ----------------------------------------------------------------------
# return first item of list or alternative
def item(mylist, alternative=""):
return next(iter(mylist), alternative)
# split list into chunks of equal size
def chunks(l, n):
for i in range(0, len(l), n):
yield l[i:i+n]
# ----------------------------------------------------------------------
class log():
# open logfile
def open(self, filename):
try:
return open(filename, mode='wb')
except IOError as e:
output().errmsg("Cannot open logfile", e)
return None
# write raw data to logfile
def write(self, logfile, data):
# logfile open and data non-empty
if logfile and data:
try:
logfile.write(data)
except IOError as e:
output().errmsg("Cannot log", e)
# write comment to logfile
def comment(self, logfile, line):
comment = "%" + ("[ " + line + " ]").center(72, '-')
self.write(logfile, os.linesep + comment + os.linesep)
# close logfile
def close(self, logfile):
try:
logfile.close()
except IOError as e:
output().errmsg("Cannot close logfile", e)
# ----------------------------------------------------------------------
class output():
# show send commands (debug mode)
def send(self, str, mode):
if str: print(Back.CYAN + str + Style.RESET_ALL)
if str and mode == 'hex':
print(Fore.CYAN + conv().hex(str, ':') + Style.RESET_ALL)
# show recv commands (debug mode)
def recv(self, str, mode):
if str: print(Back.MAGENTA + str + Style.RESET_ALL)
if str and mode == 'hex':
print(Fore.MAGENTA + conv().hex(str, ':') + Style.RESET_ALL)
# show information
def info(self, msg, eol=None):
if msg: print(Back.BLUE + msg + Style.RESET_ALL, end=eol)
sys.stdout.flush()
# show raw data
def raw(self, msg, eol=None):
if msg: print(Fore.YELLOW + msg + Style.RESET_ALL, end=eol)
sys.stdout.flush()
# show chit-chat
def chitchat(self, msg, eol=None):
if msg: print(Style.DIM + msg + Style.RESET_ALL, end=eol)
sys.stdout.flush()
# show warning message
def warning(self, msg):
if msg: print(Back.RED + msg + Style.RESET_ALL)
# show green message
def green(self, msg):
if msg: print(Back.GREEN + msg + Style.RESET_ALL)
# show error message
def errmsg(self, msg, info=""):
info = str(info).strip()
if info: # monkeypatch to make python error message less ugly
info = item(re.findall('Errno -?\d+\] (.*)', info), '') or info.splitlines()[-1]
info = Style.RESET_ALL + Style.DIM + " (" + info.strip('<>') + ")" + Style.RESET_ALL
if msg: print(Back.RED + msg + info)
# show printer and status
def discover(self, xxx_todo_changeme):
(ipaddr, (device, uptime, status, prstat)) = xxx_todo_changeme
ipaddr = output().strfit(ipaddr, 15)
device = output().strfit(device, 27)
uptime = output().strfit(uptime, 8)
status = output().strfit(status, 23)
if device.strip() != 'device': device = Style.BRIGHT + device + Style.NORMAL
if prstat == '1': status = Back.GREEN + status + Back.BLUE # unknown
if prstat == '2': status = Back.GREEN + status + Back.BLUE # running
if prstat == '3': status = Back.YELLOW + status + Back.BLUE # warning
if prstat == '4': status = Back.GREEN + status + Back.BLUE # testing
if prstat == '5': status = Back.RED + status + Back.BLUE # down
line = (ipaddr, device, uptime, status)
output().info('%-15s %-27s %-8s %-23s' % line)
# recursively list files
def psfind(self, name):
vol = Style.DIM + Fore.YELLOW + item(re.findall("^(%.*%)", name)) + Style.RESET_ALL
name = Fore.YELLOW + const.SEP + re.sub("^(%.*%)", '', name) + Style.RESET_ALL
print("%s %s" % (vol, name))
# show directory listing
def psdir(self, isdir, size, mtime, name, otime):
otime = Style.DIM + "(created " + otime + ")" + Style.RESET_ALL
vol = Style.DIM + Fore.YELLOW + item(re.findall("^(%.*%)", name)) + Style.RESET_ALL
name = re.sub("^(%.*%)", '', name) # remove volume information from filename
name = Style.BRIGHT + Fore.BLUE + name + Style.RESET_ALL if isdir else name
if isdir: print("d %8s %s %s %s %s" % (size, mtime, otime, vol, name))
else: print("- %8s %s %s %s %s" % (size, mtime, otime, vol, name))
# show directory listing
def pjldir(self, name, size):
name = name if size else Style.BRIGHT + Fore.BLUE + name + Style.RESET_ALL
if size: print("- %8s %s" % (size, name))
else: print("d %8s %s" % ("-", name))
# show directory listing
def pcldir(self, size, mtime, id, name):
id = Style.DIM + "(macro id: " + id + ")" + Style.RESET_ALL
print("- %8s %s %s %s" % (size, mtime, id, name))
# show output from df
def df(self, args):
self.info("%-16s %-11s %-11s %-9s %-10s %-8s %-9s %-10s %-10s" % args)
# show fuzzing results
def fuzzed(self, path, cmd, opt):
opt1, opt2, opt3 = opt
if isinstance(opt1, bool): opt1 = (Back.GREEN + str(opt1) + Back.BLUE + " ")\
if opt1 else (Back.RED + str(opt1) + Back.BLUE + " ")
if isinstance(opt2, bool): opt2 = (Back.GREEN + str(opt2) + Back.BLUE + " ")\
if opt2 else (Back.RED + str(opt2) + Back.BLUE + " ")
if isinstance(opt3, bool): opt3 = (Back.GREEN + str(opt3) + Back.BLUE + " ")\
if opt3 else (Back.RED + str(opt3) + Back.BLUE + " ")
opt = opt1, opt2, opt3
self.info("%-35s %-12s %-7s %-7s %-7s" % ((path, cmd) + opt))
# show captured jobs
def joblist(self, xxx_todo_changeme1):
(date, size, user, name, soft) = xxx_todo_changeme1
user = output().strfit(user, 13)
name = output().strfit(name, 22)
soft = output().strfit(soft, 20)
line = (date, size, user, name, soft)
output().info('%-12s %5s %-13s %-22s %-20s' % line)
# show ascii only
def ascii(self, data):
data = re.sub(r"(\x00){10}", "\x00", data) # shorten nullbyte streams
data = re.sub(r"([^ -~])", ".", data) # replace non-printable chars
self.raw(data, "")
# show binary dump
def dump(self, data):
# experimental regex to match sensitive strings like passwords
data = re.sub(r"[\x00-\x06,\x1e]([!-~]{6,}?(?!\\0A))\x00{16}", "START" + r"\1" + "STOP", data)
data = re.sub(r"\00+", "\x00", data) # ignore nullbyte streams
data = re.sub(r"(\x00){10}", "\x00", data) # ignore nullbyte streams
data = re.sub(r"([\x00-\x1f,\x7f-\xff])", ".", data)
data = re.sub(r"START([!-~]{6,}?)STOP", Style.RESET_ALL + Back.BLUE + r"\1" + Style.RESET_ALL + Fore.YELLOW, data)
self.raw(data,
|
Markcial/salty
|
salty/api.py
|
Python
|
mit
| 1,918 | 0.000521 |
import nacl.exceptions
import nacl.utils
import nacl.secret
from salty.config import encoder, Store
from salty.exceptions import NoValidKeyFound, DefaultKeyNotSet
__all__ = ['new', 'current', 'select', 'add_secret', 'get_secret', 'encrypt', 'decrypt']
def _new():
return encoder.encode(nacl.utils.random(nacl.secret.SecretBox.KEY_SIZE))
def _box(key):
assert type(key) is bytes
return nacl.secret.SecretBox(key, encoder=encoder)
def _encrypt(message, key=None):
assert type(message) is bytes
if key is None:
if store.current is None:
raise DefaultKeyNotSet
key = bytes(store.current, 'utf8')
return _box(key).encrypt(message, encoder=encoder)
return _box(key).encrypt(message, encoder=encoder)
def _decrypt(name, key=None):
assert type(name) is bytes
if key is None:
for k in store.keys:
dk = bytes(k, 'utf8')
try:
return _box(dk).decrypt(name, encoder=encoder)
except nacl.exceptions.CryptoError:
continue
raise NoValidKeyFound
return _box(
|
key).decrypt(name, encoder=encoder)
store = Store(default_key=_new().decode())
# public api
def new():
return _new()
def current(key=None):
if key is None:
return store.current
store.add_key(bytes(key, 'utf8'), current=True)
return
|
True
def select(pos):
store.set_current(pos)
return True
def add_secret(name, raw):
msg = _encrypt(bytes(raw, 'utf8'), bytes(store.current, 'utf8'))
store.add_secret(name, msg)
return True
def get_secret(name):
msg = store.get_secret(name)
return _decrypt(bytes(msg, 'utf8'))
def secrets():
return store.secrets
def decrypt(name, key=None):
key = key or current()
return _decrypt(name, key)
def encrypt(message, key=None):
key = key or current()
return _encrypt(message, key)
|
Yelp/pootle
|
pootle/apps/pootle_store/migrations/0001_initial.py
|
Python
|
gpl-3.0
| 7,129 | 0.005611 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import datetime
from django.utils.timezone import utc
import translate.storage.base
import pootle_store.fields
import pootle.core.mixins.treeitem
import pootle.core.storage
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
('pootle_translationproject', '0001_initial'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('pootle_app', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Store',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('file', pootle_store.fields.TranslationStoreField(storage=pootle.core.storage.PootleFileSystemStorage(), upload_to=b'', max_length=255, editable=False, db_index=True)),
('pootle_path', models.CharField(max_length=255, verbose_name='Path', db_index=True)),
('name', models.CharField(max_length=128, editable=False)),
('file_mtime', models.DateTimeField(default=datetime.datetime(1, 1, 1, 0, 0, tzinfo=utc))),
('state', models.IntegerField(default=0, editable=False, db_index=True)),
('creation_time', models.DateTimeField(db_index=True, auto_now_add=True, null=True)),
('last_sync_revision', models.IntegerField(null=True, db_index=True)),
('obsolete', models.BooleanField(default=False)),
('parent', models.ForeignKey(related_name='child_stores', editable=False, to='pootle_app.Directory')),
('translation_project', models.ForeignKey(related_name='stores'
|
, editable=False, to='pootle_translationproject.TranslationProject')),
],
options={
'ordering': ['pootle_path'],
},
bases=(models.Model, pootle.core.mixins.treeitem.CachedTreeItem, translate.storage.base.TranslationStore),
),
migrations.RunSQL('ALTER TABLE `pootle_store_store` ROW_FORMAT=DYNAMIC'),
migrations.AlterField(
model_name='store
|
',
name='pootle_path',
field=models.CharField(unique=True, max_length=255, verbose_name='Path', db_index=True)
),
migrations.CreateModel(
name='Unit',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('index', models.IntegerField(db_index=True)),
('unitid', models.TextField(editable=False)),
('unitid_hash', models.CharField(max_length=32, editable=False, db_index=True)),
('source_f', pootle_store.fields.MultiStringField(null=True)),
('source_hash', models.CharField(max_length=32, editable=False, db_index=True)),
('source_wordcount', models.SmallIntegerField(default=0, editable=False)),
('source_length', models.SmallIntegerField(default=0, editable=False, db_index=True)),
('target_f', pootle_store.fields.MultiStringField(null=True, blank=True)),
('target_wordcount', models.SmallIntegerField(default=0, editable=False)),
('target_length', models.SmallIntegerField(default=0, editable=False, db_index=True)),
('developer_comment', models.TextField(null=True, blank=True)),
('translator_comment', models.TextField(null=True, blank=True)),
('locations', models.TextField(null=True, editable=False)),
('context', models.TextField(null=True, editable=False)),
('state', models.IntegerField(default=0, db_index=True)),
('revision', models.IntegerField(default=0, db_index=True, blank=True)),
('creation_time', models.DateTimeField(db_index=True, auto_now_add=True, null=True)),
('mtime', models.DateTimeField(auto_now=True, auto_now_add=True, db_index=True)),
('submitted_on', models.DateTimeField(null=True, db_index=True)),
('commented_on', models.DateTimeField(null=True, db_index=True)),
('reviewed_on', models.DateTimeField(null=True, db_index=True)),
('commented_by', models.ForeignKey(related_name='commented', to=settings.AUTH_USER_MODEL, null=True)),
('reviewed_by', models.ForeignKey(related_name='reviewed', to=settings.AUTH_USER_MODEL, null=True)),
('store', models.ForeignKey(to='pootle_store.Store')),
('submitted_by', models.ForeignKey(related_name='submitted', to=settings.AUTH_USER_MODEL, null=True)),
],
options={
'ordering': ['store', 'index'],
'get_latest_by': 'mtime',
},
bases=(models.Model, translate.storage.base.TranslationUnit),
),
migrations.CreateModel(
name='Suggestion',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('target_f', pootle_store.fields.MultiStringField()),
('target_hash', models.CharField(max_length=32, db_index=True)),
('translator_comment_f', models.TextField(null=True, blank=True)),
('state', models.CharField(default=b'pending', max_length=16, db_index=True, choices=[(b'pending', 'Pending'), (b'accepted', 'Accepted'), (b'rejected', 'Rejected')])),
('creation_time', models.DateTimeField(null=True, db_index=True)),
('review_time', models.DateTimeField(null=True, db_index=True)),
('unit', models.ForeignKey(to='pootle_store.Unit')),
('reviewer', models.ForeignKey(related_name='reviews', to=settings.AUTH_USER_MODEL, null=True)),
('user', models.ForeignKey(related_name='suggestions', to=settings.AUTH_USER_MODEL, null=True)),
],
options={
},
bases=(models.Model, translate.storage.base.TranslationUnit),
),
migrations.CreateModel(
name='QualityCheck',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=64, db_index=True)),
('category', models.IntegerField(default=0)),
('message', models.TextField()),
('false_positive', models.BooleanField(default=False, db_index=True)),
('unit', models.ForeignKey(to='pootle_store.Unit')),
],
options={
},
bases=(models.Model,),
),
migrations.AlterUniqueTogether(
name='unit',
unique_together=set([('store', 'unitid_hash')]),
),
migrations.AlterUniqueTogether(
name='store',
unique_together=set([('parent', 'name')]),
),
]
|
iLoop2/ResInsight
|
ThirdParty/Ert/devel/python/python/ert/enkf/summary_key_matcher.py
|
Python
|
gpl-3.0
| 1,882 | 0.007439 |
from ert.cwrap import CWrapper, BaseCClass
from ert.enkf import ENKF_LIB
from ert.util import StringList
class SummaryKeyMatcher(BaseCClass):
def __init__(self):
c_ptr = SummaryKeyMatcher.cNamespace().alloc()
super(SummaryKeyMatcher, self).__init__(c_ptr)
def addSummaryKey(self, key):
assert isinstance(key, str)
return SummaryKeyMatcher.cNamespace().add_key(self, key)
def __len__(self):
return SummaryKeyMatcher.cNamespace().size(self)
def __contains__(self, key):
return SummaryKeyMatcher.cNamespace().match_key(self, key)
def isRequired(self, key):
""" @rtype: bool """
return SummaryKeyMatcher.cNamespace().is_required(self, key)
def keys(self):
""" @rtype: StringList """
return SummaryKeyMatcher.cNamespace().keys(self)
def free(self):
SummaryKeyMatcher.cNamespace().free(self)
cwrapper = CWrapper(ENKF_LIB)
cwrapper.registerObjectType("summary_key_matcher", SummaryKeyMatcher)
SummaryKeyMatcher.cNamespace().alloc = cwrapper.prototype("c_void_p summary_key_matcher_alloc()")
SummaryKeyMatcher.cNamespace().f
|
ree = cwrapper.prototype("void summary_key_matcher_free(summary_key_matcher)")
SummaryKeyMatcher.cNamespace().size = cwrapper.prototype("int summary_key_matcher_get_size(summary_key_matcher)")
SummaryKeyMatcher.cNamespace().add_key = cwrapper.prototype("void summary_key_matcher_add_summary_key(summary_key_matcher, char*)")
SummaryKeyMatcher.cNamespace().match_key = cwrapper.prototype("bool summary_key_matcher_match_summary_key(summary_key_matcher, char*)")
SummaryKe
|
yMatcher.cNamespace().keys = cwrapper.prototype("stringlist_obj summary_key_matcher_get_keys(summary_key_matcher)")
SummaryKeyMatcher.cNamespace().is_required = cwrapper.prototype("bool summary_key_matcher_summary_key_is_required(summary_key_matcher, char*)")
|
ledtvavs/repository.ledtv
|
script.tvguide.Vader/resources/lib/dateutil/rrule.py
|
Python
|
gpl-3.0
| 64,642 | 0.000155 |
# -*- coding: utf-8 -*-
"""
The rrule module offers a small, complete, and very fast, implementation of
the recurrence rules documented in the
`iCalendar RFC <https://tools.ietf.org/html/rfc5545>`_,
including support for caching of results.
"""
import itertools
import datetime
import calendar
import re
import sys
try:
from math import gcd
except ImportError:
from fractions import gcd
from six import advance_iterator, integer_types
from six.moves import _thread, range
import heapq
from ._common import weekday as weekdaybase
from .tz import tzutc, tzlocal
# For warning about deprecation of until and count
from warnings import warn
__all__ = ["rrule", "rruleset", "rrulestr",
"YEARLY", "MONTHLY", "WEEKLY", "DAILY",
"HOURLY", "MINUTELY", "SECONDLY",
"MO", "TU", "WE", "TH", "FR", "SA", "SU"]
# Every mask is 7 days longer to handle cross-year weekly periods.
M366MASK = tuple([1]*31+[2]*29+[3]*31+[4]*30+[5]*31+[6]*30 +
[7]*31+[8]*31+[9]*30+[10]*31+[11]*30+[12]*31+[1]*7)
M365MASK = list(M366MASK)
M29, M30, M31 = list(range(1, 30)), list(range(1, 31)), list(range(1, 32))
MDAY366MASK = tuple(M31+M29+M31+M30+M31+M30+M31+M31+M30+M31+M30+M31+M31[:7])
MDAY365MASK = list(MDAY366MASK)
M29, M30, M31 = list(range(-29, 0)), list(range(-30, 0)), list(range(-31, 0))
NMDAY366MASK = tuple(M31+M29+M31+M30+M31+M30+M31+M31+M30+M31+M30+M31+M31[:7])
NMDAY365MASK = list(NMDAY366MASK)
M366RANGE = (0, 31, 60, 91, 121, 152, 182, 213, 244, 274, 305, 335, 366)
M365RANGE = (0, 31, 59, 90, 120, 151, 181, 212, 243, 273, 304, 334, 365)
WDAYMASK = [0, 1, 2, 3, 4, 5, 6]*55
del M29, M30, M31, M365MASK[59], MDAY365MASK[59], NMDAY365MASK[31]
MDAY365MASK = tuple(MDAY365MASK)
M365MASK = tuple(M365MASK)
FREQNAMES = ['YEARLY', 'MONTHLY', 'WEEKLY', 'DAILY', 'HOURLY', 'MINUTELY', 'SECONDLY']
(YEARLY,
MONTHLY,
WEEKLY,
DAILY,
HOURLY,
MINUTELY,
SECONDLY) = list(range(7))
# Imported on demand.
easter = None
parser = None
class weekday(weekdaybase):
"""
This version of weekday does not allow n = 0.
"""
def __init__(self, wkday, n
|
=None):
if n == 0:
raise ValueError("Can't create weekday with n==0")
super(weekday, self).__init__(wkday, n)
MO, TU, WE, TH, FR, SA, SU = weekdays = tuple(weekday(x) for x in range(7))
def _invalidates_cache
|
(f):
"""
Decorator for rruleset methods which may invalidate the
cached length.
"""
def inner_func(self, *args, **kwargs):
rv = f(self, *args, **kwargs)
self._invalidate_cache()
return rv
return inner_func
class rrulebase(object):
def __init__(self, cache=False):
if cache:
self._cache = []
self._cache_lock = _thread.allocate_lock()
self._invalidate_cache()
else:
self._cache = None
self._cache_complete = False
self._len = None
def __iter__(self):
if self._cache_complete:
return iter(self._cache)
elif self._cache is None:
return self._iter()
else:
return self._iter_cached()
def _invalidate_cache(self):
if self._cache is not None:
self._cache = []
self._cache_complete = False
self._cache_gen = self._iter()
if self._cache_lock.locked():
self._cache_lock.release()
self._len = None
def _iter_cached(self):
i = 0
gen = self._cache_gen
cache = self._cache
acquire = self._cache_lock.acquire
release = self._cache_lock.release
while gen:
if i == len(cache):
acquire()
if self._cache_complete:
break
try:
for j in range(10):
cache.append(advance_iterator(gen))
except StopIteration:
self._cache_gen = gen = None
self._cache_complete = True
break
release()
yield cache[i]
i += 1
while i < self._len:
yield cache[i]
i += 1
def __getitem__(self, item):
if self._cache_complete:
return self._cache[item]
elif isinstance(item, slice):
if item.step and item.step < 0:
return list(iter(self))[item]
else:
return list(itertools.islice(self,
item.start or 0,
item.stop or sys.maxsize,
item.step or 1))
elif item >= 0:
gen = iter(self)
try:
for i in range(item+1):
res = advance_iterator(gen)
except StopIteration:
raise IndexError
return res
else:
return list(iter(self))[item]
def __contains__(self, item):
if self._cache_complete:
return item in self._cache
else:
for i in self:
if i == item:
return True
elif i > item:
return False
return False
# __len__() introduces a large performance penality.
def count(self):
""" Returns the number of recurrences in this set. It will have go
trough the whole recurrence, if this hasn't been done before. """
if self._len is None:
for x in self:
pass
return self._len
def before(self, dt, inc=False):
""" Returns the last recurrence before the given datetime instance. The
inc keyword defines what happens if dt is an occurrence. With
inc=True, if dt itself is an occurrence, it will be returned. """
if self._cache_complete:
gen = self._cache
else:
gen = self
last = None
if inc:
for i in gen:
if i > dt:
break
last = i
else:
for i in gen:
if i >= dt:
break
last = i
return last
def after(self, dt, inc=False):
""" Returns the first recurrence after the given datetime instance. The
inc keyword defines what happens if dt is an occurrence. With
inc=True, if dt itself is an occurrence, it will be returned. """
if self._cache_complete:
gen = self._cache
else:
gen = self
if inc:
for i in gen:
if i >= dt:
return i
else:
for i in gen:
if i > dt:
return i
return None
def xafter(self, dt, count=None, inc=False):
"""
Generator which yields up to `count` recurrences after the given
datetime instance, equivalent to `after`.
:param dt:
The datetime at which to start generating recurrences.
:param count:
The maximum number of recurrences to generate. If `None` (default),
dates are generated until the recurrence rule is exhausted.
:param inc:
If `dt` is an instance of the rule and `inc` is `True`, it is
included in the output.
:yields: Yields a sequence of `datetime` objects.
"""
if self._cache_complete:
gen = self._cache
else:
gen = self
# Select the comparison function
if inc:
comp = lambda dc, dtc: dc >= dtc
else:
comp = lambda dc, dtc: dc > dtc
# Generate dates
n = 0
for d in gen:
if comp(d, dt):
if count is not None:
n += 1
if n > count:
break
yield d
def between(self, after, before, inc=False, count=1):
""" Returns all the occurrences of the rrule between after and before.
The inc keyword defines what ha
|
angr/angr
|
angr/analyses/vtable.py
|
Python
|
bsd-2-clause
| 4,142 | 0.002897 |
import logging
from ..analyses import AnalysesHub
from . import Analysis, CFGFast
l = logging.getLogger(name=__name__)
class Vtable:
"""
This contains the addr, size and function addresses of a Vtable
"""
def __init__(self, vaddr, size, func_addrs=None):
self.vaddr = vaddr
self.size = size
self.func_addrs = func_addrs if func_addrs else []
class VtableFinder(Analysis):
"""
This analysis locates Vtables in a binary based on heuristics taken from - "Reconstruction of Class Hierarchies
for Decompilation of C++ Programs"
"""
def __init__(self):
if "CFGFast" not in self.project.kb.cfgs:
# populate knowledge base
self.project.analyses[CFGFast].prep()(cross_references=True)
skip_analysis = True
# check if the sections exist
for sec in self.project.loader.main_object.sections:
if sec.name in [".data.rel.ro", ".rodata", ".data.rel.ro.local"]:
skip_analysis = False
if not skip_analysis:
self.vtables_list = self.analyze()
else:
l.warning("VtableFinder analysis is skipped")
def is_cross_referenced(self, addr):
return addr in self.project.kb.xrefs.xrefs_by_dst
def is_function(self, addr):
return addr in self.project.kb.functions
def analyze(self):
# finding candidate starting vtable addresses
# "current location is referenced from a code segment and its value is a pointer to a function,
# then it is marked as a start of vtable"
# taken from - Reconstruction of Class Hierarchies for Decompilation of C++ Programs
list_vtables = []
for sec in self.project.loader.main_object.sections:
if sec.name in [".data.rel.ro", ".rodata", ".data.rel.ro.local"]:
for offset in range(0, sec.memsize, self.project.arch.bytes):
cur_addr = sec.vaddr + offset
possible_func_addr = self.project.loader.memory.unpack_word(
cur_addr
)
# check if this address is referenced in the code segment
if self.is_cross_referenced(cur_addr):
|
# check if it is also a function, if so then it is possibly a vtable start
if self.is_func
|
tion(possible_func_addr):
new_vtable = self.create_extract_vtable(
cur_addr, sec.memsize
)
if new_vtable is not None:
list_vtables.append(new_vtable)
return list_vtables
def create_extract_vtable(self, start_addr, sec_size):
# using the starting address extracting the vtable
# "Other elements of vtable must be unreferenced pointers to function"
# "Vtable ends with the first location that is either referenced from the program code,
# or is not a pointer to a function"
# taken from - Reconstruction of Class Hierarchies for Decompilation of C++ Programs
first_func_addr = self.project.loader.memory.unpack_word(start_addr)
cur_vtable = Vtable(start_addr, self.project.arch.bytes, [first_func_addr])
for cur_addr in range(
start_addr + self.project.arch.bytes,
start_addr + sec_size,
self.project.arch.bytes,
):
possible_func_addr = self.project.loader.memory.unpack_word(cur_addr)
if self.is_function(possible_func_addr) and not self.is_cross_referenced(
cur_addr
):
cur_vtable.func_addrs.append(possible_func_addr)
cur_vtable.size += self.project.arch.bytes
elif not self.is_function(possible_func_addr) or self.is_cross_referenced(
cur_addr
):
return cur_vtable
return None
AnalysesHub.register_default("VtableFinder", VtableFinder)
|
cloudify-cosmo/cloudify-manager
|
tests/integration_tests/resources/dsl/plugin_tests/plugins/mock-plugin/mock_plugin/ops.py
|
Python
|
apache-2.0
| 940 | 0 |
# ***************************************************************************
# * Copyright (c) 2016 GigaSpaces Technologies Ltd. All rights reserved
# *
# * Licen
|
sed under the Apache License, Version 2.0 (the "License");
# * you may not use this file except in compliance with the License.
# * You may obtain a copy of the License at
# *
# * http://w
|
ww.apache.org/licenses/LICENSE-2.0
# *
# * Unless required by applicable law or agreed to in writing, software
# * distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
# **************************************************************************/
from cloudify import ctx
from cloudify.decorators import operation
@operation
def op(**_):
ctx.logger.info('Performing OP')
|
ajbc/lda-svi
|
generalrandom.py
|
Python
|
gpl-3.0
| 8,782 | 0.00353 |
# wikirandom.py: Functions for downloading random articles from Wikipedia
#
# Copyright (C) 2010 Matthew D. Hoffman
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import sys, os, urllib2, re, string, time, threading, fnmatch
from random import randint
from bs4 import BeautifulSoup
class LiveparseDocGen():
def __init__(self, path):
print "initializing live parse doc gen with path " + path
self.doclist = []
for (root,dirnames,filenames) in os.walk(path):
for filename in fnmatch.filter(filenames, '*'):
#print os.path.join(root,filename)
self.doclist.append(os.path.join(root,filename))
def get_article(self, id):
docfile = self.doclist[id]
xml = open(docfile, 'r')
soup = BeautifulSoup(xml)
xml.close()
# find all the text
if soup.find("block", {"class":"full_text"}) is None:
return self.get_random_article()
fulltext = soup.find("block", {"class":"full_text"})
paras = fulltext.findAll("p")
alltxt = ' '.join([p.contents[0] for p in paras])
alltxt = alltxt.lower()
alltxt = re.sub(r'-', ' ', alltxt)
alltxt = re.sub(r'[^a-z ]', '', alltxt)
alltxt = re.sub(r' +', ' ', alltxt)
title = soup.find("title")
title = title.contents[0] if title else ""
byline = soup.find("byline")
subtitle = byline.contents[0] if byline and len(byline.contents) != 0 \
else ""
return (alltxt, title, subtitle, docfile)
def get_random_article(self):
id = randint(0, len(self.doclist) - 1)
return self.get_article(id)
def get_random_articles(self, n):
docs = []
for i in range(n):
(doc, title, subtitle, link) = self.get_random_article()
docs.append(doc)
return docs
def getDocCount(self):
return len(self.doclist)
def __iter__(self):
self.current = 0
return self
def next(self):
if self.current >= len(self.doclist):
raise StopIteration
else:
(all, title, subtitle, docfile) = self.get_article(self.current)
link = self.doclist[self.current]
self.current += 1
return (link, all, title, subtitle)
class PreparseDocGen():
def __init__(self, filename):
print "initializing preparsed doc gen with file " + filename
lines = open(filename).readlines()
self.docs = []
self.terms = set()
for line in lines:
wordids = []
wordcts = []
for token in line.split(' ')[1:]:
tokens = token.split(':')
wordids.append(int(tokens[0])-1)
wordcts.appe
|
nd(int(tokens[1]))
self.terms.add(int(tokens[0]))
|
self.docs.append((wordids, wordcts))
self.D = len(self.docs)
#''The first, wordids, says what vocabulary tokens are present in
#each document. wordids[i][j] gives the jth unique token present in
#document i. (Dont count on these tokens being in any particular
#order.)
#The second, wordcts, says how many times each vocabulary token is
#present. wordcts[i][j] is the number of times that the token given
#by wordids[i][j] appears in document i.
def get_random_articles(self, n):
wordids = []
wordcts = []
i = 0
while (i < n):
doc = self.docs[randint(0, self.D - 1)]
# omit short docs from training (to speed things up)
if len(doc[0]) < 5:
continue
wordids.append(doc[0])
wordcts.append(doc[1])
i += 1
return((wordids, wordcts))
def getDocCount(self):
return len(self.docs)
def getTermCount(self):
return len(self.terms)
def __iter__(self):
self.current = 0
return self
def next(self):
if self.current >= len(self.docs):
raise StopIteration
else:
doc = self.docs[self.current]
self.current += 1
return(([doc[0]], [doc[1]]))
def get_random_wikipedia_article():
"""
Downloads a randomly selected Wikipedia article (via
http://en.wikipedia.org/wiki/Special:Random) and strips out (most
of) the formatting, links, etc.
This function is a bit simpler and less robust than the code that
was used for the experiments in "Online VB for LDA."
"""
failed = True
while failed:
articletitle = None
failed = False
try:
req = urllib2.Request('http://en.wikipedia.org/wiki/Special:Random',
None, { 'User-Agent' : 'x'})
f = urllib2.urlopen(req)
while not articletitle:
line = f.readline()
result = re.search(r'title="Edit this page" href="/w/index.php\?title=(.*)\&action=edit" /\>', line)
if (result):
articletitle = result.group(1)
break
elif (len(line) < 1):
sys.exit(1)
req = urllib2.Request('http://en.wikipedia.org/w/index.php?title=Special:Export/%s&action=submit' \
% (articletitle),
None, { 'User-Agent' : 'x'})
f = urllib2.urlopen(req)
all = f.read()
except (urllib2.HTTPError, urllib2.URLError):
print 'oops. there was a failure downloading %s. retrying...' \
% articletitle
failed = True
continue
print 'downloaded %s. parsing...' % articletitle
try:
all = re.search(r'<text.*?>(.*)</text', all, flags=re.DOTALL).group(1)
all = re.sub(r'\n', ' ', all)
all = re.sub(r'\{\{.*?\}\}', r'', all)
all = re.sub(r'\[\[Category:.*', '', all)
all = re.sub(r'==\s*[Ss]ource\s*==.*', '', all)
all = re.sub(r'==\s*[Rr]eferences\s*==.*', '', all)
all = re.sub(r'==\s*[Ee]xternal [Ll]inks\s*==.*', '', all)
all = re.sub(r'==\s*[Ee]xternal [Ll]inks and [Rr]eferences==\s*', '', all)
all = re.sub(r'==\s*[Ss]ee [Aa]lso\s*==.*', '', all)
all = re.sub(r'http://[^\s]*', '', all)
all = re.sub(r'\[\[Image:.*?\]\]', '', all)
all = re.sub(r'Image:.*?\|', '', all)
all = re.sub(r'\[\[.*?\|*([^\|]*?)\]\]', r'\1', all)
all = re.sub(r'\<.*?>', '', all)
except:
# Something went wrong, try again. (This is bad coding practice.)
print 'oops. there was a failure parsing %s. retrying...' \
% articletitle
failed = True
continue
return(all, articletitle)
class WikiThread(threading.Thread):
articles = list()
articlenames = list()
lock = threading.Lock()
def run(self):
(article, articlename) = get_random_wikipedia_article()
WikiThread.lock.acquire()
WikiThread.articles.append(article)
WikiThread.articlenames.append(articlename)
WikiThread.lock.release()
def get_random_wikipedia_articles(n):
"""
Downloads n articles in parallel from Wikipedia and returns lists
of their names and contents. Much faster than calling
get_random_wikipedia_article() serially.
"""
maxthreads = 8
WikiThread.articles = list()
WikiThread.articlenames = list()
wtlist =
|
skylines-project/skylines
|
tests/api/views/clubs/read_test.py
|
Python
|
agpl-3.0
| 1,643 | 0 |
from tests.api import auth_for
from tests.data import add_fixtures, clubs, users
def test_lva(db_session, client):
lva = clubs.lva(owner=users.john())
add_fixtures(db_session, lva)
res = client.get("/clubs/{id}".format(id=lva.id))
assert res.status_code == 200
assert res.json == {
"id": lva.id,
"name": "LV Aachen",
"timeCreated": "2015-12-24T12:34:56+00:00",
"website": "http://www.lv-aachen.de",
"isWritable": False,
"owner": {"id
|
": lva.owner.id, "name": lva.owner.name},
}
def test_sfn(db_session, client):
sfn = clubs.sfn()
add_fixtures(db_session, sfn)
res = client.get("/clubs/{id}".format(id=sfn.id))
assert res.status_code == 200
assert res.json == {
u"id": sfn.id,
u"name": u"Sportflug Niederberg",
u"timeCreated": "2017-01-01T12:34:56+00:00",
u"web
|
site": None,
u"isWritable": False,
u"owner": None,
}
def test_writable(db_session, client):
lva = clubs.lva()
john = users.john(club=lva)
add_fixtures(db_session, lva, john)
res = client.get("/clubs/{id}".format(id=lva.id), headers=auth_for(john))
assert res.status_code == 200
assert res.json == {
"id": lva.id,
"name": "LV Aachen",
"timeCreated": "2015-12-24T12:34:56+00:00",
"website": "http://www.lv-aachen.de",
"isWritable": True,
"owner": None,
}
def test_missing(client):
res = client.get("/clubs/10000000")
assert res.status_code == 404
def test_invalid_id(client):
res = client.get("/clubs/abc")
assert res.status_code == 404
|
diofeher/django-nfa
|
django/contrib/localflavor/pl/forms.py
|
Python
|
bsd-3-clause
| 5,591 | 0.004114 |
"""
Polish-specific form helpers
"""
import re
from django.newforms import ValidationError
from django.newforms.fields import Select, RegexField
from django.utils.translation import ugettext_lazy as _
class PLVoivodeshipSelect(Select):
"""
A select widget with list of Polish voivodeships (administrative provinces)
as choices.
"""
def __init__(self, attrs=None):
from pl_voivodeships import VOIVODESHIP_CHOICES
super(PLVoivodeshipSelect, self).__init__(attrs, choices=VOIVODESHIP_CHOICES)
class PLAdministrativeUnitSelect(Select):
"""
A select widget with list of Polish administrative units as choices.
"""
def __init__(self, attrs=None):
from pl_administrativeunits import ADMINISTRATIVE_UNIT_CHOICES
super(PLAdministrativeUnitSelect, self).__init__(attrs, choices=ADMINISTRATIVE_UNIT_CHOICES)
class PLNationalIdentificationNumberField(RegexField):
"""
A form field that validates as Polish Identification Number (PESEL).
Checks the following rules:
* the length consist of 11 digits
* has a valid checksum
The algorithm is documented at http://en.wikipedia.org/wiki/PESEL.
"""
default_error_messages = {
'invalid': _(u'National Identification Number consists of 11 digits.'),
'checksum': _(u'Wrong checksum for the National Identification Number.'),
}
def __init__(self, *args, **kwargs):
super(PLNationalIdentificationNumberField, self).__init__(r'^\d{11}$',
max_length=None, min_length=None, *args, **kwargs)
def clean(self,value):
super(PLNationalIdentificationNumberField, self).clean(value)
if not self.has_valid_checksum(value):
raise ValidationError(self.error_messages['checksum'])
return u'%s' % value
def has_valid_checksum(self, number):
"""
Calculates a checksum with the provided algorithm.
"""
multiple_table = (1, 3, 7, 9, 1, 3, 7, 9, 1, 3, 1)
result = 0
for i in range(len(number)):
result += int(number[i]) * multiple_table[i]
return result % 10 == 0
class PLTaxNumberField(RegexField):
"""
A form field that validates as Polish Tax Number (NIP).
Valid forms are: XXX-XXX-YY-YY or XX-XX-YYY-YYY.
Checksum algorithm based on documentation at
http://wipos.p.lodz.pl/zylla/ut/nip-rego.html
"""
default_error_messages = {
'invalid': _(u'Enter a tax number field (NIP) in the format XXX-XXX-XX-XX or XX-XX-XXX-XXX.'),
'checksum': _(u'Wrong checksum for the Tax Number (NIP).'),
}
def __init__(self, *args, **kwargs):
super(PLTaxNumberField, self).__init__(r'^\d{3}-\d{3}-\d{2}-\d{2}$|^\d{2}-\d{2}-\d{3}-\d{3}$',
max_length=None, min_length=None, *args, **kwargs)
def clean(self,value):
super(PLTaxNumberField, self).clean(value)
value = re.sub("[-]", "", value)
if not self.has_valid_checksum(value):
raise ValidationError(self.error_messages['checksum'])
return u'%s' % value
def has_valid_checksum(self, number):
"""
Calculates a checksum with the provided algorithm.
"""
multiple_table = (6, 5, 7, 2, 3, 4, 5, 6, 7)
result = 0
for i in range(len(number)-1):
result += int(number[i]) * multiple_table[i]
result %= 11
if result == int(number[-1]):
return True
else:
return False
class PLNationalBusinessRegisterField(RegexField):
"""
A form field that validated as Polish National Official Business Register Number (REGON)
Valid forms are: 7 or 9 digits number
More on the field: http://www.stat.gov.pl/bip/regon_ENG_HTML.htm
The checksum algorithm is documented at http://wipos.p.lodz.pl/zylla/ut/nip-rego.html
"""
default_error_messages = {
'invalid': _(u'National Business Register Number (REGON) consists of 7 or 9 digits.'),
'checksum': _(u'Wrong checksum for the National Business Register Number (REGON).'),
}
def __init__(self, *args, **kwargs):
super(PLNationalBusinessRegisterField, self).__init__(r'^\d{7,9}$',
max_length=None, min_length=None, *args, **kwargs)
|
def clean(self,value):
super(PLNationalBusinessRegisterField, self).clean(value)
if not self.has_valid_checksum(value):
raise ValidationError(self.error_messages['checksum'])
return u'%s' %
|
value
def has_valid_checksum(self, number):
"""
Calculates a checksum with the provided algorithm.
"""
multiple_table_7 = (2, 3, 4, 5, 6, 7)
multiple_table_9 = (8, 9, 2, 3, 4, 5, 6, 7)
result = 0
if len(number) == 7:
multiple_table = multiple_table_7
else:
multiple_table = multiple_table_9
for i in range(len(number)-1):
result += int(number[i]) * multiple_table[i]
result %= 11
if result == 10:
result = 0
if result == int(number[-1]):
return True
else:
return False
class PLPostalCodeField(RegexField):
"""
A form field that validates as Polish postal code.
Valid code is XX-XXX where X is digit.
"""
default_error_messages = {
'invalid': _(u'Enter a postal code in the format XX-XXX.'),
}
def __init__(self, *args, **kwargs):
super(PLPostalCodeField, self).__init__(r'^\d{2}-\d{3}$',
max_length=None, min_length=None, *args, **kwargs)
|
Willempie/Artificial_Intelligence_Cube
|
logic/handling/panel_action.py
|
Python
|
apache-2.0
| 5,455 | 0.005683 |
from gui_items import *
from objects.xml.xml_step import Step
class ActionPanel:
def __init__(self, display):
self.steps = []
self.display = display
# new ACTION panel (textbox met draaien, knop voor het uitvoeren van de draaien)
self.panel = self.display.gui_items.add_panel(450, 390, (300, 300))
cube_action_gui_items = GuiItems(display, self.display.cube_gui, self.panel)
main_sizer = cube_action_gui_items.gen_box_sizer(wx.HORIZONTAL)
axes_sizer = cube_action_gui_items.gen_box_sizer(wx.VERTICAL)
output_sizer = cube_action_gui_items.gen_box_sizer(wx.VERTICAL)
# uitvoer draaien knop
cube_action_button = cube_action_gui_items.gen_button("Run actions.", 20, 20)
cube_action_button.btn_id = 'run'
cube_action_button.Bind(wx.EVT_BUTTON, lambda event: self._button_run())
# reset textbox button
cube_reset_textbox_button = cube_action_gui_items.gen_button("Reset actions.", 30, 30)
cube_reset_textbox_button.Bind(wx.EVT_BUTTON, lambda event: self._button_reset())
# textbox met draaien
self.cube_action_textbox = cube_action_gui_items.gen_textbox(10, 10, (200, -1), (wx.TE_MULTILINE))
# dropdown for selecting cube row
combo_box_items = []
for size in range(self.display._storage.cube_size):
combo_box_items.append(str(size+1))
self.action_combo_box = cube_action_gui_items.gen_combobox((150, 10), (150, -1), combo_box_items)
self.action_combo_box.SetSelection(0)
# turnable checkbox(clockwise, counterclockwise)
cube_turnable_checkbox = cube_action_gui_items.gen_radiobox(20, 20, (100, 100), wx.RA_SPECIFY_ROWS,
['CounterClockwise', 'Clockwise'])
# buttons voor het draaien (MET BIND)
x_button = cube_action_gui_items.gen_button("Voer X in", 0, 0)
x_button.btn_id = 'x'
y_button = cube_action_gui_items.gen_button("Voer Y in", 0, 0)
y_button.btn_id = 'y'
z_button = cube_action_gui_items.gen_button("Voer Z in", 0, 0)
z_button.btn_id = 'z'
x_button.Bind(wx.EVT_BUTTON, lambda event: self._button_x_y_z('x', self.action_combo_box.GetValue(),
cube_turnable_checkbox.GetSelection()))
y_button.Bind(wx.EVT_BUTTON, lambda event: self._button_x_y_z('y', self.action_combo_box.GetValue(),
cube_turnable_checkbox.GetSelection()))
z_button.Bind(wx.EVT_BUTTON, lambda event: self._button_x_y_z('z', self.action_combo_box.GetValue(),
|
cube_turnable_checkbox.GetSelection()))
# undo button
undo_button = cube_action_gui_items.gen_button("Undo last input", 0,0)
undo_button.Bind(wx.EVT_BUTTON, self.__undo)
# add elements to box_sizers
output_siz
|
er.Add(self.cube_action_textbox, 0, wx.ALL, 5)
output_sizer.Add(cube_action_button, 0, wx.ALL, 5)
output_sizer.Add(cube_reset_textbox_button, 0, wx.ALL, 5)
output_sizer.Add(undo_button, 0, wx.ALL, 5)
axes_sizer.Add(x_button, 0, wx.ALL, 1)
axes_sizer.Add(y_button, 0, wx.ALL, 1)
axes_sizer.Add(z_button, 0, wx.ALL, 1)
axes_sizer.Add(self.action_combo_box, 0, wx.ALL, 1)
axes_sizer.Add(cube_turnable_checkbox, 0, wx.ALL, 1)
main_sizer.Add(output_sizer)
main_sizer.Add(axes_sizer)
# set sizer to panel
self.panel.SetSizer(main_sizer)
self.panel.Layout()
# hide panel
self.panel.Hide()
def __undo(self, event):
counter = 1
textbox_items = ""
splitted_inserted_text = self.cube_action_textbox.GetValue().split(';')
for current_split in splitted_inserted_text:
if counter < len(splitted_inserted_text):
textbox_items += ";" + current_split
counter += 1
# change textbox value
self.cube_action_textbox.Clear()
self.cube_action_textbox.AppendText(textbox_items[1:]) # minus first ; char
def _button_run(self):
self.read_steps()
self.display._storage.current_cube.execute_steps(self.steps)
def read_steps(self):
self.steps = []
text = str(self.cube_action_textbox.GetValue())
if not text == "":
for current_split in text.split(';'):
var_split = current_split.split(',')
self.steps.append(Step(var_split[0], int(var_split[1])-1, int(var_split[2])))
print var_split
def _button_reset(self):
self.steps = []
self.cube_action_textbox.Clear()
def _reset_textbox(self):
self.cube_action_textbox.Clear()
for step in self.steps:
self.cube_action_textbox.AppendText(";" + str(step.axis) + "," + str(step.rows) + "," + str(step.direction))
def _button_x_y_z(self, axis, row, direction):
if direction == 0:
direction = -1
if len(self.cube_action_textbox.GetValue()) == 0:
self.cube_action_textbox.AppendText(str(axis) + "," + str(row) + "," + str(direction))
else:
self.cube_action_textbox.AppendText(";" + str(axis) + "," + str(row) + "," + str(direction))
|
ellisonbg/pyzmq
|
zmq/tests/test_version.py
|
Python
|
lgpl-3.0
| 1,970 | 0.003553 |
#-----------------------------------------------------------------------------
# Copyright (c) 2010-2012 Brian Granger, Min Ragan-Kelley
#
# This file is part of pyzmq
#
# Distributed under the terms of the New BSD License. The full license is in
# the file COPYING.BSD, distributed as part of this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from unittest import TestCase
import zmq
from zmq.sugar import version
#-----------------------------------------------------------------------------
# Tests
#-----------------------------------------------------------------------------
class TestVersion(TestCase):
def test_pyzmq_version(self):
vs = zmq.pyzmq_version()
vs2 = zmq.__version__
self.assertTrue(isinstance(vs, str))
if zmq.__revision__:
self.assertEqual(vs, '@'.join(vs2, zmq.__revision__))
else:
self.assertEqual(vs, vs2)
if version.VERSION_EXTRA:
self.assertTrue(version.VERSION_EXTRA in vs)
self.assertTrue(version.V
|
ERSION_EXTRA in vs2)
def test_pyzmq_version_info(self):
info = zmq.pyzmq_version_info()
self.assertTrue(isinstance(info, tuple))
for n in info[:3]:
self.assertTrue(isinstance(n, int))
if version.VERSION_EXTRA:
self.assertEqual(len(info), 4)
self.assertEqual(info[-1], float('inf'))
else:
self.assertEqual(len(info), 3)
def test_zmq_version_info(self):
info = zmq.zmq_version_info()
self.asser
|
tTrue(isinstance(info, tuple))
for n in info[:3]:
self.assertTrue(isinstance(n, int))
def test_zmq_version(self):
v = zmq.zmq_version()
self.assertTrue(isinstance(v, str))
|
RIEI/tongue
|
TongueD/StreamThread.py
|
Python
|
gpl-2.0
| 1,400 | 0.013571 |
__author__ = 'sysferland'
import argparse, subprocess, os
parser = argparse.ArgumentParser()
parser.add_argument('-feed', help='feed name')
parser.add_argument('-ffserver', help='ffserver IP and PORT')
parser.add_argument('-source', help='video source path if DVD Raw the path to the VIDEO_TS folder')
parser.add_argument('-seek', help='time to seek to in for the feed')
parse
|
r.add_argument('-binpath', help='ffmpeg bin path')
args = parser.parse_args()
videofile = os.path.normpath(args.source)
#dt_to = datetime.time(00,20,00)
#dt_delta = datetime.time(00,00,30)
#seek_to = datetime.timedelta(hours=dt_to.hour,minutes=dt_to.minute,seconds=dt_to.second)
#seek_delta = datetime.timedelta(hours=dt_delta.hour,minutes=dt_delta.minute,seconds=dt_delta.second)
#seek_to_fast = seek_to - seek_delta
seek_delta = "00:00:00"
seek_to_fast = "00
|
:00:00"
other_options = "-ss " + str(seek_to_fast)
options = "-ss "+ str(seek_delta) # +" -trellis 1 -lmax 42000 "
ffm_output = " http://"+args.ffserver+"/"+args.feed
command = args.binpath + "ffmpeg -threads 2 "+ other_options +" -i " + videofile.replace("'", "\\'").replace(" ", "\\ ").replace("-", "\-").replace("&", "\&") + " " + options + ffm_output
command = command.replace("&", "\&")
print command
process = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
#print process.stdout
out, err = process.communicate()
|
MackZxh/OCA-Choice
|
server-tools/auth_supplier/tests/test_auth_supplier.py
|
Python
|
lgpl-3.0
| 852 | 0 |
# -*- coding: utf-8 -*-
# (c) 2015 Antiun Ingeniería S.L. - Sergio Teruel
# (c) 2015 Antiun Ingeniería S.L. - Carlos Dauden
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
from openerp.tests.common import TransactionCase
class TestSAuthSupplier(TransactionCase):
def setUp(self):
super(TestSAuthSupplier, self).setUp()
ir_config_parameter = self.env['ir.config_parameter']
ir_config_parameter.
|
set_param('auth_signup.allow_uninvited', 'True')
def test_user_signup(self):
values = {
'login': 'test@test.com',
'name': 'test',
'password': '1234',
'account_type': 'supplier'
}
user
|
_obj = self.env['res.users']
user = user_obj.browse(user_obj._signup_create_user(values))
self.assertTrue(user.partner_id.supplier)
|
hzj123/56th
|
pombola/votematch/admin.py
|
Python
|
agpl-3.0
| 754 | 0.01061 |
from django.contrib import admin
import models
from pombola.slug_helpers.admin import StricterSlugFieldMixin
class QuizAdmin(StricterSlugFieldMixin, admin.ModelAdmin):
prepopulated_fields = {"slug": ["name"]}
class StatementAdmin(admin.ModelAdmin):
pass
class PartyAdmin(admin.ModelAdmin):
pass
class StanceAdmin(admi
|
n.ModelAdmin):
pass
class SubmissionAdmin(admin.ModelAdmin):
pass
class AnswerAdmin(admin.ModelAdmin):
pass
admin.site.register(models.Quiz, QuizAdmin)
admin.site.register(models.Statem
|
ent, StatementAdmin)
admin.site.register(models.Party, PartyAdmin)
admin.site.register(models.Stance, StanceAdmin)
admin.site.register(models.Submission, SubmissionAdmin)
admin.site.register(models.Answer, AnswerAdmin)
|
rholy/dnf-plugins-core
|
plugins/generate_completion_cache.py
|
Python
|
gpl-2.0
| 2,729 | 0 |
# coding=utf-8
# generate_completion_cache.py - generate cache for dnf bash completion
# Copyright © 2013 Elad Alfassa <elad@fedoraproject.org>
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details. You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA.
from dnfpluginscore import logger
import dnf
import os.path
class BashCompletionCache(dnf.Plugin):
name = 'generate_completion_cache'
def __init__(self, base, cli):
self.base = base
self.available_cache_file = '/var/cache/dnf/available.cache'
self.installed_cache_file = '/var/cache/dnf/installed.cache'
def _out(self, msg):
logger.debug('Completion plugin: %s', msg)
def sack(self):
''' Ge
|
nerate cache of available packages '''
# We generate
|
this cache only if the repos were just freshed or if the
# cache file doesn't exist
fresh = False
for repo in self.base.repos.iter_enabled():
if repo.metadata is not None and repo.metadata.fresh:
# One fresh repo is enough to cause a regen of the cache
fresh = True
break
if not os.path.exists(self.available_cache_file) or fresh:
try:
with open(self.available_cache_file, 'w') as cache_file:
self._out('Generating completion cache...')
available_packages = self.base.sack.query().available()
for package in available_packages:
cache_file.write(package.name + '\n')
except Exception as e:
self._out('Can\'t write completion cache: %s' % e)
def transaction(self):
''' Generate cache of installed packages '''
try:
with open(self.installed_cache_file, 'w') as cache_file:
installed_packages = self.base.sack.query().installed()
self._out('Generating completion cache...')
for package in installed_packages:
cache_file.write(package.name + '\n')
except Exception as e:
self._out('Can\'t write completion cache: %s' % e)
|
nansencenter/nansat
|
nansat/mappers/mapper_opendap_sentinel1.py
|
Python
|
gpl-3.0
| 4,824 | 0.004561 |
# Name: mapper_opendap_sentinel1.py
# Purpose: Nansat mapping for ESA Sentinel-1 data from the Norwegian ground segment
# Author: Morten W. Hansen
# Licence: This file is part of NANSAT. You can redistribute it or modify
# under the terms of GNU General Public License, v.3
# http://www.gnu.org/licenses/gpl-3.0.html
import os
from datetime import datetime
import json
import warnings
import numpy as np
from netCDF4 import Dataset
from nansat.utils import gdal
try:
import scipy
except:
IMPORT_SCIPY = False
else:
IMPORT_SCIPY = True
import pythesint as pti
from nansat.mappers.sentinel1 import Sentinel1
from nansat.mappers.opendap import Opendap
from nansat.vrt import VRT
from nansat.nsr import NSR
from nansat.utils import initial_bearing
class Mapper(Opendap, Sentinel1):
baseURLs = [
'http://nbstds.met.no/thredds/dodsC/NBS/S1A',
'http://nbstds.met.no/thredds/dodsC/NBS/S1B',
]
timeVarName = 'time'
xName = 'x'
yName = 'y'
timeCalendarStart = '1981-01-01'
srcDSProjection = NSR().wkt
def __init__(self, filename, gdal_dataset, gdal_metadata, date=None,
ds=None, bands=None, cachedir=None, *args, **kwargs):
self.test_mapper(filename)
if not IMPORT_SCIPY:
raise NansatReadError('Sentinel-1 data cannot be read because scipy is not installed')
timestamp = date if date else self.get_date(filename)
self.create_vrt(filename, gdal_dataset, gdal_metadata, timestamp, ds, bands, cachedir)
Sentinel1.__init__(self, filename)
self.add_calibrated_nrcs(filename)
self.add_nrcs_VV_from_HH(filename)
def add_calibrated_nrcs(self, filename):
layer_time_id, layer_date = Opendap.get_layer_datetime(None,
self.convert_dstime_datetimes(self.get_dataset_time()))
polarizations = [self.ds.polarisation[i:i+2] for i in range(0,len(self.ds.polarisation),2)]
for pol in polarizations:
dims = list(self.ds.variables['dn_%s' %pol].dimensions)
dims[dims.index(self.timeVarName)] = layer_time_id
src = [
self.get_metaitem(filename, 'Amplitude_%s' %pol, dims)['src'],
self.get_metaitem(filename, 'sigmaNought_%s' %pol, dims)['src']
]
dst = {
'wkv': 'surface_backwards_scattering_coefficient_of_radar_wave',
'PixelFunctionType': 'Sentinel1Calibration',
'polarization': pol,
'suffix': pol,
}
self.create_band(src, dst)
self.dataset.FlushCache()
def add_nrcs_VV_from_HH(self, filename):
if not 'Amplitude_HH' in self.ds.variables.keys():
return
layer_time_id, layer_date = Opendap.get_layer_datetime(None,
self.convert_dstime_datetimes(self.get_dataset_time()))
dims = list(self.ds.variables['dn_HH'].dimensions)
dims[dims.index(self.timeVarName)] = layer_time_id
src = [
self.get_metaitem(filename, 'Amplitude_HH', dims)['src'],
self.get_metaitem(filename, 'sigmaNought_HH', dims)['src'],
{'SourceFilename': self.band_vrts['inciVRT'].filename, 'SourceBand': 1}
]
dst = {
'wkv': 'surface_backwards_scattering_coefficient_of_radar_wave',
'PixelFunctionType': 'Sentinel1Sigma0HHToSigma0VV',
'polarization': 'VV',
'suffix': 'VV'}
self.create_band(src, dst)
self.dataset.FlushCache()
@staticmethod
def get_date(filename):
"""Extract date and time parameters from filename and return
it as a formatted (isoformat) string
Parameters
----------
filename: str
nn
Returns
-------
str, YYYY-mm-ddThh:MMZ
"""
_, filename = os.path.split(filename)
t = datetime.strptime(filename.split('_')[4], '%Y%m%dT%H%M%S')
return datetime.strftime(t, '%Y-%m-%dT%H:%M:%SZ')
def convert_dstime_datetimes(self, ds_time):
"""Convert time variable to np.datetime64"""
ds_datetimes = np.array(
[(np.datetime64(self.timeCalendarStart).astype('M8[s]')
|
+ np.timedelta64(int(sec), 's').astype('m8[s]')) for sec in ds_time]).astype('M8[s]')
return ds_datetimes
def get_geotransform(self):
""" Return fake and temporary geotransform. This will be replaced by gcps in
Sentinel1.__init__
"""
xx = self.ds.variables['lon'][0:100:50, 0].data
yy = self.ds.variables['lat'][0, 0:100:50].data
r
|
eturn xx[0], xx[1]-xx[0], 0, yy[0], 0, yy[1]-yy[0]
|
dhalperi/beam
|
sdks/python/apache_beam/utils/urns.py
|
Python
|
apache-2.0
| 4,265 | 0.00422 |
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""For internal use only; no backwards-compatibility guarantees."""
import abc
import inspect
from google.protobuf import wrappers_pb2
from apache_beam.internal import pickler
from apache_beam.utils import proto_utils
PICKLED_WINDOW_FN = "beam:window_fn:pickled_python:v0.1"
GLOBAL_WINDOWS_FN = "beam:window_fn:global_windows:v0.1"
FIXED_WINDOWS_FN = "beam:window_fn:fixed_windows:v0.1"
SLIDING_WINDOWS_FN = "beam:window_fn:sliding_windows:v0.1"
SESSION_WINDOWS_FN = "beam:window_fn:session_windows:v0.1"
PICKLED_CODER = "beam:coder:pickled_python:v0.1"
PICKLED_TRANSFORM = "beam:ptransform:pickled_python:v0.1"
FLATTEN_TRANSFORM = "beam:ptransform:flatten:v0.1"
WINDOW_INTO_TRANSFORM = "beam:ptransform:window_into:v0.1"
class RunnerApiFn(object):
"""Abstract base class that provides urn registration utilities.
A class that inherits from this class will get a registration-based
from_runner_api and to_runner_api method that convert to and from
beam_runner_api_pb2.SdkFunctionSpec.
Additionally, register_pickle_urn can be called from the body of a class
to register serialization via pickling.
"""
__metaclass__ = abc.ABCMeta
_known_urns = {}
@abc.abstractmethod
def to_runner_api_parameter(self, unused_context):
"""Returns the urn and payload for this Fn.
The returned urn(s) should be registered with `register_urn`.
"""
pass
@classmethod
def register_urn(cls, urn, parameter_type, fn=None):
"""Registeres a urn with a constructor.
For example, if 'beam:fn:foo' had paramter type FooPayload, one could
write `RunnerApiFn.register_urn('bean:fn:foo', FooPayload, foo_from_proto)`
where foo_from_proto took as arguments a FooPayload and a PipelineContext.
This function can also be used as a decorator rather than passing the
callable in as the final parameter.
A corresponding to_runner_api_parameter method would be expected that
returns the tuple ('beam:fn:foo', FooPayload)
"""
def register(fn):
cls._known_urns[urn] = parameter_type, fn
return staticmethod(fn)
if fn:
# Used as a statement.
register(fn)
else:
# Used as a decorator.
return register
@classmethod
def register_pickle_urn(cls, pickle_urn):
"""Registers and implements the given urn
|
via pickling.
"""
inspect.currentframe().f_back.f_locals['to_runner_api_parameter'] = (
lambda self, context: (
pickle_urn, wrappe
|
rs_pb2.BytesValue(value=pickler.dumps(self))))
cls.register_urn(
pickle_urn,
wrappers_pb2.BytesValue,
lambda proto, unused_context: pickler.loads(proto.value))
def to_runner_api(self, context):
"""Returns an SdkFunctionSpec encoding this Fn.
Prefer overriding self.to_runner_api_parameter.
"""
from apache_beam.runners.api import beam_runner_api_pb2
urn, typed_param = self.to_runner_api_parameter(context)
return beam_runner_api_pb2.SdkFunctionSpec(
spec=beam_runner_api_pb2.FunctionSpec(
urn=urn,
parameter=proto_utils.pack_Any(typed_param)))
@classmethod
def from_runner_api(cls, fn_proto, context):
"""Converts from an SdkFunctionSpec to a Fn object.
Prefer registering a urn with its parameter type and constructor.
"""
parameter_type, constructor = cls._known_urns[fn_proto.spec.urn]
return constructor(
proto_utils.unpack_Any(fn_proto.spec.parameter, parameter_type),
context)
|
hiteshagrawal/python
|
info/bkcom/problem8.py
|
Python
|
gpl-2.0
| 635 | 0.034646 |
#!/usr/bin/python
import sys
""" My input is 2234,2234,765,2,3,44,44,55,33,33,2,33,33,33
my o/p
2234:2,765,2,3,44:2,55,33:2,2,33:3"""
my_input = sys.argv[1]
#my_input = "1,7,2234,2234,765,2,3,44,44,55,33,33,2,33,33,33,33,1"
my_list = my_input.split(",")
my_str = ""
#print my_list
init = my_list[0]
count = 0
final
|
_list = []
for i in my_list:
if i == init:
count += 1
else:
#print init, count
my_str = my_str + "," + "%s:%s" %(init,count)
count = 1
init = i
#print init, count
my_str = my_str + "," + "%s:%s" %(init,c
|
ount)
print my_str.replace(":1","")[1:]
#final_list = zip(my_numbers,my_count)
#print final_list
|
miyataken999/weblate
|
weblate/trans/views/reports.py
|
Python
|
gpl-3.0
| 6,538 | 0 |
# -*- coding: utf-8 -*-
#
# Copyright © 2012 - 2015 Michal Čihař <michal@cihar.com>
#
# This file is part of Weblate <https://weblate.org/>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from weblate.trans.models.changes import Change
from weblate.trans.forms import ReportsForm
from weblate.trans.views.helper import get_subproject
from weblate.trans.permissions import can_view_reports
from django.http import HttpResponse, JsonResponse
from django.views.decorators.http import require_POST
from django.shortcuts import redirect
from django.contrib.auth.decorators import login_required
from django.core.exceptions import PermissionDenied
def generate_credits(component, start_date, end_date):
"""Generates credits data for given component."""
result = []
for translation in component.translation_set.all():
authors = Change.objects.content().filter(
translation=translation,
timestamp__range=(start_date, end_date),
).values_list(
'author__email', 'author__first_name'
)
if not authors:
continue
result.append({translation.language.name: sorted(set(authors))})
return result
@login_required
@require_POST
def get_credits(request, project, subproject):
"""View for credits"""
obj = get_subproject(request, project, subproject)
if not can_view_reports(request.user, obj.project):
raise PermissionDenied()
form = ReportsForm(request.POST)
if not form.is_valid():
return redirect(obj)
data = generate_credits(
obj,
form.cleaned_data['start_date'],
form.cleaned_data['end_date'],
)
if form.cleaned_data['style'] == 'json':
return JsonResponse(data=data, safe=False)
if form.cleaned_data['style'] == 'html':
start = '<table>'
row_start = '<tr>'
language_format = u'<th>{0}</th>'
translator_start = '<td><ul>'
translator_format = u'<li><a href="mailto:{0}">{1}</a></li>'
translator_end = '</ul></td>'
row_end = '</tr>'
mime = 'text/html'
end = '</table>'
else:
start = ''
row_start = ''
language_format = u'* {0}\n'
translator_start = ''
translator_format = u' * {1} <{0}>'
translator_end = ''
row_end = ''
mime = 'text/plain'
end = ''
result = []
result.append(start)
for language in data:
name, translators = language.items()[0]
result.append(row_start)
result.append(language_format.format(name))
result.append(
u'{0}{1}{2}'.format(
translator_start,
'\n'.join(
[translator_format.format(*t) for t in translators]
),
translator_end,
)
)
result.append(row_end)
result.append(end)
return HttpResponse(
'\n'.join(result),
content_type='{0}; charset=utf-8'.format(mime),
)
def generate_counts(component, start_date, end_date):
"""Generates credits data for given component."""
result = {}
for translation in component.translation_set.all():
authors = Change.objects.content().filter(
translation=translation,
timestamp__range=(start_date, end_date),
).values_list(
'author__email', 'author__first_name', 'unit__num_words',
)
for email, name, words in authors:
if words is None:
continue
if email not in result:
result[email] = {
'name': name,
'email': email,
'words': words,
'count': 1,
}
else:
result[email]['words'] += words
result[email]['count'] += 1
return result.values()
@login_required
@require_POST
def get_counts(request, project, subproject):
"""View for work counts"""
obj = get_subproject(request, project, subproject)
if not can_view_reports(request.user, obj.project):
raise PermissionDenied()
form = ReportsForm(request.POST)
if not form.is_valid():
return redirect(obj)
data = generate_counts(
obj,
form.cleaned_data['start_date'],
form.cleaned_data['end_date'],
)
if form.cleaned_data['style'] == 'json':
ret
|
urn JsonResponse(data=data, safe=False)
if form.cleaned_data['style'] == 'html':
start = (
'<table>\n<tr><th>Name</th><th>Email</th>'
'<th>Words</th><th>Count</th></tr>'
|
)
row_start = '<tr>'
cell_name = cell_email = cell_words = cell_count = u'<td>{0}</td>\n'
row_end = '</tr>'
mime = 'text/html'
end = '</table>'
else:
heading = ' '.join([
'=' * 40,
'=' * 40,
'=' * 10,
'=' * 10,
])
start = '{0}\n{1:40} {2:40} {3:10} {4:10}\n{0}'.format(
heading,
'Name',
'Email',
'Words',
'Count'
)
row_start = ''
cell_name = cell_email = u'{0:40} '
cell_words = cell_count = u'{0:10} '
row_end = ''
mime = 'text/plain'
end = heading
result = []
result.append(start)
for item in data:
if row_start:
result.append(row_start)
result.append(
u'{0}{1}{2}{3}'.format(
cell_name.format(item['name']),
cell_email.format(item['email']),
cell_words.format(item['words']),
cell_count.format(item['count']),
)
)
if row_end:
result.append(row_end)
result.append(end)
return HttpResponse(
'\n'.join(result),
content_type='{0}; charset=utf-8'.format(mime),
)
|
ychen820/microblog
|
y/google-cloud-sdk/platform/google_appengine/lib/django-0.96/django/core/handler.py
|
Python
|
bsd-3-clause
| 289 | 0.00346 |
# This modu
|
le is DEPRECATED!
#
# You should no longer be pointing your mod_python configuration
# at "django.core.handler".
#
# Use "django.core.handlers.modpython" instead.
from django.core
|
.handlers.modpython import ModPythonHandler
def handler(req):
return ModPythonHandler()(req)
|
aio-libs/aiomysql
|
aiomysql/sa/engine.py
|
Python
|
mit
| 6,916 | 0 |
# ported from:
# https://github.com/aio-libs/aiopg/blob/master/aiopg/sa/engine.py
import asyncio
import aiomysql
from .connection import SAConnection
from .exc import InvalidRequestError, ArgumentError
from ..utils import _PoolContextManager, _PoolAcquireContextManager
from ..cursors import (
Cursor, DeserializationCursor, DictCursor, SSCursor, SSDictCursor)
try:
from sqlalchemy.dialects.mysql.pymysql import MySQLDialect_pymysql
from sqlalchemy.dialects.mysql.mysqldb import MySQLCompiler_mysqldb
except ImportError: # pragma: no cover
raise ImportError('aiomysql.sa requires sqlalchemy')
class MySQLCompiler_pymysql(MySQLCompiler_mysqldb):
def construct_params(self, params=None, _group_number=None, _check=True):
pd = super().construct_params(params, _group_number, _check)
for column in self.prefetch:
pd[column.key] = self._exec_default(column.default)
return pd
def _exec_default(self, default):
if default.is_callable:
return default.arg(self.dialect)
else:
return default.arg
_dialect = MySQLDialect_pymysql(paramstyle='pyformat')
_dialect.statement_compiler = MySQLCompiler_pymysql
_dialect.default_paramstyle = 'pyformat'
def create_engine(minsize=1, maxsize=10, loop=None,
dialect=_dialect, pool_recycle=-1, compiled_cache=None,
**kwargs):
"""A coroutine for Engine creation.
Returns Engine instance with embedded connection pool.
The pool has *minsize* opened connections to MySQL server.
"""
deprecated_cursor_classes = [
DeserializationCursor, DictCursor, SSCursor, SSDictCursor,
]
cursorclass = kwargs.get('cursorclass', Cursor)
if not issubclass(cursorclass, Cursor) or any(
issubclass(cursorclass, cursor_cla
|
ss)
for cursor_class in deprecated_cursor_classes
):
raise ArgumentError('SQLAlchemy engine does not support '
'this cursor class')
coro = _create_engine(m
|
insize=minsize, maxsize=maxsize, loop=loop,
dialect=dialect, pool_recycle=pool_recycle,
compiled_cache=compiled_cache, **kwargs)
return _EngineContextManager(coro)
async def _create_engine(minsize=1, maxsize=10, loop=None,
dialect=_dialect, pool_recycle=-1,
compiled_cache=None, **kwargs):
if loop is None:
loop = asyncio.get_event_loop()
pool = await aiomysql.create_pool(minsize=minsize, maxsize=maxsize,
loop=loop,
pool_recycle=pool_recycle, **kwargs)
conn = await pool.acquire()
try:
return Engine(dialect, pool, compiled_cache=compiled_cache, **kwargs)
finally:
pool.release(conn)
class Engine:
"""Connects a aiomysql.Pool and
sqlalchemy.engine.interfaces.Dialect together to provide a
source of database connectivity and behavior.
An Engine object is instantiated publicly using the
create_engine coroutine.
"""
def __init__(self, dialect, pool, compiled_cache=None, **kwargs):
self._dialect = dialect
self._pool = pool
self._compiled_cache = compiled_cache
self._conn_kw = kwargs
@property
def dialect(self):
"""An dialect for engine."""
return self._dialect
@property
def name(self):
"""A name of the dialect."""
return self._dialect.name
@property
def driver(self):
"""A driver of the dialect."""
return self._dialect.driver
@property
def minsize(self):
return self._pool.minsize
@property
def maxsize(self):
return self._pool.maxsize
@property
def size(self):
return self._pool.size
@property
def freesize(self):
return self._pool.freesize
def close(self):
"""Close engine.
Mark all engine connections to be closed on getting back to pool.
Closed engine doesn't allow to acquire new connections.
"""
self._pool.close()
def terminate(self):
"""Terminate engine.
Terminate engine pool with instantly closing all acquired
connections also.
"""
self._pool.terminate()
async def wait_closed(self):
"""Wait for closing all engine's connections."""
await self._pool.wait_closed()
def acquire(self):
"""Get a connection from pool."""
coro = self._acquire()
return _EngineAcquireContextManager(coro, self)
async def _acquire(self):
raw = await self._pool.acquire()
conn = SAConnection(raw, self, compiled_cache=self._compiled_cache)
return conn
def release(self, conn):
"""Revert back connection to pool."""
if conn.in_transaction:
raise InvalidRequestError("Cannot release a connection with "
"not finished transaction")
raw = conn.connection
return self._pool.release(raw)
def __enter__(self):
raise RuntimeError(
'"yield from" should be used as context manager expression')
def __exit__(self, *args):
# This must exist because __enter__ exists, even though that
# always raises; that's how the with-statement works.
pass # pragma: nocover
def __iter__(self):
# This is not a coroutine. It is meant to enable the idiom:
#
# with (yield from engine) as conn:
# <block>
#
# as an alternative to:
#
# conn = yield from engine.acquire()
# try:
# <block>
# finally:
# engine.release(conn)
conn = yield from self.acquire()
return _ConnectionContextManager(self, conn)
async def __aenter__(self):
return self
async def __aexit__(self, exc_type, exc_val, exc_tb):
self.close()
await self.wait_closed()
_EngineContextManager = _PoolContextManager
_EngineAcquireContextManager = _PoolAcquireContextManager
class _ConnectionContextManager:
"""Context manager.
This enables the following idiom for acquiring and releasing a
connection around a block:
with (yield from engine) as conn:
cur = yield from conn.cursor()
while failing loudly when accidentally using:
with engine:
<block>
"""
__slots__ = ('_engine', '_conn')
def __init__(self, engine, conn):
self._engine = engine
self._conn = conn
def __enter__(self):
assert self._conn is not None
return self._conn
def __exit__(self, *args):
try:
self._engine.release(self._conn)
finally:
self._engine = None
self._conn = None
|
StuntsPT/pyRona
|
helper_scripts/geste2lfmm.py
|
Python
|
gpl-3.0
| 2,430 | 0 |
#!/usr/bin/python3
# Copyright 2018 Francisco Pina Martins <f.pinamartins@gmail.com>
# This file is part of geste2lfmm.
# geste2
|
lfmm is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public
|
License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# geste2lfmm is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with geste2lfmm. If not, see <http://www.gnu.org/licenses/>.
# Usage: python3 geste2lfmm.py file.geste file.lfmm
from collections import OrderedDict
def parse_geste(infile_name):
"""
Parses a GESTE file and retuns an OrderedDict with:
{"Population_name":[Freq_ref_allele_on SNP_1,Freq_ref_allele_on SNP_2,...]}
"""
infile = open(infile_name, "r")
pop_freqs = OrderedDict()
pop_starter = "[pop]="
popname = ""
for line in infile:
# Neat trick to ignore data that is not SNP info
# This code section should be very performant since it replaces most
# if - else tests with try -> except statements
line = line.split()
try:
int(line[0])
except ValueError: # In case it's a new section
if line[0].startswith(pop_starter):
popname = "Pop %s" % line[0].strip().replace(pop_starter, "")
pop_freqs[popname] = []
continue
except IndexError: # In case it's an empty line
continue
try:
ref_frequency = round(int(line[3]) / int(line[1]), 3)
except ZeroDivisionError:
ref_frequency = 9
pop_freqs[popname].append(ref_frequency)
infile.close()
return pop_freqs
def write_lfmm(pop_freqs, lfmm_filename):
"""
Write a LFMM inpt file based on the OrderedDict extracted from the GESTE
file.
"""
outfile = open(lfmm_filename, 'w')
for name, freq in pop_freqs.items():
outfile.write(name + "\t")
outfile.write("\t".join(map(str, freq)) + "\n")
outfile.close()
if __name__ == "__main__":
from sys import argv
POP_FREQS = parse_geste(argv[1])
write_lfmm(POP_FREQS, argv[2])
|
thebjorn/fixedrec
|
fixedrec/utils.py
|
Python
|
mit
| 2,032 | 0.001476 |
# -*- coding: utf-8 -*-
"""Utility functions.
"""
from collections import OrderedDict
from .bsd_checksum import bsd_checksum # make name available from this module
def n_(s, replacement='_'):
"""Make binary fields more readable.
"""
if isinstance(s, (str, unicode, bytearray)):
return s.replace('\0', replacement)
return s
def split_string(s, *ndxs):
"""String sub-class with a split() method that splits a given indexes.
Usage:
>>> print split_string('D2008022002', 1, 5, 7, 9)
['D', '2008', '02', '20', '02']
"""
if len(ndxs) == 0:
return [s]
if len(ndxs) == 1:
i = ndxs[0]
return [s[:i], s[i:]]
res = []
b = 0
while ndxs:
a, b, ndxs = b, ndxs[0], ndxs[1:]
res.append(s[a:b])
res.append(s[b:])
return res
def split_fields(s, sizes):
"""Split a string into fields based on field `sizes`.
"""
slen = len(s)
if No
|
ne in sizes:
nonesize = slen - sum(v for v in sizes if v is not None)
sizes = [v or nonesize for v in sizes]
ndxs = [sizes[0]]
cur = 1
while cur < len(sizes) - 1:
ndxs.append(ndxs[-1] + sizes[cur])
cur += 1
return split_string(s, *ndxs)
class pset(OrderedDict):
"""A property set is an OrderedDict with prettier string display
(useful when working with record lengths that are wider t
|
han your
terminal).
"""
def __repr__(self):
return '{%s}' % ', '.join('%s: %r' % (str(k), str(v))
for k,v in self.items())
def __str__(self):
return "{\n%s\n}" % ',\n'.join(' %s: %r' % (str(k), str(v))
for k,v in self.items())
def pad(data, size, padchar=' '):
"""Pad the `data` to exactly length = `size`.
"""
if len(data) > size:
raise ValueError("Data is longer than size, cannot pad.")
if len(data) == size:
return data
return data + padchar * (size - len(data))
|
reo7sp/vk-text-likeness
|
vk_text_likeness/logs.py
|
Python
|
apache-2.0
| 889 | 0.003375 |
import inspect
import time
from collections import defaultdict
_method_time_logs = defaultdict(list)
|
def log_method_begin():
curframe = inspect.currentframe()
calframe = inspect.getouterframes(curframe, 2)
caller_name = "{}: {}".format(calframe[1].filename.split('/')[-1], calframe[1].function)
_method_time_logs[caller_name].append(time.time())
print("{}: begin".format(caller_name))
def log_method_end():
curframe = inspect.currentframe()
calframe = inspe
|
ct.getouterframes(curframe, 2)
caller_name = "{}: {}".format(calframe[1].filename.split('/')[-1], calframe[1].function)
if caller_name in _method_time_logs:
logs = _method_time_logs[caller_name]
if len(logs) > 0:
print("{}: end ({:.3}s)".format(caller_name, time.time() - logs[-1]))
logs.pop()
else:
print("{}: end".format(caller_name))
|
arcward/ticketpy
|
ticketpy/query.py
|
Python
|
mit
| 13,726 | 0.003643 |
"""Classes to handle API queries/searches"""
import requests
from ticketpy.model import Venue, Event, Attraction, Classification
class BaseQuery:
"""Base query/parent class for specific serach types."""
#: Maps parameter names to parameters expected by the API
#: (ex: *market_id* maps to *marketId*)
attr_map = {
'start_date_time': 'startDateTime',
'end_date_time': 'endDateTime',
'onsale_start_date_time': 'onsaleStartDateTime',
'onsale_end_date_time': 'onsaleEndDateTime',
'country_code': 'countryCode',
'state_code': 'stateCode',
'venue_id': 'venueId',
'attraction_id': 'attractionId',
'segment_id': 'segmentId',
'segment_name': 'segmentName',
'classification_name': 'classificationName',
'classification_id': 'classificationId',
'market_id': 'marketId',
'promoter_id': 'promoterId',
'dma_id': 'dmaId',
'include_tba': 'includeTBA',
'include_tbd': 'includeTBD',
'client_visibility': 'clientVisibility',
'include_test': 'includeTest',
'keyword': 'keyword',
'id': 'id',
'sort': 'sort',
'page': 'page',
'size': 'size',
'locale': 'locale',
'latlong': 'latlong',
'radius': 'radius'
}
def __init__(self, api_client, method, model):
"""
:param api_client: Instance of ``ticketpy.client.ApiClient``
:param method: API method (ex: *events*, *venues*...)
:param model: Model from ``ticketpy.model``. Either
``Event``, ``Venue``, ``Attraction`` or ``Classification``
"""
self.api_client = api_client
self.method = method
self.model = model
def __get(self, **kwargs):
"""Sends final request to ``ApiClient``"""
response = self.api_client.search(self.method, **kwargs)
return response
def _get(self, keyword=None, entity_id=None, sort=None, include_test=None,
page=None, size=None, locale=None, **kwargs):
"""Basic API search request, with only the parameters common to all
search functions. Specific searches pass theirs through **kwargs.
:param keyword: Keyword to search on
:param entity_id: ID of the object type (such as an event ID...)
|
:param sort: Sort method
:param include_test: ['yes', 'no', 'only'] to include test objects in
results. Default: *no*
:param page: Page to return (default: 0)
:param size: Page size (default: 20)
:param locale: Locale (default: *en*)
|
:param kwargs: Additional search parameters
:return:
"""
# Combine universal parameters and supplied kwargs into single dict,
# then map our parameter names to the ones expected by the API and
# make the final request
search_args = dict(kwargs)
search_args.update({
'keyword': keyword,
'id': entity_id,
'sort': sort,
'include_test': include_test,
'page': page,
'size': size,
'locale': locale
})
params = self._search_params(**search_args)
return self.__get(**params)
def by_id(self, entity_id):
"""Get a specific object by its ID"""
get_tmpl = "{}/{}/{}"
get_url = get_tmpl.format(self.api_client.url, self.method, entity_id)
r = requests.get(get_url, params=self.api_client.api_key)
r_json = self.api_client._handle_response(r)
return self.model.from_json(r_json)
def _search_params(self, **kwargs):
"""Returns API-friendly search parameters from kwargs
Maps parameter names to ``self.attr_map`` and removes
paramters == ``None``
:param kwargs: Keyword arguments
:return: API-friendly parameters
"""
# Update search parameters with kwargs
kw_map = {}
for k, v in kwargs.items():
# If arg is API-friendly (ex: stateCode='GA')
if k in self.attr_map.keys():
kw_map[self.attr_map[k]] = v
elif k in self.attr_map.values():
kw_map[k] = v
else:
kw_map[k] = v
return {k: v for (k, v) in kw_map.items() if v is not None}
class AttractionQuery(BaseQuery):
"""Query class for Attractions"""
def __init__(self, api_client):
self.api_client = api_client
super().__init__(api_client, 'attractions', Attraction)
def find(self, sort=None, keyword=None, attraction_id=None,
source=None, include_test=None, page=None, size=None,
locale=None, **kwargs):
"""
:param sort: Response sort type (API default: *name,asc*)
:param keyword:
:param attraction_id:
:param source:
:param include_test: Include test attractions (['yes', 'no', 'only'])
:param page:
:param size:
:param locale: API default: *en*
:param kwargs:
:return:
"""
return self._get(keyword, attraction_id, sort, include_test,
page, size, locale, source=source, **kwargs)
class ClassificationQuery(BaseQuery):
"""Classification search/query class"""
def __init__(self, api_client):
super().__init__(api_client, 'classifications', Classification)
def find(self, sort=None, keyword=None, classification_id=None,
source=None, include_test=None, page=None, size=None,
locale=None, **kwargs):
"""Search classifications
:param sort: Response sort type (API default: *name,asc*)
:param keyword:
:param classification_id:
:param source:
:param include_test: Include test classifications
(['yes', 'no', 'only'])
:param page:
:param size:
:param locale: API default: *en*
:param kwargs:
:return:
"""
return self._get(keyword, classification_id, sort, include_test,
page, size, locale, source=source, **kwargs)
def segment_by_id(self, segment_id):
"""Return a ``Segment`` matching this ID"""
return self.by_id(segment_id).segment
def genre_by_id(self, genre_id):
"""Return a ``Genre`` matching this ID"""
genre = None
resp = self.by_id(genre_id)
if resp.segment:
for genre in resp.segment.genres:
if genre.id == genre_id:
genre = genre
return genre
def subgenre_by_id(self, subgenre_id):
"""Return a ``SubGenre`` matching this ID"""
subgenre = None
segment = self.by_id(subgenre_id).segment
if segment:
subgenres = [
subg for genre in segment.genres
for subg in genre.subgenres
]
for subg in subgenres:
if subg.id == subgenre_id:
subgenre = subg
return subgenre
class EventQuery(BaseQuery):
"""Abstraction to search API for events"""
def __init__(self, api_client):
super().__init__(api_client, 'events', Event)
def find(self, sort='date,asc', latlong=None, radius=None, unit=None,
start_date_time=None, end_date_time=None,
onsale_start_date_time=None, onsale_end_date_time=None,
country_code=None, state_code=None, venue_id=None,
attraction_id=None, segment_id=None, segment_name=None,
classification_name=None, classification_id=None,
market_id=None, promoter_id=None, dma_id=None,
include_tba=None, include_tbd=None, client_visibility=None,
keyword=None, event_id=None, source=None, include_test=None,
page=None, size=None, locale=None, **kwargs):
"""Search for events matching given criteria.
:param sort: Sorting order of search result
(default: *'relevance,desc'*)
:param latlong: Latitude/longitude filter
:param radius: Radius of area to search
|
warwick-one-metre/opsd
|
warwick/observatory/operations/actions/superwasp/park_telescope.py
|
Python
|
gpl-3.0
| 1,410 | 0.001418 |
#
# This file is part of opsd.
#
# opsd is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# opsd is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with opsd. If not, see <http://www.gnu.org/licenses/>.
"""Telescope action to park the telescope"""
from warwick.observatory.operations import TelescopeAction, TelescopeActionStatus
from warwick.observatory.operations.actions.superwasp.telescope_help
|
ers import tel_stop, tel_park
class ParkTelescope(TelescopeAction):
"""Telescope action to park the telescope"""
def __init__(self, log_name):
super().__init__('Park Telescope', log_name, {})
def run_thread(self):
"""Thread that runs t
|
he hardware actions"""
if not tel_stop(self.log_name):
self.status = TelescopeActionStatus.Error
return
if not tel_park(self.log_name):
self.status = TelescopeActionStatus.Error
return
self.status = TelescopeActionStatus.Complete
|
dgholz/dung
|
dung/command/wait.py
|
Python
|
mit
| 226 | 0.00885 |
import argparse
class Wait(object):
@st
|
aticmethod
def add_parser(parser):
parser.add_parser('wait')
def __init__(self, dung):
self.dung = dung
|
def run(self):
self.dung.wait_for_it()
|
vyacheslav-bezborodov/skt
|
stockviewer/stockviewer/db/main.py
|
Python
|
mit
| 142 | 0.028169 |
import dbman
|
ager
def main(args, config):
db = dbmanager.dbma
|
nager(config.find('dbmanager'))
if args.make_migration:
db.make_migration()
|
unix4you2/practico
|
mod/pam/pam_nativo.py
|
Python
|
gpl-3.0
| 891 | 0.030303 |
#!/usr/bin/env python
import sys
import PAM
from getpass import getpass
def pam_conv(auth, query_list, userData):
resp = []
for i in range(len(query_list)):
query, type = query_list[i]
if type == PAM.PAM_PROMPT_ECHO_ON:
val = raw_input(query)
resp.append((val, 0))
elif type == PAM
|
.PAM_PROMPT_ECHO_OFF:
val = getpass(query)
resp.append((val, 0))
elif type == PAM.PAM_PROMPT_ERROR_MSG or type == PAM.PAM_PROMPT_TEXT_INFO:
print query
resp.append(('', 0))
else:
return None
return resp
service = 'passwd'
if len(sys.argv) == 2:
user = sys.argv[1]
else:
user = None
aut
|
h = PAM.pam()
auth.start(service)
if user != None:
auth.set_item(PAM.PAM_USER, user)
auth.set_item(PAM.PAM_CONV, pam_conv)
try:
auth.authenticate()
auth.acct_mgmt()
except PAM.error, resp:
print 'Go away! (%s)' % resp
except:
print 'Internal error'
else:
print 'Good to go!'
|
mheap/ansible
|
lib/ansible/modules/network/nxos/nxos_interface.py
|
Python
|
gpl-3.0
| 26,385 | 0.001819 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'network'}
DOCUMENTATION = """
---
module: nxos_interface
extends_documentation_fragment: nxos
version_added: "2.1"
short_description: Manages physical attributes of interfaces.
description:
- Manages physical attributes of interfaces of NX-OS switches.
author:
- Jason Edelman (@jedelman8)
- Trishna Guha (@trishnaguha)
notes:
- Tested against NXOSv 7.3.(0)D1(1) on VIRL
- This module is also used to create logical interfaces such as
svis and loopbacks.
- Be cautious of platform specific idiosyncrasies. For example,
when you default a loopback interface, the admin state toggles
on certain versions of NX-OS.
- The M(nxos_overlay_global) C(anycast_gateway_mac) attribute must be
set before setting the C(fabric_forwarding_anycast_gateway) property.
options:
name:
description:
- Full name of interface, i.e. Ethernet1/1, port-channel10.
required: true
aliases: [interface]
interface_type:
description:
- Interface type to be unconfigured from the device.
choices: ['loopback', 'portchannel', 'svi', 'nve']
version_added: 2.2
speed:
description:
- Interface link speed. Applicable for ethernet interface only.
version_added: 2.5
admin_state:
description:
- Administrative state of the interface.
default: up
choices: ['up','down']
description:
description:
- Interface description.
mode:
description:
- Manage Layer 2 or Layer 3 state of the interface.
This option is supported for ethernet and portchannel interface.
Applicable for ethernet and portchannel interface only.
choices: ['layer2','layer3']
mtu:
description:
- MTU for a specific interface. Must be an even number between 576 and 9216.
Applicable for ethernet interface only.
version_added: 2.5
ip_forward:
description:
- Enable/Disable ip forward feature on SVIs.
choices: ['enable','disable']
version_added: 2.2
fabric_forwarding_anycast_gateway:
description:
- Associate SVI with anycast gateway under VLAN configuration mode.
Applicable for SVI interface only.
type: bool
version_added: 2.2
duplex:
description:
- Interface link status. Applicable for ethernet interface only.
default: auto
choices: ['full', 'half', 'auto']
version_added: 2.5
tx_rate:
description:
- Transmit rate in bits per second (bps).
- This is state check parameter only.
- Supports conditionals, see L(Conditionals in Networking Modules,../network/user_guide/network_working_with_command_output.html)
version_added: 2.5
rx_rate:
description:
- Receiver rate in bits per second (bps).
- This is state check parameter only.
- Supports conditionals, see L(Conditionals in Networking Modules,../network/user_guide/network_working_with_command_output.html)
version_added: 2.5
neighbors:
description:
- Check the operational state of given interface C(name) for LLDP neighbor.
- The following suboptions are available. This is state check parameter only.
suboptions:
host:
description:
- "LLDP neighbor host for given interface C(name)."
port:
|
description:
- "LLDP neighbor port to which given interface C(name) is connected."
version_added: 2.5
aggregate:
description: List of Interfaces definitions.
version_added: 2.5
state:
description:
- Specify desired state of the resource.
default: present
choices: ['present','absent','default']
delay:
description:
- Time in seconds to wait before checking for the operational state on remote
device. This wait is applicable for
|
operational state arguments.
default: 10
"""
EXAMPLES = """
- name: Ensure an interface is a Layer 3 port and that it has the proper description
nxos_interface:
name: Ethernet1/1
description: 'Configured by Ansible'
mode: layer3
- name: Admin down an interface
nxos_interface:
name: Ethernet2/1
admin_state: down
- name: Remove all loopback interfaces
nxos_interface:
name: loopback
state: absent
- name: Remove all logical interfaces
nxos_interface:
interface_type: "{{ item }} "
state: absent
loop:
- loopback
- portchannel
- svi
- nve
- name: Admin up all loopback interfaces
nxos_interface:
name: loopback 0-1023
admin_state: up
- name: Admin down all loopback interfaces
nxos_interface:
name: looback 0-1023
admin_state: down
- name: Check neighbors intent arguments
nxos_interface:
name: Ethernet2/3
neighbors:
- port: Ethernet2/3
host: abc.mycompany.com
- name: Add interface using aggregate
nxos_interface:
aggregate:
- { name: Ethernet0/1, mtu: 256, description: test-interface-1 }
- { name: Ethernet0/2, mtu: 516, description: test-interface-2 }
duplex: full
speed: 100
state: present
- name: Delete interface using aggregate
nxos_interface:
aggregate:
- name: Loopback9
- name: Loopback10
state: absent
- name: Check intent arguments
nxos_interface:
name: Ethernet0/2
state: up
tx_rate: ge(0)
rx_rate: le(0)
"""
RETURN = """
commands:
description: command list sent to the device
returned: always
type: list
sample:
- interface Ethernet2/3
- mtu 1500
- speed 10
"""
import re
import time
from copy import deepcopy
from ansible.module_utils.network.nxos.nxos import load_config, run_commands
from ansible.module_utils.network.nxos.nxos import nxos_argument_spec, normalize_interface
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.common.utils import conditional, remove_default_spec
def execute_show_command(command, module):
if 'show run' not in command:
output = 'json'
else:
output = 'text'
cmds = [{
'command': command,
'output': output,
}]
body = run_commands(module, cmds, check_rc=False)
if body and "Invalid" in body[0]:
return []
else:
return body
def search_obj_in_list(name, lst):
for o in lst:
if o['name'] == name:
return o
return None
def get_interface_type(interface):
"""Gets the type of interface
"""
if interface.upper().startswith('ET'):
return 'ethernet'
elif interface.upper().startswith('VL'):
return 'svi'
elif interface.upper().startswith('LO'):
return 'loopback'
elif interface.upper().startswith('MG'):
return 'management'
elif interface.upper().startswith('MA'):
return 'management'
elif interface.upper().startswith('PO'):
return 'portchannel'
elif interface.upper().startswith('NV'):
return 'nve'
else:
return 'unknown'
def get_interfaces_dict(module):
"""Gets all active interfaces on a given switch
"""
try:
body = execute_show_command('show interface', module)[0]
except IndexError:
return {}
interfaces = {
'ethernet': [],
'svi': [],
'loopback': [],
'management': [],
'portchannel': [],
'nve': [],
'unknown': []
}
if body:
interface_list = body['TABLE_interface']['ROW_interface']
for index in interface_list:
intf = index['interface']
intf_type = get_interface_type(intf)
interfaces[intf_type].append(intf)
return interfaces
def get_vlan_interface_attributes(name, intf_type, module):
""" Returns dictionary that has two k/v pairs:
admin_state & description if not an svi, returns None
"""
command = 'show run interface {0} all'.format(name)
try:
body = execute_show_command(command, module)[0]
except IndexError:
return None
if body:
|
ciudadanointeligente/deldichoalhecho
|
ddah_web/tests/instance_template_tests.py
|
Python
|
gpl-3.0
| 3,123 | 0.004483 |
from django.test import TestCase, RequestFactory
from ddah_web.models import DDAHTemplate, DDAHInstanceWeb
from ddah_web import read_template_as_string
from ddah_web.views import MoustacheTemplateResponse
class InstanceTemplateTestCase(TestCase):
'''There is a template which contains the html to be represented using {{moustache}}'''
def setUp(self):
self.default_template = read_template_as_string('instance_
|
templates/default.html')
self.default_template_flat_page = read_template_as_string('instance_templates/default_flat_page.html')
self.default_template_footer = read_template_as_string('instance_templates/partials/footer.html')
self.default_template_head = read_template_as_string('instance_templates/partials/head.html')
self.default_template_header = read_template_as_string('instance_templates/partial
|
s/header.html')
self.default_template_style = read_template_as_string('instance_templates/partials/style.html')
def test_create_a_template(self):
template = DDAHTemplate.objects.create()
self.assertEquals(template.content, self.default_template)
self.assertEquals(template.flat_page_content, self.default_template_flat_page)
self.assertEquals(template.head, self.default_template_head)
self.assertEquals(template.header, self.default_template_header)
self.assertEquals(template.style, self.default_template_style)
self.assertEquals(template.footer, self.default_template_footer)
def test_when_creating_an_instance_it_automatically_creates_a_template(self):
instance = DDAHInstanceWeb.objects.create(label="bici", title="Bicicletas")
self.assertTrue(instance.template)
self.assertEquals(instance.template.content, self.default_template)
self.assertEquals(instance.template.head, self.default_template_head)
self.assertEquals(instance.template.header, self.default_template_header)
self.assertEquals(instance.template.style, self.default_template_style)
self.assertEquals(instance.template.footer, self.default_template_footer)
class MustacheTemplateResponseTestCase(TestCase):
def setUp(self):
self.template = DDAHTemplate.objects.create(content="content {{> head }} {{> header }} {{> style }} {{> footer }}",
head="head",
header="header",
style="style",
footer="footer")
self.instance = DDAHInstanceWeb.objects.create(label="bici", title="Bicicletas")
self.instance.template = self.template
self.factory = RequestFactory()
def test_renderes_correctly(self):
request = self.factory.get('/')
response = MoustacheTemplateResponse(request, 'unused.html')
response.context_data = {
'instance': self.instance
}
rendered_text = "content head header style footer"
self.assertEquals(rendered_text, response.rendered_content)
|
anchore/anchore-engine
|
anchore_engine/analyzers/modules/31_file_package_verify.py
|
Python
|
apache-2.0
| 2,520 | 0.001587 |
#!/usr/bin/env python3
import json
import os
import sys
import anchore_engine.analyzers.utils
analyzer_name = "file_package_verify"
try:
config = anchore_engine.analyzers.utils.init_analyzer_cmdline(
sys.argv, analyzer_name
)
except Exception as err:
print(str(err))
sys.exit(1)
imgname = config["imgid"]
imgid = config["imgid_full"]
outputdir = config["dirs"]["outputdir"]
unpackdir = config["dirs"]["unpackdir"]
squashtar = os.path.join(unpackdir, "squashed.tar")
meta = anchore_engine.analyzers.utils.get_distro_from_squashtar(
squashtar, unpackdir=unpackdir
)
distrodict = anchore_engine.analyzers.utils.get_distro_flavor(
meta["DISTRO"], meta["DISTROVERS"], likedistro=meta["LIKEDISTRO"]
)
flavor = distrodict["flavor"]
# gather file metadata from installed packages
result = {}
resultlist = {}
try:
if flavor == "RHEL":
try:
# result = rpm_get_f
|
ile_package_metadata(unpackdir, record)
result = anchore_engine.analyzers.utils.rpm_get_file_package_metadata_from_squashtar(
unpackdir, squashtar
)
except Exception as err:
raise Exception("ERROR: " +
|
str(err))
elif flavor == "DEB":
try:
# result = deb_get_file_package_metadata(unpackdir, record)
result = anchore_engine.analyzers.utils.dpkg_get_file_package_metadata_from_squashtar(
unpackdir, squashtar
)
except Exception as err:
raise Exception("ERROR: " + str(err))
elif flavor == "ALPINE":
try:
# result = apk_get_file_package_metadata(unpackdir, record)
result = anchore_engine.analyzers.utils.apk_get_file_package_metadata_from_squashtar(
unpackdir, squashtar
)
except Exception as err:
raise Exception("ERROR: " + str(err))
else:
# do nothing, flavor not supported for getting metadata about files from pkg manager
pass
except Exception as err:
print("WARN: analyzer unable to complete - exception: " + str(err))
result = {}
resultline = {}
if result:
for f in list(result.keys()):
try:
resultlist[f] = json.dumps(result[f], sort_keys=True)
except Exception as err:
print("WARN: " + str(err))
resultlist[f] = ""
if resultlist:
ofile = os.path.join(outputdir, "distro.pkgfilemeta")
anchore_engine.analyzers.utils.write_kvfile_fromdict(ofile, resultlist)
sys.exit(0)
|
vollov/py-lab
|
src/mongodb/__init__.py
|
Python
|
mit
| 1,550 | 0.007097 |
# -*- coding: utf-8 -*-
import sys,pymongo
from pymongo import MongoClient
from pymongo.errors import ConnectionFailure
from bson.code import Code
class MongoDBConfig:
def __init__(self, db_name, host):
self._db_name= db_name
self._host = host
class MongoDB:
'''Use mongo_client for a pooled mongodb'''
def __init__(self, config):
self.db_name = config._db_name
try:
self.connection = pymongo.MongoClient(host=config._host,auto_start_request=False)
except ConnectionFailure, e:
sys.stderr.write("Could not connect to MongoDB: %s" % e)
sys.exit(1)
def insert(self, doc, collection_name):
'insert a document into a collection'
db_handler = self.connection[self.db_name]
assert db_handler.connection == self.connection
with self.connection.start_request():
object_id = db_handler[collection_name].insert(doc, safe=True)
return object_id
def findOne(self, query, collection_name):
db_handler = self.connection[self.db_name]
|
assert db_handler.connection == self.connection
with self.connection.start_request():
result = db_handler[collection_name].find_one(query)
return result
def removeAll(self, collection_name):
db_handler = self.connection[self.db_name]
assert db_handler.connection == self.connection
with self.connection.start_request():
db_handler[collec
|
tion_name].remove()
|
ericbean/RecordSheet
|
test/test_jsonapp.py
|
Python
|
gpl-3.0
| 5,052 | 0.005542 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright (C) 2015 Eric Beanland <eric.beanland@gmail.com>
# This file is part of RecordSheet
#
# RecordSheet is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# RecordSheet is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
from nose.tools import with_setup
from webtest import TestApp
from sqlalchemy import event
from sqlalchemy.engine import create_engine
from sqlalchemy.orm.session import Session
from RecordSheet.dbapi import Base
from RecordSheet import jsonapp, dbapi, dbmodel, plugins
app = TestApp(jsonapp.app, extra_environ={'beaker.session':{'user_id':1}})
from test import dbhelper
###############################################################################
def setup_module():
jsonapp.app.uninstall(plugins.CsrfPlugin)
jsonapp.app.uninstall(plugins.AuthPlugin)
dbhelper.setup_module()
def teardown_module():
dbhelper.teardown_module()
###############################################################################
def test_generic_collection():
url = '/accounts?sort=name.asc&sort=id.desc&limit=5&offset=1'
response = app.get(url)
assert response.status_int == 200
assert response.content_type == 'application/json'
def test_generic_collection_404():
response = app.get('/doesnotexist', status=404)
assert response.status_int == 404
assert response.content_type == 'application/json'
###############################################################################
def test_generic_item():
response = app.get('/accounts/2')
assert response.status_int == 200
assert response.content_type == 'application/json'
assert 'id' in response.json
def test_generic_item_invalid_kind():
response = app.get('/doesnotexist/2', status=404)
assert response.status_int == 404
assert response.content_type == 'application/json'
def test_generic_item_invalid_id():
response = app.get('/accounts/0', status=404)
assert response.status_int == 404
assert response.content_type == 'application/json'
###############################################################################
data = {'name':'TEST145', 'desc':'test_145'}
def test_generic_put():
response = app.put_json('/accounts', data)
assert response.status_int == 200
assert response.content_type == 'application/json'
assert 'id' in response.json
def test_generic_put_duplicate():
response = app.put_json('/accounts', data, status=400)
assert response.status_int == 400
assert response.content_type == 'application/json'
def test_generic_put_invalid_attr():
data['__table__'] = 'fubar'
response = app.put_json('/accounts', data, status=400)
assert response.content_type == 'application/json'
###############################################################################
def test_generic_post():
response = app.post_json('/accounts/1', {'desc':'hello'}, status='*')
assert response.status_int == 200
assert response.content_type == 'application/json'
assert 'id' in response.json
def test_generic_post_invalid_id():
response = app.post_json('/accounts/0', {'desc':'hello'}, status=404)
assert response.content_type == 'application/json'
def test_generic_post_invalid_attr():
data = {'desc':'test', 'nxattr':1234}
response = app.post_json('/accounts/1', data, status=400)
assert response.content_type == 'application/json'
#FIXME needs to g
|
enerate a Integrity exception serverside
#def test_generic_post_invalid_attr():
# response = app.post_json('/accounts/1', {'desc':1}, status=404)
# assert response.content_type == 'application/json'
###############################
|
################################################
def test_journal_put():
posts = [{'amount':100, 'account_id':'TEST01'},
{'amount':-100, 'account_id':'TEST02', 'memo':'testing'}]
data = {'memo':'test journal entry',
'datetime':'2016-06-05 14:09:00-05',
'posts':posts}
response = app.put_json('/journal', data, status='*')
assert response.status_int == 200
assert response.content_type == 'application/json'
assert 'id' in response.json
###############################################################################
def test_imported_transactions_get():
response = app.get('/imported_transactions?limit=10&offset=0')
assert response.content_type == 'application/json'
assert 'imported_transactions' in response.json
###############################################################################
|
d120/kifplan
|
oplan/migrations/0008_auto_20160503_1910.py
|
Python
|
agpl-3.0
| 2,321 | 0.002155 |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.4 on 2016-05-03 17:10
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('oplan', '0007_auto_20160503_1638'),
]
operations = [
migrations.RemoveField(
model_name='aktermin',
name='constraintAfterEvents',
),
migrations.AddField(
model_name='aktermin',
name='constraintAfterEvents',
field=models.ManyToManyField(related_name='constraint_must_be_after_this', to='oplan.AKTermin', verbose_name='Nach Veranstaltung(
|
en)'),
),
migratio
|
ns.RemoveField(
model_name='aktermin',
name='constraintBeforeEvents',
),
migrations.AddField(
model_name='aktermin',
name='constraintBeforeEvents',
field=models.ManyToManyField(related_name='constraint_must_be_before_this', to='oplan.AKTermin', verbose_name='Vor Veranstaltung(en)'),
),
migrations.RemoveField(
model_name='aktermin',
name='constraintForceParallelWithEvents',
),
migrations.AddField(
model_name='aktermin',
name='constraintForceParallelWithEvents',
field=models.ManyToManyField(related_name='constraint_force_parallel_with_this', to='oplan.AKTermin', verbose_name='Gleichzeitig mit Veranstaltung(en)'),
),
migrations.RemoveField(
model_name='aktermin',
name='constraintNotParallelWithEvents',
),
migrations.AddField(
model_name='aktermin',
name='constraintNotParallelWithEvents',
field=models.ManyToManyField(related_name='constraint_not_parallel_with_this', to='oplan.AKTermin', verbose_name='Nicht gleichzeitig mit Veranstaltung(en)'),
),
migrations.RemoveField(
model_name='aktermin',
name='constraintRooms',
),
migrations.AddField(
model_name='aktermin',
name='constraintRooms',
field=models.ManyToManyField(blank=True, max_length=255, null=True, related_name='aktermin_constraint_room', to='oplan.Room', verbose_name='In einem der Räume'),
),
]
|
OCA/stock-logistics-warehouse
|
stock_vertical_lift_server_env/models/__init__.py
|
Python
|
agpl-3.0
| 36 | 0 |
from .
|
import vertical_lift_shuttle
| |
acutesoftware/worldbuild
|
worldbuild/world_builder.py
|
Python
|
gpl-2.0
| 2,333 | 0.014145 |
#!/usr/bin/python3
# -*- coding: utf-8 -*-
# world_builder.py
class BuildMap(object):
"""
Base class to actually build a map which is defined
by the 'World' object or Grid (to be decided).
This can mean printing the grid, display as image,
create the map in Minecraft or create in other virtual
environments.
"""
def __init__(self, struct_data, style):
""
|
"
struct_data = details of coords to fill, make
style = details on colours, textures, if applicable
The assert checks that both are NOT strings, but should
be iterable lists / dicts or subclasses of
"""
assert not isinstance(struct_data, str)
|
assert not isinstance(style, str)
self.struct_data = struct_data
self.style = style
def __str__(self):
res = ''
res += 'BuildMap ' + '\n'
if type(self.struct_data) is list:
for l in self.struct_data:
res += 'data:' + str(l) + '\n'
else: # assume dictionary
for k,v in self.struct_data.items():
res += 'data:' + str(k) + ' = ' + str(v) + '\n'
if type(self.style) is list:
for l in self.style:
res += 'style:' + str(l) + '\n'
else: # assume dictionary
for k,v in self.style.items():
res += 'style:' + str(k) + ' = ' + str(v) + '\n'
return res
class BuildMapMineCraft(BuildMap):
"""
Interface with Minecraft (currently via sendkeys to server)
to create objects in the Minecraft world.
"""
def build(self, ip_file):
import minecraft_builder
minecraft_builder.make_structure(ip_file)
class BuildMapGrid(BuildMap):
"""
Not much to do here, simply returns a standard grid or
CSV formatted file of the world
to create objects in the Minecraft world.
"""
pass
class BuildMapImage(BuildMap):
"""
Generates an image of the world, which is a 2d grid, or
multiple images if multiple floors (not much point calling this
for a proper 3d world ( Unity) , but useful for castle/dungeon
game maps with multiple floors.
"""
pass
|
ahmadpriatama/Flask-Simple-Ecommerce
|
appname/__init__.py
|
Python
|
bsd-2-clause
| 1,969 | 0.001016 |
#! ../env/bin/python
from flask import Flask
from webassets.loaders import PythonLoader as PythonAssetsLoader
from appname import assets
from appname.models import db
from appname.controllers.main import main
from appname.controllers.categories import categories
from appname.controllers.products import products
from appname.controllers.catalogs import catalogs
from flask_bootstrap import Bootstrap
from flask import send_from_directory
import os
from appname.extensions import (
cache,
assets_env,
debug_toolbar,
login_manager
)
def create_app(object_name):
"""
An flask application factory, as explained here:
http://flask.pocoo.org/docs/patterns/appfactories/
Arguments:
object_name: the python path of the config object,
e.g. appname.settings.ProdConfig
"""
app = Flask(__name__)
@app.route('/uploads/<filename>')
def uploaded_file(filename):
return send_from_directory('/home/ahmad/workspace/python/Flask-CRUD/uploads/', filename)
Bootstrap(app)
app.config.from_object(object_name)
# initialize the cache
cache.init_app(app)
# initialize the debug tool bar
debug_toolbar.init_app(app)
# initialize SQLAlchemy
db.init_app(app)
db.app = app
login_manager.init_app(app)
# Import and register
|
the different asset bundles
assets_env.init_app(app)
with app.app_context():
assets_env.load_path = [
os.path.join(os.path.join(os.path.dirname(__file__), os.pardir), 'node_modules'),
os.path.join(os.path.dirname(__file__), 'static'),
]
assets_loader = PythonAssetsLoader(assets)
for name, bundle in assets_loader.load_bundles().items():
assets_env.register(nam
|
e, bundle)
# register our blueprints
app.register_blueprint(main)
app.register_blueprint(categories)
app.register_blueprint(products)
app.register_blueprint(catalogs)
return app
|
tochikuji/chainer-libDNN
|
libdnn/autoencoder.py
|
Python
|
mit
| 4,929 | 0.000406 |
# coding: utf-8
# Simple chainer interfaces for Deep learning researching
# For autoencoder
# Author: Aiga SUZUKI <ai-suzuki@aist.go.jp>
import chainer
import chainer.functions as F
import chainer.optimizers as Opt
import numpy
from libdnn.nnbase import NNBase
from types import MethodType
from abc import abstractmethod
class AutoEncoder(NNBase):
def __init__(self, model, gpu=-1):
NNBase.__init__(self, model, gpu)
self.optimizer = Opt.Adam()
self.optimizer.setup(self.model)
self.loss_function = F.mean_squared_error
self.loss_param = {}
def validate(self, x_data, train=False):
y = self.forward(x_data, train=train)
if self.gpu >= 0:
x_data = chainer.cuda.to_gpu(x_data)
x = chainer.Variable(x_data)
return self.loss_function(x, y, **self.loss_param)
def train(self, x_data, batchsize=100, action=(lambda: None)):
N = len(x_data)
perm = numpy.random.permutation
|
(N)
su
|
m_error = 0.
for i in range(0, N, batchsize):
x_batch = x_data[perm[i:i + batchsize]]
self.optimizer.zero_grads()
err = self.validate(x_batch, train=True)
err.backward()
self.optimizer.update()
sum_error += float(chainer.cuda.to_cpu(err.data)) * len(x_batch)
action()
return sum_error / N
def test(self, x_data, batchsize=100, action=(lambda: None)):
N = len(x_data)
perm = numpy.random.permutation(N)
sum_error = 0.
for i in range(0, N, batchsize):
x_batch = x_data[perm[i:i + batchsize]]
err = self.validate(x_batch, train=False)
sum_error += float(chainer.cuda.to_cpu(err.data)) * batchsize
action()
return sum_error / N
class StackedAutoEncoder(AutoEncoder):
def __init__(self, model, gpu=-1):
self.sublayer = []
AutoEncoder.__init__(self, model, gpu)
def set_order(self, encl, decl):
if len(encl) != len(decl):
raise TypeError('Encode/Decode layers mismatch')
self.depth = len(encl)
for (el, dl) in zip(encl, reversed(decl)):
self.sublayer.append(chainer.FunctionSet(
enc=self.model[el],
dec=self.model[dl]
))
@abstractmethod
def __encode(self, x, layer, train):
pass
def set_encode(self, func):
self.__encode = MethodType(func, self, StackedAutoEncoder)
def encode(self, x_data, layer=None, train=False):
if self.gpu >= 0:
x_data = chainer.cuda.to_gpu(x_data)
x = chainer.Variable(x_data)
return self.__encode(x, layer, train)
@abstractmethod
def __decode(self, x, layer, train):
pass
def set_decode(self, func):
self.__decode = MethodType(func, self, StackedAutoEncoder)
def decode(self, x_data, layer=None, train=False):
if self.gpu >= 0:
x_data = chainer.cuda.to_gpu(x_data)
x = chainer.Variable(x_data)
return self.__decode(x, layer, train)
def forward(self, x_data, train=False):
code = self.encode(x_data, train=train)
y = self.__decode(code, train=train)
return y
def validate(self, x_data, layer=None, train=False):
targ = self.encode(x_data, layer - 1, train=False)
code = self.encode(x_data, layer, train=train)
y = self.__decode(code, layer, train=train)
return self.loss_function(targ, y, **self.loss_param)
def train(self, x_data, batchsize=100, action=(lambda: None)):
errs = []
N = len(x_data)
perm = numpy.random.permutation(N)
for l in range(1, self.depth + 1):
self.optimizer.setup(self.sublayer[l - 1])
sum_error = 0.
for i in range(0, N, batchsize):
x_batch = x_data[perm[i:i + batchsize]]
self.optimizer.zero_grads()
err = self.validate(x_batch, layer=l, train=True)
err.backward()
self.optimizer.update()
sum_error += float(chainer.cuda.to_cpu(err.data)) * len(x_batch)
action()
errs.append(sum_error / N)
return tuple(errs)
def test(self, x_data, batchsize=100, action=(lambda: None)):
N = len(x_data)
perm = numpy.random.permutation(N)
sum_error = 0.
for i in range(0, N, batchsize):
x_batch = x_data[perm[i:i + batchsize]]
y = self.forward(x_batch, train=False)
if self.gpu >= 0:
x_batch = chainer.cuda.to_gpu(x_batch)
x = chainer.Variable(x_batch)
err = self.loss_function(x, y, **self.loss_param)
sum_error += float(chainer.cuda.to_cpu(err.data)) * len(x_batch)
action()
return sum_error / N
|
c17r/catalyst
|
src/mountains/__init__.py
|
Python
|
mit
| 465 | 0 |
"""
mount
|
ains
~~~~~~~~~
Takes a CSV file either via local or HTTP retrieval and outputs information about the mountains according to spec.
Originally a programming skills check for a particular position. I've get it updated to current python versions
as well as packaging and testing methodologies.
:copyright: 2016-2017 Christian Erick Sauer.
:license: MIT, see LICENSE for more details.
""" # noqa
from .core import * # noqa
from .errors import
|
* # noqa
|
fegonda/icon_demo
|
code/web/server.py
|
Python
|
mit
| 3,926 | 0.018849 |
import tornado.ioloop
import tornado.web
import socket
import os
import sys
import time
import signal
# import datetime
import h5py
from datetime import datetime, date
import tornado.httpserver
from browserhandler import BrowseHandler
from annotationhandler import AnnotationHandler
from projecthandler import ProjectHandler
from helphandler import HelpHandler
from defaulthandler import DefaultHandler
base_path = os.path.dirname(__file__)
sys.path.insert(1,os.path.join(base_path, '../common'))
from utility import Utility
from database import Database
from paths import Paths
MAX_WAIT_SECONDS_BEFORE_SHUTDOWN = 0.5
class Application(tornado.web.Application):
def __init__(self):
handlers = [
(r"/", DefaultHandler),
(r"/browse.*", BrowseHandler),
(r"/project.*", ProjectHandler),
(r"/annotate.*", AnnotationHandler),
(r'/help*', HelpHandler),
(r'/settings/(.*)', tornado.web.StaticFileHandler, {'path': 'resources/settings/'}),
(r'/js/(.*)', tornado.web.StaticFileHandler, {'path': 'resources/js/'}),
(r'/js/vendors/(.*)', tornado.web.StaticFileHandler, {'path': 'resources/js/vendors/'}),
(r'/css/(.*)', tornado.web.StaticFileHandler, {'pa
|
th': 'resources/css/'}),
(r'/uikit/(.*)', tornado.web.StaticFileHandler, {'path': 'resources/uikit/'}),
(r'/images/(.*)', tornado.web.StaticFileHandler, {'path': 'resources/images/'}),
(r'/open-iconic/(.*)', tornado.web.StaticFileHandler
|
, {'path': 'resources/open-iconic/'}),
(r'/input/(.*)', tornado.web.StaticFileHandler, {'path': 'resources/input/'}),
(r'/train/(.*)', tornado.web.StaticFileHandler, {'path': 'resources/input/'}),
(r'/validate/(.*)', tornado.web.StaticFileHandler, {'path': 'resources/input/'}),
#(r"/annotate/(.*)", AnnotationHandler, dict(logic=self)),
]
settings = {
"template_path": 'resources',
"static_path": 'resources',
}
tornado.web.Application.__init__(self, handlers, **settings)
import numpy as np
class Server():
def __init__(self, name, port):
self.name = name
self.port = port
application = Application()
self.http_server = tornado.httpserver.HTTPServer( application )
hostname = socket.gethostname()
print 'hostname:', hostname
self.ip = hostname #socket.gethostbyname( hostname )
def print_status(self):
Utility.print_msg ('.')
Utility.print_msg ('\033[93m'+ self.name + ' running/' + '\033[0m', True)
Utility.print_msg ('.')
Utility.print_msg ('open ' + '\033[92m'+'http://' + self.ip + ':' + str(self.port) + '/' + '\033[0m', True)
Utility.print_msg ('.')
def start(self):
self.print_status()
self.http_server.listen( self.port )
tornado.ioloop.IOLoop.instance().start()
def stop(self):
msg = 'shutting down %s in %s seconds'%(self.name, MAX_WAIT_SECONDS_BEFORE_SHUTDOWN)
Utility.print_msg ('\033[93m'+ msg + '\033[0m', True)
io_loop = tornado.ioloop.IOLoop.instance()
deadline = time.time() + MAX_WAIT_SECONDS_BEFORE_SHUTDOWN
def stop_loop():
now = time.time()
if now < deadline and (io_loop._callbacks or io_loop._timeouts):
io_loop.add_timeout(now + 1, stop_loop)
else:
io_loop.stop()
Utility.print_msg ('\033[93m'+ 'shutdown' + '\033[0m', True, 'done')
stop_loop()
def sig_handler(sig, frame):
msg = 'caught interrupt signal: %s'%sig
Utility.print_msg ('\033[93m'+ msg + '\033[0m', True)
tornado.ioloop.IOLoop.instance().add_callback(shutdown)
def shutdown():
server.stop()
def main():
global server
signal.signal(signal.SIGTERM, sig_handler)
signal.signal(signal.SIGINT, sig_handler)
port = 8888
name = 'icon webserver'
server = Server(name, port)
server.start()
if __name__ == "__main__":
main()
|
aronsky/home-assistant
|
homeassistant/components/nx584/binary_sensor.py
|
Python
|
apache-2.0
| 4,476 | 0 |
"""Support for exposing NX584 elements as sensors."""
import logging
import threading
import time
from nx584 import client as nx584_client
import requests
import voluptuous as vol
from homeassistant.components.binary_sensor import (
DEVICE_CLASS_OPENING,
DEVICE_CLASSES,
PLATFORM_SCHEMA,
BinarySensorEntity,
)
from homeassistant.const import CONF_HOST, CONF_PORT
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
CONF_EXCLUDE_ZONES = "exclude_zones"
CONF_ZONE_TYPES = "zone_types"
DEFAULT_HOST = "localhost"
DEFAULT_PORT = "5007"
DEFAULT_SSL = False
ZONE_TYPES_SCHEMA = vol.Schema({cv.positive_int: vol.In(DEVICE_CLASSES)})
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_EXCLUDE_ZONES, default=[]): vol.All(
cv.ensure_list, [cv.positive_int]
),
vol.Optional(CONF_HOST, default=DEFAULT_HOST): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
v
|
ol.Optional(CONF_ZONE_TYPES, default={}): ZONE_TYPES_SCHEMA,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the NX584 binary sensor platform."""
host = config.get(CONF_HOST)
|
port = config.get(CONF_PORT)
exclude = config.get(CONF_EXCLUDE_ZONES)
zone_types = config.get(CONF_ZONE_TYPES)
try:
client = nx584_client.Client(f"http://{host}:{port}")
zones = client.list_zones()
except requests.exceptions.ConnectionError as ex:
_LOGGER.error("Unable to connect to NX584: %s", str(ex))
return False
version = [int(v) for v in client.get_version().split(".")]
if version < [1, 1]:
_LOGGER.error("NX584 is too old to use for sensors (>=0.2 required)")
return False
zone_sensors = {
zone["number"]: NX584ZoneSensor(
zone, zone_types.get(zone["number"], DEVICE_CLASS_OPENING)
)
for zone in zones
if zone["number"] not in exclude
}
if zone_sensors:
add_entities(zone_sensors.values())
watcher = NX584Watcher(client, zone_sensors)
watcher.start()
else:
_LOGGER.warning("No zones found on NX584")
return True
class NX584ZoneSensor(BinarySensorEntity):
"""Representation of a NX584 zone as a sensor."""
def __init__(self, zone, zone_type):
"""Initialize the nx594 binary sensor."""
self._zone = zone
self._zone_type = zone_type
@property
def device_class(self):
"""Return the class of this sensor, from DEVICE_CLASSES."""
return self._zone_type
@property
def should_poll(self):
"""No polling needed."""
return False
@property
def name(self):
"""Return the name of the binary sensor."""
return self._zone["name"]
@property
def is_on(self):
"""Return true if the binary sensor is on."""
# True means "faulted" or "open" or "abnormal state"
return self._zone["state"]
@property
def extra_state_attributes(self):
"""Return the state attributes."""
return {"zone_number": self._zone["number"]}
class NX584Watcher(threading.Thread):
"""Event listener thread to process NX584 events."""
def __init__(self, client, zone_sensors):
"""Initialize NX584 watcher thread."""
super().__init__()
self.daemon = True
self._client = client
self._zone_sensors = zone_sensors
def _process_zone_event(self, event):
zone = event["zone"]
# pylint: disable=protected-access
if not (zone_sensor := self._zone_sensors.get(zone)):
return
zone_sensor._zone["state"] = event["zone_state"]
zone_sensor.schedule_update_ha_state()
def _process_events(self, events):
for event in events:
if event.get("type") == "zone_status":
self._process_zone_event(event)
def _run(self):
"""Throw away any existing events so we don't replay history."""
self._client.get_events()
while True:
events = self._client.get_events()
if events:
self._process_events(events)
def run(self):
"""Run the watcher."""
while True:
try:
self._run()
except requests.exceptions.ConnectionError:
_LOGGER.error("Failed to reach NX584 server")
time.sleep(10)
|
nandhp/youtube-dl
|
youtube_dl/extractor/yandexmusic.py
|
Python
|
unlicense
| 8,328 | 0.001817 |
# coding: utf-8
from __future__ import unicode_literals
import re
import hashlib
from .common import InfoExtractor
from ..compat import compat_str
from ..utils import (
ExtractorError,
int_or_none,
float_or_none,
sanitized_Request,
urlencode_postdata,
)
class YandexMusicBaseIE(InfoExtractor):
@staticmethod
def _handle_error(response):
error = response.get('error')
if error:
raise ExtractorError(error, expected=True)
def _download_json(self, *args, **kwargs):
response = super(YandexMusicBaseIE, self)._download_json(*args, **kwargs)
self._handle_error(response)
return response
class YandexMusicTrackIE(YandexMusicBaseIE):
IE_NAME = 'yandexmusic:track'
IE_DESC = 'Яндекс.Музыка - Трек'
_VALID_URL = r'https?://music\.yandex\.(?:ru|kz|ua|by)/album/(?P<album_id>\d+)/track/(?P<id>\d+)'
_TEST = {
'url': 'http://music.yandex.ru/album/540508/track/4878838',
'md5': 'f496818aa2f60b6c0062980d2e00dc20',
'info_dict': {
'id': '4878838',
'ext': 'mp3',
'title': 'Carlo Ambrosio & Fabio Di Bari, Carlo Ambrosio - Gypsy Eyes 1',
'filesize': 4628061,
'duration': 193.04,
'track': 'Gypsy Eyes 1',
'album': 'Gypsy Soul',
'album_artist': 'Carlo Ambrosio',
'artist': 'Carlo Ambrosio & Fabio Di Bari, Carlo Ambrosio',
'release_year': '2009',
}
}
def _get_track_url(self, storage_dir, track_id):
data = self._download_json(
'http://music.yandex.ru/api/v1.5/handlers/api-jsonp.jsx?action=getTrackSrc&p=download-info/%s'
% storage_dir,
track_id, 'Downloading track location JSON')
key = hashlib.md5(('XGRlBW9FXlekgbPrRHuSiA' + data['path'][1:] + data['s']).encode('utf-8')).hexdigest()
storage = storage_dir.split('.')
return ('http://%s/get-mp3/%s/%s?track-id=%s&from=service-10-track&similarities-experiment=default'
% (data['host'], key, data['ts'] + data['path'], storage[1]))
def _get_track_info(self, track):
thumbnail = None
cover_uri = track.get('albums', [{}])[0].get('coverUri')
if cover_uri:
thumbnail = cover_uri.replace('%%', 'orig')
if not thumbnail.startswith('http'):
thumbnail = 'http://' + thumbnail
track_title = track['title']
track_info = {
'id': track['id'],
'ext': 'mp3',
'url': self._get_track_url(track['storageDir'], track['id']),
'filesize': int_or_none(track.get('fileSize')),
'duration': float_or_none(track.get('durationMs'), 1000),
'thumbnail': thumbnail,
'track': track_title,
}
def extract_artist(artist_list):
if artist_list and isinstance(artist_list, list):
artists_names = [a['name'] for a in artist_list if a.get('name')]
if artists_names:
return ', '.join(artists_names)
albums = track.get('albums')
if albums and isinstance(albums, list):
album = albums[0]
if isinstance(album, dict):
year = album.get('year')
track_info.update({
'album': album.get('title'),
'album_artist': extract_artist(album.get('artists')),
'release_year': compat_str(year) if year else None,
})
track_artist = extract_artist(track.get('artists'))
if track_artist:
track_info.update({
'artist': track_artist,
'title': '%s - %s' % (track_artist, track_title),
})
else:
track_info['title'] = track_title
return track_info
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
album_id, track_id = mobj.group('album_id'), mobj.group('id')
track = self._download_json(
'http://music.yandex.ru/handlers/track.jsx?track=%s:%s' % (track_id, album_id),
track_id, 'Downloading track JSON')['track']
return self._get_track_info(track)
class YandexMusicPlaylistBaseIE(YandexMusicBaseIE):
def _build_playlist(self, tracks):
return [
self.url_result(
'http://music.yandex.ru/album/%s/track/%s' % (track['albums'][0]['id'], track['id']))
for track in tracks if track.get('albums') and isinstance(track.get('albums'), list)]
class YandexMusicAlbumIE(YandexMusicPlaylistBaseIE):
IE_NAME = 'yandexmusic:album'
IE_DESC = 'Яндекс.Музыка - Альбом'
_VALID_URL = r'https?://music\.yandex\.(?:ru|kz|ua|by)/album/(?P<id>\d+)/?(\?|$)'
_TEST = {
'url': 'http://music.yandex.ru/album/540508',
'info_dict': {
'id': '540508',
'title': 'Carlo Ambrosio - Gypsy Soul (2009)',
},
'playlist_count': 50,
}
def _real_extract(self, url):
album_id = self._match_id(url)
album = self._download_json(
'http://music.yandex.ru/handlers/album.jsx?album=%s' % album_id,
album_id, 'Downloading album JSON')
entries = self._build_playlist(album['volumes'][0])
title = '%s - %s' % (album['artists'][0]['name'], album['title'])
year = album.get('year')
if year:
title += ' (%s)' % year
return self.playlist_result(entries, compat_str(album['id']), title)
class YandexMusicPlaylistIE(YandexMusicPlaylistBaseIE):
IE_NAME = 'yandexmusic:playlist'
IE_DESC = 'Яндекс.Музыка - Плейлист'
_VALID_URL = r'https?://music\.yandex\.(?:ru|kz|ua|by)/users/[^/]+/playlists/(?P<id>\d+)'
_TESTS = [{
'url': 'http://music.yandex.ru/users/music.partners/playlists/1245',
'info_dict': {
'id': '1245',
'title': 'Что слушают Enter Shikari',
'description': 'md5:3b9f27b0efbe53f2ee1e844d07155cc9',
},
'playlist_count': 6,
}, {
# playlist exceeding the limit of 150 tracks shipped with webpage (see
# https://github.com/rg3/youtube-dl/issues/6666)
'url': 'https://music.yandex.ru/users/ya.playlist/playlists/1036',
'info_dict': {
'id': '1036',
'title': 'Музыка 90-х',
},
'playlist_count': 310,
}]
def _real_extract(self, url):
playlist_id = self._match_id(url)
webpage = self._download_webpage(url, playlist_id)
mu = self._parse_json(
self._search_regex(
r'var\s+Mu\s*=\s*({.+?});\s*</script>', webpage, 'player'),
playlist_id)
playlist = mu['pageData']['playlist']
tracks, track_ids = playlist['tracks'], playlist['trackIds']
# tracks dictionary shipped with webpage is limited to 150 tracks,
# missing tracks should be retrieved manually.
if len(tracks) < len(track_ids):
present_track_ids = set([compat_str(track['id']) for track in tracks if track.get('id')])
missing_track_ids = set(map(compat_str, track_ids)) - set(present_track_ids)
request = sanitized_Request(
'https://music.yandex.ru/handlers/track-entries.jsx',
urlencode_postdata({
'entries': ','.join(missing_track_ids),
'lang': mu.get('settings', {}).get('lang', 'en'),
'external
|
-domain': 'music.yandex.ru',
'overembed': 'false',
'sign': mu.get('authData', {}).get('user', {}).get('sign'),
'strict': 'true',
}))
request.add_header('Referer', url)
request.add_header('X-Requested-With', 'XMLHttpRequest')
missing_tracks = self._download_json(
request, playlist_id, 'Downloadi
|
ng missing tracks JSON', fatal=False)
if missing_tracks:
tracks.extend(missing_tracks)
return self.playlist_result(
self._build_playlist(tracks),
compat_s
|
mbayon/TFG-MachineLearning
|
venv/lib/python3.6/site-packages/sklearn/isotonic.py
|
Python
|
mit
| 14,061 | 0 |
# Authors: Fabian Pedregosa <fabian@fseoane.net>
# Alexandre Gramfort <alexandre.gramfort@inria.fr>
# Nelle Varoquaux <nelle.varoquaux@gmail.com>
# License: BSD 3 clause
import numpy as np
from scipy import interpolate
from scipy.stats import spearmanr
from .base import BaseEstimator, TransformerMixin, RegressorMixin
from .utils import as_float_array, check_array, check_consistent_length
from .utils import deprecated
from ._isotonic import _inplace_contiguous_isotonic_regression, _make_unique
import warnings
import math
__all__ = ['check_increasing', 'isotonic_regression',
'IsotonicRegression']
def check_increasing(x, y):
"""Determine whether y is monotonically correlated with x.
y is found increasing or decreasing with respect to x based on a Spearman
correlation test.
Parameters
----------
x : array-like, shape=(n_samples,)
Training data.
y : array-like, shape=(n_samples,)
Training target.
Returns
-------
increasing_bool : boolean
Whether the relationship is increasing or decreasing.
Notes
-----
The Spearman correlation coefficient is estimated from the data, and the
sign of the resulting estimate is used as the result.
In the event that the 95% confidence interval based on Fisher transform
spans zero, a warning is raised.
References
----------
Fisher transformation. Wikipedia.
https://en.wikipedia.org/wiki/Fisher_transformation
"""
# Calculate Spearman rho estimate and set return accordingly.
rho, _ = spearmanr(x, y)
increasing_bool = rho >= 0
# Run Fisher transform to get the rho CI, but handle rho=+/-1
if rho not in [-1.0, 1.0] and len(x) > 3:
F = 0.5 * math.log((1. + rho) / (1. - rho))
F_se = 1 / math.sqrt(len(x) - 3)
# Use a 95% CI, i.e., +/-1.96 S.E.
# https://en.wikipedia.org/wiki/Fisher_transformation
rho_0 = math.tanh(F - 1.96 * F_se)
rho_1 = math.tanh(F + 1.96 * F_se)
# Warn if the CI spans zero.
if np.sign(rho_0) != np.sign(rho_1):
warnings.warn("Confidence interval of the Spearman "
"correlation coefficient spans zero. "
"Determination of ``increasing`` may be "
"suspect.")
return increasing_bool
def isotonic_regression(y, sample_weight=None, y_min=None, y_max=None,
increasing=True):
"""Solve the isotonic regression model::
min sum w[i] (y[i] - y_[i]) ** 2
subject to y_min = y_[1] <= y_[2] ... <= y_[n] = y_max
where:
- y[i] are inputs (real numbers)
- y_[i] are fitted
- w[i] are optional strictly positive weights (default to 1.0)
Read more in the :ref:`User Guide <isotonic>`.
Parameters
----------
y : iterable of floating-point values
The data.
sample_weight : iterable of floating-point values, optional, default: None
Weights on each point of the regression.
If None, weight is set to 1 (equal weights).
y_min : optional, default: None
If not None, set the lowest value of the fit to y_min.
y_max : optional, default: None
If not None, set the highest value of the fit to y_max.
increasing : boolean, optional, default: True
Whether to compute ``y_`` is increasing (if set to True) or decreasing
(if set to False)
Returns
-------
y_ : list of floating-point values
Isotonic fit of y.
References
----------
"Active set algorithms for isotonic regression; A unifying framework"
by Michael J. Best and Nilotpal Chakravarti, section 3.
"""
order = np.s_[:] if increasing else np.s_[::-1]
y = np.array(y[order], dtype=np.float64)
if sample_weight is None:
sample_weight = np.ones(len(y), dtype=np.float64)
else:
sample_weight = np.array(sample_weight[order], dtype=np.float64)
_inplace_contiguous_isotonic_regression(y, sample_weight)
if y_min is not None or y_max is not None:
# Older versions of np.clip don't accept None as a bound, so use np.inf
if y_min is None:
y_min = -np.inf
if y_max is None:
y_max = np.inf
np.clip(y, y_min, y_max, y)
return y[order]
class IsotonicRegression(BaseEstimator, TransformerMixin, RegressorMixin):
"""Isotonic regression model.
The isotonic regression optimization problem is defined by::
min sum w_i (y[i] - y_[i]) ** 2
subject to y_[i] <= y_[j] whenever X[i] <= X[j]
and min(y_) = y_min, max(y_) = y_max
where:
- ``y[i]`` are inputs (real numbers)
- ``y_[i]`` are fitted
- ``X`` specifies the order.
If ``X`` is non-decreasing then ``y_`` is non-decreasing.
- ``w[i]`` are optional strictly positive weights (default to 1.0)
Read more in the :ref:`User Guide <isotonic>`.
Parameters
----------
y_min : optional, default: None
If not None, set the lowest value of the fit to y_min.
y_max : optional, default: None
If not None, set the highest value of the fit to y_max.
increasing : boolean or string, optional, default: True
If boolean, whether or not to fit the isotonic regression with y
increasing or decreasing.
The string value "auto" determines whether y should
increase or decrease based on the Spearman correlation estimate's
sign.
out_of_bounds : string, optional, default: "nan"
The ``out_of_bounds`` parameter handles how x-values outside of the
training domain are handled. When set to "nan", predicted y-values
will be NaN. When set to "clip", predicted y-values will be
set to the value corresponding to the nearest train interval endpoint.
When set to "raise", allow ``interp1d`` to throw ValueError.
Attributes
----------
X_min_ : float
Minimum value of input array `X_` for left bound.
X_max_ : float
Maximum value of input array `X_` f
|
or right bound.
f_ : function
The stepwise interpolating function that covers the domain `X_`.
Notes
-----
Ties are broken using the secondary method from Leeuw, 1977.
Refere
|
nces
----------
Isotonic Median Regression: A Linear Programming Approach
Nilotpal Chakravarti
Mathematics of Operations Research
Vol. 14, No. 2 (May, 1989), pp. 303-308
Isotone Optimization in R : Pool-Adjacent-Violators
Algorithm (PAVA) and Active Set Methods
Leeuw, Hornik, Mair
Journal of Statistical Software 2009
Correctness of Kruskal's algorithms for monotone regression with ties
Leeuw, Psychometrica, 1977
"""
def __init__(self, y_min=None, y_max=None, increasing=True,
out_of_bounds='nan'):
self.y_min = y_min
self.y_max = y_max
self.increasing = increasing
self.out_of_bounds = out_of_bounds
@property
@deprecated("Attribute ``X_`` is deprecated in version 0.18 and will be"
" removed in version 0.20.")
def X_(self):
return self._X_
@X_.setter
def X_(self, value):
self._X_ = value
@X_.deleter
def X_(self):
del self._X_
@property
@deprecated("Attribute ``y_`` is deprecated in version 0.18 and will"
" be removed in version 0.20.")
def y_(self):
return self._y_
@y_.setter
def y_(self, value):
self._y_ = value
@y_.deleter
def y_(self):
del self._y_
def _check_fit_data(self, X, y, sample_weight=None):
if len(X.shape) != 1:
raise ValueError("X should be a 1d array")
def _build_f(self, X, y):
"""Build the f_ interp1d function."""
# Handle the out_of_bounds argument by setting bounds_error
if self.out_of_bounds not in ["raise", "nan", "clip"]:
raise ValueError("The argument ``out_of_bounds`` must be in "
"'nan', 'clip', 'raise'; got {0}"
|
noironetworks/neutron
|
neutron/extensions/l3_ext_gw_mode.py
|
Python
|
apache-2.0
| 820 | 0 |
# Copyright 2013 VMware, Inc.
# All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for
|
the specific language governing permissions and limitations
# under the License.
from neutron_lib.api.definitions import l3_ext_gw_mode as apidef
from neutron_lib.api import extensions
|
class L3_ext_gw_mode(extensions.APIExtensionDescriptor):
api_definition = apidef
|
ProfessorX/Config
|
.PyCharm30/system/python_stubs/-1247971765/PyKDE4/kdeui/KColorChooserMode.py
|
Python
|
gpl-2.0
| 508 | 0.009843 |
# encoding: utf-8
# module PyKDE4.kdeui
# from /usr/lib/python3/dist-packages/PyKDE4/kdeui.cpython-34m-x86_64-linux-gnu.so
# by generator 1.135
# no doc
# imports
import PyKDE4.kdecore as __PyKDE4_kdecore
import PyQt4.QtCore as __PyQt4_QtCore
import PyQt4.QtGui as __PyQt4_QtGui
import PyQt4.QtSvg as __PyQt4_QtSvg
from .int import int
class KColorCho
|
oserMode(int):
# no doc
def __init__(self, *args, **kwargs): #
|
real signature unknown
pass
__dict__ = None # (!) real value is ''
|
florentbr/SeleniumBasic
|
FirefoxAddons/build-implicit-wait.py
|
Python
|
bsd-3-clause
| 3,005 | 0.009318 |
"""Script to build the xpi add-in for firefox
Usage : python build-implicit-wait.py "x.x.x.x"
"""
import os, re, sys, shutil, datetime, zipfile, glob
CD = os.path.dirname(os.path.abspath(__file__))
SRC_DIR = CD + r'\implicit-wait'
OUT_DIR = CD + r'\bin'
RDF_PATH = CD + r'\implicit-wait\install.rdf'
def main(args):
arg_version = args and args[0]
set_working_dir(CD)
last_modified_time = get_file_mtime(RDF_PATH, '%Y-%m-%d %H:%M:%S')
current_version = find_in_file(RDF_PATH, r'version>([.\d]+)<');
print __doc__
print 'Last compilation : ' + (last_modified_time or 'none')
print 'Current Version : ' + current_version
new_version = arg_version or get_input_version(current_version)
print 'New version : ' + new_version + '\n'
print 'Update version number ...'
replace_in_file(RDF_PATH, r'(?<=version>)[.\d]+(?=<)', new_version)
print 'Build formater xpi ...'
make_dir(OUT_DIR)
set_working_dir(SRC_DIR)
with ZipFile(OUT_DIR + r'\implicit-wait.xpi', 'w') as zip:
zip.add(r'*')
print '\nDone'
def set_working_dir(directory):
make_dir(directory)
os.chdir(directory)
def make_dir(directory):
if not os.path.isdir(directory):
os.makedirs(directory)
def clear_dir(directory):
if os.path.isdir(directory):
shutil.rmtree(directory)
os.makedirs(directory)
def get_file_mtime(filepath, format=None):
if(not os.path.isfile(filepath)):
return None
dt = datetime.datetime.fromtimestamp(os.path.getmtime(filepath))
if format:
return dt.strftime(format)
return dt
def delete_file(filepath):
if(os.path.isfile(filepath)):
os.remove(filepath)
def find_in_file(filepath, pattern):
with open(filepath, 'r') as f:
result = re.search(pattern, f.read())
return result.group(result.re.groups)
def replace_in_file(filepath, pattern, replacement):
with open(filepath, 'r') as f:
text = re.sub(pattern, replacement, f.read())
with open(filepath, 'w') as f:
f.write(text)
def get_input(message):
try: return raw_input(message)
except NameError: return input(message)
def get_input_version(version):
while True:
input = get_input('Digit to increment [w.x.y.z] or version [0.0.0.0] or skip [s] ? ').strip()
if re.match(r's|w|x|y|z', input) :
idx = {'s': 99, 'w': 0, 'x': 1, 'y': 2, 'z': 3}[input]
return '.'.join([str((int(v)+(i == idx))*(i <= idx)) for i, v in enumerate(version.split('.'))])
elif re.match(r'\d+\.\d+\.\d+\.\d+'
|
, input):
return input
class ZipFile(zipfile.ZipFile):
def __init__(cls, file, mode):
zipfile.ZipFile.__init__(cls, file, mode)
|
def add(self, path):
for item in glob.glob(path):
if os.path.isdir(item):
self.add(item + r'\*');
else:
self.write(item)
if __name__ == '__main__':
main(sys.argv[1:])
|
friendly-of-python/flask-online-store
|
flask_online_store/views/admin/order.py
|
Python
|
mit
| 396 | 0.005051 |
from flask import Blueprint, render_template, session, redirect, url_for, reque
|
st, flash, g, jsonify, abort
#from flask_login import requires_login
admin_order = Blueprint('admin_order', __name__)
@admin_order.route('/')
def index():
pass
@admin_order.route('/new', methods=['GET', 'POST'])
def new():
pass
@admin_ord
|
er.route('/edit', methods=['GET', 'POST'])
def edit():
pass
|
guardicore/monkey
|
monkey/infection_monkey/telemetry/scan_telem.py
|
Python
|
gpl-3.0
| 537 | 0.001862 |
from common.common_consts.telem_categories import TelemCategoryEnum
from infection_monkey.telemetry.base_telem import BaseTelem
class ScanTelem(BaseTele
|
m):
def __init__(self, machine):
"""
Default scan telemetry constructor
:param machine: Scanned machine
"""
super(ScanTelem, self).__init__()
self.machine = machine
telem_category = TelemCategoryEnum.SCAN
def get_data(self):
return {"machine": self.machine.as_
|
dict(), "service_count": len(self.machine.services)}
|
browseinfo/odoo_saas3_nicolas
|
addons/website_sale/tests/test_ui.py
|
Python
|
agpl-3.0
| 963 | 0.015576 |
import openerp.addons.website.tests.test_ui as test_ui
def load_tests(loader, base, _):
base.addTest(test_ui.WebsiteUiSuite(test_ui.full_path(__file__,'website_sale-add_product-test.js'),
{'redirect': '/page/website.homepage'}))
base.addTest(test_ui.WebsiteUiSuite(test_ui.full_path(__file__,'website_sale-sale_process-test.js'),
{'redirect': '/page/website.homepage'}))
base.addTest(test_ui.WebsiteUiSuite(test_ui.full_path(__file__,'website_sale-sale_process-test.js'),
{'redirect': '/page/website.homepage', 'user': 'demo', 'password': 'demo'}))
# Test has been commented in SAAS-3 ONLY, it must be activated in trunk.
# Log for test JS has been improved in trunk, so we stop to loss time
|
in
|
saas-3 and debug it directly in trunk.
# Tech Saas & AL agreement
# base.addTest(test_ui.WebsiteUiSuite(test_ui.full_path(__file__,'website_sale-sale_process-test.js'), {'path': '/', 'user': None}))
return base
|
fidals/refarm-site
|
pages/migrations/0003_auto_20160909_0747.py
|
Python
|
mit
| 467 | 0 |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.5 on 2016-09-09 07:47
from __future__ import unicode_literals
from django.db import mig
|
rations, models
class Migration(migrations.Migration):
dependencies = [
('pages', '0002_auto_20160829_1730'),
]
operations = [
migrations.AlterField(
model_name='page',
name='cont
|
ent',
field=models.TextField(blank=True, default='', null=True),
),
]
|
rrwick/Unicycler
|
unicycler/blast_func.py
|
Python
|
gpl-3.0
| 5,533 | 0.004518 |
"""
Copyright 2017 Ryan Wick (rrwick@gmail.com)
https://github.com/rrwick/Unicycler
This module contains functions relating to BLAST, which Unicycler uses to rotate completed circular
replicons to a standard starting point.
This file is part of Unicycler. Unicycler is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by the Free Software Foundation,
either version 3 of the License, or (at your option) any later version. Unicycler is distributed in
the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
details. You should have received a copy of the GNU General Public License along with Unicycler. If
not, see <http://www.gnu.org/licenses/>.
"""
import os
import subprocess
from
|
.misc import load_fasta
from . import log
class CannotFindStart(Exception):
pass
def find_start_gene(sequence, start_genes_fasta, identity_threshold, coverage_threshold, blast_dir,
makeblastdb_path, tblastn_path):
"""
This fun
|
ction uses tblastn to look for start genes in the sequence. It returns the first gene
(using the order in the file) which meets the identity and coverage thresholds, as well as
the position of that gene (including which strand it is on).
This function assumes that the sequence is circular with no overlap.
"""
# Prepare the replicon sequence. In order to get a solid, single BLAST hit in cases where the
# gene overlaps from the end to the start, we have to duplicate some of the replicon sequence
# for the BLAST database.
seq_len = len(sequence)
start_genes_fasta = os.path.abspath(start_genes_fasta)
queries = load_fasta(start_genes_fasta)
if not queries:
raise CannotFindStart
longest_query = max(len(x[1]) for x in queries)
longest_query *= 3 # amino acids to nucleotides
dup_length = min(seq_len, longest_query)
sequence = sequence + sequence[:dup_length]
# BLAST has serious issues with paths that contain spaces. This page explains some of it:
# https://www.ncbi.nlm.nih.gov/books/NBK279669/
# But I couldn't make it all work for makeblastdb (spaces made it require -out, and it never
# accepted spaces in the -out path, no matter how I used quotes). So we will just move into the
# temporary directory to run the BLAST commands.
starting_dir = os.getcwd()
os.chdir(blast_dir)
# Create a FASTA file of the replicon sequence.
replicon_fasta_filename = 'replicon.fasta'
replicon_fasta = open(replicon_fasta_filename, 'w')
replicon_fasta.write('>replicon\n')
replicon_fasta.write(sequence)
replicon_fasta.write('\n')
replicon_fasta.close()
# Build the BLAST database.
command = [makeblastdb_path, '-dbtype', 'nucl', '-in', replicon_fasta_filename]
log.log(' ' + ' '.join(command), 2)
process = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
_, err = process.communicate()
if err:
log.log('\nmakeblastdb encountered an error:\n' + err.decode())
os.chdir(starting_dir)
raise CannotFindStart
# Run the tblastn search.
command = [tblastn_path, '-db', replicon_fasta_filename, '-query', start_genes_fasta, '-outfmt',
'6 qseqid sstart send pident qlen qseq qstart bitscore', '-num_threads', '1']
log.log(' ' + ' '.join(command), 2)
process = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
blast_out, blast_err = process.communicate()
process.wait()
if blast_err:
log.log('\nBLAST encountered an error:\n' + blast_err.decode())
# Find the best hit in the results.
best_hit, best_bitscore = None, 0
for line in blast_out.decode().splitlines():
hit = BlastHit(line, seq_len)
if hit.pident >= identity_threshold and hit.query_cov >= coverage_threshold and \
hit.qstart == 0 and hit.bitscore > best_bitscore:
best_hit = hit
best_bitscore = hit.bitscore
os.chdir(starting_dir)
if best_bitscore:
return best_hit
else:
raise CannotFindStart
class BlastHit(object):
def __init__(self, blast_line, seq_len):
self.qseqid = ''
self.pident, self.qstart, self.bitscore, self.query_cov, self.start_pos = 0, 0, 0, 0, 0
self.flip = False
parts = blast_line.strip().split('\t')
if len(parts) > 7:
self.qseqid = parts[0]
self.pident = float(parts[3])
self.qstart = int(parts[6]) - 1
self.bitscore = float(parts[7])
sstart = int(parts[1]) - 1
send = int(parts[2])
qlen = float(parts[4])
qseq = parts[5]
self.query_cov = 100.0 * len(qseq) / qlen
if sstart <= send:
self.start_pos = sstart
self.flip = False
else:
self.start_pos = sstart + 1
self.flip = True
if self.start_pos >= seq_len:
self.start_pos -= seq_len
def __repr__(self):
return 'BLAST hit: query=' + self.qseqid + ', subject start=' + str(self.start_pos) + \
', strand=' + ('reverse' if self.flip else 'forward') + ', ID=' + \
str(self.pident) + ', cov=' + str(self.query_cov) + ', bitscore=' + \
str(self.bitscore)
|
mapzen/vector-datasource
|
integration-test/421-zoos-z13.py
|
Python
|
mit
| 609 | 0 |
# -*- encoding: utf-8 -*-
from shapely.wkt import loads as wkt_loads
import dsl
from . import FixtureTest
class ZoosZ13(FixtureTest):
def test_zoo_appears_at_z13(self):
# Zoo Montana, Billings, MT
self.generate_fixtures(dsl.way(2274329294, wkt_loads('POINT (-108.620965329915 45.7322965681428)'), {u'addr:housenum
|
ber': u'2100', u'name': u'Zoo Montana', u'addr:city': u'Billings, MT 59106', u'source': u'openst
|
reetmap.org', u'tourism': u'zoo', u'addr:street': u'S. Shiloh Road'})) # noqa
self.assert_has_feature(
13, 1624, 2923, 'pois',
{'kind': 'zoo'})
|
kins912/giantcellsim
|
giantcellsim_motifoutput.py
|
Python
|
mit
| 3,105 | 0.02963 |
import csv
from giantcellsim_trial import giantcellsim_trial
import itertools
import numpy
def flatten(items, seqtypes=(list, tuple)): # used for flattening lists
for i, x in enumerate(items):
while isinstance(items[i], seqtypes):
items[i:i+1] = items[i]
return items
def giantcellsim_motifoutput(parameterlist,masterprefix,testprefix,trials,growthIterations,max_strand_nr,maxStrandLength,numCells,numRounds,motif,elong,bias):
pop_tracker = []
with open(masterprefix+ testprefix +'_MotifData_motif{motif}_len{maxStrandLength}_bias{bias}_elong{elong}_{trials}trials_numRound{numRounds}.csv'.format(motif = motif, maxStrandLength = maxStrandLength, bias=bias, elong=elong, trials=trials, numRounds=numRounds), 'wb') as f:
writer = csv.writer(f)
writer.writerow(parameterlist)
for trial in range(trials):
pop_tracker.append([])
nr_motifs, nr_strands, nr_cells_with_motif, pop_tracker[trial] = giantcellsim_trial(motif,growthIterations,max_strand_nr,maxStrandLength,numCells,numRounds,elong,bias)
motif_freq = [moti
|
fs / float(total) for motifs,total in itertools.izip(nr_motifs,nr_strands)]
strands_freq = [strands / float(max_strand_nr*numCells) for strands in nr_strands]
cells_with_freq = [cells / float(numCells) for cells in nr_cells_with_motif]
writer.writerow(motif_freq)
writer.writerow(strands_freq)
writer.writerow(cells_with_fre
|
q)
if trial == 0:
motif_freq_aggregate = motif_freq
strands_freq_aggregate = strands_freq
cells_with_freq_aggregate = cells_with_freq
nr_strands_per_time = nr_strands
else:
motif_freq_aggregate = [list(round_data) for round_data in zip(motif_freq_aggregate,motif_freq)]
strands_freq_aggregate = [list(round_data) for round_data in zip(strands_freq_aggregate,strands_freq)]
cells_with_freq_aggregate = [list(round_data) for round_data in zip(cells_with_freq_aggregate,cells_with_freq)]
nr_strands_per_time = [list(round_data) for round_data in zip(nr_strands_per_time,nr_strands)]
for time_point in range(numRounds):
motif_freq_aggregate[time_point] = flatten(motif_freq_aggregate[time_point])
strands_freq_aggregate[time_point] = flatten(strands_freq_aggregate[time_point])
cells_with_freq_aggregate[time_point] = flatten(cells_with_freq_aggregate[time_point])
nr_strands_per_time[time_point] = flatten(nr_strands_per_time[time_point])
means = []
stdevs = []
for iterator in range(3):
means.append([])
stdevs.append([])
for time_point in range(numRounds):
means[0].append(numpy.mean(motif_freq_aggregate[time_point]))
stdevs[0].append(numpy.std(motif_freq_aggregate[time_point]))
means[1].append(numpy.mean(strands_freq_aggregate[time_point]))
stdevs[1].append(numpy.std(strands_freq_aggregate[time_point]))
means[2].append(numpy.mean(cells_with_freq_aggregate[time_point]))
stdevs[2].append(numpy.std(cells_with_freq_aggregate[time_point]))
for mean_data in means:
writer.writerow(mean_data)
for stdev_data in stdevs:
writer.writerow(stdev_data)
f.close()
return pop_tracker, nr_strands_per_time
|
GovReady/readthedocs.org
|
readthedocs/rtd_tests/base.py
|
Python
|
mit
| 3,856 | 0.001037 |
import os
import shutil
import logging
from collections import OrderedDict
from mock import patch
from django.conf import settings
from django.test import TestCase
log = logging.getLogger(__name__)
class RTDTestCase(TestCase):
def setUp(self):
self.cwd = os.path.dirname(__file__)
self.build_dir = os.path.join(self.cwd, 'builds')
log.info("build dir: %s" % self.build_dir)
if not os.path.exists(self.build_dir):
os.makedirs(self.build_dir)
settings.DOCROOT = self.build_dir
def tearDown(self):
shutil.rmtree(self.build_dir)
@patch('readthedocs.projects.views.private.trigger_build', lambda x, basic: None)
@patch('readthedocs.projects.views.private.trigger_build', lambda x, basic: None)
class MockBuildTestCase(TestCase):
'''Mock build triggers for test cases'''
pass
class WizardTestCase(TestCase):
'''Test case for testing wizard forms'''
step_data = OrderedDict({})
url = None
wizard_class_slug = None
@patch('readthedocs.projects.views.private.trigger_build', lambda x, basic: None)
@patch('readthedocs.projects.views.private.trigger_build', lambda x, basic: None)
def post_step(self, step, **data):
'''Post step form data to `url`, using supplimentary `kwargs`
Use data from kwargs to build dict to pass into form
'''
if not self.url:
raise Exception('Missing wizard URL')
try:
data = {}
for key in self.step_data:
data.update({('{0}-{1}'.format(key, k), v)
for (k, v) in self.step_data[key].items()})
if key == step:
break
except KeyError:
pass
# Update with prefixed step data
data['{0}-current_step'.format(self.wizard_class_slug)] = step
resp = self.client.post(self.url, data)
self.assertIsNotNone(resp)
return resp
# We use camelCase on purpose here to conform with unittest's naming
# conventions.
def assertWizardResponse(self, response, step=None): # noqa
'''Assert successful wizard response'''
# Is is the last form
if step is None:
try:
wizard = response.context['wizard']
self.assertEqual(wizard['form'].errors, {})
except (TypeError, KeyError):
pass
self.assertEqual(response.status_code, 302)
else:
self.assertIn('wizard', response.context)
wizard = response.context['wizard']
try:
self.assertEqual(wizard['form'].errors, {})
except AssertionError:
self.assertIsNone(wizard['form'].errors)
self.assertEqual(response.status_code, 200)
self.assertIsNotNone(response.context['wizard'])
self.assertEqual(wizard['steps'].current, step)
self.a
|
ssertIn('{0}-current_step'.format(self.wizard_class_slug),
response.content)
# We use camelCase on purpose here to conform with unittest's naming
# conventions.
def assertWizardFailure(self, response, field, match=None): # noqa
'''Assert field threw a validation error
response
Client response object
field
Field name to test for validation error
match
Regex match for field va
|
lidation error
'''
self.assertEqual(response.status_code, 200)
self.assertIn('wizard', response.context)
self.assertIn('form', response.context['wizard'])
self.assertIn(field, response.context['wizard']['form'].errors)
if match is not None:
error = response.context['wizard']['form'].errors[field]
self.assertRegexpMatches(unicode(error), match)
|
petervanderdoes/wger
|
wger/gym/migrations/0002_auto_20151003_1944.py
|
Python
|
agpl-3.0
| 1,061 | 0.002828 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
import datetime
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('gym', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='gym',
name='email',
field=models.EmailField(max_length=254, verbose_name='Email', blank=True, null=True),
),
migrations.AlterField(
model_name='gym',
name='zip_code',
field=models.Char
|
Field(max_length=10, verbose_name='ZIP code', blank=True, null=True),
),
migrations.AlterField(
model_name='gymconfig',
name='weeks_inactive',
field=models.PositiveIntegerField(help_text='Number of weeks since the last time a user logged his presence to be considered inactive', default
|
=4, verbose_name='Reminder inactive members'),
),
]
|
regisf/geocluster
|
geocluster/geoconvertion.py
|
Python
|
mit
| 1,701 | 0 |
# -*- coding: utf-8 -*-
# Geocluster - A simple and naive geo cluster
# (c) Régis FLORET 2014 and later
#
def convert_lat_from_gps(value):
"""
Convert a lattitude from GPS coordinate to decimal degrees
:param value: The lattitude as a float between 0 and -90
:return: The lattitude in decimal degrees
"""
assert (isinstance(value, (int, float)))
return value if value > 0 else 90 + abs(value)
def convert_lng_from_gps(value):
"""
Convert a longitude from GPS coordinate to decimal degrees
:param value: The longitu
|
de as a float
:return: The longitude in decimal degrees
"""
assert (isinstance(value, (int, float)))
return value if value > 0 else 180 + abs(value)
def convert_lat_from_degrees(value):
"""
Convert a longitude from decimal degrees to GPS coordinate
:param value: The longitude as a float
:return: The longitude in GPS coordinate
"""
assert (isinstan
|
ce(value, (int, float)))
if value > 180:
raise ValueError("Lattitude in degrees can't be greater than 180")
elif value < 0:
raise ValueError("Lattitude in degrees can't be lesser than 0")
return value if value < 90 else 90 - value
def convert_lng_from_degrees(value):
"""
Convert a longitude from decimal degrees to GPS coordinate
:param value: The longitude as a float
:return: The longitude in GPS coordinate
"""
assert (isinstance(value, (int, float)))
if value > 180:
raise ValueError("Lattitude in degrees can't be greater than 180")
elif value < 0:
raise ValueError("Lattitude in degrees can't be lesser than 0")
return value if value < 180 else 180 - value
|
letsencrypt/letsencrypt
|
tools/pip_install_editable.py
|
Python
|
apache-2.0
| 629 | 0.00159 |
#!/usr/bin/env python
# pip installs pac
|
kages in editable mode using pip_install.py
#
# cryptography is currently using this script in their CI at
# https://github.com/pyca/cryptography/blob/a02fdd60d98273ca34427235c4ca96687a12b239/.travis/downstream.d/certbot.sh#L8-L9.
# We should try to remember to keep their repo updated if we make any changes
# to this script which may break things for them.
import sys
import pip_install
def main(args):
new_args = []
for arg in args:
new_args.append('-e')
n
|
ew_args.append(arg)
pip_install.main(new_args)
if __name__ == '__main__':
main(sys.argv[1:])
|
dimagi/commcare-hq
|
corehq/apps/smsbillables/migrations/0022_pinpoint_gateway_fee_amount_null.py
|
Python
|
bsd-3-clause
| 485 | 0.002062 |
from django.
|
db import migrations
from corehq.apps.smsbillables.management.commands.bootstrap_gateway_fees import (
bootstrap_pinpoint_gateway,
)
def add_pinpoint_gateway_fee_for_migration(apps, schema_editor):
bootstrap_pinpoint_gateway(apps)
class Migration(migrations.Migration):
dependencies = [
('smsbillables', '0021_infobip_gateway_fee_amount_null'),
]
operations = [
migratio
|
ns.RunPython(add_pinpoint_gateway_fee_for_migration),
]
|
ProfessorX/Config
|
.PyCharm30/system/python_stubs/-1247972723/gtk/_gtk/Arrow.py
|
Python
|
gpl-2.0
| 6,743 | 0.002818 |
# encoding: utf-8
# module gtk._gtk
# from /usr/lib/python2.7/dist-packages/gtk-2.0/gtk/_gtk.so
# by generator 1.135
# no doc
# imports
import atk as __atk
import gio as __gio
import gobject as __gobject
import gobject._gobject as __gobject__gobject
from Misc import Misc
class Arrow(Misc):
"""
Object GtkArrow
Properties from GtkArrow:
arrow-type -> GtkArrowType: Arrow direction
The direction the arrow should point
shadow-type -> GtkShadowType: Arrow shadow
Appearance of the shadow surrounding the arrow
Properties from GtkMisc:
xalign -> gfloat: X align
The horizontal alignment, from 0 (left) to 1 (right). Reversed for RTL layouts.
yalign -> gfloat: Y align
The vertical alignment, from 0 (top) to 1 (bottom)
xpad -> gint: X pad
The amount of space to add on the left and right of the widget, in pixels
ypad -> gint: Y pad
The amount of space to add on the top and bottom of the widget, in pixels
Signals from GtkWidget:
composited-changed ()
show ()
hide ()
map ()
unmap ()
realize ()
unrealize ()
size-request (GtkRequisition)
size-allocate (GdkRectangle)
state-changed (GtkStateType)
parent-set (GtkWidget)
hierarchy-changed (GtkWidget)
style-set (GtkStyle)
direction-changed (GtkTextDirection)
grab-notify (gboolean)
child-notify (GParam)
mnemonic-activate (gboolean) -> gboolean
grab-focus ()
focus (GtkDirectionType) -> gboolean
move-focus (GtkDirectionType)
event (GdkEvent) -> gboolean
event-after (GdkEvent)
button-press-event (GdkEvent) -> gboolean
button-release-event (GdkEvent) -> gboolean
scroll-event (GdkEvent) -> gboolean
motion-notify-event (GdkEvent) -> gboolean
keynav-failed (GtkDirectionType) -> gboolean
delete-event (GdkEvent) -> gboolean
destroy-event (GdkEvent) -> gboolean
expose-event (GdkEvent) -> gboolean
key-press-event (GdkEvent) -> gboolean
key-release-event (GdkEvent) -> gboolean
enter-notify-event (GdkEvent) -> gboolean
leave-notify-event (GdkEvent) -> gboolean
configure-event (GdkEvent) -> gboolean
focus-in-event (GdkEvent) -> gboolean
focus-out-event (GdkEvent) -> gboolean
map-event (GdkEvent) -> gboolean
unmap-event (GdkEvent) -> gboolean
property-notify-event (GdkEvent) -> gboolean
selection-clear-event (GdkEvent) -> gboolean
selection-request-event (GdkEvent) -> gboolean
selection-notify-event (GdkEvent) -> gboolean
selection-received (GtkSelectionData, guint)
selection-get (GtkSelectionData, guint, guint)
proximity-in-event (GdkEvent) -> gboolean
proximity-out-event (GdkEvent) -> gboolean
drag-leave (GdkDragContext, guint)
drag-begin (GdkDragContext)
drag-end (GdkDragContext)
drag-data-delete (GdkDragContext)
drag-failed (GdkDragContext, GtkDragResult) -> gboolean
drag-motion (GdkDragContext, gint, gint, guint) -> gboolean
drag-drop (GdkDragContext, gint, gint, guint) -> gboolean
drag-data-get (GdkDragContext, GtkSelectionData, guint, guint)
drag-data-received (GdkDragContext, gint, gint, GtkSelectionData, guint, guint)
visibility-notify-event (GdkEvent) -> gboolean
client-event (GdkEvent) -> gboolean
no-expose-event (GdkEvent) -> gboolean
window-state-event (GdkEvent) -> gboolean
damage-event (GdkEvent) -> gboolean
grab-broken-event (GdkEvent) -> gboolean
query-tooltip (gint, gint, gboolean, GtkTooltip) -> gboolean
popup-menu () -> gboolean
show-help (GtkWidgetHelpType) -> gboolean
accel-closures-changed ()
screen-changed (GdkScreen)
can-activate-accel (guint) -> gboolean
Properties from GtkWidget:
name -> gchararray: Widget name
The name of the widget
parent -> GtkContainer: Parent widget
The parent widget of this widget. Must be a Container widget
width-request -> gint: Width request
Override for width request of the
|
widget, or -1 if natural request should be used
height-request -> gint: Height request
Override for height request of the widget, or -1 if natural request should be used
visible -> gboolean: Visible
Whether the widget is visible
sensitive -> gboolean: Sensitive
Whether the widget responds to input
app-paintable -> gbool
|
ean: Application paintable
Whether the application will paint directly on the widget
can-focus -> gboolean: Can focus
Whether the widget can accept the input focus
has-focus -> gboolean: Has focus
Whether the widget has the input focus
is-focus -> gboolean: Is focus
Whether the widget is the focus widget within the toplevel
can-default -> gboolean: Can default
Whether the widget can be the default widget
has-default -> gboolean: Has default
Whether the widget is the default widget
receives-default -> gboolean: Receives default
If TRUE, the widget will receive the default action when it is focused
composite-child -> gboolean: Composite child
Whether the widget is part of a composite widget
style -> GtkStyle: Style
The style of the widget, which contains information about how it will look (colors etc)
events -> GdkEventMask: Events
The event mask that decides what kind of GdkEvents this widget gets
extension-events -> GdkExtensionMode: Extension events
The mask that decides what kind of extension events this widget gets
no-show-all -> gboolean: No show all
Whether gtk_widget_show_all() should not affect this widget
has-tooltip -> gboolean: Has tooltip
Whether this widget has a tooltip
tooltip-markup -> gchararray: Tooltip markup
The contents of the tooltip for this widget
tooltip-text -> gchararray: Tooltip Text
The contents of the tooltip for this widget
window -> GdkWindow: Window
The widget's window if it is realized
double-buffered -> gboolean: Double Buffered
Whether or not the widget is double buffered
Signals from GtkObject:
destroy ()
Properties from GtkObject:
user-data -> gpointer: User Data
Anonymous User Data Pointer
Signals from GObject:
notify (GParam)
"""
def set(self, *args, **kwargs): # real signature unknown
pass
def __init__(self, *args, **kwargs): # real signature unknown
pass
__gtype__ = None # (!) real value is ''
|
JeffHeard/sondra
|
sondra/auth/collections.py
|
Python
|
apache-2.0
| 11,035 | 0.007431 |
import datetime
import bcrypt
import rethinkdb as r
from sondra.api.expose import expose_method, expose_method_explicit
from sondra.auth.decorators import authorized_method, authorization_required, authentication_required, anonymous_method
from sondra.collection import Collection
from .documents import Credentials, Role, User, LoggedInUser, IssuedToken
@authorization_required('write')
@authentication_required('read')
class Roles(Collection):
primary_key = 'slug'
document_class = Role
autocomplete_props = ('title', 'description')
template = '${title}'
class UserCredentials(Collection):
primary_key = 'user'
document_class = Credentials
private = True
@authorization_required('write')
class Users(Collection):
document_class = User
primary_key = 'username'
indexes = ['email']
order_by = ('family_name', 'given_name', 'username')
def __init__(self, application):
super(Users, self).__init__(application)
# if '__anonymous__' not in self:
# self.create_user('__anonymous__', '', '', active=False)
#
# self._anonymous_user = self['__anonymous__']
#
@property
def anonymous(self):
return None # self._anonymous_user
def validate_password(self, password):
"""Validate that the desired password is strong enough to use.
Override this in a subclass if you want stronger controls on the password. This version
of the function only makes sure that the password has a minimum length of 8.
Args:
password (str): The password to use
Returns:
None
Raises:
ValueError if the password doesn't pass muster.
"""
if le
|
n(password) < 6:
raise ValueError("Password too short")
def user_data(self,
username: str,
email: str,
loca
|
le: str='en-US',
password: str=None,
family_name: str=None,
given_name: str=None,
names: list=None,
active: bool=True,
roles: list=None,
confirmed_email: bool=False
) -> str:
"""Create a new user
Args:
username (str): The username to use. Can be blank. If blank the username is the email.
password (str): The password to use.
email (str): The email address for the user. Should be unique
locale (str="en-US"): The name of the locale for the user
family_name (str): The user's family name
given_name (str): The user's given name
names (str): The user's middle name
active (bool): Default true. Whether or not the user is allowed to log in.
roles (list[roles]): List of role objects or urls. The list of roles a user is granted.
confirmed_email (bool): Default False. The user has confirmed their email address already.
Returns:
str: The url of the new user object.
Raises:
KeyError if the user already exists.
ValueError if the user's password does not pass muster.
"""
email = email.lower()
if not password:
active=False
password=''
if not username:
username = email
if username in self:
raise PermissionError("Attempt to create duplicate user " + username)
user = {
"username": username,
"email": email,
"email_verified": False,
"active": active if active is not None else self.document_class.active_by_default,
"locale": locale,
"created": datetime.datetime.now(),
"roles": roles or [],
"confirmed_email": confirmed_email
}
if family_name:
user['family_name'] = family_name
if given_name:
user['given_name'] = given_name
if names:
user['names'] = names
credentials = None
if active and password:
self.validate_password(password)
salt = bcrypt.gensalt()
secret = bcrypt.gensalt(16)
hashed_password = bcrypt.hashpw(password.encode('utf-8'), salt)
credentials = {
'password': hashed_password.decode('utf-8'),
'salt': salt.decode('utf-8'),
'secret': secret.decode('utf-8')
}
return user, credentials
@authorized_method
@expose_method_explicit(
title="Create User",
side_effects=True,
request_schema={
"type": "object",
"required": ['username', 'email'],
"description": "Create a new user in the system",
"properties": {
"username": {"type": "string", "title": "Username", "description": "The new username"},
"email": {"type": "string", "title": "email", "description": "The user's email"},
"locale": {"type": "string", "title": "Locale", "description": "The user's default language setting", "default": "en-US"}, #, "format": "locale"},
"password": {"type": "string", "title": "Password", "description": "The user's password. Leave blank to have it auto-generated."},
"family_name": {"type": "string", "title": "Family Name", "description": "The user's password"},
"given_name": {"type": "string", "title": "Given Name", "description": "The user's password"},
"names": {"type": "string", "title": "Middle Name(s)", "description": "The user's middle names"},
"active": {"type": "boolean", "title": "Can Login", "description": "The user can login", "default": True},
"roles": {"type": "array", "title": "Roles", "items": {"type": "string", "fk": "/auth/roles"}, "description": "The roles to assign to the new user.", "default": []},
"confirmed_email": {"type": "boolean", "default": False, "title": "Confirmed", "description": "Whether or not the user has confirmed their email address."}
}
},
response_schema={
"type": "object",
"properties": {"_": {"type": "string", "description": "The new user's URL."}}
},
)
def create_user(
self,
username: str,
email: str,
locale: str='en-US',
password: str=None,
family_name: str=None,
given_name: str=None,
names: list=None,
active: bool=True,
roles: list=None,
confirmed_email: bool=False,
_user=None
) -> str:
"""Create a new user
Args:
username (str): The username to use. Can be blank. If blank the username is the email.
password (str): The password to use.
email (str): The email address for the user. Should be unique
locale (str="en-US"): The name of the locale for the user
family_name (str): The user's family name
given_name (str): The user's given name
names (str): The user's middle name
active (bool): Default true. Whether or not the user is allowed to log in.
roles (list[roles]): List of role objects or urls. The list of roles a user is granted.
confirmed_email (bool): Default False. The user has confirmed their email address already.
Returns:
str: The url of the new user object.
Raises:
KeyError if the user already exists.
ValueError if the user's password does not pass muster.
"""
user_record, credentials = self.user_data(
username=username,
email=email,
locale=locale,
password=password,
family_name=family_name,
given_name=given_name,
names=names,
active=active,
roles=roles,
confirmed_email=confirmed_email,
)
user = self.create(user_record)
if credentials:
credentials['user'] = username
|
krisys/SpojBot
|
src/spojbot/bot/models.py
|
Python
|
mit
| 6,922 | 0.002023 |
from django.db import models
from django.contrib.auth.models import User
import requests
from datetime import datetime
from BeautifulSoup import BeautifulSoup
import re
SPOJ_ENDPOINT = "http://www.spoj.com/status/%s/signedlist/"
class SpojUser(models.Model):
user = models.OneToOneField(User)
spoj_handle = models.CharField(max_length=50)
points = models.FloatField(default=0)
rank = models.IntegerField(default=10000000)
problems_solved = models.IntegerField(default=0)
notify_via_email = models.BooleanField(default=True)
last_notified = models.DateTimeField(null=True, blank=True)
FREQUENCY_CHOICES = (
(1, 'Daily'),
(2, 'Once in 2 days'),
(3, 'Once in 3 days'),
(7, 'Weekly'),
(15, 'Fortnightly'),
(30, 'Monthly')
)
frequency = models.IntegerField(default=2,
verbose_name='Problem Suggestion Frequency',
choices=FREQUENCY_CHOICES)
def __unicode__(self):
return '%s (%s)' % (self.spoj_handle, self.user.email)
def ppp(self):
if self.problems_solved == 0:
return 'NA'
return str((self.points * 1.0) / self.problems_solved)[0:6]
def fetch_spoj_data(self):
if not self.spoj_handle:
return
response = requests.get(SPOJ_ENDPOINT % (self.spoj_handle))
for line in response.text.split('\n'):
if line and line[0] == '|':
fields = line.split('|')
if fields[4].strip() == 'AC':
problem, created = SpojProblem.objects.get_or_create(
problem=fields[3].strip())
dt = datetime.strptime(fields[2].strip(),
"%Y-%m-%d %H:%M:%S")
try:
Submission.objects.get(user=self.user,
problem=problem)
except:
Submission.objects.create(user=self.user,
problem=problem, timestamp=dt)
self.fetch_spoj_stats()
def fetch_spoj_stats(self):
response = requests.get('http://www.spoj.com/users/%s/' % (
self.spoj_handle))
rank = re.search('>#\d+', response.text)
points = re.search('(.* points)', response.text)
if not rank:
return
self.rank = rank.group(0)[2:]
if not points:
return
points = p
|
oints.group()
try:
self.points = float(re.search("\d+.\
|
d+", points).group())
except:
self.points = float(re.search("\d+", points).group())
soup = BeautifulSoup(response.text)
stats = soup.find("table", {"class": "problems"})
for index, row in enumerate(stats.findAll('tr')):
if index == 0:
continue
cols = []
for col in row.findAll('td'):
cols.append(int(col.text))
self.problems_solved = cols[0]
self.save()
class CodeGroup(models.Model):
name = models.CharField(max_length=100)
notifications = models.IntegerField(default=2,
verbose_name='Problem Notification Frequncy')
last_notified = models.DateField(null=True, blank=True)
def __unicode__(self):
return '%s' % (self.name)
class GroupMember(models.Model):
group = models.ForeignKey(CodeGroup)
user = models.ForeignKey(User, null=True, blank=True)
user_email = models.EmailField(verbose_name='Email')
invite_accepted = models.BooleanField(default=False)
receive_emails = models.BooleanField(default=False,
verbose_name='Send email notifications')
is_owner = models.BooleanField(default=False)
class Meta:
unique_together = ('group', 'user_email',)
def __unicode__(self):
return '%s - %s' % (self.group, self.user_email)
class SpojProblem(models.Model):
problem = models.CharField(max_length=40, unique=True)
solved_by = models.IntegerField(default=0)
category = models.CharField(max_length=100, null=True, blank=True)
is_tutorial = models.BooleanField(default=False)
SOURCE_CHOICES = (
('problem_classifier', 'Problem Classifier'),
('curated', 'Curated'),
)
source = models.CharField(max_length=50, null=True, blank=True,
choices=SOURCE_CHOICES)
difficulty = models.IntegerField(default=0, null=True, blank=True)
def __unicode__(self):
return self.problem
def fetch_stats(self):
if self.is_tutorial:
return
response = requests.get('http://www.spoj.com/ranks/%s/' % (
self.problem))
soup = BeautifulSoup(response.text)
stats = soup.find("table", {"class": "problems"})
for index, row in enumerate(stats.findAll('tr')):
if index == 0:
continue
cols = []
for col in row.findAll('td'):
cols.append(int(col.text))
self.solved_by = int(cols[0])
self.save()
self.categorize_tutorial_problems()
def categorize_tutorial_problems(self):
if self.is_tutorial:
return
response = requests.get('http://www.spoj.com/problems/%s/' % (
self.problem))
if '(tutorial)' in response.text:
self.is_tutorial = True
self.save()
class Submission(models.Model):
problem = models.ForeignKey(SpojProblem)
user = models.ForeignKey(User)
timestamp = models.DateTimeField()
def __unicode__(self):
return '%s - %s' % (self.problem, self.user.email)
class ProblemSuggestion(models.Model):
user = models.ForeignKey(User)
problem = models.ForeignKey(SpojProblem)
timestamp = models.DateTimeField(auto_now_add=True)
class Meta:
unique_together = ('user', 'problem',)
def __unicode__(self):
return '%s - %s' % (self.group, self.problem)
class UserSuggestion(models.Model):
group = models.ForeignKey(CodeGroup)
problem = models.ForeignKey(SpojProblem)
user = models.ForeignKey(User)
timestamp = models.DateTimeField(auto_now_add=True)
class Meta:
unique_together = ('group', 'problem',)
def __unicode__(self):
return '%s' % (self.problem)
class Discussion(models.Model):
title = models.CharField(max_length=200)
content = models.TextField(null=True, blank=True)
group = models.ForeignKey(CodeGroup)
owner = models.ForeignKey(User)
timestamp = models.DateTimeField(auto_now_add=True)
last_updated = models.DateTimeField(auto_now_add=True)
def __unicode__(self):
return self.title
class Reply(models.Model):
discussion = models.ForeignKey(Discussion)
content = models.TextField()
user = models.ForeignKey(User)
timestamp = models.DateTimeField(auto_now_add=True)
def __unicode__(self):
return self.content[:200]
|
RCoon/CodingBat
|
Python/String_1/make_tags.py
|
Python
|
mit
| 576 | 0.005208 |
# The web is built with
|
HTML strings like "<i>Yay</i>" which draws Yay as
# italic text. In this example, the "i" tag makes <i> and </i> which surround
# the word "Yay". Given tag and word strings, create the HTML string with tags
# around the word, e.g. "<i>Yay</i>".
# make_tags('i', 'Yay') --> '<i>Yay</i>'
# make_tags('i', 'Hello') --> '<i>Hello</i>'
# make_tags('cite', 'Yay') --> '<cite>Yay</cite>'
def make_tags(tag, word):
return "<{0}>{1}</{2}>".format(tag, word, tag)
print(m
|
ake_tags('i', 'Yay'))
print(make_tags('i', 'Hello'))
print(make_tags('cite', 'Yay'))
|
markflyhigh/incubator-beam
|
sdks/python/apache_beam/testing/benchmarks/chicago_taxi/trainer/task.py
|
Python
|
apache-2.0
| 5,417 | 0.009599 |
# Copyright 2019 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Trainer for the chicago_taxi demo."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import os
import tensorflow as tf
import tensorflow_model_analysis as tfma
import tensorflow_transform as tft
from trainer import model
from trainer import taxi
SERVING_MODEL_DIR = 'serving_model_dir'
EVAL_MODEL_DIR = 'eval_model_dir'
TRAIN_BATCH_SIZE = 40
EVAL_BATCH_SIZE = 40
# Number of nodes in the first layer of the DNN
FIRST_DNN_LAYER_SIZE = 100
NUM_DNN_LAYERS = 4
DNN_DECAY_FACTOR = 0.7
def train_and_maybe_evaluate(hparams):
"""Run the training and evaluate using the high level API.
Args:
hparams: Holds hyperparameters used to train the model as name/value pairs.
Returns:
The estimator that was used for training (and maybe eval)
"""
schema = taxi.read_schema(hparams.schema_file)
tf_transform_output = tft.TFTransformOutput(hparams.tf_transform_dir)
train_input = lambda: model.input_fn(
hparams.train_files,
tf_transform_output,
batch_size=TRAIN_BATCH_SIZE
)
eval_input = lambda: model.input_fn(
hparams.eval_files,
tf_transform_output,
batch_size=EVAL_BATCH_SIZE
)
train_spec = tf.estimator.TrainSpec(
train_input, max_steps=hparams.train_steps)
serving_receiver_fn = lambda: model.example_serving_receiver_fn(
tf_transform_output, schema)
exporter = tf.estimator.FinalExporter('chicago-taxi', serving_receiver_fn)
eval_spec = tf.estimator.EvalSpec(
eval_input,
steps=hparams.eval_steps,
exporters=[exporter],
name='chicago-taxi-eval')
run_config = tf.estimator.RunConfig(
save_checkpoints_steps=999, keep_checkpoint_max=1)
serving_model_dir = os.path.join(hparams.output_dir, SERVING_MODEL_DIR)
run_config = run_config.replace(model_dir=serving_model_dir)
estimator = model.build_estimator(
tf_transform_output,
# Construct layers sizes with exponetial decay
hidden_units=[
max(2, int(FIRST_DNN_LAYER_SIZE * DNN_DECAY_FACTOR**i))
for i in range(NUM_DNN_LAYERS)
],
config=run_config)
tf.estimator.train_and_evaluate(estimator, train_spec, eval_spec)
return estimator
def run_experiment(hparams):
"""Train the model then export it for tf.model_analysis evaluation.
Args:
hparams: Holds hyperparameters used to train the model as name/value pairs.
"""
estimator = train_and_maybe_evaluate(hparams)
schema = taxi.read_schema(hparams.schema_file)
tf_transform_output = tft.TFTransformOutput(hparams.tf_transform_dir)
# Save a model for tfma eval
eval_model_dir = os.path.join(hparams.output_dir, EVAL_MODEL_DIR)
receiver_fn = lambda: model.eval_input_receiver_fn(
tf_transform_output, schema)
tfma.export.export_eval_savedmodel(
estimator=estimator,
export_dir_base=eval_model_dir,
eval_input_receiver_fn=receiver_fn)
def main():
parser = argparse.ArgumentParser()
# Input Arguments
parser.add_argument(
'--train-files',
help='GCS or local paths to training data',
nargs='+',
required=True)
parser.add_argument(
'--tf-transform-dir',
help='Tf-transform directory with model from preprocessing step',
required=True)
parser.add_argument(
'--output-
|
dir',
help="""\
Directory under which which the serving model (under /serving_model_dir)\
and the tf-mode-analysis model (under /eval_model_dir) will be written\
""",
required=True)
parser.add_argument(
'--eval-fi
|
les',
help='GCS or local paths to evaluation data',
nargs='+',
required=True)
# Training arguments
parser.add_argument(
'--job-dir',
help='GCS location to write checkpoints and export models',
required=True)
# Argument to turn on all logging
parser.add_argument(
'--verbosity',
choices=['DEBUG', 'ERROR', 'FATAL', 'INFO', 'WARN'],
default='INFO',
)
# Experiment arguments
parser.add_argument(
'--train-steps',
help='Count of steps to run the training job for',
required=True,
type=int)
parser.add_argument(
'--eval-steps',
help='Number of steps to run evalution for at each checkpoint',
default=100,
type=int)
parser.add_argument(
'--schema-file',
help='File holding the schema for the input data')
args = parser.parse_args()
# Set python level verbosity
tf.logging.set_verbosity(args.verbosity)
# Set C++ Graph Execution level verbosity
os.environ['TF_CPP_MIN_LOG_LEVEL'] = str(
tf.logging.__dict__[args.verbosity] / 10)
# Run the training job
hparams = tf.contrib.training.HParams(**args.__dict__)
run_experiment(hparams)
if __name__ == '__main__':
main()
|
echohenry2006/tvb-library
|
tvb/basic/traits/util.py
|
Python
|
gpl-2.0
| 6,354 | 0.003777 |
# -*- coding: utf-8 -*-
#
#
# TheVirtualBrain-Scientific Package. This package holds all simulators, and
# analysers necessary to run brain-simulations. You can use it stand alone or
# in conjunction with TheVirtualBrain-Framework Package. See content of the
# documentation-folder for more details. See also http://www.thevirtualbrain.org
#
# (c) 2012-2013, Baycrest Centre for Geriatric Care ("Baycrest")
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License version 2 as published by the Free
# Software Foundation. This program is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
# License for more details. You should have received a copy of the GNU General
# Public License along with this program; if not, you can download it here
# http://www.gnu.org/licenses/old-licenses/gpl-2.0
#
#
# CITATION:
# When using The Virtual Brain for scientific publications, please cite it as follows:
#
# Paula Sanz Leon, Stuart A. Knock, M. Marmaduke Woodman, Lia Domide,
# Jochen Mersmann, Anthony R. McIntosh, Viktor Jirsa (2013)
# The Virtual Brain: a simulator of primate brain network dynamics.
# Frontiers in Neuroinformatics (7:10. doi: 10.3389/fninf.2013.00010)
#
#
"""
All the little functions that make life nicer in the Traits package.
.. moduleauthor:: Mihai Andrei <mihai.andrei@codemart.ro>
.. moduleauthor:: Lia Domide <lia.domide@codemart.ro>
.. moduleauthor:: marmaduke <duke@eml.cc>
"""
import numpy
import collections
import inspect
from tvb.basic.profile import TvbProfile
# returns true if key is, by convention, public
ispublic = lambda key: key[0] is not '_'
def str_class_name(thing, short_form=False):
"""
A helper function that tries to generate an informative name for its
argument: when passed a class, return its name, when passed an object
return a string representation of that value.
"""
# if thing is a class, it has attribute __name__
if hasattr(thing, '__name__'):
cls = thing
if short_form:
return cls.__name__
return cls.__module__ + '.' + cls.__name__
else:
# otherwise, it's an object and we return its __str__
return str(thing)
def get(obj, key, default=None):
"""
get() is a general function allowing us to ignore whether we are
getting from a dictionary or object. If obj is a dictionary, we
return the value corresponding to key, otherwise we return the
attribute on obj corresponding to key. In both cases, if key
does not exist, default is returned.
"""
if type(obj) is dict:
return obj.get(key, default)
else:
return getattr(obj, key) if hasattr(obj, key) else default
def log_debug_array(log, array, array_name, owner=""):
"""
Simple access to debugging info on an array.
"""
if TvbProfile.current.TRAITS_CONFIGURATION.use_storage:
return
# Hide this logs in web-mode, with storage, because we have multiple storage exceptions
if owner != "":
name = ".".join((owner, array_name))
else:
name = array_name
if array is not None and hasattr(array, 'shape'):
shape = str(array.shape)
dtype = str(array.dtype)
has_nan = str(numpy.isnan(array).any())
array_max = str(array.max())
array_min = str(array.min())
log.debug("%s shape: %s" % (name, shape))
log.debug("%s dtype: %s" % (name, dtype))
log.debug("%s has NaN: %s" % (name, has_nan))
log.debug("%s maximum: %s" % (name, array_max))
log.debug("%s minimum: %s" % (name, array_min))
else:
log.debug("%s is None or not Array" % name)
Args = collections.namedtuple('Args', 'pos kwd')
class TypeRegister(list):
"""
TypeRegister is a smart list that can be queried to obtain selections of the
classes inheriting from Traits classes.
"""
def subclasses(self, obj, avoid_subclasses=False):
"""
The subclasses method takes a class (or given instance object, will use
the class of the instance), and returns a list of all options known to
this TypeRegister that are direct subclasses of the class or have the
class in their base class list.
:param obj: Class or instance
:param avoid_subclasses: When specified, subclasses are not retrieved, only current class.
"""
cls = obj if inspect.isclass(obj) else obj.__class__
if avoid_subclasses:
return [cls]
if hasattr(cls,
|
'_base_classes'):
bases = cls._base_classes
else:
bases = []
sublcasses = [opt for opt in self if ((issubclass(opt, cls) or cls in opt.__bases__)
and not inspect.isabstract(opt) and opt.__name__ not in bases)]
return sublcasses
def multiline_math_directives_to_matjax(doc):
"""
Looks for multi-line sphinx math direct
|
ives in the given rst string
It converts them in html text that will be interpreted by mathjax
The parsing is simplistic, not a rst parser.
Wraps .. math :: body in \[\begin{split}\end{split}\]
"""
# doc = text | math
BEGIN = r'\[\begin{split}'
END = r'\end{split}\]'
in_math = False # 2 state parser
out_lines = []
indent = ''
for line in doc.splitlines():
if not in_math:
# math = indent directive math_body
indent, sep, _ = line.partition('.. math::')
if sep:
out_lines.append(BEGIN)
in_math = True
else:
out_lines.append(line)
else:
# math body is at least 1 space more indented than the directive, but we tolerate empty lines
if line.startswith(indent + ' ') or line.strip() == '':
out_lines.append(line)
else:
# this line is not properly indented, math block is over
out_lines.append(END)
out_lines.append(line)
in_math = False
if in_math:
# close math tag
out_lines.append(END)
return '\n'.join(out_lines)
|
kenyaapps/loaderio
|
loaderio/resources/servers.py
|
Python
|
mit
| 207 | 0.043478 |
from loaderio.resources.client import Client
class Servers(Client):
"""
|
"""
def __init__(self, api_key):
Client.__init__(self, api_key)
pass
d
|
ef list(self):
return self.request('GET', 'servers')
|
SmartJog/sjconf
|
sjconfparts/type.py
|
Python
|
lgpl-2.1
| 16,215 | 0.001788 |
import re
import sjconfparts.excep
|
tions
class Error(sjconfparts.exceptions.Error):
pass
class ConversionError(Error):
pass
class ConversionList:
"""Custom list imp
|
lementation, linked to the related Conf.
Each modification of the list will auto-update the string representation
of the list directly in the Conf object, via a call to
self.conversion_method().
Nowadays this is considered ugly (maybe it wasn't back in 2008 with Python 2.5?),
but no one wants nor has time to redevelop a big part of SJConf to get rid of this.
(aka don't blame the current dev who just wants to port this mess to Python3 :-p)
Starting from Python3/new style classes, all used special methods must be
explicitly redefined:
https://docs.python.org/3/reference/datamodel.html#special-lookup
"""
def __add__(self, other):
self.innerList.__add__(other)
self.conversion_method()
def __init__(self, conversion_method, list_object=None):
self.conversion_method = conversion_method
if list_object == None:
list_object = []
self.innerList = list_object
def __contains__(self, item):
return self.innerList.__contains__(item)
def __delitem__(self, key):
self.innerList.__delitem__(key)
self.conversion_method()
def __getitem__(self, key):
self.innerList.__getitem__(key)
def __iadd__(self, other):
self.innerList.__iadd__(other)
self.conversion_method()
def __imul__(self, other):
self.innerList.__imul__(other)
self.conversion_method()
def __iter__(self):
return self.innerList.__iter__()
def __len__(self):
return self.innerList.__len__()
def __mul__(self, other):
self.innerList.__mul__(other)
self.conversion_method()
def __reversed__(self, other):
self.innerList.__reversed__(other)
self.conversion_method()
def __rmul__(self, other):
self.innerList.__rmul__(other)
self.conversion_method()
def __setitem__(self, key, value):
self.innerList.__setitem__(key, value)
self.conversion_method()
def __str__(self):
return self.innerList.__str__()
def __getattr__(self, name):
list_method = getattr(self.innerList, name)
def method(*args, **kw):
result = list_method(*args, **kw)
if name in (
"append",
"extend",
"insert",
"pop",
"remove",
"reverse",
"sort",
):
self.conversion_method()
return result
return method
class Type:
class ConversionBadTypeError(ConversionError):
def __init__(self, type_source, type_dest):
self.msg = "Invalid conversion from type %s to type %s, can only convert from str or to str"
@classmethod
def convert(cls, type_source, type_dest, dict_source, dict_dest, key):
if type_source == "str":
type_class_name = type_dest.capitalize()
elif type_dest == "str":
type_class_name = type_source.capitalize()
else:
raise Type.ConversionBadTypeError(type_source, type_dest)
type_class = getattr(cls, type_class_name)
return getattr(type_class, type_source + "_to_" + type_dest)(
dict_source, dict_dest, key
)
@classmethod
def convert_safe(cls, type_source, type_dest, dict_source, dict_dest, key):
if type_source == "str":
type_class_name = type_dest.capitalize()
elif type_dest == "str":
type_class_name = type_source.capitalize()
else:
raise Type.ConversionBadTypeError(type_source, type_dest)
type_class = getattr(cls, type_class_name)
if hasattr(type_class, type_source + "_to_" + type_dest + "_safe"):
return getattr(type_class, type_source + "_to_" + type_dest + "_safe")(
dict_source, dict_dest, key
)
else:
return getattr(type_class, type_source + "_to_" + type_dest)(
dict_source, dict_dest, key
)
@classmethod
def convert_key(cls, key, type):
return cls._convert_method("key", key, type)
@classmethod
def convert_value(cls, value, type, dict_str, dict_type, key):
return cls._convert_method("value", value, type, dict_str, dict_type, key)
@classmethod
def convert_key_for_search(cls, key, type):
return cls._convert_method("key_for_search", key, type)
@classmethod
def _convert_method(cls, method, value, type, *args):
type_class = getattr(cls, type.capitalize())
if not hasattr(type_class, method):
converted_value = value
else:
converted_value = getattr(type_class, method)(value, *args)
return converted_value
class List:
@classmethod
def value(cls, value, dict_str, dict_type, key):
def conversion_method():
Type.List.list_to_str(dict_type, dict_str, key)
return ConversionList(conversion_method, value)
@classmethod
def str_to_list(cls, dict_source, dict_dest, key):
def conversion_method():
Type.List.list_to_str(dict_dest, dict_source, key)
str_object = dict_source[key]
li = list(map(str.strip, str_object.split(",")))
try:
li.remove("")
except ValueError:
pass
dict_dest[key] = ConversionList(conversion_method, li)
return dict_dest
@classmethod
def str_to_list_safe(cls, dict_source, dict_dest, key):
str_object = dict_source[key]
list_object = list(map(str.strip, str_object.split(",")))
try:
list_object.remove("")
except ValueError:
pass
dict_dest[key] = list_object
return dict_dest
@classmethod
def list_to_str(cls, dict_source, dict_dest, key):
list_object = dict_source[key]
str_object = ", ".join(list_object)
dict_dest[key] = str_object
return dict_dest
class Bool:
TRUE_VALUES = ("yes", "on", "true", "enabled", "enable")
FALSE_VALUES = ("no", "off", "false", "disabled", "disable")
class StrToBoolError(ConversionError):
def __init__(self, str_object):
self.msg = (
'Bad value "%s" for str to bool conversion, expected a value in %s'
% (str_object, str(Type.Bool.TRUE_VALUES + Type.Bool.FALSE_VALUES))
)
class BoolToStrError(ConversionError):
def __init__(self, bool_object):
self.msg = (
'Bad value "%s" for bool to str conversion, expected a boolean'
% (bool_object)
)
@classmethod
def str_to_bool(cls, dict_source, dict_dest, key):
str_object = dict_source[key]
if str_object.lower() in Type.Bool.TRUE_VALUES:
bool_object = True
elif str_object.lower() in Type.Bool.FALSE_VALUES:
bool_object = False
else:
raise Type.Bool.StrToBoolError(str_object)
dict_dest[key] = bool_object
return dict_dest
@classmethod
def bool_to_str(cls, dict_source, dict_dest, key):
bool_object = dict_source[key]
if bool_object == True:
str_object = "yes"
elif bool_object == False:
str_object = "no"
else:
raise Type.Bool.BoolToStrError(bool_object)
dict_dest[key] = str_object
return dict_dest
class Size:
class StrToSizeError(ConversionError):
def __init__(self, str_object):
self.msg = (
'Bad value "%s" for str to size conversion, expected a value like, e.g. 10M'
|
cadithealth/genemail
|
genemail/modifier/base.py
|
Python
|
mit
| 2,085 | 0.009592 |
# -*- coding: utf-8 -*-
#------------------------------------------------------------------------------
# file: $Id$
# auth: Philip J Grabner <grabner@cadit.com>
# date: 2013/07/31
# copy: (C) Copyright 2013 Cadit Health Inc., All Rights Reserved.
|
#------------------------------------------------------------------------------
#------------------------------------------------------------------------------
class Modifier(object):
#----------------------------------------------------------------------------
def modify(self, mailfrom, recipients, data):
'''
Modifies the prepared email for sen
|
ding.
:Parameters:
mailfrom : str
the SMTP-level `MAILFROM` command argument.
recipients : { list, tuple }
an iterable of the SMTP-level `RCPTTO` command arguments.
data : { str, email.MIMEMessage }
represents the SMTP-level `DATA` command argument, and can
either be a subclass of `email.MIMEMessage` or the raw SMTP data
(as generated by a call to `email.MIMEMessage.as_string()`).
:Returns:
tuple
A three-element tuple with the adjusted `mailfrom`, `recipients`
and `data` values.
'''
raise NotImplementedError()
#------------------------------------------------------------------------------
class ChainingModifier(Modifier):
#----------------------------------------------------------------------------
def __init__(self, modifiers=[]):
self.modifiers = modifiers
#----------------------------------------------------------------------------
def addModifier(self, modifier):
self.modifiers.append(modifier)
return self
#----------------------------------------------------------------------------
def modify(self, mailfrom, recipients, data):
for mod in self.modifiers:
mailfrom, recipients, data = mod.modify(mailfrom, recipients, data)
return (mailfrom, recipients, data)
#------------------------------------------------------------------------------
# end of $Id$
#------------------------------------------------------------------------------
|
ic-hep/DIRAC
|
src/DIRAC/FrameworkSystem/Service/ProxyManagerHandler.py
|
Python
|
gpl-3.0
| 18,027 | 0.003162 |
""" ProxyManager is the implementation of the ProxyManagement service in the DISET framework
.. literalinclude:: ../ConfigTemplate.cfg
:start-after: ##BEGIN ProxyManager:
:end-before: ##END
:dedent: 2
:caption: ProxyManager options
"""
from DIRAC import gLogger, S_OK, S_ERROR
from DIRAC.Core.DISET.RequestHandler import RequestHandler, getServiceOption
from DIRAC.Core.Security import Properties
from DIRAC.Core.Utilities.ThreadScheduler import gThreadScheduler
from DIRAC.Core.Utilities.ObjectLoader import ObjectLoader
from DIRAC.ConfigurationSystem.Client.Helpers import Registry
DEFAULT_MAIL_FROM = "proxymanager@diracgrid.org"
class ProxyManagerHandler(RequestHandler):
__maxExtraLifeFactor = 1.5
__proxyDB = None
@classmethod
def initializeHandler(cls, serviceInfoDict):
useMyProxy = cls.srv_getCSOption("UseMyProxy", False)
mailFrom = getServiceOption(serviceInfoDict, "MailFrom", DEFAULT_MAIL_FROM)
try:
result = ObjectLoader().loadObject("FrameworkSystem.DB.ProxyDB")
if not result["OK"]:
gLogger.error("Failed to load ProxyDB class: %s" % result["Message"])
return result
dbClass = result["Value"]
cls.__proxyDB = dbClass(useMyProxy=useMyProxy, mailFrom=mailFrom)
except RuntimeError as excp:
return S_ERROR("Can't connect to ProxyDB: %s" % excp)
gThreadScheduler.addPeriodicTask(900, cls.__proxyDB.purgeExpiredTokens, elapsedTime=900)
gThreadScheduler.addPeriodicTask(900, cls.__proxyDB.purgeExpiredRequests, elapsedTime=900)
gThreadScheduler.addPeriodicTask(21600, cls.__proxyDB.purgeLogs)
gThreadScheduler.addPeriodicTask(3600, cls.__proxyDB.purgeExpiredProxies)
if useMyProxy:
gLogger.info("MyProxy: %s\n MyProxy Server: %s" % (useMyProxy, cls.__proxyDB.getMyProxyServer()))
return S_OK()
def __generateUserProxiesInfo(self):
"""Generate information dict about user proxies
:return: dict
"""
proxiesInfo = {}
credDict = self.getRemoteCredentials()
result = Registry.getDNForUsername(credDict["username"])
if not result["OK"]:
return result
selDict = {"UserDN": result["Value"]}
result = self.__proxyDB.getProxiesContent(selDict, {})
if not result["OK"]:
return result
contents = result["Value"]
userDNIndex = contents["ParameterNames"].index("UserDN")
userGroupIndex = contents["ParameterNames"].index("UserGroup")
expirationIndex = contents["ParameterNames"].index("ExpirationTime")
for record in contents["Records"]:
userDN = record[userDNIndex]
if userDN not in proxiesInfo:
proxiesInfo[userDN] = {}
userGroup = record[userGroupIndex]
proxiesInfo[userDN][userGroup] = record[expirationIndex]
return proxiesInfo
def __addKnownUserProxiesInfo(self, retDict):
"""Given a S_OK/S_ERR add a proxies entry with info of all the proxies a user has uploaded
:return: S_OK(dict)/S_ERROR()
"""
retDict["proxies"] = self.__generateUserProxiesInfo()
return retDict
auth_getUserProxiesInfo = ["authenticated"]
types_getUserProxiesInfo = []
def export_getUserProxiesInfo(self):
"""Get the info about the user proxies in the system
:return: S_OK(dict)
"""
return S_OK(self.__generateUserProxiesInfo())
# WARN: Since v7r1 requestDelegationUpload method use only first argument!
# WARN: Second argument for compatibility with older versions
types_requestDelegationUpload = [int]
def export_requestDelegationUpload(self, requestedUploadTime, diracGroup=None):
"""Request a delegation. Send a delegation request to client
:param int requestedUploadTime: requested live time
:return: S_OK(dict)/S_ERROR() -- dict contain id and proxy as string of the request
"""
if diracGroup:
self.log.warn("Since v7r1 requestDelegationUpload method use only first argument!")
credDict = self.getRemoteCredentials()
user = "%s:%s" % (credDict["username"], credDict["group"])
result = self.__proxyDB.generateDelegationRequest(credDict["x509Chain"], credDict["DN"])
if result["OK"]:
gLogger.info("Upload request by %s given id %s" % (user, result["Value"]["id"]))
else:
gLogger.error("Upload request failed", "by %s : %s" % (user, result["Message"]))
return result
types_completeDelegationUpload = [int, str]
def export_completeDelegationUpload(self, requestId, pemChain):
"""Upload result of delegation
:param int requestId: identity number
:param str pemChain: certificate as string
:return: S_OK(dict)/S_ERROR() -- dict contain proxies
"""
credDict = self.getRemoteCredentials()
userId = "%s:%s" % (credDict["username"], credDict["group"])
retVal = self.__proxyDB.completeDelegation(requestId, credDict["DN"], pemChain)
if not retVal["OK"]:
gLogger.error("Upload proxy failed", "id: %s user: %s message: %s" % (requestId, userId, retVal["Message"]))
return self.__addKnownUserProxiesInfo(retVal)
gLogger.info("Upload %s by %s completed" % (requestId, userId))
return self.__addKnownUserProxiesInfo(S_OK())
types_getRegisteredUsers = []
def export_getRegisteredUsers(self, validSecondsRequired=0):
"""Get the list of users who have a valid proxy in the system
:param int validSecondsRequired: required seconds the proxy is valid for
:return: S_OK(list)/S_ERROR() -- list contain dicts with user name, DN, group
expiration time, persistent flag
"""
credDict = self.getRemoteCredentials()
if Properties.PROXY_MANAGEMENT not in credDict["properties"]:
return self.__proxyDB.getUsers(validSecondsRequired, userMask=credDict["username"])
return self.__proxyDB.getUsers(validSecondsRequired)
def __checkProperties(self, requestedUserDN, requestedUserGroup):
"""Check the properties and return if they can only download limited proxies if authorized
:param str requestedUserDN: user DN
:param str requestedUserGroup: DIRAC group
:return: S_OK(boolean)/S_ERROR()
"""
credDict = self.getRemoteCredentials()
if Properties.FULL_DELEGATION in credDict["properties"]:
return S_OK(False)
if Properties.LIMITED_DELEGATION in credDict["properties"]:
return S_OK(True)
if Properties.PRIVATE_LIMITED_DELEGATION in credDict["properties"]:
if credDict["DN"] != requestedUserDN:
return S_ERROR("You are not allowed to download any proxy")
if Properties.PRIVATE_LIMITED_DELEGATION not in Registry.getPropertiesForGroup(requestedUserGroup):
return S_ERROR("You can't download proxies for that group")
return S_OK(True)
# Not authorized!
return S_ERROR("You can't get proxies!")
types_getProxy = [str, str, str, int]
def export_getProxy(self, userDN, userGroup, requestPem, requiredLifetime):
"""Get a proxy for a userDN/userGroup
:param requestPem: PEM encoded request object for delegation
:param requiredLifetime: Argument for length of proxy
* Properties:
* FullDelegation <- permits full delegation of proxies
*
|
LimitedDelegation <- permits downloading only limited proxies
* PrivateLimitedDelegation <- permits downloading only limited proxies for one self
"""
credDict = self.getRemoteCredentials()
result = self.__checkProperties(userDN, userGroup)
if not result["OK"]:
return result
forceLimited = result["V
|
alue"]
self.__proxyDB.logAction("download proxy", credDict["DN"], credDict["group"], userDN, userGroup)
return self.__
|
noironetworks/neutron
|
neutron/plugins/ml2/drivers/agent/capabilities.py
|
Python
|
apache-2.0
| 1,161 | 0 |
# Copyright 2016 Hewlett Packard Enterprise Development LP
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KI
|
ND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron_lib.callbacks import events
from neutron_lib.callbacks import registry
def notify_init_event(agent_type, agent):
"""Notify init event for the specifie
|
d agent."""
registry.publish(agent_type, events.AFTER_INIT, agent)
def register(callback, agent_type):
"""Subscribe callback to init event for the specified agent.
:param agent_type: an agent type as defined in neutron_lib.constants.
:param callback: a callback that can process the agent init event.
"""
registry.subscribe(callback, agent_type, events.AFTER_INIT)
|
root-mirror/root
|
bindings/pyroot/cppyy/cppyy/python/cppyy/_stdcpp_fix.py
|
Python
|
lgpl-2.1
| 524 | 0 |
import sys
# It may be that the inter
|
preter (wether python or pypy-c) was not linked
# with C++; force its loading before doing anything else (note that not
# linking with C++ spells trouble anyway for any C++ libraries ...)
if 'linux' in sys.platform and 'GCC' in sys.version:
# TODO: check executable to see whether linking indeed didn't happen
import ctypes
try:
stdcpp = ctypes.CDLL('libstdc++.so', ctypes.RTLD_GLOBAL)
except Exception:
pass
# TODO: what if Linux/clang and what if Mac?
| |
mr-karan/Udacity-FullStack-ND004
|
Project1/projects/movieServer/app.py
|
Python
|
mit
| 2,980 | 0.034564 |
from flask import Flask
app = Flask(__name__)
from media import Movie
from flask import render_template
import re
@app.route('/')
def index():
'''View function for index page.'''
toy_story = Movie(title = "Toy Story 3", trailer_youtube_url ="https://www.youtube.com/watch?v=QW0sjQFpXTU",
poster_image_url="https://images-na.ssl-images-amazon.com/images/M/MV5BMTgxOTY4Mjc0MF5BMl5BanBnXkFtZTcwNTA4MDQyMw@@._V1_UY268_CR3,0,182,268_AL_.jpg",
storyline='''Andy's toys get mistakenly delivered to a day care centre.
Woody convinces the other toys that they weren't dumped and leads them on an expedition back
home.''')
pulp_fiction = Movie(title = "Pulp Fiction ", trailer_youtube_url ="https://www.youtube.com/watch?v=s7EdQ4FqbhY",
poster_image_url="https://images-na.ssl-images-amazon.com/images/M/MV5BMTkxMTA5OTAzMl5BMl5BanBnXkFtZTgwNjA5MDc3NjE@._V1_UX182_CR0,0,182,268_AL_.jpg",
storyline='''The lives of two mob hit men, a boxer, a gangster's wife, and a pair of diner bandits
intertwine in four tales of violence and redemption''')
shawshank = Movie(title = "The Shawshank Redemption", trailer_youtube_url ="https://www.youtube.com/watch?v=KtwXlIwozog",
poster_image_url="https://images-na.ssl-images-amazon.com/images/M/MV5BODU4MjU4NjIwNl5BMl5BanBnXkFtZTgwMDU2MjEyMDE@._V1_UX182_CR0,0,182,268_AL_.jpg",
storyline='''Two imprisoned men bond over a number of years, finding solace
and eventual redemption through acts of common decency.''')
godfather = Movie(title = "The Godfather ", trailer_youtube_url ="https://www.youtube.com/watch?v=sY1S34973zA",
poster_image_url="https://images-na.ssl-images-amazon.com/images/M/MV5BMjEyMjcyNDI4MF5BMl5BanBn
|
XkFtZTcwMDA5Mzg3OA@@._V1_UX182_CR0,0,182,268_AL_.jpg",
storyline='''The aging patriarch of an organized crime dynasty transfers control of his clandestine empire to his reluctant son.''')
dark_knight = Movie(title = "The Dark Knight ", trailer_youtube_url ="https://www.youtube.com/watch?v=EXeTwQWrcwY",
poster_image_url="htt
|
ps://images-na.ssl-images-amazon.com/images/M/MV5BMTMxNTMwODM0NF5BMl5BanBnXkFtZTcwODAyMTk2Mw@@._V1_UX182_CR0,0,182,268_AL_.jpg",
storyline='''Set within a year after the events of Batman Begins, Batman, Lieutenant James Gordon, and new district attorney Harvey Dent successfully begin to round up the criminals''')
movies=[toy_story,pulp_fiction,dark_knight,godfather,shawshank]
# Replace `Youtube URL` with just `Youtube video ID`
for movie in movies:
youtube_id_match = re.search(r'(?<=v=)[^&#]+', movie.trailer_youtube_url)
youtube_id_match = youtube_id_match or re.search(r'(?<=be/)[^&#]+', movie.trailer_youtube_url)
trailer_youtube_id = (youtube_id_match.group(0) if youtube_id_match else None)
movie.trailer_youtube_url = trailer_youtube_id
return render_template('index.html',
data=movies)
if __name__ == '__main__':
app.run(debug=True)
|
shrinidhi666/rbhus
|
rbhusUI/lib/rbhusPipeSubmitRenderMod.py
|
Python
|
gpl-3.0
| 30,076 | 0.003757 |
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'rbhusPipeSubmitRenderMod.ui'
#
# Created by: PyQt4 UI code generator 4.11.4
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_rbhusSubmit(object):
def setupUi(self, rbhusSubmit):
rbhusSubmit.setObjectName(_fromUtf8("rbhusSubmit"))
rbhusSubmit.resize(572, 796)
self.centralwidget = QtGui.QWidget(rbhusSubmit)
self.centralwidget.setObjectName(_fromUtf8("centralwidget"))
self.gridLayout = QtGui.QGridLayout(self.centralwidget)
self.gridLayout.setObjectName(_fromUtf8("gridLayout"))
self.labelFrange = QtGui.QLabel(self.centralwidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Maximum, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.labelFrange.sizePolicy().hasHeightForWidth())
self.labelFrange.setSizePolicy(sizePolicy)
self.labelFrange.setObjectName(_fromUtf8("labelFrange"))
self.gridLayout.addWidget(self.labelFrange, 7, 0, 1, 1)
self.comboFileType = QtGui.QComboBox(self.centralwidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.comboFileType.sizePolicy().hasHeightForWidth())
self.comboFileType.setSizePolicy(sizePolicy)
self.comboFileType.setObjectName(_fromUtf8("comboFileType"))
self.gridLayout.addWidget(self.comboFileType, 2, 1, 1, 1)
self.pushSelectHostGroups = QtGui.QPushButton(self.centralwidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.pushSelectHostGroups.sizePolicy().hasHeightForWidth())
self.pushSelectHostGroups.setSizePolicy(sizePolicy)
self.pushSelectHostGroups.setObjectName(_fromUtf8("pushSelectHostGroups"))
self.gridLayout.addWidget(self.pushSelectHostGroups, 10, 2, 1, 1)
self.checkPngFlv = QtGui.QCheckBox(self.centralwidget)
self.checkPngFlv.setChecked(False)
self.checkPngFlv.setObjectName(_fromUtf8("checkPngFlv"))
self.gridLayout.addWidget(self.checkPngFlv, 24, 1, 1, 1)
self.lineEditAfterTask = QtGui.QLineEdit(self.centralwidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Maximum, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.lineEditAfterTask.sizePolicy().hasHeightForWidth())
self.lineEditAfterTask.setSizePolicy(sizePolicy)
self.lineEditAfterTask.setObjectName(_fromUtf8("lineEditAfterTask"))
self.gridLayout.addWidget(self.lineEditAfterTask, 16, 1, 1, 1)
self.labelFileName = QtGui.QLabel(self.centralwidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Maximum, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.labelFileName.sizePolicy().hasHeightForWidth())
self.labelFileName.setSizePolicy(sizePolicy)
self.labelFileName.setObjectName(_fromUtf8("labelFileName"))
self.gridLayout.addWidget(self.labelFileName, 1, 0, 1, 1)
self.labelPrio = QtGui.QLabel(self.centralwidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Maximum, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.labelPrio.sizePolicy().hasHeightForWidth())
self.labelPrio.setSizePolicy(sizePolicy)
self.labelPrio.setObjectName(_fromUtf8("labelPrio"))
self.gridLayout.addWidget(self.labelPrio, 18, 0, 1, 1)
self.labelImageName = QtGui.QLabel(self.centralwidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Maximum, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.labelImageName.sizePolicy().hasHeightForWidth())
self.labelImageName.setSizePolicy(sizePolicy)
self.labelImageName.setObjectName(_fromUtf8("labelImageName"))
self.gridLayout.addWidget(self.labelImageName, 5, 0, 1, 1)
self.comboRenderer = QtGui.QComboBox(self.centralwidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.comboRenderer.sizePolicy().hasHei
|
ghtForWidth())
self.comboRenderer.setSizePolicy(sizePolicy)
self.comboRenderer.setObjectName(_fromUtf8("comboRenderer"))
self.gridLayout.addWidget(self.comboRenderer, 12, 1, 1, 1)
self.labelAfterTime = QtGui.QLabel(self.centralwidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Maximum, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.la
|
belAfterTime.sizePolicy().hasHeightForWidth())
self.labelAfterTime.setSizePolicy(sizePolicy)
self.labelAfterTime.setObjectName(_fromUtf8("labelAfterTime"))
self.gridLayout.addWidget(self.labelAfterTime, 17, 0, 1, 1)
self.pushSubmit = QtGui.QPushButton(self.centralwidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.pushSubmit.sizePolicy().hasHeightForWidth())
self.pushSubmit.setSizePolicy(sizePolicy)
self.pushSubmit.setObjectName(_fromUtf8("pushSubmit"))
self.gridLayout.addWidget(self.pushSubmit, 27, 2, 1, 1)
self.checkAfterTime = QtGui.QCheckBox(self.centralwidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.checkAfterTime.sizePolicy().hasHeightForWidth())
self.checkAfterTime.setSizePolicy(sizePolicy)
self.checkAfterTime.setObjectName(_fromUtf8("checkAfterTime"))
self.gridLayout.addWidget(self.checkAfterTime, 17, 2, 1, 1)
self.checkExrMov = QtGui.QCheckBox(self.centralwidget)
self.checkExrMov.setChecked(False)
self.checkExrMov.setObjectName(_fromUtf8("checkExrMov"))
self.gridLayout.addWidget(self.checkExrMov, 26, 1, 1, 1)
self.lineEditDescription = QtGui.QLineEdit(self.centralwidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.MinimumExpanding, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.lineEditDescription.sizePolicy().hasHeightForWidth())
self.lineEditDescription.setSizePolicy(sizePolicy)
self.lineEditDescription.setDragEnabled(True)
self.lineEditDescription.setObjectName(_fromUtf8("lineEditDescription"))
self.gridLayout.addWidget(self.lineEditDescription, 23, 1, 1, 1)
self.labelBatching = QtGui.QLabel(self.centralwidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Maximum, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.labelBatching.sizePolicy().hasHeightForWidth())
self.labelBatching.setSizePolicy(sizePolicy)
self.labelBatching.setObjectName(_fromUtf8("labelBatching"))
self.gridLayout.addWidget(self.labelBatching, 20, 0, 1, 1)
self.labelUser = QtGui.QLabel(self.centralwidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label
|
urielka/shaveet
|
shaveet/gc.py
|
Python
|
mit
| 925 | 0.027027 |
#std
import logging
#3rd
from gevent import Greenlet,sleep
#shaveet
from shaveet.config import MAX_CLIENTS_GC,CLIENT_GC_INTERVAL
from shaveet.lookup import all_clients,discard_client
logger = logging.getLogger("shaveet.gc")
class ClientGC(Greenlet):
"""
this greenthread collects the clients that are no longer active
"""
def run(self):
while True:
logger.info("ClientGC:processing clients")
client_processed = 0
for
|
clien
|
t_id,client in all_clients().iteritems():
if not client.is_active():
logger.debug("ClientGC:collecting id:%s,ts:%d,waiting:%s",client.id,client.ts,client.is_waiting)
discard_client(client)
#process in chuncks of MAX_CLIENTS_GC,sleep(0) means yeild to next greenlet
client_processed+=1
if client_processed % MAX_CLIENTS_GC == 0:
sleep(0)
logger.info("ClientGC:sleeping")
sleep(CLIENT_GC_INTERVAL)
|
planetlabs/datalake-api
|
setup.py
|
Python
|
apache-2.0
| 2,140 | 0 |
# Copyright 2015 Planet Labs, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
from setuptools import setup
from setuptools import distutils
import os
import sys
def get_version_from_pkg_info():
metadata = distutils.dist.DistributionMetadata("PKG-INFO")
return metadata.version
def get_version_from_pyver():
try:
import pyver
except ImportError:
if 'sdist' in sys.argv or 'bdist_wheel' in sys.argv:
raise ImportError('You must install pyver to create a package')
else:
return 'noversion'
version, version_info = pyver.get_version(pkg="datalake_api",
public=True)
return version
def get_version():
if os.path.exists("PKG-INFO"):
return get_version_from_pkg_info()
else:
return get_version_from_pyver()
setup(name='datalake_api',
url='https://github.com/
|
planetlab
|
s/datalake-api',
version=get_version(),
description='datalake_api ingests datalake metadata records',
author='Brian Cavagnolo',
author_email='brian@planet.com',
packages=['datalake_api'],
install_requires=[
'pyver>=1.0.18',
'memoized_property>=1.0.2',
'simplejson>=3.3.1',
'datalake-common>=0.25',
'Flask>=0.10.1',
'flask-swagger==0.2.8',
'boto3==1.1.3',
'raven[flask]>=5.6.0',
'blinker>=1.4',
],
extras_require={
'test': [
'pytest==2.7.2',
'flake8==2.5.0',
'moto==0.4.23',
],
},
include_package_data=True)
|
airbnb/superset
|
superset/migrations/versions/732f1c06bcbf_add_fetch_values_predicate.py
|
Python
|
apache-2.0
| 1,455 | 0.002062 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""add fetch values predicate
Revision ID: 732f1c06bcbf
Revises: d6db5a5cdb5d
Create Date: 2017-03-03 09:15:56.800930
"""
# revision identifiers, used by Alembic.
revision = "732f1c06bcbf"
down_revision = "d6db5a5cdb5d"
impo
|
rt sqlalchemy as sa
from alembic import op
def upgrade():
op.add_column(
"datasources",
sa.Column("fetch_values_from", sa.String(length=100), nullable=True),
|
)
op.add_column(
"tables",
sa.Column("fetch_values_predicate", sa.String(length=1000), nullable=True),
)
def downgrade():
op.drop_column("tables", "fetch_values_predicate")
op.drop_column("datasources", "fetch_values_from")
|
mhbu50/erpnext
|
erpnext/hr/doctype/employee_advance/employee_advance.py
|
Python
|
gpl-3.0
| 8,463 | 0.025996 |
# Copyright (c) 2017, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
import frappe
from frappe import _
from frappe.model.document import Document
from frappe.utils import flt, nowdate
import erpnext
from erpnext.accounts.doctype.journal_entry.journal_entry import get_default_bank_cash_account
from erpnext.hr.utils import validate_active_employee
class EmployeeAdvanceOverPayment(frappe.ValidationError):
pass
class EmployeeAdvance(Document):
def onload(self):
self.get("__onload").make_payment_via_journal_entry = frappe.db.get_single_value('Accounts Settings',
'make_payment_via_journal_entry')
def validate(self):
validate_active_employee(self.employee)
self.set_status()
def on_cancel(self):
self.ignore_linked_doctypes = ('GL Entry')
def set_status(self):
if self.docstatus == 0:
self.status = "Draft"
if self.docstatus == 1:
if self.claimed_amount and flt(self.claimed_amount) == flt(self.paid_amount):
self.status = "Claimed"
elif self.paid_amount and self.advance_amount == flt(self.paid_amount):
self.status = "Paid"
else:
self.status = "Unpaid"
elif self.docstatus == 2:
self.status = "Cancelled"
def set_total_advance_paid(self):
paid_amount = frappe.db.sql("""
select ifnull(sum(debit), 0) as paid_amount
from `tabGL Entry`
where against_voucher_type = 'Employee Advance'
and against_vou
|
cher = %s
and party_type = 'Employee'
and party = %s
""", (self.name, self.employee), as_dict=1)[0].paid_amount
return_amount = frappe.db.sql("""
select ifnull(sum(credit), 0) as return_amount
from `tabGL Entry`
where against_voucher_type = 'Employee Advance'
and voucher_type != 'Expense Claim'
and against_voucher = %s
|
and party_type = 'Employee'
and party = %s
""", (self.name, self.employee), as_dict=1)[0].return_amount
if paid_amount != 0:
paid_amount = flt(paid_amount) / flt(self.exchange_rate)
if return_amount != 0:
return_amount = flt(return_amount) / flt(self.exchange_rate)
if flt(paid_amount) > self.advance_amount:
frappe.throw(_("Row {0}# Paid Amount cannot be greater than requested advance amount"),
EmployeeAdvanceOverPayment)
if flt(return_amount) > self.paid_amount - self.claimed_amount:
frappe.throw(_("Return amount cannot be greater unclaimed amount"))
self.db_set("paid_amount", paid_amount)
self.db_set("return_amount", return_amount)
self.set_status()
frappe.db.set_value("Employee Advance", self.name , "status", self.status)
def update_claimed_amount(self):
claimed_amount = frappe.db.sql("""
SELECT sum(ifnull(allocated_amount, 0))
FROM `tabExpense Claim Advance` eca, `tabExpense Claim` ec
WHERE
eca.employee_advance = %s
AND ec.approval_status="Approved"
AND ec.name = eca.parent
AND ec.docstatus=1
AND eca.allocated_amount > 0
""", self.name)[0][0] or 0
frappe.db.set_value("Employee Advance", self.name, "claimed_amount", flt(claimed_amount))
self.reload()
self.set_status()
frappe.db.set_value("Employee Advance", self.name, "status", self.status)
@frappe.whitelist()
def get_pending_amount(employee, posting_date):
employee_due_amount = frappe.get_all("Employee Advance", \
filters = {"employee":employee, "docstatus":1, "posting_date":("<=", posting_date)}, \
fields = ["advance_amount", "paid_amount"])
return sum([(emp.advance_amount - emp.paid_amount) for emp in employee_due_amount])
@frappe.whitelist()
def make_bank_entry(dt, dn):
doc = frappe.get_doc(dt, dn)
payment_account = get_default_bank_cash_account(doc.company, account_type="Cash",
mode_of_payment=doc.mode_of_payment)
if not payment_account:
frappe.throw(_("Please set a Default Cash Account in Company defaults"))
advance_account_currency = frappe.db.get_value('Account', doc.advance_account, 'account_currency')
advance_amount, advance_exchange_rate = get_advance_amount_advance_exchange_rate(advance_account_currency,doc )
paying_amount, paying_exchange_rate = get_paying_amount_paying_exchange_rate(payment_account, doc)
je = frappe.new_doc("Journal Entry")
je.posting_date = nowdate()
je.voucher_type = 'Bank Entry'
je.company = doc.company
je.remark = 'Payment against Employee Advance: ' + dn + '\n' + doc.purpose
je.multi_currency = 1 if advance_account_currency != payment_account.account_currency else 0
je.append("accounts", {
"account": doc.advance_account,
"account_currency": advance_account_currency,
"exchange_rate": flt(advance_exchange_rate),
"debit_in_account_currency": flt(advance_amount),
"reference_type": "Employee Advance",
"reference_name": doc.name,
"party_type": "Employee",
"cost_center": erpnext.get_default_cost_center(doc.company),
"party": doc.employee,
"is_advance": "Yes"
})
je.append("accounts", {
"account": payment_account.account,
"cost_center": erpnext.get_default_cost_center(doc.company),
"credit_in_account_currency": flt(paying_amount),
"account_currency": payment_account.account_currency,
"account_type": payment_account.account_type,
"exchange_rate": flt(paying_exchange_rate)
})
return je.as_dict()
def get_advance_amount_advance_exchange_rate(advance_account_currency, doc):
if advance_account_currency != doc.currency:
advance_amount = flt(doc.advance_amount) * flt(doc.exchange_rate)
advance_exchange_rate = 1
else:
advance_amount = doc.advance_amount
advance_exchange_rate = doc.exchange_rate
return advance_amount, advance_exchange_rate
def get_paying_amount_paying_exchange_rate(payment_account, doc):
if payment_account.account_currency != doc.currency:
paying_amount = flt(doc.advance_amount) * flt(doc.exchange_rate)
paying_exchange_rate = 1
else:
paying_amount = doc.advance_amount
paying_exchange_rate = doc.exchange_rate
return paying_amount, paying_exchange_rate
@frappe.whitelist()
def create_return_through_additional_salary(doc):
import json
if isinstance(doc, str):
doc = frappe._dict(json.loads(doc))
additional_salary = frappe.new_doc('Additional Salary')
additional_salary.employee = doc.employee
additional_salary.currency = doc.currency
additional_salary.amount = doc.paid_amount - doc.claimed_amount
additional_salary.company = doc.company
additional_salary.ref_doctype = doc.doctype
additional_salary.ref_docname = doc.name
return additional_salary
@frappe.whitelist()
def make_return_entry(employee, company, employee_advance_name, return_amount, advance_account, currency, exchange_rate, mode_of_payment=None):
bank_cash_account = get_default_bank_cash_account(company, account_type='Cash', mode_of_payment = mode_of_payment)
if not bank_cash_account:
frappe.throw(_("Please set a Default Cash Account in Company defaults"))
advance_account_currency = frappe.db.get_value('Account', advance_account, 'account_currency')
je = frappe.new_doc('Journal Entry')
je.posting_date = nowdate()
je.voucher_type = get_voucher_type(mode_of_payment)
je.company = company
je.remark = 'Return against Employee Advance: ' + employee_advance_name
je.multi_currency = 1 if advance_account_currency != bank_cash_account.account_currency else 0
advance_account_amount = flt(return_amount) if advance_account_currency==currency \
else flt(return_amount) * flt(exchange_rate)
je.append('accounts', {
'account': advance_account,
'credit_in_account_currency': advance_account_amount,
'account_currency': advance_account_currency,
'exchange_rate': flt(exchange_rate) if advance_account_currency == currency else 1,
'reference_type': 'Employee Advance',
'reference_name': employee_advance_name,
'party_type': 'Employee',
'party': employee,
'is_advance': 'Yes'
})
bank_amount = flt(return_amount) if bank_cash_account.account_currency==currency \
else flt(return_amount) * flt(exchange_rate)
je.append("accounts", {
"account": bank_cash_account.account,
"debit_in_account_currency": bank_amount,
"account_currency": bank_cash_account.account_currency,
"account_type": bank_cash_account.account_type,
"exchange_rate": flt(exchange_rate) if bank_cash_account.account_currency == currency else 1
})
return je.as_dic
|
AddonScriptorDE/plugin.video.dtm_tv
|
default.py
|
Python
|
gpl-2.0
| 6,186 | 0.032368 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
import urllib,urllib2,re,xbmcplugin,xbmcgui,sys,xbmcaddon
pluginhandle = int(sys.argv[1])
settings = xbmcaddon.Addon(id='plugin.video.dtm_tv')
translation = settings.getLocalizedString
language=""
language=settings.getSetting("language")
if language=="":
settings.openSettings()
language=settings.getSetting("l
|
anguage")
if language=="0":
|
language="DE"
elif language=="1":
language="EN"
def cleanTitle(title):
return title.replace("\\u00c4","Ä").replace("\\u00e4","ä").replace("\\u00d6","Ö").replace("\\u00f6","ö").replace("\\u00dc","Ü").replace("\\u00fc","ü").replace("\\u00df","ß").strip()
def index():
addDir(translation(30001),"LATEST","listVideos","")
addDir(translation(30002),"MOST_VIEWED","listVideos","")
addDir(translation(30003),"BEST_RATED","listVideos","")
addDir(translation(30004),"SEARCH","listVideos","")
xbmcplugin.endOfDirectory(pluginhandle)
def listVideos(url):
values = {}
if url=="LATEST":
values = {'string':'*',
'lang':language,
'page':'1',
'order':'date'}
elif url=="MOST_VIEWED":
values = {'string':'*',
'lang':language,
'page':'1',
'order':'views'}
elif url=="BEST_RATED":
values = {'string':'*',
'lang':language,
'page':'1',
'order':'ranking'}
elif url=="SEARCH":
keyboard = xbmc.Keyboard('', translation(30004))
keyboard.doModal()
if keyboard.isConfirmed() and keyboard.getText():
search_string = keyboard.getText()
values = {'string':search_string,
'lang':language,
'page':'1',
'order':'date'}
if len(values)>0:
data = urllib.urlencode(values)
listVideosMain(data)
def listVideosMain(url):
content = getUrl("http://www.dtm.tv/Daten/getSearchData",data=url)
spl=content.split('{"id":')
for i in range(1,len(spl),1):
entry=spl[i]
match=re.compile('"bild":"(.+?)"', re.DOTALL).findall(entry)
thumb="http://www.dtm.tv/media/images/"+match[0]
match=re.compile('"publishdate":"(.+?)"', re.DOTALL).findall(entry)
date=match[0]
match=re.compile('"title":"(.+?)"', re.DOTALL).findall(entry)
title=date+" - "+cleanTitle(match[0])
urls=[]
match=re.compile('"url1":"(.+?)"', re.DOTALL).findall(entry)
if len(match)==1:
urls.append(match[0].replace("\\",""))
match=re.compile('"url2":"(.+?)"', re.DOTALL).findall(entry)
if len(match)==1:
urls.append(match[0].replace("\\",""))
match=re.compile('"url3":"(.+?)"', re.DOTALL).findall(entry)
if len(match)==1:
urls.append(match[0].replace("\\",""))
urlNew=""
for urlTemp in urls:
if urlTemp.find("_HD.mp4")>=0:
urlNew=urlTemp
elif urlTemp.find("_SD.mp4")>=0:
if urlNew=="":
urlNew=urlTemp
elif urlTemp.find(".flv")>=0:
if urlNew=="":
urlNew=urlTemp
addLink(title,urlNew,'playVideo',thumb)
match=re.compile('"nextPage":(.+?),', re.DOTALL).findall(content)
if len(match)==1:
dataNext=url[:url.find("page=")+5]+match[0]
temp=url[url.find("page=")+5:]
if temp.find("&")>=0:
dataNext=dataNext+url[:url.find("&")+1]
addDir("Next Page ("+str(match[0])+")",dataNext,"listVideosMain","")
xbmcplugin.endOfDirectory(pluginhandle)
def playVideo(url):
listitem = xbmcgui.ListItem(path=url)
return xbmcplugin.setResolvedUrl(pluginhandle, True, listitem)
def getUrl(url,data=None,cookie=None):
if data!=None:
req = urllib2.Request(url,data)
req.add_header('Content-Type', 'application/x-www-form-urlencoded')
else:
req = urllib2.Request(url)
req.add_header('User-Agent', 'Mozilla/5.0 (Windows NT 6.1; rv:11.0) Gecko/20100101 Firefox/11.0')
if cookie!=None:
req.add_header('Cookie',cookie)
response = urllib2.urlopen(req,timeout=30)
link=response.read()
response.close()
return link
def parameters_string_to_dict(parameters):
''' Convert parameters encoded in a URL to a dict. '''
paramDict = {}
if parameters:
paramPairs = parameters[1:].split("&")
for paramsPair in paramPairs:
paramSplits = paramsPair.split('=')
if (len(paramSplits)) == 2:
paramDict[paramSplits[0]] = paramSplits[1]
return paramDict
def addLink(name,url,mode,iconimage):
u=sys.argv[0]+"?url="+urllib.quote_plus(url)+"&mode="+str(mode)
ok=True
liz=xbmcgui.ListItem(name, iconImage="DefaultVideo.png", thumbnailImage=iconimage)
liz.setInfo( type="Video", infoLabels={ "Title": name } )
liz.setProperty('IsPlayable', 'true')
ok=xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]),url=u,listitem=liz)
return ok
def addDir(name,url,mode,iconimage):
u=sys.argv[0]+"?url="+urllib.quote_plus(url)+"&mode="+str(mode)
ok=True
liz=xbmcgui.ListItem(name, iconImage="DefaultFolder.png", thumbnailImage=iconimage)
liz.setInfo( type="Video", infoLabels={ "Title": name } )
ok=xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]),url=u,listitem=liz,isFolder=True)
return ok
params=parameters_string_to_dict(sys.argv[2])
mode=params.get('mode')
url=params.get('url')
if type(url)==type(str()):
url=urllib.unquote_plus(url)
if mode == 'listVideos':
listVideos(url)
elif mode == 'playVideo':
playVideo(url)
elif mode == 'listVideosMain':
listVideosMain(url)
else:
index()
|
mephasor/mephaBot
|
addons/onlineRadio.py
|
Python
|
gpl-3.0
| 5,202 | 0.002169 |
import discord
import re
import urllib.request
import xml.etree.ElementTree as ET
radio = {}
radioNames = {}
radioWhosPlaying = {}
radioNowPlaying = ''
playerStatus = 0
defaultChannel = ''
voice = ''
async def botWhatIsPlaying(client, message):
if playerStatus is 0:
await client.send_message(message.channel, 'А ничего и не играет.')
else:
if radioNowPlaying in radioWhosPlaying:
print('Getting whos playing for' + radioNowPlaying)
src = radioWhosPlaying[radioNowPlaying]
response = urllib.request.urlopen(src[0])
html = response.read()
codec = response.info().get_param('charset', 'utf8')
html = html.decode(codec)
p = re.compile(src[1])
result = re.search(p, html)
if result is not None:
gr = result.groups()
if len(gr) is 3:
msg = "{:s} - {:s} ({:s})".format(gr[0], gr[1], gr[2])
elif len(gr) is 2:
msg = "{:s} - {:s}".format(gr[0], gr[1])
else:
msg = 'Ляляля играет. Я хз'
await client.send_message(message.channel, msg)
else:
await client.send_message(message.channel, 'Не знаю что играет.')
else:
await client.send_message(message.channel,
'Информация не доступна для этой станции')
async def botJoinVoiceChannel(client, message):
print(message)
if client.is_voice_connected(message.server):
await client.send_message(message.channel,
'Я уже в голосовом канале.')
channel_name = defaultChannel
print('Trying to join: %s' % (channel_name))
check = lambda c: c.name == channel_name and c.type == discord.ChannelType.voice
channel = discord.utils.find(check, message.server.channels)
if channel is None:
await client.send_message(message.channel,
'Не могу найти канал с таким названием.')
else:
global voice
voice = await client.join_voice_channel(channel)
client.starter = message.author
async def botStop(client, message):
global playerStatus
client.player.stop()
client.loop.create_task(client.change_status())
playerStatus = 0
async def botPlayRadio(client, message):
global playerStatus
global radioNowPlaying
if not client.is_voice_connected(message.server):
await botJoinVoiceChannel(client, message)
if(playerStatus is not 0):
print('Have to stop Radio first')
print('PlayerStatus: ' + str(playerStatus))
client.player.stop()
radioNowPlaying = ''
station = message.content[1:]
#Handle special short cuts (desired by discord members)
if station is '1':
print("Debug: piterFM")
station = 'piterfm'
elif station is '2':
station = 'nashe'
elif station is '3':
print("Debug: chanson")
station = 'chanson'
if station in radio:
radioUrl = radio[station]
print('Starting to play Radio Station: '+radioNames[station])
client.player = voice.create_ffmpeg_player(radioUrl)
client.player.start()
radioNowPlaying = station
playerStatus = 1
game = discord.Game(name=radioNames[station])
client.loop.create_task(client.change_status(game))
else:
print('No such station in list.')
commands = {
'!bot': botJoinVoiceChannel,
'!1': botPlayRadio,
'!2': botPlayRadio,
'!3': botPlayRadio,
'!0': botStop,
'!stop': botStop,
'!a': botWhatIsPlaying,
}
def load(config):
global radio
global radioNames
global radioWhosPlaying
global defaultChannel
# Open radio config and populate the command list, radio URL list and
# radio name list.
# configFile
|
= open('cfg/radio.cfg').readlines()
# for line in configFile:
# tmp = line.split(', ')
# radio[tmp[0]] = tmp[1].rstrip('\n')
# radioNames[tmp[0]] = tmp[2].rstrip('\n')
# commands['!'+tmp[0]] = botPlayRadio
|
# radioWhosPlaying[tmp[0]] = [tmp[3], tmp[4].rstrip('\n')]
defaultChannel = config.getDefaultChannel()
data = open('cfg/radio.xml').read()
root = ET.fromstring(data)
for station in root:
cmd = station.find('command').text
name = station.get('name')
strURL = station.find('streamURL').text
nowURL = station.find('nowPlayingURL').text
nowRE = station.find('nowPlayingRE').text
radio[cmd] = strURL.strip(' \t\n')
radioNames[cmd] = name.strip('\n')
commands['!'+cmd] = botPlayRadio
# If we have now playing settings available
if(nowURL is not None and nowRE is not None):
radioWhosPlaying[cmd] = [nowURL.strip(' \n\t'), nowRE.strip(' \t\n')]
return commands
def getName():
return 'onlineRadio'
def getDescription():
return 'Plays online radio stations found in cfg/radio.cfg.'
|
chihongze/girlfriend
|
girlfriend/util/validating.py
|
Python
|
mit
| 4,999 | 0.000236 |
# coding: utf-8
"""参数验证相关工具
"""
import re
import ujson
import types
import numbers
from girlfriend.util.lang import args2fields
from girlfriend.exception import InvalidArgumentException
class Rule(object):
|
"""描述参数验证规则,并执行验证过程
"""
@args2fields()
def __init__(self, name,
type=None,
required=False, min=None, max=None,
regex=None, logic=None, default=None):
"""
:param name 参数名称,通常用于错误提示
:param required 如果为True,那么参数是必须的
:param min 如果是字符串,那么该参数为最小长度(等于此长度合法),
如果是数字(numbers.Number类型),那
|
么为该参数最小值(等于此值算合法)
:param max 同上
:param regex 正则验证
:param type 类型验证,多个参数可以传递元组
:param logic 谓词函数,满足更加复杂的业务验证需要,比如查查数据库邮箱是否存在等等
该谓词函数并非返回True和False,如果有错误,那么返回错误消息的字符串,
如果没有错误,那么直接返回None
:param default 该项的默认值
"""
pass
@property
def name(self):
return self._name
@property
def default(self):
return self._default
@property
def required(self):
return self._required
def validate(self, value):
"""执行验证
:param value 要验证的值
"""
if self._required and self._is_empty(value):
raise InvalidArgumentException(
u"参数 '{}' 的值是必须的,不能为空".format(self._name))
# 如果非必须并且为空,那么接下来的验证就不必运行了
if self._is_empty(value):
return
# 检查类型
self._validate_type(value)
# 检查大小、长度
self._validate_min_max(value)
# 检查正则
self._validate_regex(value)
# 检查逻辑
self._validate_logic(value)
def _validate_type(self, value):
if not self._type:
return
if not isinstance(value, self._type):
raise InvalidArgumentException(
u"参数 '{name}' 的类型不正确,只允许以下类型:{types}".format(
name=self._name,
types=self._type
)
)
def _validate_min_max(self, value):
if self._min is not None:
if isinstance(value, numbers.Number):
if self._min > value:
raise InvalidArgumentException(
u"参数 '{name}' 的值不能小于{min}".format(
name=self._name, min=self._min)
)
else:
if self._min > len(value):
raise InvalidArgumentException(
u"参数 '{name}' 的长度不能小于{min}".format(
name=self._name, min=self._min)
)
if self._max is not None:
if isinstance(value, numbers.Number):
if self._max < value:
raise InvalidArgumentException(
u"参数 '{name}' 的值不能大于{max}".format(
name=self._name, max=self._max)
)
else:
if self._max < len(value):
raise InvalidArgumentException(
u"参数 '{name}' 的长度不能大于{max}".format(
name=self._name, max=self._max)
)
def _validate_regex(self, value):
if not self._regex:
return
value = str(value)
if not re.search(self._regex, value):
raise InvalidArgumentException(
u"参数 '{name}' 不符合正则表达式'{regex}'".format(
name=self._name, regex=self._regex)
)
def _validate_logic(self, value):
if self._logic is None:
return
msg = self._logic(value)
if msg:
raise InvalidArgumentException(msg)
def _is_empty(self, value):
"""判断一个值是否为空
如果值为None,那么返回True
如果值为空字符串,那么返回True
如果值为0, 那么不算空,返回False
"""
if value is None:
return True
if isinstance(value, types.StringType) and not value:
return True
return False
def be_json(name):
def _be_json(value):
try:
ujson.loads(value)
except:
return u"参数 '{}' 必须是json格式".format(name)
return _be_json
|
TomasTomecek/sen
|
sen/tui/buffer.py
|
Python
|
mit
| 10,717 | 0.001306 |
import logging
from sen.docker_backend import DockerContainer, RootImage
from sen.exceptions import NotifyError
from sen.tui.commands.base import Command
from sen.tui.views.disk_usage import DfBufferView
from sen.tui.views.help import HelpBufferView, HelpCommandView
from sen.tui.views.main import MainListBox
from sen.tui.views.image_info import ImageInfoWidget
from sen.tui.views.container_info import ContainerInfoView
from sen.tui.widgets.list.common import AsyncScrollableListBox, ScrollableListBox
from sen.tui.widgets.list.util import get_operation_notify_widget
from sen.tui.widgets.tree import ImageTree
logger = logging.getLogger(__name__)
class Buffer:
"""
base buffer class
"""
name = None # unique identifier
description = None # for help
display_name = None # display in status bar
widget = None # display this in main frame
# global keybinds which will be available in every buffer
global_keybinds = {
# navigation
"home": "navigate-top",
"gg": "navigate-top",
"end": "navigate-bottom",
"G": "navigate-bottom",
"down": "navigate-down",
"j": "navigate-down",
"up": "navigate-up",
"k": "navigate-up",
"ctrl d": "navigate-downwards",
"ctrl u": "navigate-upwards",
# UI
":": "prompt",
"/": "prompt prompt-text=\"\" initial-text=\"/\"",
"n": "search-next",
"N": "search-previous",
"f4": "prompt initial-text=\"filter \"",
"x": "kill-buffer",
"q": "kill-buffer quit-if-no-buffer",
"ctrl i": "select-next-buffer",
"ctrl o": "select-previous-buffer",
"h": "help",
"?": "help",
"f5": "layers",
}
# buffer specific keybinds
keybinds = {}
def __init__(self):
logger.debug("creating buffer %r", self)
self._keybinds = None # cache
self.refresh()
def __repr__(self):
return "{}(name={!r}, widget={!r})".format(
self.__class__.__name__, self.display_name, self.widget)
def destroy(self):
destroy_method = getattr(self.widget, "destroy", None)
if destroy_method:
destroy_method()
def find_previous(self, s=None):
logger.debug("searching next %r in %r", s, self.__class__.__name__)
try:
self.widget.find_previous(s)
except AttributeError as ex:
logger.debug(repr(ex))
raise NotifyError("Can't search in this buffer.")
def find_next(self, s=None):
logger.debug("searching next %r in %r", s, self.__class__.__name__)
try:
self.widget.find_next(s)
except AttributeError as ex:
logger.debug(repr(ex))
raise NotifyError("Can't search in this buffer.")
def build_status_bar(self):
status_bar = getattr(self.widget, "status_bar", None)
if status_bar:
return status_bar()
def filter(self, s):
logger.debug("filter widget %r with query %r", self.widget, s)
self.widget.filter(s)
def get_keybinds(self):
if self._keybinds is None:
self._keybinds = {}
self._keybinds.update(self.global_keybinds)
self._keybinds.update(self.keybinds)
return self._keybinds
def refresh(self):
refresh_func = getattr(self.widget, "refresh", None)
if refresh_func:
logger.info("refreshing widget %s", self.widget)
refresh_func()
def process_realtime_event(self, event):
logger.info("buffer %s doesn't process realtime events", self)
return
class ImageInfoBuffer(Buffer):
description = "Dashboard for information about selected image.\n" + \
"You can run command `df` to get more detailed info about disk usage."
keybinds = {
"enter": "display-info",
"d": "rm",
"i": "inspect",
"@": "refresh",
}
def __init__(self, docker_image, ui):
"""
:param docker_image:
:param ui: ui object so we refresh
"""
if isinstance(docker_image, RootImage):
raise NotifyError("Image \"scratch\" doesn't provide any more information.")
if docker_image.image_id == "<missing>":
raise NotifyError("This image (layer) is not available due to changes in docker-1.10 "
"image representation.")
self.docker_image = docker_image
self.display_name = docker_image.short_name
self.widget = ImageInfoWidget(ui, docker_image)
super().__init__()
|
def process_realtime_event(self, event):
if event.get("id", None) == self.docker_image.object_id:
self.widget.refresh()
class ContainerInfoBuffer(Buffer):
description = "Detailed info about selected container presented in a slick dashboard."
keybinds = {
"enter": "display-info",
"@": "
|
refresh",
"i": "inspect",
}
def __init__(self, docker_container, ui):
"""
:param docker_container:
:param ui: ui object so we refresh
"""
self.docker_container = docker_container
self.display_name = docker_container.short_name
self.widget = ContainerInfoView(ui, docker_container)
super().__init__()
def process_realtime_event(self, event):
action = event.get("Action", None)
if action == "top":
return
if event.get("id", None) == self.docker_container.object_id:
self.widget.refresh()
class TreeBuffer(Buffer):
display_name = "Layers"
description = "Tree view of all layers available on your docker engine."
keybinds = {
"enter": "display-info",
}
def __init__(self, ui, docker_backend):
self.widget = ImageTree(ui, docker_backend)
super().__init__()
class MainListBuffer(Buffer):
display_name = "Listing"
description = "List of all known docker images and containers display in a single list"
keybinds = {
"d": "rm",
"D": "rm -f",
"s": "start",
"t": "stop",
"r": "restart",
"X": "kill",
"p": "pause",
"u": "unpause",
"enter": "display-info",
"b": "open-browser",
"l": "logs",
"f": "logs -f",
"i": "inspect",
"!": "toggle-live-updates", # TODO: rfe: move to global so this affects every buffer
"@": "refresh", # FIXME: move to global and refactor & rewrite
}
def __init__(self, ui, docker_backend):
self.ui = ui
self.widget = MainListBox(ui, docker_backend)
super().__init__()
def process_realtime_event(self, event):
self.widget.process_realtime_event(event)
class LogsBuffer(Buffer):
description = "Display logs of selected container."
display_name = "Logs "
def __init__(self, ui, docker_object, follow=False):
"""
:param docker_object: container to display logs
:param ui: ui object so we can refresh
"""
self.display_name += "({})".format(docker_object.short_name)
if isinstance(docker_object, DockerContainer):
try:
pre_message = "Getting logs for container {}...".format(docker_object.short_name)
ui.notify_message(pre_message)
if follow:
# FIXME: this is a bit race-y -- we might lose some logs with this approach
operation = docker_object.logs(follow=follow, lines=0)
static_data = docker_object.logs(follow=False).response
self.widget = AsyncScrollableListBox(operation.response, ui, static_data=static_data)
else:
operation = docker_object.logs(follow=follow)
self.widget = ScrollableListBox(ui, operation.response)
ui.remove_notification_message(pre_message)
ui.notify_widget(get_operation_notify_widget(operation, display_always=False))
except Exception as ex:
# FIXME: let's catch 404 and print that container doesn't
|
Azure/azure-sdk-for-python
|
sdk/keyvault/azure-keyvault-keys/tests/test_crypto_client.py
|
Python
|
mit
| 41,908 | 0.003508 |
# ------------------------------------
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
# ------------------------------------
import codecs
from datetime import datetime
import hashlib
import os
import time
try:
from unittest import mock
except ImportError:
import mock
from azure.core.exceptions import AzureError, HttpResponseError
from azure.core.pipeline.policies import SansIOHTTPPolicy
from azure.keyvault.keys import ApiVersion, JsonWebKey, KeyCurveName, KeyOperation, KeyVaultKey
from azure.keyvault.keys.crypto import CryptographyClient, EncryptionAlgorithm, KeyWrapAlgorithm, SignatureAlgorithm
from azure.keyvault.keys.crypto._key_validity import _UTC
from azure.keyvault.keys.crypto._providers import NoLocalCryptography, get_local_cryptography_provider
from azure.mgmt.keyvault.models import KeyPermissions, Permissions
import pytest
from _shared.json_attribute_matcher import json_attribute_matcher
from _shared.test_case import KeyVaultTestCase
from _test_case import client_setup, get_decorator, KeysTestCase
# without keys/get, a CryptographyClient created with a key ID performs all ops remotely
NO_GET = Permissions(keys=[p.value for p in KeyPermissions if p.value != "get"])
all_api_versions = get_decorator()
only_hsm = get_decorator(only_hsm=True)
no_get = get_decorator(permissions=NO_GET)
class CryptoClientTests(KeysTestCase, KeyVaultTestCase):
def __init__(self, *args, **kwargs):
kwargs["match_body"] = False
kwargs["custom_request_matchers"] = [json_attribute_matcher]
super(CryptoClientTests, self).__init__(*args, **kwargs)
plaintext = b"5063e6aaa845f150200547944fd199679c98ed6f99da0a0b2dafeaf1f4684496fd532c1c229968cb9dee44957fcef7ccef59ceda0b362e56bcd78fd3faee5781c623c0bb22b35beabde0664fd30e0e824aba3dd1b0afffc4a3d955ede20cf6a854d52cfd"
iv = codecs.decode("89b8adbfb07345e3598932a09c517441", "hex_codec")
aad = b"test"
def _create_rsa_key(self, client, key_name, **kwargs):
key_ops = kwargs.get("key_operations") or ["encrypt", "decrypt", "sign", "verify", "wrapKey", "unwrapKey"]
hsm = kwargs.get("hardware_protected") or False
if self.is_live:
time.sleep(2) # to avoid throttling by the service
created_key = client.create_rsa_key(key_name, **kwargs)
kty = "RSA-HSM" if hsm else "RSA"
self._validate_rsa_key_bundle(created_key, client.vault_url, key_name, kty, key_ops)
return created_key
def _create_ec_key(self, client, key_name, **kwargs):
key_curve = kwargs.get("curve") or "P-256"
hsm = kwargs.get("hardware_protected") or False
if self.is_live:
time.sleep(2) # to avoid throttling by the service
created_key = client.create_ec_key(key_name, **kwargs)
key_type = "EC-HSM" if hsm else "EC"
self._validate_ec_key_bundle(key_curve, created_key, client.vault_url, key_name, key_type)
return created_key
def _validate_rsa_key_bundle(self, key_attributes, vault, key_name, kty, key_ops):
prefix = "/".join(s.strip("/") for s in [vault, "keys", key_name])
key = key_attributes.key
kid = key_attributes.id
self.assertTrue(kid.index(prefix) == 0, "Key Id should start with '{}', but value is '{}'".format(prefix, kid))
self.assertEqual(key.kty, kty, "kty should by '{}', but is '{}'".format(key, key.kty))
self.assertTrue(key.n and key.e, "Bad RSA public material.")
self.assertEqual(
sorted(key_ops), sorted(key.key_ops), "keyOps should be '{}', but is '{}'".format(key_ops, key.key_ops)
)
self.assertTrue(
key_attributes.properties.created_on and key_attributes.properties.updated_on,
"Missing required date attributes.",
)
def _validate_ec_key_bundle(self, key_curve, key_attributes, vault, key_name, kty):
prefix = "/".join(s.strip("/") for s in [vault, "keys", key_name])
key = key_attributes.key
kid = key_attributes.id
self.assertEqual(key_curve, key.crv)
self.assertTrue(kid.index(prefix) == 0, "Key Id should start with '{}', but value is '{}'".format(prefix, kid))
self.assertEqual(key.kty, kty, "kty should by '{}', but is '{}'".format(key, key.kty))
self.assertTrue(
key_attributes.properties.created_on and key_attributes.properties.updated_on,
"Missing required date attributes.",
)
def _import_test_key(self, client, name, hardware_protected=False):
def _to_bytes(hex):
if len(hex) % 2:
hex = "0{}".format(hex)
return codecs.decode(hex, "hex_codec")
key = JsonWebKey(
kty="RSA-HSM" if hardware_protected else "RSA",
key_ops=["encrypt", "decrypt", "sign", "verify", "wrapKey", "unwrapKey"],
n=_to_bytes(
"00a0914d00234ac683b21b4c15d5bed887bdc959c2e57af54ae734e8f00720d775d275e455207e3784ceeb60a50a4655dd72a7a94d271e8ee8f7959a669ca6e775bf0e23badae991b4529d978528b4bd90521d32dd2656796ba82b6bbfc7668c8f5eeb5053747fd199319d29a8440d08f4412d527ff9311eda71825920b47b1c46b11ab3e91d7316407e89c7f340f7b85a34042ce51743b27d4718403d34c7b438af6181be05e4d11eb985d38253d7fe9bf53fc2f1b002d22d2d793fa79a504b6ab42d0492804d7071d727a06cf3a8893aa542b1503f832b296371b6707d4dc6e372f8fe67d8ded1c908fde45ce03bc086a71487fa75e43aa0e0679aa0d20efe35"
),
e=_to_bytes("10001"),
|
d=_to_bytes(
"627c7d24668148fe2252c7fa649ea8a5a9ed44d75c766cda42b29b660e99404f0e862d4561a6c95af6a83d213e0a2244b03cd28576473215073785fb067f015da19084ade9f475e08b040a9a2c7ba00253bb8125508c9df140b75161d266be347a5e0f6900fe1d8bbf78ccc25eeb37e0c9d188d6e1fc15169ba4fe12276193d7779
|
0d2326928bd60d0d01d6ead8d6ac4861abadceec95358fd6689c50a1671a4a936d2376440a41445501da4e74bfb98f823bd19c45b94eb01d98fc0d2f284507f018ebd929b8180dbe6381fdd434bffb7800aaabdd973d55f9eaf9bb88a6ea7b28c2a80231e72de1ad244826d665582c2362761019de2e9f10cb8bcc2625649"
),
p=_to_bytes(
"00d1deac8d68ddd2c1fd52d5999655b2cf1565260de5269e43fd2a85f39280e1708ffff0682166cb6106ee5ea5e9ffd9f98d0becc9ff2cda2febc97259215ad84b9051e563e14a051dce438bc6541a24ac4f014cf9732d36ebfc1e61a00d82cbe412090f7793cfbd4b7605be133dfc3991f7e1bed5786f337de5036fc1e2df4cf3"
),
q=_to_bytes(
"00c3dc66b641a9b73cd833bc439cd34fc6574465ab5b7e8a92d32595a224d56d911e74624225b48c15a670282a51c40d1dad4bc2e9a3c8dab0c76f10052dfb053bc6ed42c65288a8e8bace7a8881184323f94d7db17ea6dfba651218f931a93b8f738f3d8fd3f6ba218d35b96861a0f584b0ab88ddcf446b9815f4d287d83a3237"
),
dp=_to_bytes(
"00c9a159be7265cbbabc9afcc4967eb74fe58a4c4945431902d1142da599b760e03838f8cbd26b64324fea6bdc9338503f459793636e59b5361d1e6951e08ddb089e1b507be952a81fbeaf7e76890ea4f536e25505c3f648b1e88377dfc19b4c304e738dfca07211b792286a392a704d0f444c0a802539110b7f1f121c00cff0a9"
),
dq=_to_bytes(
"00a0bd4c0a3d9f64436a082374b5caf2488bac1568696153a6a5e4cd85d186db31e2f58f024c617d29f37b4e6b54c97a1e25efec59c4d1fd3061ac33509ce8cae5c11f4cd2e83f41a8264f785e78dc0996076ee23dfdfc43d67c463afaa0180c4a718357f9a6f270d542479a0f213870e661fb950abca4a14ca290570ba7983347"
),
qi=_to_bytes(
"009fe7ae42e92bc04fcd5780464bd21d0c8ac0c599f9af020fde6ab0a7e7d1d39902f5d8fb6c614184c4c1b103fb46e94cd10a6c8a40f9991a1f28269f326435b6c50276fda6493353c650a833f724d80c7d522ba16c79f0eb61f672736b68fb8be3243d10943c4ab7028d09e76cfb5892222e38bc4d35585bf35a88cd68c73b07"
),
)
imported_key = client.import_key(name, key)
self._validate_rsa_key_bundle(imported_key, client.vault_url, name, key.kty, key.key_ops)
return imported_key
def _import_symmetric_test_key(self, client, name):
key_material = codecs.decode("e27ed0c84512bbd55b6af434d237c11feba311870f80f2c2e3364260f31c82c8", "hex_codec")
key = JsonWebKey(
kty="oct-HSM",
key_ops=["encrypt", "decrypt", "wrapKey", "unwrapKey"],
k=key_material,
)
imported_key = cli
|
google-research/accelerated_gbm
|
solve_libsvm_instances.py
|
Python
|
mit
| 4,858 | 0.004323 |
# Copyright 2020 The Google Authors. All Rights Reserved.
#
# Licensed under the MIT License (the "License");
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# ==============================================================================
"""Run tests with LIBSVM dataset.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import os
from absl import app
from absl import flags
import numpy as np
import sklearn.datasets
from sklearn.model_selection import train_test_split
from agbt import AGBT
from agbt_b import AGBTB
import functional as F
from gbt import GBT
from tree import Dataset
from tensorflow.python.platform import gfile
FLAGS = flags.FLAGS
flags.DEFINE_string("data_folder", None, "The directory of datasets.")
flags.DEFINE_enum("dataset_name", "all_datasets",
["all_datasets", "a1a", "w1a", "housing"],
("The name of instances."
"`all_datasets` means all of the instances in the folder."))
flags.DEFINE_enum("loss", "L2Loss", ["L2Loss", "LogisticLoss"],
"The loss function.")
flags.DEFINE_enum(
"method", "AGBT", ["GBT", "AGBT", "AGBTB"],
("The method to use. GBT is the standard gradient boosted tree. AGBT is our"
"proposed method and AGBTB is the method proposed by Biau et al."))
flags.DEFINE_integer(
"early_stopping_rounds", 100000,
("Stop the algorithm if the validation loss does not improve after this"
"number of iterations."))
flags.DEFINE_float(
"z_shrinkage_parameter", 0.1,
"The shrinkage parameter in the z-update in accelerated method.")
flags.DEFINE_integer("max_depth", 3, "Maximal depth of a tree.")
flags.DEFINE_integer("num_trees", 20, "Number of boosting iterations.")
flags.DEFINE_float("min_split_gain", 0.1, "Minimal gain for splitting a leaf.")
flags.DEFINE_float("learning_rate", 0.3, "Learning rate.")
flags.DEFINE_float("regularizer_const", 1, "Regularizer constant.")
flags.DEFINE_boolean("use_hessian", False, "Whether to use Hessian.")
TEST_SIZE = 0.2
RANDOM_STATE = 40
LOSS = {"L2Loss": F.L2Loss, "LogisticLoss": F.LogisticLoss}
def SetupData(data_folder, dataset_name):
path = os.path.join(data_folder, dataset_name + ".txt")
data = sklearn.datasets.load_svmlight_file(gfile.Open(path, mode="rb"))
x = np.asarray(data[0].todense())
y = np.array(data[1])
return train_test_split(x, y, test_size=TEST_SIZE, random_state=RANDOM_STATE)
def main(argv):
del argv
if FLAGS.data_folder is None:
raise ValueError("Directory with downloaded datasets must be provided.")
if FLAGS.dataset_name == "all_datasets":
names = ["a1a", "w1a", "housing"]
else:
names = [FLAGS.dataset_name]
for name in names:
x_train, x_test, y_train, y_test = SetupData(FLAGS.data_folder, name)
train_data = Dataset(x_train, y_train)
test_data = Dataset(x_test, y_test)
GBTParams = collections.namedtuple("GBTParams", [
"regularizer_const", "min_split_gain", "max_depth", "learning_rate",
"num_trees", "early_stopping_rounds", "loss", "use_hessian",
"z_shrinkage_parameter"
])
params = GBTParams(
regularizer_const=FLAGS.regularizer_const,
min_split_gain=FLAGS.min_split_gain,
max_depth=FLAGS.max_depth,
learning_rate=FLAGS.learning_rate,
num_trees=FLAGS.num_trees,
early_stopping_rounds=FLAGS.early_stopping_rounds,
loss=FLAGS.loss,
use_hessian=FLAGS.use_hessian,
z_shrinkage_parameter=FLAGS.z_shrinkage_parameter)
if FLAGS.method == "GBT":
print("Start training using GBT...")
method = GBT(params)
elif FLAGS.method == "AGBT":
print("Start training using AGBT...")
method = AGBT(params)
elif FLAGS.method == "AGBTB":
print("Start training using AGBTB...")
method = AGBTB(params)
method.train(train_data, valid_set=test_data)
print("Start predicting...")
y_pred = []
for x in x_test:
y_pred.append(method.predict(x, num
|
_iteration=method.best_iteration))
if params.loss == "L2Loss":
loss = F.L2Loss(params.use_hessian)
elif params.loss == "LogisticLoss":
|
loss = F.LogisticLoss(params.use_hessian)
print("The mean loss of prediction is:",
np.mean(loss.loss_value(np.array(y_pred), np.array(y_test))))
if __name__ == "__main__":
app.run(main)
|
r0h4n/commons
|
tendrl/commons/tests/test_init.py
|
Python
|
lgpl-2.1
| 17,194 | 0 |
import __builtin__
import etcd
from etcd import Client
import importlib
import inspect
import maps
from mock import MagicMock
from mock import patch
import os
import pkgutil
import pytest
import yaml
from tendrl.commons import objects
import tendrl.commons.objects.node_context as node
from tendrl.commons import TendrlNS
from tendrl.commons.utils import etcd_utils
@patch.object(etcd, "Client")
@patch.object(Client, "read")
@patch.object(node.NodeContext, '_get_node_id')
@patch.object(etcd_utils, 'read')
@patch.object(node.NodeContext, 'load')
def init(patch_node_load,
patch_etcd_utils_read,
patch_get_node_id,
patch_read,
patch_client):
patch_get_node_id.return_value = 1
patch_read.return_value = etcd.Client()
patch_client.return_value = etcd.Client()
setattr(__builtin__, "NS", maps.NamedDict())
setattr(NS, "_int", maps.NamedDict())
NS._int.etcd_kwargs = {
'port': 1,
'host': 2,
'allow_reconnect': True}
NS._int.client = etcd.Client(**NS._int.etcd_kwargs)
NS["config"] = maps.NamedDict()
NS.config["data"] = maps.NamedDict()
NS.config.data['tags'] = "test"
patch_etcd_utils_read.return_value = maps.NamedDict(
value='{"status": "UP",'
'"pkey": "tendrl-node-test",'
'"node_id": "test_node_id",'
'"ipv4_addr": "test_ip",'
'"tags": "[\\"my_tag\\"]",'
'"sync_status": "done",'
'"locked_by": "fd",'
'"fqdn": "tendrl-node-test",'
'"last_sync": "date"}')
patch_node_load.return_value = node.NodeContext
tendrlNS = TendrlNS()
return tendrlNS
def test_constructor():
with patch.object(TendrlNS, 'setup_common_objects') as \
mocked_method:
mocked_method.return_value = None
tendrlNS = TendrlNS()
tendrlNS = init()
# Default Parameter Testing
assert tendrlNS.ns_name == "tendrl"
assert tendrlNS.ns_src == "tendrl.commons"
# Check for existance and right data type
assert isinstance(NS, maps.NamedDict)
# Testing _list_modules_in_package_path
def test_list_modules_in_package_path():
tendrlNS = init()
modules = [
('alert',
|
'tendrl.commons.objects.alert'),
('block_device',
'tendrl.commons.objects.block_device'),
('cluster',
'tendrl.commons.objects.cluster'),
('cluster_alert',
|
'tendrl.commons.objects.cluster_alert'),
('cluster_alert_counters',
'tendrl.commons.objects.cluster_alert_counters'),
('cluster_node_alert_counters',
'tendrl.commons.objects.cluster_node_alert_counters'),
('cluster_node_context',
'tendrl.commons.objects.cluster_node_context'),
('cluster_tendrl_context',
'tendrl.commons.objects.cluster_tendrl_context'),
('cpu', 'tendrl.commons.objects.cpu'),
('definition', 'tendrl.commons.objects.definition'),
('detected_cluster', 'tendrl.commons.objects.detected_cluster'),
('disk', 'tendrl.commons.objects.disk'),
('geo_replication_session',
'tendrl.commons.objects.geo_replication_session'),
('global_details',
'tendrl.commons.objects.global_details'),
('gluster_brick', 'tendrl.commons.objects.gluster_brick'),
('gluster_volume', 'tendrl.commons.objects.gluster_volume'),
('gluster_peer', 'tendrl.commons.objects.gluster_peer'),
('job', 'tendrl.commons.objects.job'),
('memory', 'tendrl.commons.objects.memory'),
('node', 'tendrl.commons.objects.node'),
('node_alert',
'tendrl.commons.objects.node_alert'),
('node_context', 'tendrl.commons.objects.node_context'),
('node_network', 'tendrl.commons.objects.node_network'),
('notification_only_alert',
'tendrl.commons.objects.notification_only_alert'),
('os', 'tendrl.commons.objects.os'),
('platform', 'tendrl.commons.objects.platform'),
('service', 'tendrl.commons.objects.service'),
('tendrl_context', 'tendrl.commons.objects.tendrl_context'),
('virtual_disk', 'tendrl.commons.objects.virtual_disk')
]
ns_objects_path = os.path.join(os.path.dirname(os.path.abspath(__file__)).
rsplit('/', 1)[0], "objects")
ns_objects_prefix = "tendrl.commons.objects."
ret = tendrlNS._list_modules_in_package_path(ns_objects_path,
ns_objects_prefix)
# TO-DISCUSS : modules is hard coded and might change in future
if len(ret) != len(modules):
raise AssertionError()
ret = tendrlNS._list_modules_in_package_path("test", "test")
assert len(ret) == 0
# Testing _register_subclasses_to_ns
def test_register_subclasses_to_ns(monkeypatch):
tendrlNS = init()
tendrlNS._register_subclasses_to_ns()
assert len(getattr(NS.tendrl, "objects")) > 0
assert len(getattr(NS.tendrl, "flows")) > 0
ns_objects_path = os.path.join(
os.path.dirname(
os.path.abspath(__file__)).rsplit(
'/', 1)[0], "objects")
ns_objects_prefix = "tendrl.commons.objects."
modules = tendrlNS._list_modules_in_package_path(ns_objects_path,
ns_objects_prefix)
for mode_name, mod_cls in modules:
assert hasattr(NS.tendrl.objects, mode_name.title().replace('_', '')) \
is True
def list_package(self_obj, package_path, prefix):
if "flows" in prefix:
return [
('ImportCluster', 'tendrl.commons.flows.import_cluster'),
('UnmanageCluster', 'tendrl.commons.flows.unmanage_cluster')
]
else:
modules = []
for importer, name, ispkg in pkgutil.walk_packages(
path=[package_path]):
modules.append((name, prefix + name))
return modules
monkeypatch.setattr(TendrlNS, '_list_modules_in_package_path',
list_package)
tendrlNS._register_subclasses_to_ns()
assert len(getattr(NS.tendrl, "objects")) > 0
# Testing _add_object
def test_add_object():
tendrlNS = init()
obj_name = "test_obj"
obj = importlib.import_module(
"tendrl.commons.objects.cluster_node_context")
current_ns = tendrlNS._get_ns()
obj_cls = ""
for obj_cls in inspect.getmembers(obj, inspect.isclass):
tendrlNS._add_object(obj_name, obj_cls[1])
break
assert isinstance(getattr(current_ns.objects, "_test_obj")['atoms'],
maps.NamedDict)
assert isinstance(getattr(current_ns.objects, "_test_obj")['flows'],
maps.NamedDict)
with patch.object(TendrlNS, "_get_ns") as mock_add_obj:
mock_add_obj.return_value = maps.NamedDict(
objects=maps.NamedDict(_Service=maps.NamedDict(
atoms=maps.NamedDict())))
tendrlNS._add_object("Service", obj_cls[1])
with patch.object(TendrlNS, "_get_ns") as mock_add_obj:
mock_add_obj.return_value = maps.NamedDict(
objects=maps.NamedDict(
_Service=maps.NamedDict(
flows=maps.NamedDict())))
tendrlNS._add_object("Service", obj_cls[1])
# Testing _get_objects
def test_get_objects():
path = os.path.join(os.path.dirname(
os.path.dirname(os.path.abspath(__file__))), "objects")
objects_list = [d.title().replace('_', '') for d in os.listdir(path)
if os.path.isdir(os.path.join(path, d))]
tendrlNS = init()
ret = tendrlNS._get_objects()
assert isinstance(objects_list, list)
assert ret is not None
# TO-DISCUSS : object_list is hard coded and might change in future
assert set(ret) == set(objects_list)
# Testing _get_object
def test_get_object():
tendrlNS = init()
ret = tendrlNS._get_object("NodeNetwork")
assert (inspect.isclass(ret)) is True
assert (issubclass(ret, objects.BaseObject)) is True
path = os.path.join(os.path.dirname(os.path.dirname(
os.path.absp
|
whyDK37/py_bootstrap
|
samples/context/do_with.py
|
Python
|
apache-2.0
| 269 | 0 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from contextlib import contextm
|
anager
@contextmanager
def log(name):
|
print('[%s] start...' % name)
yield
print('[%s] end.' % name)
with log('DEBUG'):
print('Hello, world!')
print('Hello, Python!')
|
akiellor/selenium
|
py/test/selenium/webdriver/common/correct_event_firing_tests.py
|
Python
|
apache-2.0
| 5,557 | 0.003599 |
#!/usr/bin/python
# Copyright 2008-2010 WebDriver committers
# Copyright 2008-2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import re
import tempfile
import time
import shutil
import unittest
from selenium.common.exceptions import NoSuchElementException
from selenium.common.exceptions import NoSuchFrameException
def not_available_on_remote(func):
def testMethod(self):
print self.driver
|
if type(self.driver) == 'remote':
return lambda x: None
else:
return func(self)
return testMethod
class CorrectEventFiringTests(unittest.TestCase):
def testShouldFireClickEventWhenClicking(self):
self._loadPage("javascriptPage")
self._clickOnElementWhichRecordsEvents()
self._assertEventFir
|
ed("click")
def testShouldFireMouseDownEventWhenClicking(self):
self._loadPage("javascriptPage")
self._clickOnElementWhichRecordsEvents()
self._assertEventFired("mousedown")
def testShouldFireMouseUpEventWhenClicking(self):
self._loadPage("javascriptPage")
self._clickOnElementWhichRecordsEvents()
self._assertEventFired("mouseup")
def testShouldIssueMouseDownEvents(self):
self._loadPage("javascriptPage")
self.driver.find_element_by_id("mousedown").click()
result = self.driver.find_element_by_id("result").text
self.assertEqual(result, "mouse down")
def testShouldIssueClickEvents(self):
self._loadPage("javascriptPage")
self.driver.find_element_by_id("mouseclick").click()
result = self.driver.find_element_by_id("result").text
self.assertEqual(result, "mouse click")
def testShouldIssueMouseUpEvents(self):
self._loadPage("javascriptPage")
self.driver.find_element_by_id("mouseup").click()
result = self.driver.find_element_by_id("result").text
self.assertEqual(result, "mouse up")
def testMouseEventsShouldBubbleUpToContainingElements(self):
self._loadPage("javascriptPage")
self.driver.find_element_by_id("child").click()
result = self.driver.find_element_by_id("result").text
self.assertEqual(result, "mouse down")
def testShouldEmitOnChangeEventsWhenSelectingElements(self):
self._loadPage("javascriptPage")
# Intentionally not looking up the select tag. See selenium r7937 for details.
allOptions = self.driver.find_elements_by_xpath("//select[@id='selector']//option")
initialTextValue = self.driver.find_element_by_id("result").text
foo = allOptions[0]
bar = allOptions[1]
foo.select()
self.assertEqual(self.driver.find_element_by_id("result").text, initialTextValue)
bar.select()
self.assertEqual(self.driver.find_element_by_id("result").text, "bar")
def testShouldEmitOnChangeEventsWhenChangingTheStateOfACheckbox(self):
self._loadPage("javascriptPage")
checkbox = self.driver.find_element_by_id("checkbox")
checkbox.select()
self.assertEqual(self.driver.find_element_by_id("result").text, "checkbox thing")
def testShouldEmitClickEventWhenClickingOnATextInputElement(self):
self._loadPage("javascriptPage")
clicker = self.driver.find_element_by_id("clickField")
clicker.click()
self.assertEqual(clicker.get_attribute("value"), "Clicked")
def testClearingAnElementShouldCauseTheOnChangeHandlerToFire(self):
self._loadPage("javascriptPage")
element = self.driver.find_element_by_id("clearMe")
element.clear()
result = self.driver.find_element_by_id("result")
self.assertEqual(result.text, "Cleared");
# TODO Currently Failing and needs fixing
#def testSendingKeysToAnotherElementShouldCauseTheBlurEventToFire(self):
# self._loadPage("javascriptPage")
# element = self.driver.find_element_by_id("theworks")
# element.send_keys("foo")
# element2 = self.driver.find_element_by_id("changeable")
# element2.send_keys("bar")
# self._assertEventFired("blur")
# TODO Currently Failing and needs fixing
#def testSendingKeysToAnElementShouldCauseTheFocusEventToFire(self):
# self._loadPage("javascriptPage")
# element = self.driver.find_element_by_id("theworks")
# element.send_keys("foo")
# self._assertEventFired("focus")
def _clickOnElementWhichRecordsEvents(self):
self.driver.find_element_by_id("plainButton").click()
def _assertEventFired(self, eventName):
result = self.driver.find_element_by_id("result")
text = result.text
self.assertTrue(eventName in text, "No " + eventName + " fired: " + text)
def _pageURL(self, name):
return "http://localhost:%d/%s.html" % (self.webserver.port, name)
def _loadSimplePage(self):
self._loadPage("simpleTest")
def _loadPage(self, name):
self.driver.get(self._pageURL(name))
|
bameda/lektor
|
tests/test_editor.py
|
Python
|
bsd-3-clause
| 1,424 | 0 |
def test_basic_editor(scratch_tree):
sess = scratch_tree.edit('/')
assert sess.id == ''
assert sess.path == '/'
assert sess.record is not None
assert sess['_model'] == 'page'
assert sess['title'] == 'Index'
assert sess['body'] == 'Hello World!'
sess['body'] = 'A new body'
sess.commit()
assert sess.closed
with open(sess.get_fs_path()) as f:
assert f.read().splitlines() == [
'_model: page',
'---',
'title: Index',
'---',
'body: A new body'
]
def test_create_alt(scratch_tree, scratch_pad):
sess = scratch_tree.edit('/', alt='de')
assert sess.id == ''
assert sess.path == '/'
assert sess.record is not None
assert sess['_model'] == 'page'
assert sess['title'] == 'Index'
assert sess['body'] == 'Hello World!'
sess['body'] = 'Hallo Welt!'
sess.commit()
assert sess.closed
# When we use the editor
|
to change this, we only want the fields that
# changed compared to the base to be included.
with open(sess.get_fs_path(alt='de'
|
)) as f:
assert f.read().splitlines() == [
'body: Hallo Welt!'
]
scratch_pad.cache.flush()
item = scratch_pad.get('/', alt='de')
assert item['_slug'] == ''
assert item['title'] == 'Index'
assert item['body'].source == 'Hallo Welt!'
assert item['_model'] == 'page'
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.