repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
---|---|---|---|---|---|---|---|---|
bne/hortee
|
hortee/production.py
|
Python
|
apache-2.0
| 31 | 0 |
from hortee.setti
|
ngs import *
| |
bgroff/kala-app
|
django_kala/organizations/views/new_organization.py
|
Python
|
mit
| 1,353 | 0.002217 |
from django.contrib import messages
from django.contrib.auth.decorators import login_required
from django.core.exceptions import PermissionDenied
from django.shortcuts import redirect
from django.urls import reverse
from django.utils.decorators import method_decorator
from django.utils.translation import ugettext as _
from django.views.generic import TemplateView
from ..forms.settings.details import DetailsForm
class NewOrganizationView(TemplateView):
template_name = 'organizations/new_organization.html'
def get_context_data(self, **kwargs
|
):
ret
|
urn {
'form': self.form,
}
@method_decorator(login_required)
def dispatch(self, request, *args, **kwargs):
if not request.user.is_superuser:
raise PermissionDenied(_('You do not have permission to create a new organization.'))
self.form = DetailsForm(request.POST or None, request.FILES or None)
return super(NewOrganizationView, self).dispatch(request, *args, **kwargs)
def post(self, request, *args, **kwargs):
if self.form.is_valid():
company = self.form.save()
messages.success(request, _('The organization has been saved.'))
return redirect(reverse('organizations:organization', args=[company.pk]))
return self.render_to_response(self.get_context_data())
|
stormi/tsunami
|
src/primaires/interpreteur/masque/noeuds/base_noeud.py
|
Python
|
bsd-3-clause
| 2,736 | 0.000368 |
# -*-coding:Utf-8 -*
# Copyright (c) 2010 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other
|
materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTIC
|
ULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Fichier définissant la classe BaseNoeud détaillée plus bas."""
class BaseNoeud:
"""Classe représentant la base d'un noeud.
Cette classe est héritée par tous les autres types de noeuds.
"""
importeur = None
def __init__(self):
"""Constructeur du noeud de base"""
self.nom = ""
self.suivant = None
def valider(self, personnage, dic_masques, commande, tester_fils=True):
"""Validation du noeud.
Cette méthode est à redéfinir dans chacune des classes-filles créée.
Chaque type de noeud a sa propre méthode de validation.
Dans tous les cas, une booléen doit être retourné :
- True si le noeud a pu être interprété ;
- False sinon.
Note : pour la plupart des noeuds, la validation est aussi fonction
des fils.
"""
raise NotImplementedError
def _get_fils(self):
"""Retourne les fils du noeud sous la forme d'une liste."""
return [self.suivant]
fils = property(_get_fils)
def afficher(self, personnage=None):
"""Retourne un affichage du masque pour les joueurs."""
return ""
|
theonlydude/RandomMetroidSolver
|
web/backend/plando.py
|
Python
|
mit
| 2,305 | 0.003471 |
from web.backend.utils import loadPresetsList, transition2isolver, getAddressesToRead
from graph.graph_utils import vanillaTransitions, vanillaBossesTransitions, vanillaEscapeTransitions, GraphUtils
from logic.logic import Logic
from utils.version import displayedVersion
from gluon.html import OPTGROUP
class Plando
|
(object):
def __init__(self, session, request, cache):
self.session = session
self.request = request
self.cache = cache
# required for GraphUtils access to access points
Logic.factory('vanilla')
def run(self):
# init session
if self.session.plando is None:
self.session.plando = {
"sta
|
te": {},
"preset": "regular",
"seed": None,
"startLocation": "Landing Site",
# rando params
"rando": {},
# set to False in plando.html
"firstTime": True
}
# load presets list
(stdPresets, tourPresets, comPresets) = loadPresetsList(self.cache)
# access points
vanillaAPs = []
for (src, dest) in vanillaTransitions:
vanillaAPs += [transition2isolver(src), transition2isolver(dest)]
vanillaBossesAPs = []
for (src, dest) in vanillaBossesTransitions:
vanillaBossesAPs += [transition2isolver(src), transition2isolver(dest)]
escapeAPs = []
for (src, dest) in vanillaEscapeTransitions:
escapeAPs += [transition2isolver(src), transition2isolver(dest)]
# generate list of addresses to read in the ROM
addresses = getAddressesToRead(plando=True)
startAPs = GraphUtils.getStartAccessPointNamesCategory()
startAPs = [OPTGROUP(_label="Standard", *startAPs["regular"]),
OPTGROUP(_label="Custom", *startAPs["custom"]),
OPTGROUP(_label="Custom (Area rando only)", *startAPs["area"])]
return dict(stdPresets=stdPresets, tourPresets=tourPresets, comPresets=comPresets,
vanillaAPs=vanillaAPs, vanillaBossesAPs=vanillaBossesAPs, escapeAPs=escapeAPs,
curSession=self.session.plando, addresses=addresses, startAPs=startAPs,
version=displayedVersion)
|
ayan-usgs/sci-wms
|
sciwms/__init__.py
|
Python
|
gpl-3.0
| 76 | 0 |
import logging
logg
|
er = logging.get
|
Logger('sci-wms')
__version__ = '1.0.0'
|
MrYsLab/rb4s
|
redbot_accel.py
|
Python
|
gpl-3.0
| 18,018 | 0.003219 |
#!/usr/bin/env python3
"""
Copyright (c) 2015 Alan Yorinks All rights reserved.
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public
License as published by the Free Software Foundation; either
version 3 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
"""
import asyncio
from pymata_aio.pymata_core import PymataCore
from pymata_aio.constants import Constants
# noinspection PyPep8
class RedBotAccel:
"""
This library is a direct port of: https://github.com/sparkfun/SparkFun_MMA8452Q_Arduino_Library/tree/V_1.1.0
Special Note: All reads have the Constants.I2C_END_TX_MASK bit sit. Most devices do not need to do this, but it
is required for this chip.
"""
MMA8452Q_Register = {
'STATUS': 0x00,
'OUT_X_MSB': 0x01,
'OUT_Y_MSB': 0x03,
'OUT_Y_LSB': 0x04,
'OUT_Z_MSB': 0x05,
'OUT_Z_LSB': 0x06,
'SYSMOD': 0x0B,
'INT_SOURCE': 0x0C,
'WHO_AM_I': 0x0D,
'XYZ_DATA_CFG': 0x0E,
'HP_FILTER_CUTOFF': 0x0F,
'PL_STATUS': 0x10,
'PL_CFG': 0x11,
'PL_COUNT': 0x12,
'PL_BF_ZCOMP': 0x13,
'P_L_THS_REG': 0x14,
'FF_MT_CFG': 0x15,
'FF_MT_SRC': 0x16,
'FF_MT_THS': 0x17,
'FF_MT_COUNT': 0x18,
'TRANSIENT_CFG': 0x1D,
'TRANSIENT_SRC': 0x1E,
'TRANSIENT_THS': 0x1F,
'TRANSIENT_COUNT': 0x20,
'PULSE_CFG': 0x21,
'PULSE_SRC': 0x22,
'PULSE_THSX': 0x23,
'PULSE_THSY': 0x24,
'PULSE_THSZ': 0x25,
'PULSE_TMLT': 0x26,
'PULSE_LTCY': 0x27,
'PULSE_WIND': 0x28,
'ASLP_COUNT': 0x29,
'CTRL_REG1': 0x2A,
'CTRL_REG2': 0x2B,
'CTRL_REG3': 0x2C,
'CTRL_REG4': 0x2D,
'CTRL_REG5': 0x2E,
'OFF_X': 0x2F,
'OFF_Y': 0x30,
'OFF_Z': 0x31
}
def __init__(self, board, address, scale, output_data_rate):
"""
@param address: Address of the device
@param scale: scale factor
@param output_data_rate: output data rate
@return: no return value
"""
# portrait landscape status values
self.PORTRAIT_U = 0
self.PORTRAIT_D = 1
self.LANDSCAPE_R = 2
self.LANDSCAPE_L = 3
self.LOCKOUT = 0x40
# device id
self.device_id = 42
# device address
self.address = address
# scale factor (fsr)
self.scale = scale
# output data rate (odr)
self.output_data_rate = output_data_rate
# call backs for axis, portrait/landscape and tap results
self.axis = None
self.p_l = None
self.tap = None
# When a read is performed,, data is returned through a call back to this structure.
# It should be cleared after data is consumed
self.callback_data = []
# beginning of data returned is located at position 4
# 0 is the device address
self.data_start = 2
self.board = board
async def start(self):
# configure firmata for i2c
await self.board.i2c_config()
# reset the device
register = self.MMA8452Q_Register['CTRL_REG2']
await self.board.i2c_write_request(self.address, [register, 0x40])
# verify the device by sending a WHO AM I command and checking the results
id_board = await self.check_who_am_i()
if not id_board:
print("Who am I fails")
await self.board.shutdown()
else:
# Correct device, continue with init
# Must be in standby to change registers
await self.standby()
# set up the scale register
await self.set_scale(self.scale)
# set the output data rate
await self.set_output_data_rate(self.output_data_rate)
# Set up portrait/landscape detection
await self.setup_portrait_landscape()
# Disable x, y, set z to 0.5g
await self.setup_tap(0x80, 0x80, 0x08)
# set device to active state
# self.board.sleep(.3)
await self.set_active()
async def data_val(self, data):
"""
This is the callback method used to save read results
@param data: Data returned from the device
@return: No return value
"""
self.callback_data = data
async def check_who_am_i(self):
"""
This method checks verifies the device ID.
@return: True if valid, False if not
"""
register = self.MMA8452Q_Register['WHO_AM_I']
await self.board.i2c_read_request(self.address, register, 1,
Constants.I2C_READ | Constants.I2C_END_TX_MASK,
self.data_val, Constants.CB_TYPE_ASYNCIO)
# await asyncio.sleep(1)
reply = await self.wait_for_read_result()
if reply[self.data_start] == self.device_id:
rval = True
else:
rval = False
return rval
async def standby(self):
"""
Put the device into standby mode so that the registers can be set.
@return: No return value
"""
register = self.MMA8452Q_Register['CTRL_REG1']
await self.board.i2c_read_request(self.address, register, 1,
Constants.I2C_READ | Constants.I2C_END_TX_MASK,
|
self.data_val, Constants.CB_TYPE_ASYNCIO)
ctrl1 = await self.wait_for_read_result()
ctrl1 = (ctrl1[self.data_start]) & ~0x01
self.callbac
|
k_data = []
await self.board.i2c_write_request(self.address, [register, ctrl1])
async def set_scale(self, scale):
"""
Set the device scale register.
Device must be in standby before calling this function
@param scale: scale factor
@return: No return value
"""
register = self.MMA8452Q_Register['XYZ_DATA_CFG']
await self.board.i2c_read_request(self.address, register, 1,
Constants.I2C_READ | Constants.I2C_END_TX_MASK,
self.data_val, Constants.CB_TYPE_ASYNCIO)
config_reg = await self.wait_for_read_result()
config_reg = config_reg[self.data_start]
config_reg &= 0xFC # Mask out scale bits
config_reg |= (scale >> 2)
await self.board.i2c_write_request(self.address, [register, config_reg])
async def set_output_data_rate(self, output_data_rate):
"""
Set the device output data rate.
Device must be in standby before calling this function
@param output_data_rate: Desired data rate
@return: No return value.
"""
# self.standby()
register = self.MMA8452Q_Register['CTRL_REG1']
await self.board.i2c_read_request(self.address, register, 1,
Constants.I2C_READ | Constants.I2C_END_TX_MASK,
self.data_val, Constants.CB_TYPE_ASYNCIO)
control_reg = await self.wait_for_read_result()
control_reg = control_reg[self.data_start]
control_reg &= 0xC7 # Mask out data rate bits
control_reg |= (output_data_rate << 3)
await self.board.i2c_write_request(self.address, [register, control_reg])
async def setup_portrait_landscape(self):
"""
Setup the portrait/landscape registers
Device must be in standby before calling this function
@return: No return value
""
|
danhuynhdev/taskbuster
|
taskbuster/urls.py
|
Python
|
mit
| 1,187 | 0 |
"""taskburster URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.10/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import include, url
from django.contrib import admin
from django.contrib.auth.views import logout
from django.conf.urls.i18n import i18n_patterns
from .views import home, home_files
urlpatterns = [
url(r'^(?P<filename>(robots.txt)|(humans.txt))$',
home_files, name='home-files'),
url(r'^accounts/logout/$', logout, {'next_page': '/'}),
url(r'^accounts/', include('allaut
|
h.urls')),
]
urlpatterns += i18n_pattern
|
s(
url(r'^$', home, name='home'),
url(r'^admin/', include(admin.site.urls)),
)
|
maciekswat/dolfin_1.3.0
|
test/documentation/test.py
|
Python
|
gpl-3.0
| 1,132 | 0 |
# Copyright (C) 2011 Marie E. Rognes
#
# This file is part of DOLFIN.
#
# DOLFIN is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# DOLFIN is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with DOLFIN. If not, see <http://www.gnu.org/licenses/>.
#
# First added: 2011-05-22
# Last changed: 2011-05-22
import sys
from instant import get_status_output
test
|
s = ["verify_demo_code_snippets.py"]
failed = []
for test in tests:
command = "%s %s" % (sys.executable, test)
fail, output = get_status_outp
|
ut(command)
if fail:
failed.append(fail)
print "*** %s failed" % test
print output
else:
print "OK"
sys.exit(len(failed))
|
the-zebulan/CodeWars
|
tests/kyu_6_tests/test_compare_versions.py
|
Python
|
mit
| 729 | 0 |
import unittest
from katas.kyu_6.compa
|
re_versions import compare_versions
class C
|
ompareVersionsTestCase(unittest.TestCase):
def test_true(self):
self.assertTrue(compare_versions('11', '10'))
def test_true_2(self):
self.assertTrue(compare_versions('11', '11'))
def test_true_3(self):
self.assertTrue(compare_versions('10.4.6', '10.4'))
def test_false(self):
self.assertFalse(compare_versions('10.4', '10.4.8'))
def test_false_2(self):
self.assertFalse(compare_versions('10.4', '11'))
def test_false_3(self):
self.assertFalse(compare_versions('10.4', '10.10'))
def test_false_4(self):
self.assertFalse(compare_versions('10.4.9', '10.5'))
|
nesdis/djongo
|
tests/django_tests/tests/v21/tests/db_functions/test_pad.py
|
Python
|
agpl-3.0
| 1,830 | 0.00165 |
from django.db.models import CharField, Value
from django.db.models.functions import Length, LPad, RPad
from django.test import TestCase
from .models import Author
class PadTests(TestCase):
def test_pad(self):
Author.objects.create(name='John', alias='j')
tests = (
(LPad('name', 7, Value('xy')), 'xyxJohn'),
|
(RPad('name', 7, Value('xy')), 'Johnxyx'),
(LPad('name', 6, Value('x')), 'xxJohn'),
(RPad('name', 6, Value('x')), 'Johnxx'),
# The default pad string is a space.
(LPad('name', 6), ' John'),
(RPad('name', 6),
|
'John '),
# If string is longer than length it is truncated.
(LPad('name', 2), 'Jo'),
(RPad('name', 2), 'Jo'),
(LPad('name', 0), ''),
(RPad('name', 0), ''),
)
for function, padded_name in tests:
with self.subTest(function=function):
authors = Author.objects.annotate(padded_name=function)
self.assertQuerysetEqual(authors, [padded_name], lambda a: a.padded_name, ordered=False)
def test_pad_negative_length(self):
for function in (LPad, RPad):
with self.subTest(function=function):
with self.assertRaisesMessage(ValueError, "'length' must be greater or equal to 0."):
function('name', -1)
def test_combined_with_length(self):
Author.objects.create(name='Rhonda', alias='john_smith')
Author.objects.create(name='♥♣♠', alias='bytes')
authors = Author.objects.annotate(filled=LPad('name', Length('alias'), output_field=CharField()))
self.assertQuerysetEqual(
authors.order_by('alias'),
[' ♥♣♠', ' Rhonda'],
lambda a: a.filled,
)
|
happz/ducky
|
ducky/mm/__init__.py
|
Python
|
mit
| 24,453 | 0.010714 |
from six import iteritems, itervalues
from six.moves import range
from ..interfaces import ISnapshotable
from ..errors import AccessViolationError, InvalidResourceError
from ..util import align, sizeof_fmt, Flags
from ..snapshot import SnapshotNode
import enum
DEFAULT_MEMORY_SIZE = 0x1000000
# Types
from ctypes import c_byte as i8_t # NOQA
from ctypes import c_short as i16_t # NOQA
from ctypes import c_int as i32_t # NOQA
from ctypes import c_int64 as i64_t # NOQA
from ctypes import c_ubyte as u8_t # NOQA
from ctypes import c_ushort as u16_t # NOQA
from ctypes import c_uint as u32_t # NOQA
from ctypes import c_uint64 as u64_t # NOQA
WORD_SIZE = 4
SHORT_SIZE = 2
PAGE_SHIFT = 8
#: Size of memory page, in bytes.
PAGE_SIZE = (1 << PAGE_SHIFT)
PAGE_MASK = (~(PAGE_SIZE - 1))
MINIMAL_SIZE = 16
class MMOperationList(enum.IntEnum):
ALLOC = 3
FREE = 4
UNUSED = 5
MMAP = 6
UNMMAP = 7
from ..util import UINT8_FMT, UINT16_FMT, UINT32_FMT, UINT64_FMT # noqa
def SIZE_FMT(size):
return str(size)
def OFFSET_FMT(offset):
s = '-' if offset < 0 else ''
return '{}0x{:04X}'.format(s, abs(offset))
class MalformedBinaryError(Exception):
pass
def addr_to_page(addr):
return (addr & PAGE_MASK) >> PAGE_SHIFT
def addr_to_offset(addr):
return addr & (PAGE_SIZE - 1)
def area_to_pages(addr, size):
return ((addr & PAGE_MASK) >> PAGE_SHIFT, align(PAGE_SIZE, size) // PAGE_SIZE)
class PageTableEntry(Flags):
_flags = ['read', 'write', 'execute', 'dirty']
_labels = 'RWXD'
READ = 0x01
WRITE = 0x02
EXECUTE = 0x04
DIRTY = 0x08
class MemoryPageState(SnapshotNode):
def __init__(self, *args, **kwargs):
super(MemoryPageState, self).__init__('index', 'content')
class MemoryPage(object):
"""
Base class for all memory pages of any kinds.
Memory page has a set of boolean flags that determine access to and behavior
of the page.
+-------------+-----------------------------------------------------------------------------+-----------+
| Flag | Meaning | Default |
+-------------+-----------------------------------------------------------------------------+-----------+
| ``read`` | page is readable by executed instructions | ``False`` |
+-------------+-----------------------------------------------------------------------------+-----------+
| ``write`` | page is writable by executed instructions | ``False`` |
+-------------+-----------------------------------------------------------------------------+-----------+
| ``execute`` | content of the page can be used as executable instructions | ``False`` |
+-------------+-----------------------------------------------------------------------------+-----------+
| ``dirty`` | there have been write access to this page, its content has changed | ``False`` |
+-------------+-----------------------------------------------------------------------------+-----------+
:param ducky.mm.MemoryController controller: Controller that owns this page.
:param int index: Serial number of this page.
"""
def __init__(self, controller, index):
super(MemoryPage, self).__init__()
self.controller = controller
self.index = index
self.DEBUG = self.controller.DEBUG
self.INFO = self.controller.INFO
self.WARN = self.controller.WARN
self.ERROR = self.controller.ERROR
self.EXCEPTION = self.controller.EXCEPTION
self.base_address = self.index * PAGE_SIZE
def __repr__(self):
return '<%s index=%i, base=%s>' % (self.__class__.__name__, self.index, UINT32_FMT(self.base_address))
def save_state(self, parent):
"""
Create state of this page, and attach it to snapshot tree.
:param parent: Parent snapshot node.
:type parent: ducky.snapshot.SnapshotNode
"""
state = parent.add_child('page_{}'.format(self.index), MemoryPageState())
state.index = self.index
state.content = [ord(i) if isinstance(i, str) else i for i in self.data]
return state
def load_state(self, state):
"""
Restore page from a snapshot.
"""
for i in range(0, PAGE_SIZE):
self.data[i] = state.content[i]
def __len__(self):
"""
:return: length of this page. By default, all pages have the same length.
:rtype: int
"""
return PAGE_SIZE
def clear(self):
"""
Clear page.
This operation is implemented by child classes.
"""
raise NotImplementedError('Not allowed to clear memory on this address: page={}'.format(self.index))
def read_u8(self, offset):
"""
Read byte.
This operation is implemented by child classes.
:param int offset: offset of requested byte.
:rtype: int
"""
raise NotImplementedError('Not allowed to ac
|
cess memory on this address: page={}, offset={}'.format(self.index, offset))
def read_u16(self, offset):
"""
Read word.
This operation is implemented by child classes.
:param
|
int offset: offset of requested word.
:rtype: int
"""
raise NotImplementedError('Not allowed to access memory on this address: page={}, offset={}'.format(self.index, offset))
def read_u32(self, offset):
"""
Read longword.
This operation is implemented by child classes.
:param int offset: offset of requested longword.
:rtype: int
"""
raise NotImplementedError('Not allowed to access memory on this address: page={}, offset={}'.format(self.index, offset))
def write_u8(self, offset, value):
"""
Write byte.
This operation is implemented by child classes.
:param int offset: offset of requested byte.
:param int value: value to write into memory.
"""
raise NotImplementedError('Not allowed to access memory on this address: page={}, offset={}'.format(self.index, offset))
def write_u16(self, offset, value):
"""
Write word.
This operation is implemented by child classes.
:param int offset: offset of requested word.
:param int value: value to write into memory.
"""
raise NotImplementedError('Not allowed to access memory on this address: page={}, offset={}'.format(self.index, offset))
def write_u32(self, offset, value):
"""
Write longword.
This operation is implemented by child classes.
:param int offset: offset of requested longword.
:param int value: value to write into memory.
"""
raise NotImplementedError('Not allowed to access memory on this address: page={}, offset={}'.format(self.index, offset))
class AnonymousMemoryPage(MemoryPage):
"""
"Anonymous" memory page - this page is just a plain array of bytes, and is
not backed by any storage. Its content lives only in the memory.
Page is created with all bytes set to zero.
"""
def __init__(self, controller, index):
super(AnonymousMemoryPage, self).__init__(controller, index)
self.data = bytearray([0 for _ in range(0, PAGE_SIZE)])
def clear(self):
self.DEBUG('%s.clear', self.__class__.__name__)
for i in range(0, PAGE_SIZE):
self.data[i] = 0
def read_u8(self, offset):
self.DEBUG('%s.read_u8: page=%s, offset=%s', self.__class__.__name__, self.index, offset)
return self.data[offset]
def read_u16(self, offset):
self.DEBUG('%s.read_u16: page=%s, offset=%s', self.__class__.__name__, self.index, offset)
return self.data[offset] | (self.data[offset + 1] << 8)
def read_u32(self, offset):
self.DEBUG('%s.do_read_u32: page=%s, offset=%s', self.__class__.__name__, self.index, offset)
return self.data[offset] | (self.data[offset + 1] << 8) | (self.data[offset + 2] << 16) | (self.data[offset + 3] << 24)
def write_u8(self, offset, value):
self.DEBUG('%s.do_write_u8: page=%s, offset=%s, value=%s', self.__class__.__name__, self.index, offset, value)
self.data[offset] = value
def write_u16(self, offset, value):
self.DEBUG('%s.write_u16: page=%s, offset=%s, value=%s', self.__class__.__name__, self.index, offset, value)
self.data[offset] = value
|
credativ/gofer
|
src/gofer/agent/manager.py
|
Python
|
lgpl-2.1
| 5,843 | 0 |
#
# Copyright (c) 2015 Red Hat, Inc.
#
# This software is licensed to you under the GNU Lesser General Public
# License as published by the Free Software Foundation; either version
# 2 of the License (LGPLv2) or (at your option) any later version.
# There is NO WARRANTY for this software, express or implied,
# including the implied warranties of MERCHANTABILITY,
# NON-INFRINGEMENT, or FITNESS FOR A PARTICULAR PURPOSE. You should
# have received a copy of LGPLv2 along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/lgpl-2.0.txt.
#
# Jeff Ortel <jortel@redhat.com>
#
import struct
from logging import getLogger
from socket import socket as Socket
from socket import AF_INET, SOCK_STREAM, IPPROTO_TCP
from socket import TCP_NODELAY, SOL_SOCKET, SO_REUSEADDR, SO_LINGER
from gofer.common import Thread, utf8
from gofer.messaging import Document
from gofer.agent.plugin import Container
from gofer.agent.builtin import Admin
HOST = 'localhost'
PORT = 5650
log = getLogger(__name__)
class Handler(object):
"""
The request handler.
"""
def show(self):
container = Container()
admin = Admin(container)
return admin.help()
def cancel(self, sn=None, criteria=None):
container = Container()
admin = Admin(container)
return admin.cancel(sn=sn, criteria=criteria)
def load(self, path):
container = Container()
return container.load(path)
def reload(self, path):
container = Container()
return container.reload(path)
def unload(self, path):
container = Container()
return container.unload(path)
class Manager(Thread):
"""
The manager thread.
"""
def __init__(self, host=None, port=None, handler=None):
"""
:param host: The host (interface) to listen on.
:type: host: str
:param port: The port to listen on.
:type: port: int
:param handler: The request handler.
:type handler: Handler
"""
super(Manager, self).__init__(name='manager')
self.host = host or HOST
self.port = port or port
self.handler = handler or Handler()
self.setDaemon(True)
def listen(self):
"""
Bind and listen.
:return: The open socket.
:rtype: socket.socket
"""
address = (self.host, self.port)
socket = Socket(AF_INET, SOCK_STREAM)
socket.setsockopt(SOL_SOCKET, SO_REUSEADDR, 1)
socket.bind(address)
socket.listen(5)
log.info('listening on: %d', self.port)
return socket
def accept(self, socket):
"""
Accept requests.
:param socket: An open socket.
:type socket: socket.socket
"""
while not Thread.aborted():
client, address = socket.accept()
try:
self.accepted(client)
finally:
client.close()
def accepted(self, client):
"""
Process the request on the accepted socket.
:param client: A client socket.
:type client: socket.socket
"""
try:
client.setsockopt(IPPROTO_TCP, TCP_NODELAY, 1)
client.setsockopt(SOL_SOCKET, SO_LINGER, struct.pack('ii', 1, 1))
message = client.recv(4096)
call = Document()
call.load(message)
reply = self.dispatch(call)
client.send(reply)
except Exception, e:
log.error(utf8(e))
|
def run(self):
"""
The thread main.
"""
try:
socket = self.listen()
|
self.accept(socket)
except Exception:
log.exception(self.host)
def dispatch(self, call):
"""
Dispatch the call to the handler.
:param call: A *call* document.
:type call: Document
"""
reply = Document()
try:
method = getattr(self.handler, call.name)
result = method(*call.args, **call.kwargs)
reply.code = 0
reply.result = result
except Exception, e:
reply.code = 1
reply.result = utf8(e)
return reply.dump()
class Method(object):
"""
Remote method.
"""
def __init__(self, host, port, name):
"""
:param host: The host used to connect to the manager.
:type host: str
:param port: The port used to connect to the manager.
:type: port: int
:param name: The method name.
:type name: str
"""
self.name = name
self.address = (host, port)
def call(self, *args, **kwargs):
"""
Remote call.
"""
socket = Socket(AF_INET, SOCK_STREAM)
socket.connect(self.address)
try:
method = Document()
method.name = self.name
method.args = args
method.kwargs = kwargs
socket.send(method.dump())
reply = socket.recv(4096)
result = Document()
result.load(reply)
return result
finally:
socket.close()
def __call__(self, *args, **kwargs):
try:
result = self.call(*args, **kwargs)
except Exception, e:
reply = Document()
reply.code = 1
reply.result = utf8(e)
result = reply
return result
class Client(object):
"""
The remote manager client.
"""
def __init__(self, host=None, port=None):
"""
:param port: The port used to connect to the manager.
:type: port: int
"""
self.host = host or HOST
self.port = port or PORT
def __getattr__(self, name):
return Method(self.host, self.port, name)
|
navijo/FlOYBD
|
Django/mysite/floybd/urls.py
|
Python
|
mit
| 10,036 | 0.005281 |
import os
from django.conf.urls import url
from . import views
from .earthquakes import viewsEarthquakes
from .weather import viewsWeather
from .gtfs import viewsGTFS
from django.http import HttpResponseRedirect
from .utils.utils import *
from django.db import connection
import simplekml
import time
import subprocess
import logging
logger = logging.getLogger("django")
app_name = 'floybd'
urlpatterns = [
url(r'^$', views.index, name='index')
|
,
url('clearKML', views.clearKML, name='clearKML'),
url('weatherStats', viewsWeather.weatherStats, name='weatherStats'),
url('dayWeather', viewsWeather.weatherConcreteIndex, name='dayWeather'),
url('predictWeatherStats',
|
viewsWeather.weatherPredictionsStats, name='predictWeatherStats'),
url('predictWeather', viewsWeather.weatherPredictions, name='predictWeather'),
url('weatherDemos', views.weatherDemos, name='weatherDemos'),
url('weather', views.weatherIndex, name='weather'),
url('currentWeather', viewsWeather.currentWeather, name='currentWeather'),
url('dummyWeather', viewsWeather.dummyWeather, name='dummyWeather'),
url('stopTour', views.stopTourView, name='stopTour'),
url('demoEarthquakes', views.demoEarthquakes, name='demoEarthquakes'),
url('getConcreteDateValues', viewsWeather.getConcreteValues, name='getConcreteDateValues'),
url('sendConcreteValuesToLG', viewsWeather.sendConcreteValuesToLG, name='sendConcreteValuesToLG'),
url('getPredictionStats', viewsWeather.getPredictionStats, name='getPredictionStats'),
url('getPrediction', viewsWeather.getPrediction, name='getPrediction'),
url('sendPredictionsToLG', viewsWeather.sendPredictionsToLG, name='sendPredictionsToLG'),
url('earthquakes', views.eartquakesIndex, name='earthquakes'),
url('getApproxEarthquakes', viewsEarthquakes.getEarthquakesApprox, name='getApproxEarthquakes'),
url('getExactEarthquakes', viewsEarthquakes.getEarthquakesExact, name='getExactEarthquakes'),
url('sendConcreteEarthquakesValuesToLG', viewsEarthquakes.sendConcreteValuesToLG,
name='sendConcreteEarthquakesValuesToLG'),
url('demoLastWeekEarthquakesHeatmap', viewsEarthquakes.demoLastWeekEarthquakesHeatmap,
name='demoLastWeekEarthquakesHeatmap'),
url('demoLastWeekEarthquakes', viewsEarthquakes.demoLastWeekEarthquakes, name='demoLastWeekEarthquakes'),
url('heatMapEarthquakes', views.eartquakesHeatMapIndex, name='heatMapEarthquakes'),
url('getHeatMapEarthquakesKML', viewsEarthquakes.generateHeapMapKml, name='getHeatMapEarthquakesKML'),
url('getHeatMapEarthquakes', viewsEarthquakes.getHeatMap, name='getHeatMapEarthquakes'),
url('getStats', viewsWeather.getStats, name='getStats'),
url('sendStatsToLG', viewsWeather.sendStatsToLG, name='sendStatsToLG'),
url('getGraphDataForStats', viewsWeather.getGraphDataForStats, name='getGraphDataForStats'),
url('launchdemogtfs', viewsGTFS.launchdemogtfs, name='launchdemogtfs'),
url('demogtfsindex', views.demogtfs, name='demogtfsindex'),
url('uploadgtfs', viewsGTFS.uploadgtfs, name='uploadgtfs'),
url('viewgtfs', viewsGTFS.viewgtfs, name='viewgtfs'),
url('gtfs', views.gtfs, name='gtfs'),
url('uploadGTFS', viewsGTFS.uploadGTFS, name='uploadGTFS'),
url('sendGTFSToLG', viewsGTFS.sendGTFSToLG, name='sendGTFSToLG'),
url('getAgenciesAndGenerateKML', viewsGTFS.getAgenciesAndGenerateKML, name='getAgenciesAndGenerateKML'),
url('citydashboard', viewsWeather.citydashboard, name='citydashboard'),
url('viewDashboard', viewsWeather.viewDashboard, name='viewDashboard'),
url('openHelp', views.openHelp, name='openHelp'),
url('launchScreenSaver', views.launchScreenSaver, name='launchScreenSaver'),
url('stopScreenSaver', views.stopScreenSaver, name='stopScreenSaver'),
url('clearCache', views.clearLGCache, name='clearCache'),
url('relaunchLG', views.relaunchLG, name='relaunchLG'),
url('settings', lambda x: HttpResponseRedirect('/admin/floybd/setting/'), name='settings'),
url('webhook', views.webhook, name='webhook'),
url('getSlideImage', views.getSlideImage, name='getSlideImage'),
]
def sendLogos():
if checkPing(getLGIp()):
millis = int(round(time.time() * 1000))
kml = simplekml.Kml(name="Layout")
screen = kml.newscreenoverlay(name='FLOYBD')
screen.icon.href = "http://"+getDjangoIp()+":8000/static/img/ownLogos.png?a="+str(millis)
screen.overlayxy = simplekml.OverlayXY(x=0.0, y=1.0, xunits=simplekml.Units.fraction,
yunits=simplekml.Units.fraction)
screen.screenxy = simplekml.ScreenXY(x=0.0, y=1.0, xunits=simplekml.Units.fraction,
yunits=simplekml.Units.fraction)
screen.rotationxy = simplekml.RotationXY(x=0.0, y=0.0, xunits=simplekml.Units.fraction,
yunits=simplekml.Units.fraction)
screen.size.x = 0.20
screen.size.y = 0.15
screen.size.xunits = simplekml.Units.fraction
screen.size.yunits = simplekml.Units.fraction
screenName = kml.newscreenoverlay(name='App name')
screenName.icon.href = "http://" + getDjangoIp() + ":8000/static/img/FlOYBDLogo.png?a=" + str(millis)
screenName.overlayxy = simplekml.OverlayXY(x=0.0, y=1.0, xunits=simplekml.Units.fraction,
yunits=simplekml.Units.fraction)
screenName.screenxy = simplekml.ScreenXY(x=0.3, y=0.95, xunits=simplekml.Units.fraction,
yunits=simplekml.Units.fraction)
screenName.rotationxy = simplekml.RotationXY(x=0.0, y=0.0, xunits=simplekml.Units.fraction,
yunits=simplekml.Units.fraction)
screenName.size.x = 0.50
screenName.size.y = 0.07
screenName.size.xunits = simplekml.Units.fraction
screenName.size.yunits = simplekml.Units.fraction
screen1 = kml.newscreenoverlay(name='Logos')
screen1.icon.href = "http://" + getDjangoIp() + ":8000/static/img/sharedLogos.png?a="+str(millis)
screen1.overlayxy = simplekml.OverlayXY(x=0.0, y=0.0, xunits=simplekml.Units.fraction,
yunits=simplekml.Units.fraction)
screen1.screenxy = simplekml.ScreenXY(x=0.0, y=0.01, xunits=simplekml.Units.fraction,
yunits=simplekml.Units.fraction)
screen1.rotationxy = simplekml.RotationXY(x=0.0, y=0.0, xunits=simplekml.Units.fraction,
yunits=simplekml.Units.fraction)
screen1.size.x = 0.3
screen1.size.y = 0.25
screen1.size.xunits = simplekml.Units.fraction
screen1.size.yunits = simplekml.Units.fraction
currentDir = os.getcwd()
fileName = "Layout.kml"
dir1 = os.path.join(currentDir, "static/logos")
dirPath2 = os.path.join(dir1, fileName)
logger.info("\033[93m" + "Saving kml: " + str(dirPath2) + "\033[0m")
kml.save(dirPath2)
if db_table_exists("floybd_setting"):
logger.info("\033[93m" + "Sending Logos...from: " + getDjangoIp() + " to: " + getLGIp() + "\033[0m")
getLeftScreenCommand = "sshpass -p " + getLGPass() + " ssh lg@" + getLGIp() + \
" 'head -n 1 personavars.txt | cut -c17-19'"
leftScreenDirty = subprocess.check_output(getLeftScreenCommand, stderr=subprocess.STDOUT, shell=True)
leftScreenClean = leftScreenDirty.rstrip().decode("utf-8")
leftScreenNumber = leftScreenClean[-1:]
logger.debug("Left Screen: " + str(leftScreenClean))
logger.info("\033[93m" + "Left Screen Number: " + str(leftScreenNumber) + "\033[0m")
command = "echo 'http://" + getDjangoIp() + ":8000/static/logos/Layout.kml?a="+str(millis) +\
"' | sshpass -p " + getLGPass() + " ssh lg@" + getLGIp() + " 'cat - > /var/www/html/kmls_" + \
leftScreenNumber+".txt'"
os.system(co
|
ajurcevic/calibre-web
|
cps/epub.py
|
Python
|
gpl-3.0
| 4,225 | 0.003314 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import zipfile
from lxml import etree
import os
import uploader
from iso639 import languages as isoLanguages
def extractCover(zipFile, coverFile, coverpath, tmp_file_name):
if coverFile is None:
return None
else:
zipCoverPath = os.path.join(coverpath , coverFile).replace('\\','/')
cf = zipFile.read(zipCoverPath)
prefix = os.path.splitext(tmp_file_name)[0]
tmp_cover_name = prefix + '.' + os.path.basename(zipCoverPath)
image = open(tmp_cover_name, 'wb')
image.write(cf)
image.close()
return tmp_cover_name
def get_epub_info(tmp_file_path, original_file_name, original_file_extension):
ns = {
'n': 'urn:oasis:names:tc:opendocument:xmlns:container',
'pkg': 'http://www.idpf.org/2007/opf',
'dc': 'http://purl.org/dc/elements/1.1/'
}
epubZip = zipfile.ZipFile(tmp_file_path)
txt = epubZip.read('META-INF/container.xml')
tree = etree.fromstring(txt)
cfname = tree.xpath('n:rootfiles/n:rootfile/@full-path', namespaces=ns)[0]
cf = epubZip.read(cfname)
tree = etree.fromstring(cf)
coverpath = os.path.dirname(cfname)
p = tree.xpath('/pkg:package/pkg:metadata', namespaces=ns)[0]
epub_metadata = {}
for s in ['title', 'description', 'creator', 'language']:
tmp = p.xpath('dc:%s/text()' % s, namespaces=ns)
if len(tmp) > 0:
epub_metadata[s] = p.xpath('dc:%s/text()' % s, namespaces=ns)[0]
else:
epub_metadata[s] = "Unknown"
if epub_metadata['description'] == "Unknown":
description = tree.xpath("//*[local-name() = 'description']/text()")
if len(description) > 0:
epub_metadata['description'] = description
else:
epub_metadata['description'] = ""
if epub_metadata['language'] == "Unknown":
epub_metadata['language'] = ""
else:
lang = epub_metadata['language'].split('-', 1)[0].lower()
if len(lang) == 2:
epub_metadata['language'] = isoLanguages.get(part1=lang).name
elif len(lang) == 3:
epub_metadata['language'] = isoLanguages.get(part3=lang).name
else:
epub_metadata['language'] = ""
coversection = tree.xpath("/pkg:package/pkg:manifest/pkg:item[@id='cover-image']/@href", namespaces=ns)
coverfile = None
if len(coversection) > 0:
coverfile = extractCover(epubZip, cover
|
section[0], coverpath, tmp_file_path)
else:
meta_cover = tree.xpath("/pkg:package/pkg:metadata/pkg:meta[@name='cover']/@content", namespaces=ns)
if len(meta_cover) > 0:
coversection = tree.xpath("/pkg:package/pkg:manifest/pkg:item[@id='"+meta_cover[0]+"']/@href", namespaces=ns)
if len(coversection) > 0:
filetype = coversection[0].rsplit('.', 1)[-1]
|
if filetype == "xhtml" or filetype == "html": #if cover is (x)html format
markup = epubZip.read(os.path.join(coverpath, coversection[0]))
markupTree = etree.fromstring(markup)
# no matter xhtml or html with no namespace
imgsrc = markupTree.xpath("//*[local-name() = 'img']/@src")
# imgsrc maybe startwith "../"" so fullpath join then relpath to cwd
filename = os.path.relpath(os.path.join(os.path.dirname(os.path.join(coverpath, coversection[0])), imgsrc[0]))
coverfile = extractCover(epubZip, filename, "", tmp_file_path)
else:
coverfile = extractCover(epubZip, coversection[0], coverpath, tmp_file_path)
if epub_metadata['title'] is None:
title = original_file_name
else:
title = epub_metadata['title']
return uploader.BookMeta(
file_path=tmp_file_path,
extension=original_file_extension,
title=title.encode('utf-8').decode('utf-8'),
author=epub_metadata['creator'].encode('utf-8').decode('utf-8'),
cover=coverfile,
description=epub_metadata['description'],
tags="",
series="",
series_id="",
languages=epub_metadata['language'])
|
kadubarbosa/hydra1
|
miles2_table.py
|
Python
|
gpl-2.0
| 1,347 | 0.019302 |
# -*- coding: utf-8 -*-
"""
Created on 26/11/15
@author: Carlos Eduardo Barbosa
Convert table with SSP parameters of MILES II from the original to the
appropriate format for the MCMC run.
"""
import os
import numpy as np
from config import *
if __name__ == "__main__":
os.chdir(os.path.join(home, "tables"))
miles2 = "MILES_BaSTI_un_1.30.LICK.txt"
lick = "BANDS"
data = np.loadtxt(mil
|
es2, dtype=str)
header = data[0]
names = data[:,0]
cols = np.array([2,3,4,5,6,7,8,9,14,15,16,17,18,24,25,26,27,28,29,30,31,
32,33,34,35])
data = data[:,cols]
# lick = np.loadtxt("BANDS", dtype=str, usecols=(0,))
|
# for a in zip(header[cols], lick):
# print a
table = []
for name, d in zip(names, data):
Z = name[8:13].replace("m", "-").replace("p", "+")
age = name[14:21]
alpha = name[25:29]
scale = name[30:]
if scale not in ["Ep0.00", "Ep0.40"]:
continue
if float(age) < 1.:
continue
table.append(np.hstack((age, Z, alpha, d)))
table = np.array(table)
header = np.hstack(("# Age(Gyr)", "[Z/H]", "[alpha/Fe]", header[cols]))
header = ["{0:12}".format(x) for x in header]
with open("MILESII.txt", "w") as f:
f.write("".join(header))
np.savetxt(f, table, fmt="%12s")
|
affan2/django-alphabetfilter
|
alphafilter/admin.py
|
Python
|
apache-2.0
| 788 | 0.003807 |
"""
This file unregisters the admin class for each model specified in
ALPHAFILTER_ADMIN_FIELDS and replaces it with a new admin class that
subclasses both the original admin and one with an alphabet_filter attribute
"""
from django.db.models import get_model
from django.contrib import admin
from django.conf import setting
|
s
MODEL_REGISTRY = getattr(settings, 'ALPHAFILTER_ADMIN_FIELDS', {})
FIELDS = {}
for key, val in MODEL_REGISTRY.items():
if isinstance(key, basestring):
FIELDS[get_model(*key.split('.'))] = val
for model, modeladmin in admin.site._registry.items()
|
:
if model in FIELDS:
admin.site.unregister(model)
admin.site.register(model, type('newadmin', (modeladmin.__class__,), {
'alphabet_filter': FIELDS[model],
}))
|
ycool/apollo
|
cyber/python/cyber_py/cyber.py
|
Python
|
apache-2.0
| 10,449 | 0.000383 |
# ****************************************************************************
# Copyright 2018 The Apollo Authors. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ****************************************************************************
# -*- coding: utf-8 -*-
"""Module for init environment."""
import sys
import os
import importlib
import time
import threading
import ctypes
from google.protobuf.descriptor_pb2 import FileDescriptorProto
PY_CALLBACK_TYPE = ctypes.CFUNCTYPE(ctypes.c_int, ctypes.c_char_p)
PY_CALLBACK_TYPE_T = ctypes.CFUNCTYPE(ctypes.c_int, ctypes.c_char_p)
# init vars
CYBER_PATH = os.environ['CYBER_PATH']
CYBER_DIR = os.path.split(CYBER_PATH)[0]
sys.path.append(CYBER_PATH + "/third_party/")
sys.path.append(CYBER_PATH + "/lib/")
sys.path.append(CYBER_PATH + "/python/cyber")
sys.path.append(CYBER_PATH + "/python/cyber_py")
sys.path.append(CYBER_PATH + "/lib/python/")
sys.path.append(CYBER_DIR + "/python/")
sys.path.append(CYBER_DIR + "/cyber/")
_CYBER_INIT = importlib.import_module('_cyber_init')
_CYBER_NODE = importlib.import_module('_cyber_node')
def init(module_name="cyber_py"):
"""
init cyber.
"""
return _CYBER_INIT.py_init(module_name)
def ok():
"""
is cyber envi ok.
"""
return _CYBER_INIT.py_ok()
def shutdown():
"""
shutdown cyber envi.
"""
return _CYBER_INIT.py_shutdown()
def is_shutdown():
"""
is cyber shutdown.
"""
return _CYBER_INIT.py_is_
|
shutdown()
def waitforshutdown():
"""
waitforshutdown.
"""
return _CYBER_INIT.py_waitforshutdown()
# //////////////////////////////class//////////////////////////////
class Writer(object):
"""
Class for cyber writer wrapper.
"""
def __init__(self, name, writer, data_type):
self.name = name
self.writer = writer
self.data_type = data_type
def write(self, data):
"""
writer msg string
|
"""
return _CYBER_NODE.PyWriter_write(self.writer, data.SerializeToString())
class Reader(object):
"""
Class for cyber reader wrapper.
"""
def __init__(self, name, reader, data_type):
self.name = name
self.reader = reader
self.data_type = data_type
class Client(object):
"""
Class for cyber service client wrapper.
"""
def __init__(self, client, data_type):
self.client = client
self.data_type = data_type
def send_request(self, data):
"""
send request to service
@param self
@param data: proto message to send
@return : None or response
"""
response_str = _CYBER_NODE.PyClient_send_request(
self.client, data.SerializeToString())
if len(response_str) == 0:
return None
response = self.data_type()
response.ParseFromString(response_str)
return response
class Node(object):
"""
Class for cyber Node wrapper.
"""
def __init__(self, name):
self.node = _CYBER_NODE.new_PyNode(name)
self.list_writer = []
self.list_reader = []
self.subs = {}
self.pubs = {}
self.list_client = []
self.list_service = []
self.mutex = threading.Lock()
self.callbacks = {}
self.services = {}
def __del__(self):
# print("+++ node __del___")
for writer in self.list_writer:
_CYBER_NODE.delete_PyWriter(writer)
for reader in self.list_reader:
_CYBER_NODE.delete_PyReader(reader)
for c in self.list_client:
_CYBER_NODE.delete_PyClient(c)
for s in self.list_service:
_CYBER_NODE.delete_PyService(s)
_CYBER_NODE.delete_PyNode(self.node)
def register_message(self, file_desc):
"""
register proto message desc file.
"""
for dep in file_desc.dependencies:
self.register_message(dep)
proto = FileDescriptorProto()
file_desc.CopyToProto(proto)
proto.name = file_desc.name
desc_str = proto.SerializeToString()
_CYBER_NODE.PyNode_register_message(self.node, desc_str)
def create_writer(self, name, data_type, qos_depth=1):
"""
create a topic writer for send message to topic.
@param self
@param name str: topic name
@param data_type proto: message class for serialization
"""
self.register_message(data_type.DESCRIPTOR.file)
datatype = data_type.DESCRIPTOR.full_name
writer = _CYBER_NODE.PyNode_create_writer(self.node, name,
datatype, qos_depth)
self.list_writer.append(writer)
return Writer(name, writer, datatype)
def reader_callback(self, name):
"""
reader callback
"""
sub = self.subs[name]
msg_str = _CYBER_NODE.PyReader_read(sub[0], False)
if len(msg_str) > 0:
if sub[3] != "RawData":
proto = sub[3]()
proto.ParseFromString(msg_str)
else:
# print "read rawdata-> ",sub[3]
proto = msg_str
if sub[2] is None:
sub[1](proto)
else:
sub[1](proto, sub[2])
return 0
def create_reader(self, name, data_type, callback, args=None):
"""
create a topic reader for receive message from topic.
@param self
@param name str: topic name
@param data_type proto: message class for serialization
@callback fn: function to call (fn(data)) when data is
received. If args is set, the function must
accept the args as a second argument,
i.e. fn(data, args)
@args any: additional arguments to pass to the callback
"""
self.mutex.acquire()
if name in self.subs.keys():
self.mutex.release()
return None
self.mutex.release()
# datatype = data_type.DESCRIPTOR.full_name
reader = _CYBER_NODE.PyNode_create_reader(
self.node, name, str(data_type))
if reader is None:
return None
self.list_reader.append(reader)
sub = (reader, callback, args, data_type, False)
self.mutex.acquire()
self.subs[name] = sub
self.mutex.release()
fun_reader_cb = PY_CALLBACK_TYPE(self.reader_callback)
self.callbacks[name] = fun_reader_cb
f_ptr = ctypes.cast(self.callbacks[name], ctypes.c_void_p).value
_CYBER_NODE.PyReader_register_func(reader, f_ptr)
return Reader(name, reader, data_type)
def create_rawdata_reader(self, name, callback, args=None):
"""
Create RawData reader:listener RawMessage
"""
return self.create_reader(name, "RawData", callback, args)
def create_client(self, name, request_data_type, response_data_type):
datatype = request_data_type.DESCRIPTOR.full_name
c = _CYBER_NODE.PyNode_create_client(self.node, name,
str(datatype))
self.list_client.append(c)
return Client(c, response_data_type)
def service_callback(self, name):
v = self.services[name]
msg_str = _CYBER_NODE.PyService_read(v[0])
if (len(msg_str) > 0):
proto = v[3]()
proto.ParseFromString(msg_str)
response = None
if v[2] is None:
response = v[1](proto)
else:
response = v[1](proto, v[2])
_CYBER_NODE.PyService_write(v[0
|
EmanueleCannizzaro/scons
|
test/Fortran/F03FILESUFFIXES.py
|
Python
|
mit
| 4,014 | 0.001495 |
#!/usr/bin/env python
#
# Copyright (c) 2001 - 2016 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "test/Fortran/F03FILESUFFIXES.py rel_2.5.1:3735:9dc6cee5c168 2016/11/03 14:02:02 bdbaddog"
import TestSCons
from common import write_fake_link
_python_ = TestSCons._python_
_exe = TestSCons._exe
test = TestSCons.TestSCons()
write_fake_link(test)
test.write('myfortran.py', r"""
import getopt
import sys
comment = '#' + sys.argv[1]
opts, args = getopt.getopt(sys.argv[2:], 'co:')
for opt, arg in opts:
if opt == '-o': out = arg
infil
|
e = open(args[0], 'rb')
outfile = open(out, 'wb')
for l in infile.readlines():
if l[:len(comment)] != comment:
outfile.write(l)
sys.exit(0)
""")
# Test default file suffix: .f90/.F90 for F90
test.write('SConstruct', """
env = Environment(LINK = r'%(_python_)s mylink.py',
LINKFL
|
AGS = [],
F03 = r'%(_python_)s myfortran.py f03',
FORTRAN = r'%(_python_)s myfortran.py fortran')
env.Program(target = 'test01', source = 'test01.f')
env.Program(target = 'test02', source = 'test02.F')
env.Program(target = 'test03', source = 'test03.for')
env.Program(target = 'test04', source = 'test04.FOR')
env.Program(target = 'test05', source = 'test05.ftn')
env.Program(target = 'test06', source = 'test06.FTN')
env.Program(target = 'test07', source = 'test07.fpp')
env.Program(target = 'test08', source = 'test08.FPP')
env.Program(target = 'test09', source = 'test09.f03')
env.Program(target = 'test10', source = 'test10.F03')
""" % locals())
test.write('test01.f', "This is a .f file.\n#link\n#fortran\n")
test.write('test02.F', "This is a .F file.\n#link\n#fortran\n")
test.write('test03.for', "This is a .for file.\n#link\n#fortran\n")
test.write('test04.FOR', "This is a .FOR file.\n#link\n#fortran\n")
test.write('test05.ftn', "This is a .ftn file.\n#link\n#fortran\n")
test.write('test06.FTN', "This is a .FTN file.\n#link\n#fortran\n")
test.write('test07.fpp', "This is a .fpp file.\n#link\n#fortran\n")
test.write('test08.FPP', "This is a .FPP file.\n#link\n#fortran\n")
test.write('test09.f03', "This is a .f03 file.\n#link\n#f03\n")
test.write('test10.F03', "This is a .F03 file.\n#link\n#f03\n")
test.run(arguments = '.', stderr = None)
test.must_match('test01' + _exe, "This is a .f file.\n")
test.must_match('test02' + _exe, "This is a .F file.\n")
test.must_match('test03' + _exe, "This is a .for file.\n")
test.must_match('test04' + _exe, "This is a .FOR file.\n")
test.must_match('test05' + _exe, "This is a .ftn file.\n")
test.must_match('test06' + _exe, "This is a .FTN file.\n")
test.must_match('test07' + _exe, "This is a .fpp file.\n")
test.must_match('test08' + _exe, "This is a .FPP file.\n")
test.must_match('test09' + _exe, "This is a .f03 file.\n")
test.must_match('test10' + _exe, "This is a .F03 file.\n")
test.pass_test()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
eek6/squeakspace
|
admin/init_server_db.py
|
Python
|
gpl-3.0
| 325 | 0 |
#!/usr/bin/python2.7
# Run this script as user: www-data
import os
import server_path
import squeakspace.server.db_sqlite3 as db
import config
try:
os.remove(config.db_path)
except OSError:
pass
conn = db.connect(config.db_path)
c = db
|
.cursor(conn)
db.make
|
_db(c, config.total_quota)
db.commit(conn)
db.close(conn)
|
antljones/saw
|
saw/views/url.py
|
Python
|
mit
| 940 | 0.014894 |
from pyramid.view import view_config
@view_config(route_name='url#index', request_method='GET', renderer='templates/url/index.pt')
def index(request):
return {}
@view_config(route_name='url#show', request_method='GET', renderer='templates/url/show.pt')
def show(request):
return {}
@view_config(route_name='url#new', request_method='GET', renderer='templates/url/new.pt')
def new(request):
return {}
@view_config(route_name='url#create', request_method='GET', renderer='templ
|
ates/url/create.pt')
def create(request):
return {}
@view_config(route_name='url#edit', request_method='GET', renderer='templates/url/edit.pt')
def edit(request):
return {}
@view_config(route_name='url#update', request_method='GET', renderer='templates/url/update.pt')
def update(request):
return {}
@view_config(route_
|
name='url#destroy', request_method='GET', renderer='templates/url/destroy.pt')
def destroy(request):
return {}
|
thijstriemstra/acidswf
|
python/acidswf/application/amf.py
|
Python
|
gpl-3.0
| 1,733 | 0.004616 |
# Copyright (c) The AcidSWF Project.
# See LICENSE.txt for details.
"""
Support for creating a service which runs a web server.
@since: 1.0
"""
import logging
from twisted.python import usage
from twisted.application import service
from acidswf.service import createAMFService
optParameters = [
['log-level', None, logging.INFO, 'Log level.'],
['amf-transport', None, 'http', 'Run the AMF server on HTTP or HTTPS transport.'],
['amf-host', None, 'localhost', 'The interface for the AMF gateway to listen on.'],
['service', None, 'acidswf', 'The remote service name.'],
['amf-port', None, 8000, 'The port number for the AMF gateway to listen on.'],
['crossdomain', None, 'crossdomain.xml', 'Path to a crossdomain.xml file.'],
]
class Options(usage.Options):
"""
Define the options accepted by the I{acidswf amf} plugin.
"""
synopsis = "[amf options]"
optParameters = optParameters
longdesc = """\
This starts an AMF server."""
def postOptions(self):
"""
Set up conditional defaults and check for dependencies.
If SSL is not available but an HTTPS server was configured, raise a
L{UsageError} indi
|
cating that this is not possible.
If no server port was supplied, select a default appropriate for the
other opti
|
ons supplied.
"""
pass
#if self['https']:
# try:
# from twisted.internet.ssl import DefaultOpenSSLContextFactory
# except ImportError:
# raise usage.UsageError("SSL support not installed")
def makeService(options):
top_service = service.MultiService()
createAMFService(top_service, options)
return top_service
|
huangkuan/hack
|
lib/gcloud/monitoring/client.py
|
Python
|
apache-2.0
| 7,310 | 0 |
# Copyright 2016 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Client for interacting with the `Google Monitoring API (V3)`_.
Example::
>>> from gcloud import monitoring
>>> client = monitoring.Client()
>>> query = client.query(minutes=5)
>>> print(query.as_dataframe()) # Requires pandas.
At present, the client supports querying of time series, metric descriptors,
and monitored resource descriptors.
.. _Google Monitoring API (V3): https://cloud.google.com/monitoring/api/
"""
from gcloud.client import JSONClient
from gcloud.monitoring.connection import Connection
from gcloud.monitoring.metric import MetricDescriptor
from gcloud.monitoring.query import Query
from gcloud.monitoring.resource import ResourceDescriptor
class Client(JSONClient):
"""Client to bundle configuration needed for API requests.
:type project: string
:param project: The target project. If not passed, falls back to the
default inferred from the environment.
:type credentials: :class:`oauth2client.client.OAuth2Credentials` or
:class:`NoneType`
:param credentials: The OAuth2 Credentials to use for the connection
owned by this client. If not passed (and if no ``http``
object is passed), falls back to the default inferred
from the environment.
:type http: :class:`httplib2.Http` or class that defines ``request()``
:param http: An optional HTTP object to make requests. If not passed, an
``http`` object is created that is bound to the
``credentials`` for the current object.
"""
_connection_class = Connection
def query(self,
metric_type=Query.DEFAULT_METRIC_TYPE,
end_time=None,
days=0, hours=0, minutes=0):
"""Construct a query object for listing time series.
Example::
>>> query = client.query(minutes=5)
>>> print(query.as_dataframe()) # Requires pandas.
:type metric_type: string
:param metric_type: The metric type name. The default value is
:data:`Query.DEFAULT_METRIC_TYPE
<gcloud.monitoring.query.Query.DEFAULT_METRIC_TYPE>`,
but please note that this default value is provided only for
demonstration purposes and is subject to change. See the
`supported metrics`_.
:type end_time: :class:`datetime.datetime` or None
:param end_time: The end time (inclusive) of the time interval
for which results should be returned, as a datetime object.
The default is the start of the current minute.
The start time (exclusive) is determined by combining the
values of ``days``, ``hours``, and ``minutes``, and
subtracting the resulting duration from the end time.
It is also allowed to omit the end time and duration here,
in which case
:meth:`~gcloud.monitoring.query.Query.select_interval`
must be called before the query is executed.
:type days: integer
:param days: The number of days in the time interval.
:type hours: integer
:param hours: The number of hours in the time interval.
:type minutes: integer
:param minutes: The number of minutes in the time interval.
:rtype: :class:`~gcloud.monitoring.query.Query`
:returns: The query object.
:raises: :exc:`ValueError` if ``end_time`` is specified but
``days``, ``hours``, and ``minutes`` are all zero.
If you really want to specify a point in time, use
:meth:`~gcloud.monitoring.query.Query.select_interval`.
.. _supported metrics: https://cloud.google.com/monitoring/api/metrics
"""
return Query(self, metric_type,
end_time=end_time,
days=days, hours=hours, minutes=minutes)
def fetch_metric_descriptor(self, metric_type):
"""Look up a metric descriptor by type.
Example::
>>> METRIC = 'compute.googleapis.com/instance/cpu/utilization'
>>> print(client.fetch_metric_descriptor(METRIC))
:type metric_type: string
:param metric_type: The metric type name.
:rtype: :class:`~gcloud.monitoring.metric.MetricDescriptor`
:returns: The metric descriptor instance.
:raises: :class:`gcloud.exceptions.NotFound` if the metric descriptor
is not found.
"""
return MetricDescriptor._fetch(self, metric_type)
def list_metric_descriptors(self, filter_string=None):
"""List all metric descriptors for the project.
Example::
>>> for descriptor in client.list_metric_descriptors():
... print(descriptor.type)
:type filter_string: string or None
:param filter_string:
An optional filter expression describing the metric descriptors
to be returned. See the `filter documentation`_.
:rtype: list of :class:`~gcloud.monitoring.metric.MetricDescriptor`
:returns: A list of metric descriptor instances.
.. _filter do
|
cumentation:
https://cloud.google.com/monitoring/api/v3/filters
"""
return MetricDescriptor._list(self, filter_string)
def fetch_resource_descriptor(self, resource_type):
"""Look up a resource descriptor by type.
Example::
>>> print(client.fetch_resource_descriptor('gce_instance'))
:type resource_type: string
:param reso
|
urce_type: The resource type name.
:rtype: :class:`~gcloud.monitoring.resource.ResourceDescriptor`
:returns: The resource descriptor instance.
:raises: :class:`gcloud.exceptions.NotFound` if the resource descriptor
is not found.
"""
return ResourceDescriptor._fetch(self, resource_type)
def list_resource_descriptors(self, filter_string=None):
"""List all resource descriptors for the project.
Example::
>>> for descriptor in client.list_resource_descriptors():
... print(descriptor.type)
:type filter_string: string or None
:param filter_string:
An optional filter expression describing the resource descriptors
to be returned. See the `filter documentation`_.
:rtype: list of :class:`~gcloud.monitoring.resource.ResourceDescriptor`
:returns: A list of resource descriptor instances.
.. _filter documentation:
https://cloud.google.com/monitoring/api/v3/filters
"""
return ResourceDescriptor._list(self, filter_string)
|
endlessm/chromium-browser
|
third_party/llvm/lldb/test/API/lang/c/anonymous/TestAnonymous.py
|
Python
|
bsd-3-clause
| 6,073 | 0.000988 |
"""Test that anonymous structs/unions are transparent to member access"""
import lldb
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
class AnonymousTestCase(TestBase):
mydir = TestBase.compute_mydir(__file__)
@skipIf(
compiler="icc",
bugnumber="llvm.org/pr15036: LLDB generates an incorrect AST layout for an anonymous struct when DWARF is generated by ICC")
def test_expr_nest(self):
self.build()
self.common_setup(self.line0)
# These should display correctly.
self.expect("expression n->foo.d", VARIABLES_DISPLAYED_CORRECTLY,
substrs=["= 4"])
self.expect("expression n->b", VARIABLES_DISPLAYED_CORRECTLY,
substrs=["= 2"])
def test_expr_child(self):
self.build()
self.common_setup(self.line1)
# These should display correctly.
self.expect("expression c->foo.d", VARIABLES_DISPLAYED_CORRECTLY,
substrs=["= 4"])
self.expect(
"expression c->grandchild.b",
VARIABLES_DISPLAYED_CORRECTLY,
substrs=["= 2"])
@skipIf(
compiler="icc",
bugnumber="llvm.org/pr15036: This particular regression was introduced by r181498")
def test_expr_grandchild(self):
self.build()
self.common_setup(self.line2)
# These should display correctly.
self.expect("expression g.child.foo.d", VARIABLES_DISPLAYED_CORRECTLY,
substrs=["= 4"])
self.expect("expression g.child.b", VARIABLES_DISPLAYED_CORRECTLY,
substrs=["= 2"])
def test_expr_parent(self):
self.build()
if "clang" in self.getCompiler() and "3.4" in self.getCompilerVersion():
self.skipTest(
"llvm.org/pr16214 -- clang emits partial DWARF for structures referenced via typedef")
self.common_setup(self.line2)
# These should display correctly.
self.expect("expression pz", VARIABLES_DISPLAYED_CORRECTLY,
substrs=["(type_z *) $", " = 0x0000"])
self.expect("expression z.y", VARIABLES_DISPLAYED_CORRECTLY,
substrs=["(type_y) $", "dummy = 2"])
def test_expr_null(self):
self.build()
self.common_setup(self.line2)
# This should fail because pz is 0, but it succeeds on OS/X.
# This fails on Linux with an upstream error "Couldn't dematerialize struct", as does "p *n" with "int *n = 0".
# Note that this can also trigger llvm.org/pr15036 when run
# interactively at the lldb command prompt.
self.expect("expression *(type_z *)pz", error=True)
def test_child_by_name(self):
self.build()
# Set debugger into synchronous mode
self.dbg.SetAsync(False)
# Create a target
exe = self.getBuildArtifact("a.out")
target = self.dbg.CreateTarget(exe)
self.assertTrue(target, VALID_TARGET)
break_in_main = target.BreakpointCreateBySourceRegex(
'// Set breakpoint 2 here.', lldb.SBFileSpec(self.source))
self.assertTrue(break_in_main, VALID_BREAKPOINT)
process = target.LaunchSimple(
None, None, self.get_process_working_directory())
self.assertTrue(process, PROCESS_IS_VALID)
threads = lldbutil.get_threads_stopped_at_breakpoint(
process, break_in_main)
if len(threads) != 1:
self.fail("Failed to stop at breakpoint in main.")
thread = threads[0]
frame = thread.frames[0]
if not frame.IsValid():
self.fail("Failed to get frame 0.")
var_n = frame.FindVariable("n")
if not var_n.IsValid():
self.fail("Failed to get the variable 'n'")
elem_a = var_n.GetChildMemberWithName("a")
if not elem_a.IsValid():
self.fail("Failed to get the element a in n")
error = lldb.SBError()
value = elem_a.GetValueAsSigned(error, 1000)
if not error.Success() or value != 0:
self.fail("failed to get the correct value for element a in n")
def test_nest_flat(self):
self.build()
self.common_setup(self.line2)
# These should display correctly.
self.expect('frame variable n --flat',
substrs=['n.a = 0',
'n.b = 2',
'n.foo.c = 0',
'n.foo.d = 4'])
def setUp(self):
# Call super's setUp().
TestBase.setUp(self)
# Find the line numbers to break in main.c.
self.source = 'main.c'
self.line0 = line_number(self.source, '// Set breakpoint 0 here.')
self.line1 = line_number(self.source, '// Set breakpoint 1 here.')
self.line2 = line_number(self.source, '// Set breakpoint 2 here.')
def common_setup(self, line):
# Set debugger into synchronous mode
self.dbg.SetAsync(False)
# Create a target
exe = self.getBuildArtifact("a.out")
target = self.dbg.CreateTarget(exe)
self.assertTrue(target, VALID_TARGET)
# Set breakpoints inside and outside methods that take pointers to the
# containing struct.
lldbutil.run_break_set_by_file_and_line(
self, self.source, line, num_expected_locat
|
ions=1, loc_exact=True)
# Now launch the process, and do not stop at entry point.
process = target.LaunchSi
|
mple(
None, None, self.get_process_working_directory())
self.assertTrue(process, PROCESS_IS_VALID)
# The stop reason of the thread should be breakpoint.
self.expect("thread list", STOPPED_DUE_TO_BREAKPOINT,
substrs=['stopped',
'stop reason = breakpoint'])
# The breakpoint should have a hit count of 1.
self.expect("breakpoint list -f", BREAKPOINT_HIT_ONCE,
substrs=[' resolved, hit count = 1'])
|
buildbot/supybot
|
test/test_ircmsgs.py
|
Python
|
bsd-3-clause
| 10,535 | 0.001804 |
###
# Copyright (c) 2002-2005, Jeremiah Fincher
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, ar
|
e permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions, and the following discl
|
aimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions, and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the author of this software nor the name of
# contributors to this software may be used to endorse or promote products
# derived from this software without specific prior written consent.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
###
from supybot.test import *
import copy
import pickle
import supybot.ircmsgs as ircmsgs
import supybot.ircutils as ircutils
# The test framework used to provide these, but not it doesn't. We'll add
# messages to as we find bugs (if indeed we find bugs).
msgs = []
rawmsgs = []
class IrcMsgTestCase(SupyTestCase):
def testLen(self):
for msg in msgs:
if msg.prefix:
strmsg = str(msg)
self.failIf(len(msg) != len(strmsg) and \
strmsg.replace(':', '') == strmsg)
def testRepr(self):
IrcMsg = ircmsgs.IrcMsg
for msg in msgs:
self.assertEqual(msg, eval(repr(msg)))
def testStr(self):
for (rawmsg, msg) in zip(rawmsgs, msgs):
strmsg = str(msg).strip()
self.failIf(rawmsg != strmsg and \
strmsg.replace(':', '') == strmsg)
def testEq(self):
for msg in msgs:
self.assertEqual(msg, msg)
self.failIf(msgs and msgs[0] == []) # Comparison to unhashable type.
def testNe(self):
for msg in msgs:
self.failIf(msg != msg)
## def testImmutability(self):
## s = 'something else'
## t = ('foo', 'bar', 'baz')
## for msg in msgs:
## self.assertRaises(AttributeError, setattr, msg, 'prefix', s)
## self.assertRaises(AttributeError, setattr, msg, 'nick', s)
## self.assertRaises(AttributeError, setattr, msg, 'user', s)
## self.assertRaises(AttributeError, setattr, msg, 'host', s)
## self.assertRaises(AttributeError, setattr, msg, 'command', s)
## self.assertRaises(AttributeError, setattr, msg, 'args', t)
## if msg.args:
## def setArgs(msg):
## msg.args[0] = s
## self.assertRaises(TypeError, setArgs, msg)
def testInit(self):
for msg in msgs:
self.assertEqual(msg, ircmsgs.IrcMsg(prefix=msg.prefix,
command=msg.command,
args=msg.args))
self.assertEqual(msg, ircmsgs.IrcMsg(msg=msg))
self.assertRaises(ValueError,
ircmsgs.IrcMsg,
args=('foo', 'bar'),
prefix='foo!bar@baz')
def testPickleCopy(self):
for msg in msgs:
self.assertEqual(msg, pickle.loads(pickle.dumps(msg)))
self.assertEqual(msg, copy.copy(msg))
def testHashNotZero(self):
zeroes = 0
for msg in msgs:
if hash(msg) == 0:
zeroes += 1
self.failIf(zeroes > (len(msgs)/10), 'Too many zero hashes.')
def testMsgKeywordHandledProperly(self):
msg = ircmsgs.notice('foo', 'bar')
msg2 = ircmsgs.IrcMsg(msg=msg, command='PRIVMSG')
self.assertEqual(msg2.command, 'PRIVMSG')
self.assertEqual(msg2.args, msg.args)
def testMalformedIrcMsgRaised(self):
self.assertRaises(ircmsgs.MalformedIrcMsg, ircmsgs.IrcMsg, ':foo')
self.assertRaises(ircmsgs.MalformedIrcMsg, ircmsgs.IrcMsg,
args=('biff',), prefix='foo!bar@baz')
def testTags(self):
m = ircmsgs.privmsg('foo', 'bar')
self.failIf(m.repliedTo)
m.tag('repliedTo')
self.failUnless(m.repliedTo)
m.tag('repliedTo')
self.failUnless(m.repliedTo)
m.tag('repliedTo', 12)
self.assertEqual(m.repliedTo, 12)
class FunctionsTestCase(SupyTestCase):
def testIsAction(self):
L = [':jemfinch!~jfincher@ts26-2.homenet.ohio-state.edu PRIVMSG'
' #sourcereview :ACTION does something',
':supybot!~supybot@underthemain.net PRIVMSG #sourcereview '
':ACTION beats angryman senseless with a Unix manual (#2)',
':supybot!~supybot@underthemain.net PRIVMSG #sourcereview '
':ACTION beats ang senseless with a 50lb Unix manual (#2)',
':supybot!~supybot@underthemain.net PRIVMSG #sourcereview '
':ACTION resizes angryman\'s terminal to 40x24 (#16)']
msgs = map(ircmsgs.IrcMsg, L)
for msg in msgs:
self.failUnless(ircmsgs.isAction(msg))
def testIsActionIsntStupid(self):
m = ircmsgs.privmsg('#x', '\x01NOTANACTION foo\x01')
self.failIf(ircmsgs.isAction(m))
m = ircmsgs.privmsg('#x', '\x01ACTION foo bar\x01')
self.failUnless(ircmsgs.isAction(m))
def testIsCtcp(self):
self.failUnless(ircmsgs.isCtcp(ircmsgs.privmsg('foo',
'\x01VERSION\x01')))
self.failIf(ircmsgs.isCtcp(ircmsgs.privmsg('foo', '\x01')))
def testIsActionFalseWhenNoSpaces(self):
msg = ircmsgs.IrcMsg('PRIVMSG #foo :\x01ACTIONfoobar\x01')
self.failIf(ircmsgs.isAction(msg))
def testUnAction(self):
s = 'foo bar baz'
msg = ircmsgs.action('#foo', s)
self.assertEqual(ircmsgs.unAction(msg), s)
def testBan(self):
channel = '#osu'
ban = '*!*@*.edu'
exception = '*!*@*ohio-state.edu'
noException = ircmsgs.ban(channel, ban)
self.assertEqual(ircutils.separateModes(noException.args[1:]),
[('+b', ban)])
withException = ircmsgs.ban(channel, ban, exception)
self.assertEqual(ircutils.separateModes(withException.args[1:]),
[('+b', ban), ('+e', exception)])
def testBans(self):
channel = '#osu'
bans = ['*!*@*', 'jemfinch!*@*']
exceptions = ['*!*@*ohio-state.edu']
noException = ircmsgs.bans(channel, bans)
self.assertEqual(ircutils.separateModes(noException.args[1:]),
[('+b', bans[0]), ('+b', bans[1])])
withExceptions = ircmsgs.bans(channel, bans, exceptions)
self.assertEqual(ircutils.separateModes(withExceptions.args[1:]),
[('+b', bans[0]), ('+b', bans[1]),
('+e', exceptions[0])])
def testUnban(self):
channel = '#supybot'
ban = 'foo!bar@baz'
self.assertEqual(str(ircmsgs.unban(channel, ban)),
'MODE %s -b :%s\r\n' % (channel, ban))
def testJoin(self):
channel = '#osu'
key = 'michiganSucks'
self.assertEqual(ircmsgs.join(channel).args, ('#osu',))
self.assertEqual(ircmsgs.join(channel, key).args,
('#osu', 'michiganSucks'))
def testJ
|
sid88in/incubator-airflow
|
airflow/contrib/operators/gcp_compute_operator.py
|
Python
|
apache-2.0
| 7,129 | 0.001964 |
# -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from airflow import AirflowException
from airflow.contrib.hooks.gcp_compute_hook import GceHook
from airflow.contrib.utils.gcp_field_validator import GcpBodyFieldValidator
from airflow.models import BaseOperator
from airflow.utils.decorators import apply_defaults
class GceBaseOperator(BaseOperator):
"""
Abstract base operator for Google Compute Engine operators to inherit from.
"""
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
self.project_id = project_id
self.zone = zone
self.full_location = 'projects/{}/zones/{}'.format(self.project_id,
self.zone)
self.resource_id = resource_id
self.gcp_conn_id = gcp_conn_id
self.api_version = api_version
self._validate_inputs()
self._hook = GceHook(gcp_conn_id=self.gcp_conn_id, api_version=self.api_version)
super(GceBaseOperator, self).__init__(*args, **kwargs)
def _validate_inputs(self):
if not self.project_id:
raise AirflowException("The required parameter 'project_id' is missing")
if not self.zone:
raise AirflowException("The required parameter 'zone' is missing")
if not self.resource_id:
raise AirflowException("The required parameter 'resource_id' is missing")
def execute(self, context):
pass
class GceInstanceStartOperator(GceBaseOperator):
"""
Start an instance in Google Compute Engine.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
super(GceInstanceStartOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def execute(self, context):
return self._hook.start_instance(self.project_id, self.zone, self.resource_id)
class GceInstanceStopOperator(GceBaseOperator):
"""
Stop an instance in Google Compute Engine.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id=
|
'google_cloud_default',
api_version='v1',
*args, **kwargs):
super(GceInstanceStopOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def execute(self, context):
return self._hook.stop_instance(self.project_id, self.zone, self.resource_id)
SET_MACHINE_TYPE_VALIDATION_SPECIFICATION = [
dict(name="ma
|
chineType", regexp="^.+$"),
]
class GceSetMachineTypeOperator(GceBaseOperator):
"""
Changes the machine type for a stopped instance to the machine type specified in
the request.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param body: Body required by the Compute Engine setMachineType API, as described in
https://cloud.google.com/compute/docs/reference/rest/v1/instances/setMachineType#request-body
:type body: dict
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
body,
gcp_conn_id='google_cloud_default',
api_version='v1',
validate_body=True,
*args, **kwargs):
self.body = body
self._field_validator = None
if validate_body:
self._field_validator = GcpBodyFieldValidator(
SET_MACHINE_TYPE_VALIDATION_SPECIFICATION, api_version=api_version)
super(GceSetMachineTypeOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def _validate_all_body_fields(self):
if self._field_validator:
self._field_validator.validate(self.body)
def execute(self, context):
self._validate_all_body_fields()
return self._hook.set_machine_type(self.project_id, self.zone,
self.resource_id, self.body)
|
anryko/ansible
|
lib/ansible/modules/cloud/amazon/rds_snapshot_info.py
|
Python
|
gpl-3.0
| 12,831 | 0.002416 |
#!/usr/bin/python
# Copyright (c) 2014-2017 Ansible Project
# Copyright (c) 2017, 2018 Will Thames
# Copyright (c) 2017, 2018 Michael De La Rue
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: rds_snapshot_info
version_added: "2.6"
short_description: obtain information about one or more RDS snapshots
description:
- Obtain information about one or more RDS snapshots. These can be for unclustered snapshots or snapshots of clustered DBs (Aurora).
- Aurora snapshot information may be obtained if no identifier parameters are passed or if one of the cluster parameters are passed.
- This module was called C(rds_snapshot_facts) before Ansible 2.9. The usage did not change.
options:
db_snapshot_identifier:
description:
- Name of an RDS (unclustered) snapshot.
- Mutually exclusive with I(db_instance_identifier), I(db_cluster_identifier), I(db_cluster_snapshot_identifier)
required: false
aliases:
- snapshot_name
type: str
db_instance_identifier:
description:
- RDS instance name for which to find snapshots.
- Mutually exclusive with I(db_snapshot_identifier), I(db_cluster_identifier), I(db_cluster_snapshot_identifier)
required: false
type: str
db_cluster_identifier:
description:
- RDS cluster name for which to find snapshots.
- Mutually exclusive with I(db_snapshot_identifier), I(db_instance_identifier), I(db_cluster_snapshot_identifier)
required: false
type: str
db_cluster_snapshot_identifier:
description:
- Name of an RDS cluster snapshot.
- Mutually exclusive with I(db_instance_identifier), I(db_snapshot_identifier), I(db_cluster_identifier)
required: false
type: str
snapshot_type:
description:
- Type of snapshot to find.
- By default both automated and manual snapshots will be returned.
required: false
choices: ['automated', 'manual', 'shared', 'public']
type: str
requirements:
- "python >= 2.6"
- "boto3"
author:
- "Will Thames (@willthames)"
extends_documentation_fragment:
- aws
- ec2
'''
EXAMPLES = '''
# Get information about an snapshot
- rds_snapshot_info:
db_snapshot_identifier: snapshot_name
register: new_database_info
# Get all RDS snapshots for an RDS instance
- rds_snapshot_info:
db_instance_identifier: helloworld-rds-master
'''
RETURN = '''
snapshots:
description: List of non-clustered snapshots
returned: When cluster parameters are not passed
type: complex
contains:
allocated_storage:
description: How many gigabytes of storage are allocated
returned: always
type: int
sample: 10
availability_zone:
description: The availability zone of the database from which the snapshot was taken
returned: always
type: str
sample: us-west-2b
db_instance_identifier:
description: Database instance identifier
returned: always
type: str
sample: hello-world-rds
db_snapshot_arn:
description: Snapshot ARN
returned: always
type: str
sample: arn:aws:rds:us-west-2:111111111111:snapshot:rds:hello-world-rds-us1-2018-05-16-04-03
db_snapshot_identifier:
description: Snapshot name
returned: always
type: str
sample: rds:hello-world-rds-us1-2018-05-16-04-03
encrypted:
description: Whether the snapshot was encrypted
returned: always
type: bool
sample: true
engine:
description: Database engine
returned: always
type: str
sample: postgres
engine_version:
description: Database engine version
returned: always
type: str
sample: 9.5.10
iam_database_authentication_enabled:
description: Whether database authentication through IAM is enabled
returned: always
type: bool
sample: false
instance_create_time:
description: Time the Instance was created
returned: always
type: str
sample: '2017-10-10T04:00:07.434000+00:00'
kms_key_id:
description: ID of the KMS Key encrypting the snapshot
returned: always
type: str
sample: arn:aws:kms:us-west-2:111111111111:key/abcd1234-1234-aaaa-0000-1234567890ab
license_model:
description: License model
returned: always
type: str
sample: postgresql-license
master_username:
description: Database master username
returned: always
type: str
sample: dbadmin
option_group_name:
description: Database option group name
returned: always
type: str
sample: default:postgres-9-5
percent_progress:
description: Percent progress of snapshot
returned: always
type: int
sample: 100
snapshot_create_time:
description: Time snapshot was created
returned: always
type: str
sample: '2018-05-16T04:03:33.871000+00:00'
snapshot_type:
description: Type of snapshot
returned: always
type: str
sample: automated
status:
description: Status of snapshot
returned: always
type: str
sample: available
storage_type:
description: Storage type of underlying DB
returned: always
type: str
sample: gp2
tags:
description: Snapshot tags
returned: always
type: complex
|
contains: {}
vpc_id:
description: ID of VPC containing the DB
returned: always
type: str
|
sample: vpc-abcd1234
cluster_snapshots:
description: List of cluster snapshots
returned: always
type: complex
contains:
allocated_storage:
description: How many gigabytes of storage are allocated
returned: always
type: int
sample: 1
availability_zones:
description: The availability zones of the database from which the snapshot was taken
returned: always
type: list
sample:
- ca-central-1a
- ca-central-1b
cluster_create_time:
description: Date and time the cluster was created
returned: always
type: str
sample: '2018-05-17T00:13:40.223000+00:00'
db_cluster_identifier:
description: Database cluster identifier
returned: always
type: str
sample: test-aurora-cluster
db_cluster_snapshot_arn:
description: ARN of the database snapshot
returned: always
type: str
sample: arn:aws:rds:ca-central-1:111111111111:cluster-snapshot:test-aurora-snapshot
db_cluster_snapshot_identifier:
description: Snapshot identifier
returned: always
type: str
sample: test-aurora-snapshot
engine:
description: Database engine
returned: always
type: str
sample: aurora
engine_version:
description: Database engine version
returned: always
type: str
sample: 5.6.10a
iam_database_authentication_enabled:
description: Whether database authentication through IAM is enabled
returned: always
type: bool
sample: false
kms_key_id:
description: ID of the KMS Key encrypting the snapshot
returned: always
type: str
sample: arn:aws:kms:ca-central-1:111111111111:key/abcd1234-abcd-1111-aaaa-0123456789ab
license_model:
description: License model
returned: always
type: str
sample: aurora
master_username:
description: Database master username
returned: always
type: str
sample: shertel
percent_progress:
description: Percent progress of snapshot
returned: always
type: int
sample: 0
port:
description: Database port
returned: always
type: int
sample: 0
snapshot_create_time:
description: Date and time when the snapshot was created
returned: always
type: str
sample: '2018-05-17T00:23:23.731000+00:00'
snap
|
skaasj/dl4mt-material
|
session2/nmt.py
|
Python
|
bsd-3-clause
| 39,416 | 0.007687 |
'''
Build a simple neural machine translation model
'''
import theano
import theano.tensor as tensor
from theano.sandbox.rng_mrg import MRG_RandomStreams as RandomStreams
import cPickle as pkl
import numpy
import copy
import os
import warnings
import sys
import time
from scipy import optimize, stats
from collections import OrderedDict
from sklearn.cross_validation import KFold
from data_iterator import TextIterator
profile = False
# push parameters to Theano shared variables
def zipp(params, tparams):
for kk, vv in params.iteritems():
tparams[kk].set_value(vv)
# pull parameters from Theano shared variables
def unzip(zipped):
new_params = OrderedDict()
for kk, vv in zipped.iteritems():
new_params[kk] = vv.get_value()
return new_params
# get the list of parameters: Note that tparams must be OrderedDict
def itemlist(tparams):
return [vv for kk, vv in tparams.iteritems()]
# dropout
def dropout_layer(state_before, use_noise, trng):
proj = tensor.switch(use_noise,
state_before * trng.binomial(state_before.shape, p=0.5, n=1, dtype=state_before.dtype),
state_before * 0.5)
return proj
# make prefix-appended name
def _p(pp, name):
return '%s_%s'%(pp, name)
# initialize Theano shared variables according to the initial parameters
def init_tparams(params):
tparams = OrderedDict()
for kk, pp in params.iteritems():
tparams[kk] = theano.shared(params[kk], name=kk)
return tparams
# load parameters
def load_params(path, params):
pp = numpy.load(path)
for kk, vv in params.iteritems():
if kk not in pp:
warnings.warn('%s is not in the archive'%kk)
continue
params[kk] = pp[kk]
return params
# layers: 'name': ('parameter initializer', 'feedforward')
layers = {'ff': ('param_init_fflayer', 'fflayer'),
'gru': ('param_init_gru', 'gru_layer'),
'gru_cond': ('param_init_gru_cond', 'gru_cond_layer'),
}
def get_layer(name):
fns = layers[name]
return (eval(fns[0]), eval(fns[1]))
# some utilities
def ortho_weight(ndim):
W = numpy.random.randn(ndim, ndim)
u, s, v = numpy.linalg.svd(W)
return u.astype('float32')
def norm_weight(nin,nout=None, scale=0.01, ortho=True):
if nout == None:
nout = nin
if nout == nin and ortho:
W = ortho_weight(nin)
else:
W = scale * numpy.random.randn(nin, nout)
return W.astype('float32')
def tanh(x):
return tensor.tanh(x)
def linear(x):
return x
def concatenate(tensor_list, axis=0):
"""
Alternative implementation of `theano.tensor.concatenate`.
This function does exactly the same thing, but contrary to Theano's own
i
|
mplementation, the gradient is implemented on the GPU.
Backpropagating through `theano.tensor.concatenate` yields slowdowns
because the inverse operation (splitting) needs to be done on the CPU.
This implementation does not have that problem.
:usage:
>>> x, y = theano.tensor.matrices('x', 'y')
>>> c = concatenate([x, y], axis=1)
:parameters:
- tensor_list : list
list of Theano tensor expressions that should be concatenated.
- axis : int
|
the tensors will be joined along this axis.
:returns:
- out : tensor
the concatenated tensor expression.
"""
concat_size = sum(tt.shape[axis] for tt in tensor_list)
output_shape = ()
for k in range(axis):
output_shape += (tensor_list[0].shape[k],)
output_shape += (concat_size,)
for k in range(axis + 1, tensor_list[0].ndim):
output_shape += (tensor_list[0].shape[k],)
out = tensor.zeros(output_shape)
offset = 0
for tt in tensor_list:
indices = ()
for k in range(axis):
indices += (slice(None),)
indices += (slice(offset, offset + tt.shape[axis]),)
for k in range(axis + 1, tensor_list[0].ndim):
indices += (slice(None),)
out = tensor.set_subtensor(out[indices], tt)
offset += tt.shape[axis]
return out
# batch preparation
def prepare_data(seqs_x, seqs_y, maxlen=None, n_words_src=30000, n_words=30000):
# x: a list of sentences
lengths_x = [len(s) for s in seqs_x]
lengths_y = [len(s) for s in seqs_y]
if maxlen != None:
new_seqs_x = []
new_seqs_y = []
new_lengths_x = []
new_lengths_y = []
for l_x, s_x, l_y, s_y in zip(lengths_x, seqs_x, lengths_y, seqs_y):
if l_x < maxlen and l_y < maxlen:
new_seqs_x.append(s_x)
new_lengths_x.append(l_x)
new_seqs_y.append(s_y)
new_lengths_y.append(l_y)
lengths_x = new_lengths_x
seqs_x = new_seqs_x
lengths_y = new_lengths_y
seqs_y = new_seqs_y
if len(lengths_x) < 1 or len(lengths_y) < 1:
return None, None, None, None
n_samples = len(seqs_x)
maxlen_x = numpy.max(lengths_x) + 1
maxlen_y = numpy.max(lengths_y) + 1
x = numpy.zeros((maxlen_x, n_samples)).astype('int64')
y = numpy.zeros((maxlen_y, n_samples)).astype('int64')
x_mask = numpy.zeros((maxlen_x, n_samples)).astype('float32')
y_mask = numpy.zeros((maxlen_y, n_samples)).astype('float32')
for idx, [s_x, s_y] in enumerate(zip(seqs_x,seqs_y)):
x[:lengths_x[idx],idx] = s_x
x_mask[:lengths_x[idx]+1,idx] = 1.
y[:lengths_y[idx],idx] = s_y
y_mask[:lengths_y[idx]+1,idx] = 1.
return x, x_mask, y, y_mask
# feedforward layer: affine transformation + point-wise nonlinearity
def param_init_fflayer(options, params, prefix='ff', nin=None, nout=None, ortho=True):
if nin == None:
nin = options['dim_proj']
if nout == None:
nout = options['dim_proj']
params[_p(prefix,'W')] = norm_weight(nin, nout, scale=0.01, ortho=ortho)
params[_p(prefix,'b')] = numpy.zeros((nout,)).astype('float32')
return params
def fflayer(tparams, state_below, options, prefix='rconv', activ='lambda x: tensor.tanh(x)', **kwargs):
return eval(activ)(tensor.dot(state_below, tparams[_p(prefix,'W')])+tparams[_p(prefix,'b')])
# GRU layer
def param_init_gru(options, params, prefix='gru', nin=None, dim=None, hiero=False):
if nin == None:
nin = options['dim_proj']
if dim == None:
dim = options['dim_proj']
if not hiero:
W = numpy.concatenate([norm_weight(nin,dim),
norm_weight(nin,dim)], axis=1)
params[_p(prefix,'W')] = W
params[_p(prefix,'b')] = numpy.zeros((2 * dim,)).astype('float32')
U = numpy.concatenate([ortho_weight(dim),
ortho_weight(dim)], axis=1)
params[_p(prefix,'U')] = U
Wx = norm_weight(nin, dim)
params[_p(prefix,'Wx')] = Wx
Ux = ortho_weight(dim)
params[_p(prefix,'Ux')] = Ux
params[_p(prefix,'bx')] = numpy.zeros((dim,)).astype('float32')
return params
def gru_layer(tparams, state_below, options, prefix='gru', mask=None, **kwargs):
nsteps = state_below.shape[0]
if state_below.ndim == 3:
n_samples = state_below.shape[1]
else:
n_samples = 1
dim = tparams[_p(prefix,'Ux')].shape[1]
if mask == None:
mask = tensor.alloc(1., state_below.shape[0], 1)
def _slice(_x, n, dim):
if _x.ndim == 3:
return _x[:, :, n*dim:(n+1)*dim]
return _x[:, n*dim:(n+1)*dim]
state_below_ = tensor.dot(state_below, tparams[_p(prefix, 'W')]) + tparams[_p(prefix, 'b')]
state_belowx = tensor.dot(state_below, tparams[_p(prefix, 'Wx')]) + tparams[_p(prefix, 'bx')]
U = tparams[_p(prefix, 'U')]
Ux = tparams[_p(prefix, 'Ux')]
def _step_slice(m_, x_, xx_, h_, U, Ux):
preact = tensor.dot(h_, U)
preact += x_
r = tensor.nnet.sigmoid(_slice(preact, 0, dim))
u = tensor.nnet.sigmoid(_slice(preact, 1, dim))
preactx = tensor.dot(h_, Ux)
preactx = preactx * r
preactx = preactx + xx_
h = tensor.tanh(preactx)
h = u * h_ + (1. - u) * h
h = m_[:,None] * h + (1. - m_)[:,None] * h_
re
|
oznurf/EKSI_HIKAYE
|
hikaye/migrations/0002_auto_20150819_1605.py
|
Python
|
mit
| 342 | 0 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models,
|
migrations
class Migration(migrations.Migration):
dependencies = [
('hikaye', '00
|
01_initial'),
]
operations = [
migrations.RenameModel(
old_name='stories',
new_name='Story',
),
]
|
frmdstryr/enamlx
|
enamlx/widgets/plot_area.py
|
Python
|
mit
| 6,354 | 0.000158 |
# -*- coding: utf-8 -*-
"""
Copyright (c) 2015, Jairus Martin.
Distributed under the terms of the MIT License.
The full license is in the file COPYING.txt, distributed with this software.
Created on Jun 11, 2015
"""
import sys
from atom.atom import set_default
from atom.api import (
Callable,
Int,
Tuple,
Instance,
Enum,
Float,
ContainerList,
Bool,
FloatRange,
Str,
Dict,
Typed,
ForwardTyped,
observe,
)
from enaml.core.declarative import d_
from enaml.widgets.api import Container
from enaml.widgets.control import Control, ProxyControl
from atom.instance import ForwardInstance
if sys.version_info.major < 3:
str = basestring
def numpy_ndarray():
import numpy
return numpy.ndarray
class ProxyPlotArea(ProxyControl):
declaration = ForwardTyped(lambda: PlotArea)
class PlotArea(Container):
hug_width = set_default("ignore")
hug_height = set_default("ignore")
proxy = Typed(ProxyPlotArea)
setup = d_(Callable(lambda graph: None))
PEN_ARGTYPES = (tuple, list, str, dict)
BRUSH_ARGTYPES = (tuple, list, str, dict, int, float)
class PlotItem(Control):
#: Title of data series
title = d_(Str())
#: Name
name = d_(Str())
#: Row in plot area
row = d_(Int(0))
#: Column in plot area
column = d_(Int(0))
#: Pen type to use for line
line_pen = d_(Instance(PEN_ARGTYPES))
#: Pen type to use for shadow
shadow_pen = d_(Instance(PEN_ARGTYPES))
#: Fill level
fill_level = d_(Float(strict=False))
# ‘c’ one of: r, g, b, c, m, y, k, w
# R, G, B, [A] integers 0-255
# (R, G, B, [A]) tuple of integers 0-255
# float greyscale, 0.0-1.0
# int see intColor()
|
# (int, hues) see intColor()
# “RGB” hexadecimal strings; may begin with ‘#’
# “RGBA”
# “RRGGBB”
|
# “RRGGBBAA”
#: Brush fill type
fill_brush = d_(Instance(BRUSH_ARGTYPES))
#: Symbol to use for points
symbol = d_(Enum(None, "o", "s", "t", "d", "+"))
#: Symbol sizes for points
symbol_size = d_(Float(10, strict=False))
#: Symbol pen to use
symbol_pen = d_(Instance(PEN_ARGTYPES))
#: Symbol brush
symbol_brush = d_(Instance(BRUSH_ARGTYPES))
#: Show legend
show_legend = d_(Bool(False))
label_left = d_(Str())
label_right = d_(Str())
label_top = d_(Str())
label_bottom = d_(Str())
# H, V
grid = d_(Tuple(bool, default=(False, False)))
grid_alpha = d_(FloatRange(low=0.0, high=1.0, value=0.5))
#: Display a separate axis for each nested plot
multi_axis = d_(Bool(True))
axis_left_ticks = d_(Callable())
axis_bottom_ticks = d_(Callable())
#: Display the axis on log scale
log_mode = d_(Tuple(bool, default=(False, False))) # x,y
#: Enable antialiasing
antialias = d_(Bool(False))
#: Set auto range for each axis
auto_range = d_(
Enum(True, False, (True, True), (True, False), (False, True), (False, False))
)
# x-range to use if auto_range is disabled
range_x = d_(ContainerList(default=[0, 100]))
#: y-range to use if auto_range is disabled
range_y = d_(ContainerList(default=[0, 100]))
#: Automatically downsaple
auto_downsample = d_(Bool(False))
#: Clip data points to view
clip_to_view = d_(Bool(False))
#: Step mode to use
step_mode = d_(Bool(False))
#: Keep aspect ratio locked when resizing
aspect_locked = d_(Bool(False))
#: Time between updates
refresh_time = d_(Int(100))
@observe(
"line_pen",
"symbol",
"symbol_size",
"symbol_pen",
"symbol_brush",
"fill_brush",
"fill_level",
"multi_axis",
"title",
"label_left",
"label_right",
"label_top",
"label_bottom",
"grid",
"grid_alpha",
"log_mode",
"antialias",
"auto_range",
"auto_downsample",
"clip_to_view",
"step_mode",
"aspect_locked",
"axis_left_ticks",
"axis_bottom_ticks",
"show_legend",
"row",
"column",
)
def _update_proxy(self, change):
"""An observer which sends state change to the proxy."""
# The superclass handler implementation is sufficient.
super(PlotItem, self)._update_proxy(change)
@observe("range_x", "range_y")
def _update_range(self, change):
"""Handle updates and changes"""
getattr(self.proxy, "set_%s" % change["name"])(change["value"])
class PlotItem2D(PlotItem):
#: x-axis values, as a list
x = d_(ContainerList())
#: y-axis values, as a list
y = d_(ContainerList())
@observe("x", "y")
def _update_proxy(self, change):
"""An observer which sends state change to the proxy."""
# The superclass handler implementation is sufficient.
super(PlotItem2D, self)._update_proxy(change)
class PlotItem3D(PlotItem2D):
#: z-axis values, as a list
z = d_(ContainerList())
@observe("z")
def _update_proxy(self, change):
"""An observer which sends state change to the proxy."""
# The superclass handler implementation is sufficient.
super(PlotItem3D, self)._update_proxy(change)
class PlotItemArray(PlotItem2D):
"""Numpy array item"""
#: x-axis values, as a numpy array
x = d_(ForwardInstance(numpy_ndarray))
#: y-axis values, as a numpy array
y = d_(ForwardInstance(numpy_ndarray))
class PlotItemArray3D(PlotItem3D):
"""Numpy array item"""
#: Plot type
type = Enum("line")
#: x-axis values, as a numpy array
x = d_(ForwardInstance(numpy_ndarray))
#: y-axis values, as a numpy array
y = d_(ForwardInstance(numpy_ndarray))
#: z-axis values, as a numpy array
z = d_(ForwardInstance(numpy_ndarray))
class AbstractDataPlotItem(PlotItem):
@observe("data")
def _update_proxy(self, change):
"""An observer which sends state change to the proxy."""
# The superclass handler implementation is sufficient.
super(AbstractDataPlotItem, self)._update_proxy(change)
class PlotItemList(AbstractDataPlotItem):
data = d_(ContainerList())
class PlotItemDict(AbstractDataPlotItem):
data = d_(Dict(default={"x": [], "y": []}))
|
onehao/opensource
|
pyml/crawler/minispider/test/SpiderHtmlParserTest.py
|
Python
|
apache-2.0
| 3,201 | 0.010016 |
# -*- coding:utf-8 -*-
################################################################################
#
# Copyright (c) 2015 Baidu.com, Inc. All Rights Reserved
#
################################################################################
'''
Created on 2015年3月14日
@author: Administrator
'''
import os
import unittest
import urllib
from urlparse import urljoin
import urlparse
from crawler.minispider.SpiderConfigParser import SpiderConfig
from crawler.minispider.SpiderHtmlParser import SpiderHtmlParser
class TestSequenceFunctions(unittest.TestCase):
'''
as for D level project, not exhausting all the possible exception attributes.
'''
def setUp(self):
self.parser = SpiderHtmlParser()
self.path = os.path.realpath(__file__)
def tearDown(self):
for filename in os.listdir(self.path[0:-23] + 'urls'):
os.remove(self.path[0:-23] + 'urls' + os.sep + filename)
def test_parse_url(self):
|
'''
test the logic of parse_url
'''
urls = self.parser.parse_url('http://pycm.baidu.com:8081/2/index.html',
'./urls', '.*\.(gif|png|jpg|bmp|html)$',
1, 1)
#http://pycm.baidu.com:8081/page2_1.html', 'h
|
ttp://pycm.baidu.com:8081/2/3/index.html'
self.assertIn("http://pycm.baidu.com:8081/page2_1.html", urls)
self.assertIn("http://pycm.baidu.com:8081/2/3/index.html", urls)
self.assertTrue(os.path.exists(self.path[0:-23] + 'urls' + os.sep
+ 'http%3A__pycm.baidu.com%3A8081_2_index.html'),
'http%3A__pycm.baidu.com%3A8081_2_index.html expecting to be created.')
def test_parse_url_B(self):
'''
test the logic of parse_url
'''
urls = self.parser.parse_url('http://pycm.baidu.com:8081',
'./urls', '.*\.(gif|png|jpg|bmp|html)$',
1, 1)
#http://pycm.baidu.com:8081/page2_1.html', 'http://pycm.baidu.com:8081/2/3/index.html'
self.assertEqual(5, len(urls), 'there should be 5 urls.')
self.assertIn("http://pycm.baidu.com:8081/page1.html", urls)
self.assertIn("http://pycm.baidu.com:8081/page2.html", urls)
self.assertIn("http://pycm.baidu.com:8081/page3.html", urls)
self.assertIn("http://pycm.baidu.com:8081/mirror/index.html", urls)
self.assertIn("http://pycm.baidu.com:8081/page4.html", urls)
def test_parse_url_404(self):
'''
test the logic of parse_url with the page 404
and the exception should be thrown
'''
urls = self.parser.parse_url('http://pycm.baidu.com:8081/2/index333.html',
'./urls', '.*\.(gif|png|jpg|bmp|html)$',
1, 1)
#http://pycm.baidu.com:8081/page2_1.html', 'http://pycm.baidu.com:8081/2/3/index.html'
self.assertTrue(len(urls) == 0, 'should not contain any element.')
if __name__ == '__main__':
unittest.main()
|
janhui/test_engine
|
dev/plugins/define_boundary_ids/shapefile.py
|
Python
|
lgpl-2.1
| 38,004 | 0.005289 |
"""
hapefile.py
Provides read and write support for ESRI Shapefiles.
author: jlawhead<at>geospatialpython.com
date: 20110927
version: 1.1.4
Compatible with Python versions 2.4-3.x
"""
from struct import pack, unpack, calcsize, error
import os
import sys
import time
import array
#
# Constants for shape types
NULL = 0
POINT = 1
POLYLINE = 3
POLYGON = 5
MULTIPOINT = 8
POINTZ = 11
POLYLINEZ = 13
POLYGONZ = 15
MULTIPOINTZ = 18
POINTM = 21
POLYLINEM = 23
POLYGONM = 25
MULTIPOINTM = 28
MULTIPATCH = 31
PYTHON3 = sys.version_info[0] == 3
def b(v):
if PYTHON3:
if isinstance(v, str):
# For python 3 encode str to bytes.
return v.encode('utf-8')
elif isinstance(v, bytes):
# Already bytes.
return v
else:
# Error.
raise Exception('Unknown input type')
else:
# For python 2 assume str passed in and return str.
return v
def u(v):
if PYTHON3:
if isinstance(v, bytes):
# For python 3 decode bytes to str.
return v.decode('utf-8')
elif isinstance(v, str):
# Already str.
return v
else:
# Error.
raise Exception('Unknown input type')
else:
# For python 2 assume str passed in and return str.
return v
def is_string(v):
if PYTHON3:
return isinstance(v, str)
else:
return isinstance(v, basestring)
class _Array(array.array):
"""Converts python tuples to lits of the appropritate type.
Used to unpack different shapefile header parts."""
def __repr__(self):
return str(self.tolist())
class _Shape:
def __init__(self, shapeType=None):
"""Stores the geometry of the different shape types
specified in the Shapefile spec. Shape types are
usually point, polyline, or polygons. Every shape type
except the "Null" type contains points at some level for
example verticies in a polygon. If a shape type has
multiple shapes containing points within a single
geometry record then those shapes are called parts. Parts
are designated by their starting index in geometry record's
list of shapes."""
self.shapeType = shapeType
self.points = []
class _ShapeRecord:
"""A shape object of any type."""
def __init__(self, shape=None, record=None):
self.shape = shape
self.record = record
class ShapefileException(Exception):
"""An exception to handle shapefile specific problems."""
pass
class Reader:
"""Reads the three files of a shapefile as a unit or
separately. If one of the three files (.shp, .shx,
.dbf) is missing no exception is thrown until you try
to call a method that depends on that particular file.
The .shx index file is used if available for efficiency
but is not required to read the geometry from the .shp
file. The "shapefile" argument in the constructor is the
name of the file you want to open.
You can instantiate a Reader without specifying a shapefile
and then specify one later with the load() method.
Only the shapefile headers are read upon loading. Content
within each file is only accessed when required and as
efficiently as possible. Shapefiles are usually not large
but they can be.
"""
def __init__(self, *args, **kwargs):
self.shp = None
self.shx = None
self.dbf = None
self.shapeName = "Not specified"
self._offsets = []
self.shpLength = None
self.numRecords = None
self.fields = []
self.__dbfHdrLength = 0
# See if a shapefile name was passed as an argument
if len(args) > 0:
if type(args[0]) is type("stringTest"):
self.load(args[0])
return
if "shp" in kwargs.keys():
if hasattr(kwargs["shp"], "read"):
self.shp = kwargs["shp"]
if hasattr(self.shp, "seek"):
self.shp.seek(0)
if "shx" in kwargs.keys():
if hasattr(kwargs["shx"], "read"):
self.shx = kwargs["shx"]
if hasattr(self.shx, "seek"):
self.shx.seek(0)
if "dbf" in kwargs.keys():
if hasattr(kwargs["dbf"], "read"):
self.dbf = kwargs["dbf"]
if hasattr(self.dbf, "seek"):
self.dbf.seek(0)
if self.shp or self.dbf:
self.load()
else:
raise ShapefileException("Shapefile Reader requires a shapefile or file-like object.")
def load(self, shapefile=None):
"""Opens a shapefile from a filename or file-like
object. Normally this method would be called by the
constructor with the file object or file name as an
argument."""
if shapefile:
(shapeName, ext) = os.path.splitext(shapefile)
self.shapeName = shapeName
try:
self.shp = open("%s.shp" % shapeName, "rb")
except IOError:
raise ShapefileException("Unable to open %s.shp" % shapeName)
try:
self.shx = open("%s.shx" % shapeName, "rb")
except IOError:
raise ShapefileException("Unable to open %s.shx" % shapeName)
try:
self.dbf = open("%s.dbf" % shapeName, "rb")
except IOError:
raise ShapefileException("Unable to open %s.dbf" % shapeName)
if self.shp:
self.__shpHeader()
if self.dbf:
self.__dbfHeader()
def __getFileObj(self, f):
"""Checks to see if the requested shapefile file object is
available. If not a ShapefileException is raised."""
if not f:
raise ShapefileException("Shapefile Reader requires a shapefile or file-like object.")
if self.shp and self.shpLength is None:
self.load()
if
|
se
|
lf.dbf and len(self.fields) == 0:
self.load()
return f
def __restrictIndex(self, i):
"""Provides list-like handling of a record index with a clearer
error message if the index is out of bounds."""
if self.numRecords:
rmax = self.numRecords - 1
if abs(i) > rmax:
raise IndexError("Shape or Record index out of range.")
if i < 0: i = range(self.numRecords)[i]
return i
def __shpHeader(self):
"""Reads the header information from a .shp or .shx file."""
if not self.shp:
raise ShapefileException("Shapefile Reader requires a shapefile or file-like object. (no shp file found")
shp = self.shp
# File length (16-bit word * 2 = bytes)
shp.seek(24)
self.shpLength = unpack(">i", shp.read(4))[0] * 2
# Shape type
shp.seek(32)
self.shapeType= unpack("<i", shp.read(4))[0]
# The shapefile's bounding box (lower left, upper right)
self.bbox = _Array('d', unpack("<4d", shp.read(32)))
# Elevation
self.elevation = _Array('d', unpack("<2d", shp.read(16)))
# Measure
self.measure = _Array('d', unpack("<2d", shp.read(16)))
def __shape(self):
"""Returns the header info and geometry for a single shape."""
f = self.__getFileObj(self.shp)
record = _Shape()
nParts = nPoints = zmin = zmax = mmin = mmax = None
(recNum, recLength) = unpack(">2i", f.read(8))
shapeType = unpack("<i", f.read(4))[0]
record.shapeType = shapeType
# For Null shapes create an empty points list for consistency
if shapeType == 0:
record.points = []
# All shape types capable of having a bounding box
elif shapeType in (3,5,8,13,15,18,23,25,28,31):
record.bbox = _Array('d', unpack("<4d", f.read(32)))
# Shape types with parts
if shapeType in (3,5,13,15,23,25,31):
nParts = unpack("<i", f.read(4))[0]
# Shape types with points
if shapeType in (3,5,8,13,15,23,25,3
|
lmprice/ansible
|
lib/ansible/modules/network/aci/aci_access_port_to_interface_policy_leaf_profile.py
|
Python
|
gpl-3.0
| 11,226 | 0.002494 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2017, Bruno Calogero <brunocalogero@hotmail.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__
|
= type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: aci_access_port_to_interface_policy_leaf_profile
short_description: Manage Fabric interface policy leaf profile interface selectors (infra:HPortS, infra:RsAccBaseGrp, infra:Po
|
rtBlk)
description:
- Manage Fabric interface policy leaf profile interface selectors on Cisco ACI fabrics.
notes:
- More information about the internal APIC classes B(infra:HPortS), B(infra:RsAccBaseGrp) and B(infra:PortBlk) from
L(the APIC Management Information Model reference,https://developer.cisco.com/docs/apic-mim-ref/)
author:
- Bruno Calogero (@brunocalogero)
version_added: '2.5'
options:
leaf_interface_profile:
description:
- The name of the Fabric access policy leaf interface profile.
required: yes
aliases: [ leaf_interface_profile_name ]
access_port_selector:
description:
- The name of the Fabric access policy leaf interface profile access port selector.
required: yes
aliases: [ name, access_port_selector_name ]
description:
description:
- The description to assign to the C(access_port_selector)
leaf_port_blk:
description:
- The name of the Fabric access policy leaf interface profile access port block.
required: yes
aliases: [ leaf_port_blk_name ]
leaf_port_blk_description:
description:
- The description to assign to the C(leaf_port_blk)
required: no
from:
description:
- The beggining (from range) of the port range block for the leaf access port block.
required: yes
aliases: [ fromPort, from_port_range ]
to:
description:
- The end (to range) of the port range block for the leaf access port block.
required: yes
aliases: [ toPort, to_port_range ]
policy_group:
description:
- The name of the fabric access policy group to be associated with the leaf interface profile interface selector.
aliases: [ policy_group_name ]
interface_type:
version_added: '2.6'
description:
- The type of interface for the static EPG deployement.
choices: [ fex, port_channel, switch_port, vpc ]
default: switch_port
state:
description:
- Use C(present) or C(absent) for adding or removing.
- Use C(query) for listing an object or multiple objects.
choices: [ absent, present, query ]
default: present
extends_documentation_fragment: aci
'''
EXAMPLES = r'''
- name: Associate an Interface Access Port Selector to an Interface Policy Leaf Profile with a Policy Group
aci_access_port_to_interface_policy_leaf_profile:
host: apic
username: admin
password: SomeSecretPassword
leaf_interface_profile: leafintprfname
access_port_selector: accessportselectorname
leaf_port_blk: leafportblkname
from: 13
to: 16
policy_group: policygroupname
state: present
- name: Associate an interface access port selector to an Interface Policy Leaf Profile (w/o policy group) (check if this works)
aci_access_port_to_interface_policy_leaf_profile:
host: apic
username: admin
password: SomeSecretPassword
leaf_interface_profile: leafintprfname
access_port_selector: accessportselectorname
leaf_port_blk: leafportblkname
from: 13
to: 16
state: present
- name: Remove an interface access port selector associated with an Interface Policy Leaf Profile
aci_access_port_to_interface_policy_leaf_profile:
host: apic
username: admin
password: SomeSecretPassword
leaf_interface_profile: leafintprfname
access_port_selector: accessportselectorname
state: absent
- name: Query Specific access_port_selector under given leaf_interface_profile
aci_access_port_to_interface_policy_leaf_profile:
host: apic
username: admin
password: SomeSecretPassword
leaf_interface_profile: leafintprfname
access_port_selector: accessportselectorname
state: query
'''
RETURN = r'''
current:
description: The existing configuration from the APIC after the module has finished
returned: success
type: list
sample:
[
{
"fvTenant": {
"attributes": {
"descr": "Production environment",
"dn": "uni/tn-production",
"name": "production",
"nameAlias": "",
"ownerKey": "",
"ownerTag": ""
}
}
}
]
error:
description: The error information as returned from the APIC
returned: failure
type: dict
sample:
{
"code": "122",
"text": "unknown managed object class foo"
}
raw:
description: The raw output returned by the APIC REST API (xml or json)
returned: parse error
type: string
sample: '<?xml version="1.0" encoding="UTF-8"?><imdata totalCount="1"><error code="122" text="unknown managed object class foo"/></imdata>'
sent:
description: The actual/minimal configuration pushed to the APIC
returned: info
type: list
sample:
{
"fvTenant": {
"attributes": {
"descr": "Production environment"
}
}
}
previous:
description: The original configuration from the APIC before the module has started
returned: info
type: list
sample:
[
{
"fvTenant": {
"attributes": {
"descr": "Production",
"dn": "uni/tn-production",
"name": "production",
"nameAlias": "",
"ownerKey": "",
"ownerTag": ""
}
}
}
]
proposed:
description: The assembled configuration from the user-provided parameters
returned: info
type: dict
sample:
{
"fvTenant": {
"attributes": {
"descr": "Production environment",
"name": "production"
}
}
}
filter_string:
description: The filter string used for the request
returned: failure or debug
type: string
sample: ?rsp-prop-include=config-only
method:
description: The HTTP method used for the request to the APIC
returned: failure or debug
type: string
sample: POST
response:
description: The HTTP response from the APIC
returned: failure or debug
type: string
sample: OK (30 bytes)
status:
description: The HTTP status from the APIC
returned: failure or debug
type: int
sample: 200
url:
description: The HTTP url used for the request to the APIC
returned: failure or debug
type: string
sample: https://10.11.12.13/api/mo/uni/tn-production.json
'''
from ansible.module_utils.network.aci.aci import ACIModule, aci_argument_spec
from ansible.module_utils.basic import AnsibleModule
def main():
argument_spec = aci_argument_spec()
argument_spec.update({
'leaf_interface_profile': dict(type='str', aliases=['leaf_interface_profile_name']), # Not required for querying all objects
'access_port_selector': dict(type='str', aliases=['name', 'access_port_selector_name']), # Not required for querying all objects
'description': dict(typ='str'),
'leaf_port_blk': dict(type='str', aliases=['leaf_port_blk_name']),
'leaf_port_blk_description': dict(type='str'),
# NOTE: Keyword 'from' is a reserved word in python, so we need it as a string
'from': dict(type='str', aliases=['fromPort', 'from_port_range']),
'to': dict(type='str', aliases=['toPort', 'to_port_range']),
'policy_group': dict(type='str', aliases=['policy_group_name']),
'interface_type': dict(type='str', default='switch_port', choices=['fex', 'port_channel', 'switch_port', 'vpc']),
'state': dict(type='str', default='present', choices=['absent', 'present', 'query']),
|
jahuth/convis
|
docs/filters-1.py
|
Python
|
gpl-3.0
| 218 | 0.009174 |
import convis
import numpy as np
import matplotlib.pylab as plt
plt.figure()
plt.imshow(convis.numerical_filters.gauss_filter_2d(4.0,4.0))
plt.figure()
plt.plot(c
|
onvis.numerical_filters.expon
|
ential_filter_1d(tau=0.01))
|
SciGaP/DEPRECATED-Cipres-Airavata-POC
|
saminda/cipres-airavata/sdk/scripts/remote_resource/trestles/submit_v2.py
|
Python
|
apache-2.0
| 5,478 | 0.011135 |
#!/usr/bin/env python
import lib_v2 as lib
import sys
import os
def main(argv=None):
"""
Usage is:
submit.py [--account <chargecode>] [--url <url>] -- <commandline>
Run from the working dir of the job which must contain (in addition
to the job files) a file named scheduler.conf with scheduler properties for the job.
<chargecode>, if present, gives the project to charge the job to.
Url is the url of the submitting website including the taskid parameter.
Returns 0 with "jobid=<jobid>" on stdout if job submitted ok
Returns 1 with multiline error message on stdout if error.
Returns 2 for the specific error of queue limit exceeded.
"""
#COMMAND LINE PARSING
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('--account', metavar="ACCOUNT", type=str, default=lib.account,
help="The account string to use when submitting jobs. Default is read from config files.")
parser.add_argument('--url', metavar="URL", dest="URL", type=str,
help="Notification URL")
try:
cmdline_options, cmdline = parser.parse_known_args(argv)
cmdline = cmdline[1:] if not ('--' in cmdline) else cmdline[cmdline.index('--')+1:]
except Exception as e:
print "There was a problem submitting your job"
print e
sys.exit(1)
account = cmdline_options.account
url = cmdline_options.URL
#cmdline as an array (and already set)
tooltype = lib.getToolType(cmdline)
scheduler_properties = lib.getProperties("scheduler.conf")
# print scheduler_properties
scheduler_info = lib.schedulerInfo(scheduler_properties, tooltype)
# print scheduler_info
# If this is a "direct" run type job we don't need to create a qsub script, we'll just run batch_ommand.cmdline.
if scheduler_info["is_direct"]:
return lib.submitDirectJob(account, url, lib.email, lib.jobname, cmdline)
runtime = int(scheduler_info["runtime"])
useLocalDisk = False
"""
Workaround for problems with file io on oasis and longer mrbayes runs. Instead of running on
oasis, we'll copy the working dir to the compute nodes local storage and copy the results back
when the job completes. Since many mrbayes jobs timeout we need a special trick to copy results
of jobs that timeout: Right before we launch mrbayes we launch a shell script in the background
that sleeps a few min less than the job's runtime and then copies the results. If mrbayes terminates
normally the background sleep is killed automatically.
"""
if (tooltype == "mrbayes" and runtime > 60):
useLocalDisk = True
# I'm backing out the workaround by setting useLocalDisk to false.
useLocalDisk = False
# Write the command line to a file, batch_command.cmdline.
rfile = open(lib.cmdfile, "w")
rfile.write("#!/bin/sh\n")
rfile.writelines((" ".join(cmdline), "\n"))
rfile.close()
os.chmod(lib.cmdfile, 0744);
# Create the qsub script
rfile = open(lib.runfile, "w")
text = """#!/bin/sh
#PBS -q %s
#PBS -N %s
#PBS -l walltime=00:%d:00
#PBS -o scheduler_stdout.txt
#PBS -e scheduler_stderr.txt
#PBS -W umask=0007
##PBS -V
#PBS -v QOS=2
#PBS -M %s
#PBS -m ae
#PBS -A %s
""" % (scheduler_info["queue"], lib.jobname, scheduler_info["runtime"], lib.email, account)
rfile.write(text)
text = "#PBS -l nodes=%d:ppn=%d\n" % (scheduler_info["nodes"], scheduler_info["ppn"])
rfile.write(text)
rfile.write("cd %s\n" % (lib.jobdir, lib.local_jobdir)[useLocalDisk])
if useLocalDisk == True:
# Note that it's critical that newlines in the text string are all within the double
# quotes; otherwise the echo command line would be split across lines and make no sense.
text = """"Due to filesystem problems intermediate results for longer mrbayes runs
will not be available while the job is running. The result files will be
available when mrbayes finishes.
We're working to find a solution." """
rfile.write("echo %s > %s/INTERMEDIATE_RESULTS_README.TXT\n" % (text, lib.jobdir))
rfile.write("cp -r %s/* .\n" % lib.jobdir);
sleepTime = int(scheduler_info["runtime"]) - 10
rfile.write("sleep_cp.sh %s %s &\n" % (sleepTime, lib.jobdir))
text = """
source /etc/profile.d/modules.sh
echo Job starting at `date` > start.txt
curl %s\&status=START
export CIPRES_THREADSPP=%d
export CIPRES_NP=%d
%s 1>stdout.txt 2>stderr.txt
echo Job finished at `date` > done.txt
""" % (url,
int(scheduler_info["threads_per_process"]),
int(scheduler_info["mpi_processes"]),
lib.cmdfile)
rfile.write(text)
if (useLocalDisk):
text = """
echo "Job completed, starting to copy working directory."
echo "mkdir %s.complete"
mkdir %s.complete
echo "cp -r * %s.com
|
plete"
cp -r * %s.complete
echo "mv %s %s.sleep"
mv %s %s.sleep
echo "mv %s.complete %s"
mv %s.complete %s
echo "rm -rf %s.sleep"
rm -rf %s.sleep
echo "Finished copying working directory."
""" % (lib.jobdir, lib.jobdir, lib.jobdir, lib.jobdir, lib.jobdir, lib.jobdir, lib.jobdir, lib.jobdir, lib.jobdir, lib.jobdir, lib.jobdir, lib.jobdir, lib.jobdir, lib.jobdir)
rfile.write(text)
rfil
|
e.write("curl %s\&status=DONE\n" % url)
rfile.close()
return lib.submitJob()
return 0
if __name__ == "__main__":
sys.exit(main())
|
appleseedhq/gaffer
|
python/GafferUI/__init__.py
|
Python
|
bsd-3-clause
| 12,297 | 0.024152 |
##########################################################################
#
# Copyright (c) 2011-2012, John Haddon. All rights reserved.
# Copyright (c) 2011-2015, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of John Haddon nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
# Work around a bug which causes segfaults if uuid is imported after
# PyQt. See here for details :
#
# https://bugs.gentoo.org/show_bug.cgi?id=317557
# http://www.riverbankcomputing.com/pipermail/pyqt/2010-December/028773.html
#
# Using __import__ rather than import so that we don't pollute the GafferUI
# namespace.
__import__( "uuid" )
## Deprecated. This legacy function only supports use with Qt4. For
# combined Qt4/Qt5 support use `from Qt import name` instead.
# Also note that the lazy argument is no longer effective, because Qt.py
# imports all modules at startup.
__qtModuleName = None
def _qtImport( name, lazy=False ) :
# decide which qt bindings to use, and apply any fix-ups we need
# to shield us from PyQt/PySide differences.
global __qtModuleName
if __qtModuleName is None :
import os
if "GAFFERUI_QT_BINDINGS" in os.environ :
__qtModuleName = os.environ["GAFFERUI_QT_BINDINGS"]
else :
# no preference stated via environment - see what we shipped with
if os.path.exists( os.environ["GAFFER_ROOT"] + "/python/PySide" ) :
__qtModuleName = "PySide"
else :
__qtModuleName = "PyQt4"
# PyQt unfortunately uses an implementation-specific
# naming scheme for its new-style signal and slot classes.
# We use this to make it compatible with PySide, accor
|
ding to :
#
# http://qt-project.org/wiki/Differences_Between_PySide_and_PyQt
if "PyQt" in __qtModuleName :
QtCore = __import__( __qtModuleName + ".QtCore" ).QtCore
QtCore.Signal = QtCore.pyqtSignal
# import the submodule from those bindings and return it
if lazy :
import Gaffer
return Gaffer.lazyImport( __qtModuleName + "." + name )
else :
qtModule = __im
|
port__( __qtModuleName + "." + name )
return getattr( qtModule, name )
##########################################################################
# Function to return the C++ address of a wrapped Qt object. This can
# be useful if needing to implement part of the UI in C++ and the rest
# in Python.
##########################################################################
def _qtAddress( o ) :
import Qt
if "PyQt" in Qt.__binding__ :
import sip
return sip.unwrapinstance( o )
else :
return __shiboken().getCppPointer( o )[0]
##########################################################################
# Function to return a wrapped Qt object from the given C++ address.
# This can be useful if needing to implement part of the UI in C++ and
# the rest in Python.
##########################################################################
def _qtObject( address, type ) :
import Qt
if "PyQt" in Qt.__binding__ :
import sip
return sip.wrapinstance( address, type )
else :
return __shiboken().wrapInstance( address, type )
##########################################################################
# Determines if the wrapped Qt object is still valid
# Useful when having to deal with the consequences of C++/Python deletion
# order challeneges, see:
# https://github.com/GafferHQ/gaffer/pull/3179
##########################################################################
def _qtObjectIsValid( o ) :
import Qt
if "PyQt" in Qt.__binding__ :
import sip
return not sip.isdeleted( o )
else :
return __shiboken().isValid( o )
##########################################################################
# Shiboken lives in a variety of places depending on which PySide it is.
##########################################################################
def __shiboken() :
import Qt
assert( "PyQt" not in Qt.__binding__ )
if Qt.__binding__ == "PySide2" :
try :
import PySide2.shiboken2 as shiboken
except ImportError :
import shiboken2 as shiboken
else :
try :
import PySide.shiboken
except ImportError :
import shiboken
return shiboken
##########################################################################
# now import our actual functionality
##########################################################################
# Import modules that must be imported before _GafferUI, using __import__
# to avoid polluting the GafferUI namespace.
__import__( "IECore" )
__import__( "Gaffer" )
from _GafferUI import *
# general ui stuff first
from Enums import *
from Widget import Widget
from LazyMethod import LazyMethod
from Menu import Menu
from ContainerWidget import ContainerWidget
from Window import Window
from SplitContainer import SplitContainer
from ListContainer import ListContainer
from GridContainer import GridContainer
from MenuBar import MenuBar
from EventLoop import EventLoop
from TabbedContainer import TabbedContainer
from TextWidget import TextWidget
from NumericWidget import NumericWidget
from Button import Button
from MultiLineTextWidget import MultiLineTextWidget
from Label import Label
from GLWidget import GLWidget
from ScrolledContainer import ScrolledContainer
from PathWidget import PathWidget
from PathListingWidget import PathListingWidget
from PathChooserWidget import PathChooserWidget
from Dialogue import Dialogue
from PathChooserDialogue import PathChooserDialogue
from TextInputDialogue import TextInputDialogue
from Collapsible import Collapsible
from ColorSwatch import ColorSwatch
from Slider import Slider
from ShowURL import showURL
from Spacer import Spacer
from BoolWidget import BoolWidget, CheckBox
from Image import Image
from ErrorDialogue import ErrorDialogue
from _Variant import _Variant
from VectorDataWidget import VectorDataWidget
from PathVectorDataWidget import PathVectorDataWidget
from ProgressBar import ProgressBar
from SelectionMenu import SelectionMenu
from PathFilterWidget import PathFilterWidget
from CompoundPathFilterWidget import CompoundPathFilterWidget
from InfoPathFilterWidget import InfoPathFilterWidget
from MatchPatternPathFilterWidget import MatchPatternPathFilterWidget
from FileSequencePathFilterWidget import FileSequencePathFilterWidget
from BusyWidget import BusyWidget
from NumericSlider import NumericSlider
from ColorChooser import ColorChooser
from ColorChooserDialogue import ColorChooserDialogue
from MessageWidget import MessageWidget
from NotificationMessageHandler import NotificationMessageHandler
from MenuButton import MenuButton
from MultiSelectionMenu import MultiSelectionMenu
from PopupWindow import PopupWindow
from ConfirmationDialogue import ConfirmationDi
|
theflofly/tensorflow
|
tensorflow/python/ops/rnn.py
|
Python
|
apache-2.0
| 66,326 | 0.004568 |
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""RNN helpers for TensorFlow models."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.eager import context
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import tensor_util
from tensorflow.python.keras.engine import base_layer
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import control_flow_util
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import rnn_cell_impl
from tensorflow.python.ops import tensor_array_ops
from tensorflow.python.ops import variable_scope as vs
from tensorflow.python.util import deprecation
from tensorflow.python.util import nest
from tensorflow.python.util.tf_export import tf_export
# pylint: disable=protected-access
_concat = rnn_cell_impl._concat
# pylint: enable=protected-access
def _transpose_batch_time(x):
"""Transposes the batch and time dimensions of a Tensor.
If the input tensor has rank < 2 it returns the original tensor. Retains as
much of the static shape information as possible.
Args:
x: A Tensor.
Returns:
x transposed along the first two dimensions.
"""
x_static_shape = x.get_shape()
if x_static_shape.rank is not None and x_static_shape.rank < 2:
return x
x_rank = array_ops.rank(x)
x_t = array_ops.transpose(
x, array_ops.concat(
([1, 0], math_ops.range(2, x_rank)), axis=0))
x_t.set_shape(
tensor_shape.TensorShape([
x_static_shape.dims[1].value, x_static_shape.dims[0].value
]).concatenate(x_static_shape[2:]))
return x_t
def _best_effort_input_batch_size(flat_input):
"""Get static input batch size if available, with fallback to the dynamic one.
Args:
flat_input: An iterable of time major input Tensors of shape
`[max_time, batch_size, ...]`.
All inputs should have compatible batch sizes.
Returns:
The batch size in Python integer if available, or a scalar Tensor otherwise.
Raises:
ValueError: if there is any input with an invalid shape.
"""
for input_ in flat_input:
shape = input_.shape
if shape.rank is None:
continue
if shape.rank < 2:
raise ValueError(
"Expected input tensor %s to have rank at least 2" % input_)
batch_size = shape.dims[1].value
if batch_size is not None:
return batch_size
# Fallback to the dynamic batch size of the first input.
return array_ops.shape(flat_input[0])[1]
def _infer_state_dtype(explicit_dtype, state):
"""Infer the dtype of an RNN state.
Args:
explicit_dtype: explicitly declared dtype or None.
state: RNN's hidden state. Must be a Tensor or a nested iterable containing
Tensors.
Returns:
dtype: inferred dtype of hidden state.
Raises:
ValueError: if `state` has heterogeneous dtypes or is empty.
"""
if explicit_dtype is not None:
return explicit_dtype
elif nest.is_sequence(state):
inferred_dtypes = [element.dtype for element in nest.flatten(state)]
if not inferred_dtypes:
raise ValueError("Unable to infer dtype from empty state.")
all_same = all(x == inferred_dtypes[0] for x in inferred_dtypes)
if not all_same:
raise ValueError(
"State has tensors of different inferred_dtypes. Unable to infer a "
"single representative dtype.")
return inferred_dtypes[0]
else:
return state.dtype
def _maybe_tensor_shape_from_tensor(shape):
if isinstance(shape, ops.Tensor):
return tensor_shape.as_shape(tensor_util.constant_value(shape))
else:
return shape
def _should_cache():
"""Returns True if a default caching device should be set, otherwise False."""
if context.executing_eagerly():
return False
# Don't set a caching device when running in a loop, since it is possible that
# train steps could be wrapped in a tf.while_loop. In that scenario caching
# prevents forward computations in loop iterations from re-reading the
# updated weights.
ctxt = ops.get_default_graph()._get_control_flow_context() # pylint: disable=protected-access
return control_flow_util.GetContainingWhileContext(ctxt) is None
def _is_keras_rnn_cell(rnn_cell):
"""Check whether the cell is a Keras RNN cell.
The Keras RNN cell accept the state as a list even the state is a single
tensor, whereas the TF RNN cell does not wrap single state tensor in list.
This behavior difference should be unified in future version.
Args:
rnn_cell: An RNN cell instance that either follow the Keras interface or TF
RNN interface.
Returns:
Boolean, whether the cell is an Keras RNN cell.
"""
# Cell type check is not strict enough since there are cells created by other
# library like Deepmind that didn't inherit tf.nn.rnn_cell.RNNCell.
# Keras cells never had zero_state method, which was from the original
# interface from TF RNN cell.
return (not isinstance(rnn_cell, rnn_cell_impl.RNNCell)
and isinstance(rnn_cell, base_layer.Layer)
and getattr(rnn_cell, "zero_state", None) is None)
# pylint: disable=unused-argument
def _rnn_step(
time, sequence_length, min_sequence_length, max_sequence_length,
zero_output, state, call_cell, state_size, skip_conditionals=False):
"""Calculate one step of a dynamic RNN minibatch.
Returns an (output, state) pair conditioned on `sequence_length`.
When skip_conditionals=False, the pseudocode is something like:
if t >= max_sequence_length:
return (zero_output, state)
if t < min_sequence_length:
return call_cell()
# Selectively output zeros or output, old state or new state depending
# on whether we've finished calculating each row.
new_output, new_state = call_cell()
final_output = np.vstack([
zero_output if time >= sequence_length[r] else new_output_r
for r, new_output_r in enumerate(new_output)
])
final_state = np.vstack([
state[r] if time >= sequence_length[r] else new_state_r
for r, new_state_r in enumerate(new_state)
])
return (final_output, final_state)
Args:
time: int32
|
`Te
|
nsor` scalar.
sequence_length: int32 `Tensor` vector of size [batch_size].
min_sequence_length: int32 `Tensor` scalar, min of sequence_length.
max_sequence_length: int32 `Tensor` scalar, max of sequence_length.
zero_output: `Tensor` vector of shape [output_size].
state: Either a single `Tensor` matrix of shape `[batch_size, state_size]`,
or a list/tuple of such tensors.
call_cell: lambda returning tuple of (new_output, new_state) where
new_output is a `Tensor` matrix of shape `[batch_size, output_size]`.
new_state is a `Tensor` matrix of shape `[batch_size, state_size]`.
state_size: The `cell.state_size` associated with the state.
skip_conditionals: Python bool, whether to skip using the conditional
calculations. This is useful for `dynamic_rnn`, where the input tensor
matches `max_sequence_length`, and using conditionals just slows
everything down.
Returns:
A tuple of (`final_output`, `final_state`) as given by the pseudocode above:
final_output is a `Tensor` matrix of shape [batch_size, output_size]
final_state is either a single `Tensor` matrix, or a tuple of such
matric
|
Modeful/poc
|
modeful/ui/diagram/relationship/composition.py
|
Python
|
gpl-3.0
| 774 | 0.003876 |
from kivy.graphics import Color, Line, Quad
from modeful.ui.diagram.relationship import Trigonometry
from modeful.ui.diagram.relationship.association import Association
class Composition(Association):
def __init__(sel
|
f, *args, **kwargs):
super().__init__(*args, **kwargs)
with self.canvas.before:
Colo
|
r(0, 0, 0)
self._diamond_bg = Quad(points=[0]*8)
Color(0, 0, 0, .5)
self._diamond_line = Line(points=[], width=1, close=True)
def redraw(self, x1, y1, x2, y2):
super().redraw(x1, y1, x2, y2)
points = Trigonometry.get_diamond_points(x1, y1, x2, y2, size=15)
self._diamond_bg.points = points
self._diamond_line.points = points
|
andrefbsantos/Tuxemon
|
tuxemon/core/components/ui/__init__.py
|
Python
|
gpl-3.0
| 8,220 | 0.001825 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Tuxemon
# Copyright (C) 2014, William Edwards <shadowapex@gmail.com>,
# Benjamin Bean <superman2k5@gmail.com>
#
# This file is part of Tuxemon.
#
# Tuxemon is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Tuxemon is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Tuxemon. If not, see <http://www.gnu.org/licenses/>.
#
# Contributor(s):
#
# William Edwards <shadowapex@gmail.com>
#
#
# core.components.ui User interface handling module.
#
#
import logging
import pygame
import operator
from core.components import pyganim
from core.components import plugin
from core import prepare
# Create a logger for optional handling of debug messages.
logger = logging.getLogger(__name__)
logger.debug("components.ui successfully imported")
class UserInterface(object):
"""A basic user interface object.
:param image: Path to the image to load or surface.
:param position: The [x, y] position to draw the UI element.
:param screen: The pygame surface to draw the element on.
:param scale: Whether or not to scale the surface based on game's scale.
:type image: String or pygame.Surface
:type position: List
:type screen: pygame.Surface
:type scale: Boolean
"""
def __init__(self, images, position, screen, scale=True,
animation_speed=0.2, animation_loop=False):
# Handle loading a single image, multiple images, or surfaces
if type(images) is str or type(images) is unicode:
surface = pygame.image.load(images).convert_alpha()
self.original_width = surface.get_width()
self.original_height = surface.get_height()
if scale:
surface = self.scale_surface(surface)
self.images = [(surface, animation_speed)]
elif type(images) is list or type(images) is tuple:
self.images = []
for item in images:
if type(item) is str or type(item) is unicode:
surface = pygame.image.load(item).convert_alpha()
self.original_width = surface.get_width()
|
self.original_height = surface.get_height()
if scale:
surfa
|
ce = self.scale_surface(surface)
else:
self.original_width = surface.get_width()
self.original_height = surface.get_height()
if scale:
surface = self.scale_surface(surface)
else:
surface = item
self.images.append((surface, animation_speed))
else:
self.original_width = images.get_width()
self.original_height = images.get_height()
if scale:
surface = self.scale_surface(images)
else:
surface = images
self.images = [(surface, animation_speed)]
# Create a pyganimation object using our loaded images.
self.animation = pyganim.PygAnimation(self.images, loop=animation_loop)
self.animation.play()
self.animation.pause()
self.position = position
self.last_position = position
self.screen = screen
self.visible = True
self.state = ""
self.width = self.images[0][0].get_width()
self.height = self.images[0][0].get_height()
self.moving = False
self.move_destination = (0, 0)
self.move_delta = (0, 0)
self.move_duration = 0.
self.move_time = 0.
self.fading = False
self.fade_duration = 0.
self.shaking = False
def scale_surface(self, surface):
"""Scales the interface based on the game's scale.
:param: None
:type: None
"""
width = surface.get_width()
height = surface.get_height()
scaled_surface = pygame.transform.scale(surface,
(width * prepare.SCALE,
height * prepare.SCALE))
return scaled_surface
def update(self, dt):
"""Updates the object based on its current state.
:param dt: Amount of time passed in seconds since the last frame.
:type dt: Float
"""
if self.moving:
self.move_time += dt
dest = self.move_destination
dur = self.move_duration
mdt = self.move_delta
mt = self.move_time
if mt > dur:
self.position = dest
self.moving = False
if self.state == "moving" or self.state == "back":
self.state = ""
elif self.state == "forward":
self.move(self.last_position, self.move_duration)
self.state == "back"
else:
if type(self.position) is tuple:
self.position = list(self.position)
self.position[0] -= (mdt[0] * dt) / dur
self.position[1] -= (mdt[1] * dt) / dur
def draw(self):
"""Draws the UI element to the screen.
:param: None
:type: None
"""
if self.visible:
if self.shaking:
# Do shaking stuff
pos = self.position
else:
pos = self.position
self.animation.blit(self.screen, pos)
def play(self):
self.animation.play()
def pause(self):
self.animation.pause()
def stop(self):
self.animation.stop()
def shake(self, intensity, direction="random"):
"""Shakes the object a given severity.
:param intensity: How much the object will shake.
:param direction: Direction to shake in degrees, defaults to "random".
:type intensity: Int
:type direction: Int or String
"""
pass
def fade_in(self, duration=1.):
"""Fades the object in.
:param duration: Fade the object in over n seconds, defaults to 1.
:type duration: Float
"""
if not self.state == "fading_in":
self.state = "fading_in"
self.fading = "in"
self.fade_duration = duration
def fade_out(self, duration=1.):
"""Fades the object out.
:param duration: Fade the object out over n seconds, defaults to 1.
:type duration: Float
"""
if not self.state == "fading_out":
self.state = "fading_out"
self.fading = "out"
self.fade_duration = duration
def move(self, destination, duration=1.):
"""Moves the object to position over n seconds.
:param destination: The (x, y) screen destination position to move to.
:param duration: Moves the object over n seconds, defaults to 1.
:type destination: Tuple
:type duration: Float
"""
if not self.state == "moving":
self.state = "moving"
self.moving = True
self.last_position = list(self.position)
self.move_destination = destination
self.move_time = 0.
self.move_delta = map(operator.sub, self.position, destination)
self.move_duration = float(duration)
def shake_once(self, destination, duration=0.3):
"""Moves the object to a position and then back to its original position.
"""
if not self.state == "forward" or not self.state == "back":
self.move(destination, duration)
self.state = "forward"
def scale(self, width_height):
self.animation.scale(width_
|
ychab/mymoney
|
mymoney/apps/bankaccounts/models.py
|
Python
|
bsd-3-clause
| 2,268 | 0 |
from django.conf import settings
from django.db import models
from django.urls import reverse
from django.utils.translation import ugettext_lazy as _
from mymoney.core.utils.currencies import get_currencies
class BankAccountManager(models.Manager):
def get_user_bankaccounts(self, user):
if not hasattr(user, '_cache_bankaccounts'):
user._cache_bankaccounts = user.bankaccounts.order_by('label')
return user._cache_bankaccounts
def delete_orphans(self):
"""
Delete bank account which have no more owners.
"""
self.filter(owners__isnull=True).delete()
class BankAccount(models.Model):
label = models.CharField(max_length=255, verbose_name=_('Label'))
balance = models.DecimalField(
max_digits=10,
decimal_places=2,
default=0,
verbose_name=_('Balance'),
)
balance_initial = models.DecimalField(
max_digits=10,
decimal_places=2,
default=0,
verbose_name=_('Initial balance'),
help_text=_('Initial balance will automatically update the balance.'),
)
currency = models.CharField(
max_length=3,
choices=get_currencies(),
verbose_name=_('Currency'),
)
owners = models.ManyToManyField(
settings.AUTH_USER_MODEL,
limit_choices_to={'is_staff': False, 'is_superuser': False},
verbose_name=_('Owners'),
related_name='bankaccounts',
db_table='bankaccounts_owners',
)
objects = BankAccountManager()
class Meta:
db_table = 'bankaccounts'
permissions = (("administer_owners", "Administer owners"),)
def __str__(self):
return self.label
def save(self, *args, **kwargs):
# Init balance. Merg
|
e both just in case.
if self.pk is None:
self.balance += self.balance_initial
# Otherwise update it with the new delta.
else:
original = BankAccount.objects.get(pk=self.pk)
|
self.balance += self.balance_initial - original.balance_initial
super(BankAccount, self).save(*args, **kwargs)
def get_absolute_url(self):
return reverse('banktransactions:list', kwargs={
'bankaccount_pk': self.pk,
})
|
ngokevin/cyder
|
cyder/cydns/cname/views.py
|
Python
|
bsd-3-clause
| 808 | 0 |
from cyder.cydns.views import CydnsDeleteView
from cyder.cydns.views import CydnsDetailView
from cyder.cydns.views import CydnsCreateView
from cyder.cydns.views import CydnsUpdateView
from cyder.cydns.views import CydnsListView
from cyder.cydns.cname.models import CNAME
from cyder.cydns.cname.forms import
|
CNAMEForm
class CNAMEView(object):
model = CNAME
form_class = CNAMEForm
queryset = CNAME.objects.all().order_by('fqdn')
class CNAMEDeleteView(CNAMEView, CydnsDeleteView):
""" """
class CNAMEDetailView(CNAMEView, CydnsDetailView):
""" """
templat
|
e_name = "cname/cname_detail.html"
class CNAMECreateView(CNAMEView, CydnsCreateView):
""" """
class CNAMEUpdateView(CNAMEView, CydnsUpdateView):
""" """
class CNAMEListView(CNAMEView, CydnsListView):
""" """
|
appleseedhq/gaffer
|
python/GafferCortexUI/CompoundPlugValueWidget.py
|
Python
|
bsd-3-clause
| 11,940 | 0.039698 |
##########################################################################
#
# Copyright (c) 2012, John Haddon. All rights reserved.
# Copyright (c) 2011-2013, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of John Haddon nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import IECore
import Gaffer
import GafferUI
from Qt import QtWidgets
# This was the predecessor to the far superior GafferUI.PlugLayout
# class that we now use. It survives here as a relic because it is
# still relied upon by CompoundParameterValueWidget and
# ClassVectorParameterValueWidget. Do not use it for anything else!
class CompoundPlugValueWidget( GafferUI.PlugValueWidget ) :
## Possible values for collapsed are :
#
# True : use Collapsible container which starts off collapsed
# False : use Collapsible container which starts off opened
# None : don't use Collapsible container
#
# Note that the True/False values for collapsible just set the initial state -
# after this the current state is stored for the session on a per-node basis
# for user convenience.
#
# If summary is specified it will be called each time a child plug changes value,
# and the result used to provide a summary in the collapsible header.
def __init__( self, plug, collapsed=True, label=None, summary=None, **kw ) :
self.__column = GafferUI.ListContainer( GafferUI.ListContainer.Orientation.Vertical, spacing = 4 )
self.__label = label if label else IECore.CamelCase.toSpaced( plug.getName() )
self.__collapsible = None
if collapsed is not None :
self.__collapsible = GafferUI.Collapsible(
self.__label,
collapsed = self.__getStoredCollapseState( plug, collapsed ),
)
self.__collapsible.setChild( self.__column )
self.__collapsible.setCornerWidget( GafferUI.Label(), True )
## \todo This is fighting the default sizing applied in the Label constructor. Really we need a standard
# way of controlling size behaviours for all widgets in the public API.
self.__collapsible.getCornerWidget()._qtWidget().setSizePolicy( QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Fixed )
self.__collapseStateChangedConnection = self.__collapsible.stateChangedSignal().connect( Gaffer.WeakMethod( self.__collapseStateChanged ) )
GafferUI.PlugValueWidget.__init__(
self,
self.__collapsible if self.__collapsible is not None else self.__column,
plug,
**kw
)
self.__plugAddedConnection = plug.childAddedSignal().connect( Gaffer.WeakMethod( self.__childAddedOrRemoved ) )
self.__plugRemovedConnection = plug.childRemovedSignal().connect( Gaffer.WeakMethod( self.__childAddedOrRemoved ) )
self.__childrenChangedPending = False
# arrange to build the rest of the ui in a deferred fashion. this means that we will be
# fully constructed when we call _childPlugWidget etc, rather than expecting derived
# class' implementations to work even before their constructor has completed.
# it also means we don't pay the cost of building huge uis upfront, and rather do it incrementally
# as the user opens up sections. for non-collapsed uis, we build when a parent is received, which
# allows the top level window to get the sizing right, and for collapsed uis we build when the
# the ui first becomes visible due to being opened.
if collapsed == True :
self.__visibilityChangedConnection = self.__column.visibilityChangedSignal().connect( Gaffer.WeakMethod( self.__visibilityChanged ) )
else :
self.__parentChangedConnection = self.parentChangedSignal().connect( Gaffer.WeakMethod( self.__parentChanged ) )
self.__visibilityChangedConnection = self.__column.visibilityChangedSignal().connect( Gaffer.WeakMethod( self.__visibilityChanged ) )
self.__childPlugUIs = {} # mapping from child plug to PlugWidget
self.__summary = summary
CompoundPlugValueWidget._updateFromPlug( self )
## Returns a PlugValueWidget representing the specified child plug.
# Because the ui is built lazily on demand, this might return None due
# to the user not having opened up the ui - in this case lazy=False may
# be passed to force the creation of the ui.
def childPlugValueWidget( self, childPlug, lazy=True ) :
if not lazy and len( self.__childPlugUIs ) == 0 :
self.__updateChildPlugUIs()
w = self.__childPlugUIs.get( childPlug, None )
if w is None :
return w
elif isinstance( w, GafferUI.PlugValueWidget ) :
return w
else :
return w.plugValueWidget()
def hasLabel( self ) :
return True
## Overridden to propagate status to children.
def setReadOnly( self, readOnly ) :
if readOnly == self.getReadOnly() :
return
GafferUI.PlugValueWidget.setReadOnly( self, readOnly )
for w in self.__childPlugUIs.values() :
if w is None :
continue
if isinstance( w, GafferUI.PlugValueWidget ) :
w.setReadOnly( readOnly )
elif isinstance( w, GafferUI.PlugWidget ) :
w.labelPlugValueWidget().setReadOnly( readOnly )
w.plugValueWidget().setReadOnly( readOnly )
else :
w.plugValueWidget().setReadOnly( readOnly )
def _updateFromPlug( self ) :
if self.__summary is not None and self.__collapsible is not None :
with self.getContext() :
s = self.__summary( self.getPlug() )
if s :
s = "<small>" + " ( " + s + " ) </small>"
self.__collapsible.getCornerWidget().setText( s )
## May be overridden by derived classes to return a widget to be placed
# at the top of the layout.
def _headerWidget( self ) :
return None
## May be overridden by derived classes to customise the creation of widgets
# to represent the child plugs. The returned widget must either derive from
# PlugValueWidget or must have a plugValueWidget() method which returns
# a PlugValueWidget.
def _childPlugWidget( self, childPlug ) :
result = GafferUI.PlugValueWidget.create( childPlug )
if isinstance( result, GafferUI.PlugValueWidget ) and not result.hasLabel() :
result = GafferUI.PlugWidget( result )
return result
## May be overridden by derived classes to return a widget to be placed
# at the bottom of the layout.
def _footerWidget( self ) :
return None
## Retu
|
rns the Collapsible widget used to contain the child widgets,
# or None if this ui is not collapsible.
def _collapsible( self ) :
return self.__collapsible
## May be overridden by derived classes to specify which child plugs
# are represented and in what order.
def _childPlugs( self ) :
return self.getPlug().children()
## \todo Mapping
|
plugName->widget makes us vulnerable to name changes.
# See similar comments in StandardNodeUI and StandardNodeToolbar.
def __updateCh
|
agustin380/scratchbling
|
src/products/api/serializers.py
|
Python
|
gpl-3.0
| 247 | 0 |
from rest_framework.serializers impo
|
rt ModelSe
|
rializer
from ..models import BackScratcher
class ProductsSerializer(ModelSerializer):
class Meta:
model = BackScratcher
fields = ['id', 'name', 'description', 'price', 'sizes']
|
amwelch/a10sdk-python
|
a10sdk/core/interface/interface_loopback_ip.py
|
Python
|
apache-2.0
| 2,191 | 0.008672 |
from a10sdk.common.A10BaseClass import A10BaseClass
class AddressList(A10BaseClass):
"""This class does not support CRUD Operations please use parent.
:param ipv4_address: {"type": "string", "description": "IP address", "format": "ipv4-address"}
:param ipv4_netmask: {"type": "string", "description": "IP subnet mask", "format": "ipv4-netmask"}
:param DeviceProxy: The device proxy for REST operations and session handling. Refer to `common/device_proxy.py`
"""
def __init__(self, **kwargs):
self.ERROR_MSG = ""
self.b_key = "address-list"
self.DeviceProxy = ""
self.ipv4_address = ""
self.ipv4_netmask = ""
for keys, value in kwargs.items():
setattr(self,keys, value)
class Ip(A10BaseClass):
"""Class Description::
Global IP configuration subcommands.
Class ip supports CRUD Operations and inherits from `common/A10BaseClass`.
This class is the `"PARENT"` class for this module.`
:param address_list: {"minItems": 1, "items": {"type": "object"}, "uniqueItems": true, "type": "array", "array": [{"properties": {"ipv4-address": {"type": "string", "description": "IP address", "format": "ipv4-address"}, "optional": true, "ipv4-netmask": {"type": "string", "description": "IP subnet mask", "format": "ipv4-netmask"}}}]}
:param uuid: {"description": "uuid of the object", "format": "string", "minLength": 1, "modify-not-allowed": 1, "optional": true, "maxLength": 64, "type": "string"}
:param DeviceProxy: The device proxy for REST operations and session handling. R
|
efer to `common/device_proxy.py`
|
URL for this object::
`https://<Hostname|Ip address>//axapi/v3/interface/loopback/{ifnum}/ip`.
"""
def __init__(self, **kwargs):
self.ERROR_MSG = ""
self.required=[]
self.b_key = "ip"
self.a10_url="/axapi/v3/interface/loopback/{ifnum}/ip"
self.DeviceProxy = ""
self.address_list = []
self.ospf = {}
self.uuid = ""
self.rip = {}
self.router = {}
for keys, value in kwargs.items():
setattr(self,keys, value)
|
fevxie/odoo-saas-tools
|
saas_server/models/__init__.py
|
Python
|
lgpl-3.0
| 37 | 0 |
imp
|
ort sa
|
as_server
import res_config
|
ipmb/PyMetrics
|
PyMetrics/halstead.py
|
Python
|
gpl-2.0
| 10,366 | 0.016593 |
""" Compute HalsteadMetric Metrics.
HalsteadMetric metrics, created by Maurice H. HalsteadMetric in 1977, consist
of a number of measures, including:
Program length (N): N = N1 + N2
Program vocabulary (n): n = n1 + n2
Volume (V): V = N * LOG2(n)
Difficulty (D): D = (n1/2) * (N2/n2)
Effort (E): E = D * V
Average Volume (avgV) avgV = sum(V)/m
Average Effort (avgE) avgE = sum(E)/m
where:
n1 = number of distinct operands
n2 = number of distinct operators
N1 = total number of operands
N2 = total number of operators
m = number of modules
What constitues an operand or operator is often open to
interpretation. In this implementation for the Python language:
operators are of type OP, INDENT, DEDENT, or NEWLINE since these
serve the same purpose as braces and semicolon in C/C++, etc.
operands are not operators or whitespace or comments
(this means operands include keywords)
$Id: halstead.py,v 1.3 2005/09/17 04:28:12 rcharney Exp $
"""
__version__ = "$Revision: 1.3 $"[11:-2]
__author__ = 'Reg. Charney <pymetrics@charneyday.com>'
import math
import time
from metricbase import MetricBase
from globals import *
class HalsteadMetric( MetricBase ):
""" Compute various HalsteadMetric metrics. """
totalV = 0
totalE = 0
numModules = 0
def __init__( self, context, runMetrics, metrics, pa, *args, **kwds ):
""" Initialization for the HalsteadMetric metrics."""
self.inFile = context['inFile']
self.context = context
self.runMetrics = runMetrics
self.metrics = metrics
self.pa = pa
self.inFile = context['inFile']
self.numOperators = 0
self.numOperands = 0
self.uniqueOperators = {}
self.uniqueOperands = {}
HalsteadMetric.numModules += 1
# initialize category accummulators as dictionaries
self.hsDict = {}
for t in ['token','stmt','block','function','class','module','run']:
self.uniqueOperators[t] = {}
self.uniqueOperands[t] = {}
#for v in ['N','N1','N2','n','n1','n2','V','D','E','avgV','avgE']:
# self.hsDict[(t,v)] = 0
def processToken( self, currentFcn, currentClass, tok, *args, **kwds ):
""" Collect token data for Halstead metrics."""
if tok.type in [WS, EMPTY, ENDMARKER, NEWLINE, EMPTY, COMMENT]:
pass
elif tok.type in [OP, INDENT, DEDENT]:
self.numOperators += 1
self.uniqueOperators['token'][tok.text] = self.uniqueOperators['token'].get(tok.text, 0) + 1
else:
|
self.numOperands += 1
sDict = self
|
.context.__repr__()
k = (sDict,tok.text)
self.uniqueOperands['token'][k] = self.uniqueOperands['token'].get(tok.text, 0) + 1
def processStmt( self, currentFcn, currentClass, stmt, *args, **kwds ):
""" Collect statement data for Halstead metrics."""
result = None
# the two lines following this comment would compute the Halstead
# metrics for each statement in the run, However, it is
# normally overkill, so these lines are commented out.
#lineNum = stmt[0].row
#result = self.computeCategory( 'stmt', lineNum, stmt )
return result
def processBlock( self, currentFcn, currentClass, block, *args, **kwds ):
""" Collect block data for Halstead metrics."""
result = None
# the two lines following this comment would compute the Halstead
# metrics for each statement in the run, However, it is
# normally overkill, so the two lines are commented out.
#blockNum = self.context['blockNum']
#result = self.computeCategory( 'block', blockNum, block )
return result
def processFunction( self, currentFcn, currentClass, fcn, *args, **kwds ):
""" Collect function data for Halstead metrics."""
result = self.computeCategory( 'function', currentFcn, fcn )
return result
def processClass( self, currentFcn, currentClass, cls, *args, **kwds ):
""" Collect class data for Halstead metrics."""
result = self.computeCategory( 'class', currentClass, cls )
return result
def processModule( self, moduleName, mod, *args, **kwds ):
""" Collect module data for Halstead metrics."""
result = self.computeCategory( 'module', moduleName, mod )
return result
def processRun( self, run, *args, **kwds ):
""" Collect run data for Halstead metrics."""
datestamp = time.strftime("%Y-%m-%d.%H:%m%Z",time.localtime())
result = self.computeCategory( 'run', datestamp, run )
return result
def __LOGb( self, x, b ):
""" convert to LOGb(x) from natural logs."""
try:
result = math.log( x ) / math.log ( b )
except OverflowError:
result = 1.0
return result
def computeIncr( self, cat, tok, uniqueOperators, uniqueOperands ):
""" Compute increment for token depending on which category it falls into."""
operatorIncr = operandIncr = 0
if tok.type in [WS, EMPTY, ENDMARKER, NEWLINE, EMPTY, COMMENT]:
return (operatorIncr,operandIncr)
if tok.type in [OP, INDENT, DEDENT]:
operatorIncr = 1
uniqueOperators[tok.text] = uniqueOperators.get(tok.text, 0) + 1
else:
operandIncr = 1
uniqueOperands[tok.text] = uniqueOperands.get(tok.text,0) + 1
return (operatorIncr,operandIncr)
def computeCategory( self, cat, mod, lst ):
""" Collection data for cat of code."""
modID= id( mod )
numOperators = numOperands = 0
for tok in lst:
result = self.computeIncr( cat, tok, self.uniqueOperators[cat], self.uniqueOperands[cat] )
numOperators += result[0]
numOperands += result[1]
result = self.compute( cat, modID, numOperators, numOperands, self.uniqueOperators[cat], self.uniqueOperands[cat] )
return result
def compute( self, cat, modID, numOperators, numOperands, uniqueOperators, uniqueOperands, *args, **kwds ):
""" Do actual calculations here."""
n1 = len( uniqueOperands )
n2 = len( uniqueOperators )
N1 = numOperands
N2 = numOperators
N = N1 + N2
n = n1 + n2
V = float(N) * self.__LOGb( n, 2 )
try:
D = (float(n1)/2.0) * (float(N2)/float(n2))
except ZeroDivisionError:
D = 0.0
E = D * V
HalsteadMetric.totalV += V
HalsteadMetric.totalE += E
avgV = HalsteadMetric.totalV / HalsteadMetric.numModules
avgE = HalsteadMetric.totalE / HalsteadMetric.numModules
self.hsDict[(cat,modID,'n1')] = n1
self.hsDict[(cat,modID,'n2')] = n2
self.hsDict[(cat,modID,'N1')] = N1
self.hsDict[(cat,modID,'N2')] = N2
self.hsDict[(cat,modID,'N')] = N
self.hsDict[(cat,modID,'n')] = n
self.hsDict[(cat,modID,'V')] = V
self.hsDict[(cat,modID,'D')] = D
self.hsDict[(cat,modID,'E')] = E
self.hsDict[(cat,modID,'numModules')] = HalsteadMetric.numModules
self.hsDict[(cat,modID,'avgV')] = avgV
self.hsDict[(cat,modID,'avgE')] = avgE
return self.hsDict
def display( self, cat=None ):
""" Display the computed Halstead Metrics."""
if self.pa.quietSw:
return self.hsDict
hdr = "\nHalstead Metrics for %s" % self.inFile
print hdr
print "-"*len(hdr) + '\n'
if len( self.hsDict ) == 0:
print "%-8s %-30s " % ('**N/A**','All Halstead metrics are zero')
return self.hsDict
keyList = self.hsDict.keys()
keyList.sort()
if 0:
for k,i,v in keyList:
if cat:
if k!=cat:
contin
|
ideascube/ideascube
|
ideascube/library/migrations/0006_auto_20160728_1317.py
|
Python
|
agpl-3.0
| 3,408 | 0 |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.8 on 2016-07-28 13:17
from __future__ import unicode_literals
from django.db import migrations
import ideascube.models
class Migration(migrations.Migration):
dependencies = [
('library', '0005_auto_20160712_1324'),
]
operations = [
migrations.AlterField(
model_name='book',
name='lang',
field=ideascube.models.LanguageField(choices=[
('af', 'Afrikaans'), ('am', 'አማርኛ'), ('ar', 'العربيّة'),
('ast', 'Asturianu'), ('az', 'Azərbaycanca'),
('be', 'Беларуская'), ('bg', 'Български'), ('bm', 'Bambara'),
('bn', 'বাংলা'), ('br', 'Brezhoneg'), ('bs', 'Bosanski'),
('ca', 'Català'), ('cs', 'Česky'), ('cy', 'Cymraeg'),
('da', 'Dansk'), ('de', 'Deutsch'), ('el', 'Ελληνικά'),
('en', 'English'), ('en-au', 'Australian english'),
('en-gb', 'British english'), ('eo', 'Esperanto'),
('es', 'Español'), ('es-ar', 'Español de argentina'),
('es-co', 'Españ
|
ol de colombia'),
('es-mx', 'Español de mexico'),
('es-ni', 'Español de nicaragua'),
('es-ve', 'Español de venezuela'), ('et', 'Eesti'),
('eu', 'Basque'), ('fa', 'فارسی'), ('fi', 'Suomi'),
('fr', 'Français'), ('fy', 'Frysk'), ('ga', 'Gaeilge'),
('gd', 'Gàidhlig'), ('gl', 'Galego'), ('he', 'עברית'),
('hi', 'H
|
indi'), ('hr', 'Hrvatski'), ('hu', 'Magyar'),
('ia', 'Interlingua'), ('id', 'Bahasa indonesia'),
('io', 'Ido'), ('is', 'Íslenska'), ('it', 'Italiano'),
('ja', '日本語'), ('ka', 'ქართული'), ('kk', 'Қазақ'),
('km', 'Khmer'), ('kn', 'Kannada'), ('ko', '한국어'),
('ku', 'Kurdî'), ('lb', 'Lëtzebuergesch'), ('ln', 'Lingála'),
('lt', 'Lietuviškai'), ('lv', 'Latviešu'),
('mk', 'Македонски'), ('ml', 'Malayalam'), ('mn', 'Mongolian'),
('mr', 'मराठी'), ('my', 'မြန်မာဘာသာ'),
('nb', 'Norsk (bokmål)'), ('ne', 'नेपाली'),
('nl', 'Nederlands'), ('nn', 'Norsk (nynorsk)'),
('no', 'Norsk'), ('os', 'Ирон'), ('pa', 'Punjabi'),
('pl', 'Polski'), ('ps', 'پښتو'), ('pt', 'Português'),
('pt-br', 'Português brasileiro'), ('rn', 'Kirundi'),
('ro', 'Română'), ('ru', 'Русский'), ('sk', 'Slovensky'),
('sl', 'Slovenščina'), ('so', 'Af-soomaali'), ('sq', 'Shqip'),
('sr', 'Српски'), ('sr-latn', 'Srpski (latinica)'),
('sv', 'Svenska'), ('sw', 'Kiswahili'), ('ta', 'தமிழ்'),
('te', 'తెలుగు'), ('th', 'ภาษาไทย'), ('ti', 'ትግርኛ'),
('tr', 'Türkçe'), ('tt', 'Татарча'), ('udm', 'Удмурт'),
('uk', 'Українська'), ('ur', 'اردو'), ('vi', 'Tiếng việt'),
('wo', 'Wolof'), ('zh-hans', '简体中文'), ('zh-hant', '繁體中文')
], max_length=10, verbose_name='Language'),
),
]
|
Rjtsahu/School-Bus-Tracking
|
BusTrack/repository/models/UserType.py
|
Python
|
gpl-3.0
| 743 | 0 |
from sqlalchemy import Column, String, Integer
from BusTrack.repository import Base, session
from BusTrack.repositor
|
y.models import STRING_LEN_SMALL
class UserType(Base):
__tablename__ = 'user_type'
id = Column(Integer, primary_key=True)
role_name = Column(String(STRING_LEN_SMALL))
@staticmethod
def __create_default_role__():
if session.query(UserType).count() != 0:
return
driver = UserType()
driver.role_name = 'Driver'
parent = UserType()
parent.role_name = 'Parent'
admin = UserType()
|
admin.role_name = 'Admin'
session.add(driver)
session.add(parent)
session.add(admin)
session.commit()
session.close()
|
maxspad/MGrader
|
autograder/modules/questions/timedsubproc.py
|
Python
|
bsd-3-clause
| 861 | 0.013937 |
import subprocess, threading
from subprocess import PIPE
class TimedSubProc (object):
def __init__(self, cmd):
self.cmd = cmd.split()
self.process = None
|
def run(self, timeout=5, stdin=None, stdout=PIPE, stderr=PIPE):
self.output = None
def target():
self.process = subprocess.Popen(self.cmd, stdin=stdin, stdout=stdout, stderr=stderr)
self.output = self.process.communicate()
thread = threading.Thread(target=target)
thread.start()
thread.join(timeout)
if thread.is_alive():
print '
|
Process timeout! Terminating...'
self.process.terminate()
thread.join()
return False
return (self.process.returncode, self.output[0], self.output[1])
|
Lorquas/dogtail
|
dogtail/utils.py
|
Python
|
gpl-2.0
| 14,664 | 0.002523 |
# -*- coding: utf-8 -*-
"""
Various utilities
Authors: Ed Rousseau <rousseau@redhat.com>, Zack Cerza <zcerza@redhat.com, David Malcolm <dmalcolm@redhat.com>
"""
__author__ = """Ed Rousseau <rousseau@redhat.com>,
Zack Cerza <zcerza@redhat.com,
David Malcolm <dmalcolm@redhat.com>
"""
import os
import sys
import subprocess
import cairo
import predicate
import errno
import gi
gi.require_version('Gtk', '3.0')
gi.require_version('Gdk', '3.0')
from gi.repository import Gtk
from gi.repository import GObject
from config import config
from time import sleep
from logging import debugLogger as logger
from logging import TimeStamp
from __builtin__ import file
def screenshot(file='screenshot.png', timeStamp=True):
"""
This function wraps the ImageMagick import command to take a screenshot.
The file argument may be specified as 'foo', 'foo.png', or using any other
extension that ImageMagick supports. PNG is the default.
By default, screenshot filenames are in the format of foo_YYYYMMDD-hhmmss.png .
The timeStamp argument may be set to False to name the file foo.png.
"""
if not isinstance(timeStamp, bool):
raise TypeError("timeStampt must be True or False")
# config is supposed to create this for us. If it's not there, bail.
assert os.path.isdir(config.scratchDir)
baseName = ''.join(file.split('.')[0:-1])
fileExt = file.split('.')[-1].lower()
if not baseName:
baseName = file
fileExt = 'png'
if timeStamp:
ts = TimeStamp()
newFile = ts.fileStamp(baseName) + '.' + fileExt
path = config.scratchDir + newFile
else:
newFile = baseName + '.' + fileExt
path = config.scratchDir + newFile
from gi.repository import Gdk
from gi.repository import GObject
from gi.repository import GdkPixbuf
rootWindow = Gdk.get_default_root_window()
geometry = rootWindow.get_geometry()
pixbuf = GdkPixbuf.Pixbuf(colorspace=GdkPixbuf.Colorspace.RGB,
has_alpha=False,
bits_per_sample=8,
width=geometry[2],
height=geometry[3])
pixbuf = Gdk.pixbuf_get_from_window(rootWindow, 0, 0,
geometry[2], geometry[3])
# GdkPixbuf.Pixbuf.save() needs 'jpeg' and not 'jpg'
if fileExt == 'jpg':
fileExt = 'jpeg'
try:
pixbuf.savev(path, fileExt, [], [])
except GObject.GError:
raise ValueError("Failed to save screenshot in %s format" % fileExt)
assert os.path.exists(path)
logger.log("Screensh
|
ot taken: " + path)
return path
def run(string, timeout=config.runTimeout, interval=config.runInterval, desktop=None, dumb=False, appName=''):
"""
Runs an application. [For simple command execution such as 'rm *', use os.popen() or os.system()]
If dumb is omitted or is False, polls at interval seconds until the application is finished starting, or until timeout is reached.
If dumb is True, returns when timeout is reached.
"""
if not desktop:
|
from tree import root as desktop
args = string.split()
os.environ['GTK_MODULES'] = 'gail:atk-bridge'
pid = subprocess.Popen(args, env=os.environ).pid
if not appName:
appName = args[0]
if dumb:
# We're starting a non-AT-SPI-aware application. Disable startup
# detection.
doDelay(timeout)
else:
# Startup detection code
# The timing here is not totally precise, but it's good enough for now.
time = 0
while time < timeout:
time = time + interval
try:
for child in desktop.children[::-1]:
if child.name == appName:
for grandchild in child.children:
if grandchild.roleName == 'frame':
from procedural import focus
focus.application.node = child
doDelay(interval)
return pid
except AttributeError: # pragma: no cover
pass
doDelay(interval)
return pid
def doDelay(delay=None):
"""
Utility function to insert a delay (with logging and a configurable
default delay)
"""
if delay is None:
delay = config.defaultDelay
if config.debugSleep:
logger.log("sleeping for %f" % delay)
sleep(delay)
class Highlight (Gtk.Window): # pragma: no cover
def __init__(self, x, y, w, h): # pragma: no cover
super(Highlight, self).__init__()
self.set_decorated(False)
self.set_has_resize_grip(False)
self.set_default_size(w, h)
self.screen = self.get_screen()
self.visual = self.screen.get_rgba_visual()
if self.visual is not None and self.screen.is_composited():
self.set_visual(self.visual)
self.set_app_paintable(True)
self.connect("draw", self.area_draw)
self.show_all()
self.move(x, y)
def area_draw(self, widget, cr): # pragma: no cover
cr.set_source_rgba(.0, .0, .0, 0.0)
cr.set_operator(cairo.OPERATOR_SOURCE)
cr.paint()
cr.set_operator(cairo.OPERATOR_OVER)
cr.set_source_rgb(0.9, 0.1, 0.1)
cr.set_line_width(6)
cr.rectangle(0, 0, self.get_size()[0], self.get_size()[1])
cr.stroke()
class Blinker(object): # pragma: no cover
INTERVAL_MS = 1000
main_loop = GObject.MainLoop()
def __init__(self, x, y, w, h): # pragma: no cover
self.highlight_window = Highlight(x, y, w, h)
if self.highlight_window.screen.is_composited() is not False:
self.timeout_handler_id = GObject.timeout_add(
Blinker.INTERVAL_MS, self.destroyHighlight)
self.main_loop.run()
else:
self.highlight_window.destroy()
def destroyHighlight(self): # pragma: no cover
self.highlight_window.destroy()
self.main_loop.quit()
return False
class Lock(object):
"""
A mutex implementation that uses atomicity of the mkdir operation in UNIX-like
systems. This can be used by scripts to provide for mutual exlusion, either in single
scripts using threads etc. or i.e. to handle sitations of possible collisions among
multiple running scripts. You can choose to make randomized single-script wise locks
or a more general locks if you do not choose to randomize the lockdir name
"""
def __init__(self, location='/tmp', lockname='dogtail_lockdir_', randomize=True):
"""
You can change the default lockdir location or name. Setting randomize to
False will result in no random string being appened to the lockdir name.
"""
self.lockdir = os.path.join(os.path.normpath(location), lockname)
if randomize:
self.lockdir = "%s%s" % (self.lockdir, self.__getPostfix())
def lock(self):
"""
Creates a lockdir based on the settings on Lock() instance creation.
Raises OSError exception of the lock is already present. Should be
atomic on POSIX compliant systems.
"""
locked_msg = 'Dogtail lock: Already locked with the same lock'
if not os.path.exists(self.lockdir):
try:
os.mkdir(self.lockdir)
return self.lockdir
except OSError as e:
if e.errno == errno.EEXIST and os.path.isdir(self.lockdir):
raise OSError(locked_msg)
else:
raise OSError(locked_msg)
def unlock(self):
"""
Removes a lock. Will raise OSError exception if the lock was not present.
Should be atomic on POSIX compliant systems.
"""
import os # have to import here for situations when executed from __del__
if os.path.exists(self.lockdir):
try:
os.rmdir(self.lockdir)
except OSError as e:
if e.erron == errno.EEXIST:
raise OSError('Dogtail unlock: lockdi
|
bluemini/kuma
|
vendor/packages/translate/lang/my.py
|
Python
|
mpl-2.0
| 1,082 | 0 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2013 Zuza Software Foundation
#
# This file is part of the Translate Toolkit.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundat
|
ion; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it
|
will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>.
"""This module represents the Burmese language.
.. seealso:: http://en.wikipedia.org/wiki/Burmese_language
"""
from translate.lang import common
class my(common.Common):
"""This class represents Burmese."""
puncdict = {
u".": u"။",
}
ignoretests = ["startcaps", "simplecaps"]
|
ZhuChara2004/MA-Summer-Practice-2016
|
homeworks/team-2/prof-test/backend/models/question.py
|
Python
|
gpl-3.0
| 438 | 0.004566 |
from sqlalchemy import Column, ForeignKey, Inte
|
ger, String
from sqlalchemy.orm import relationship
from answer import Answer
from base import Base
class Question(Base):
__tablename__ = 'question'
id = Column(Integer, primary_key=Tr
|
ue)
question = Column(String(256))
answers = relationship('Answer', backref='question',
lazy='dynamic')
test_id = Column(Integer, ForeignKey('test.id'))
|
hamishcunningham/fishy-wifi
|
wegrow-cloudside/elf-data-collector/webserver4/server-again.py
|
Python
|
agpl-3.0
| 6,539 | 0.001682 |
###########################################################################
# Concurrent WSGI server - webserver3h.py #
# #
# Tested with Python 2.7.9 on Ubuntu 14.04 & Mac OS X #
###########################################################################
import errno
import os
import signal
import socket
import StringIO
import sys
def grim_reaper(signum, frame):
while True:
try:
pid, status = os.waitpid(
-1, # Wait for any child process
os.WNOHANG # Do not block and return EWOULDBLOCK error
)
print(
'Child {pid} terminated with status {status}'
'\n'.format(pid=pid, status=status)
)
except OSError:
return
if pid == 0: # no more zombies
return
class WSGIServer(object):
address_family = socket.AF_INET
socket_type = socket.SOCK_STREAM
request_queue_size = 1024
def __init__(self, server_address):
# Create a listening socket
self.listen_socket = listen_socket = socket.socket(
self.address_family,
self.socket_type
)
# Allow to reuse the same address
listen_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
# Bind
listen_socket.bind(server_address)
# Activate
listen_socket.listen(self.request_queue_size)
# Get server host name and port
host, port = self.listen_socket.getsockname()[:2]
self.server_name = socket.getfqdn(host)
self.server_port = port
# Return headers set by Web framework/Web application
self.headers_set = []
def set_app(self, application):
self.application = application
def serve_forever(self):
listen_socket = self.listen_socket
while True:
try:
self.client_connection, client_address = listen_socket.accept()
except IOError as e:
code, msg = e.args
# restart 'accept' if it was interrupted
if code == errno.EINTR:
continue
else:
raise
pid = os.fork()
if pid == 0: # child
listen_socket.close() # close child copy
# Handle one request and close the client connection.
self.handle_one_request()
os._exit(0)
else: # parent
self.client_connection.close() # close parent copy
def handle_one_request(self):
self.request_data = request_data = self.client_connection.recv(1024)
# Print formatted request data a la 'curl -v'
print(''.join(
'< {line}\n'.format(line=line)
for line in request_data.splitlines()
))
self.parse_request(request_data)
# Construct environment dictionary using request data
env = self.get_environ()
# It's time to call our application callable and get
# back a result that will become HTTP response body
result = self.application(env, self.start_response)
# Construct a response and send it back to the client
self.finish_response(result)
def parse_request(self, text):
request_line = text.splitlines()[0]
request_line = request_line.rstrip('\r\n')
# Break down the request line into components
(self.request_method, # GET
self.path, # /hello
self.request_version # HTTP/1.1
) = request_line.split()
def get_environ(self):
env = {}
# The following code snippet does not follow PEP8 conventions
# but it's formatted the way it is for demonstration purposes
# to emphasize the required variables and their values
#
# Required WSGI variables
env['wsgi.version'] = (1, 0)
env['wsgi.url_scheme'] = 'http'
env['wsgi.input'] = StringIO.StringIO(self.request_data)
env['wsgi.errors'] = sys.stderr
env['wsgi.multithread'] = False
env['wsgi.multiprocess'] = False
env['wsgi.run_once'] = False
# Required CGI variables
env['REQUEST_METHOD'] = self.request_method # GET
env['PATH_INFO'] = self.path # /hello
env['SERVER_NAME'] = self.server_name # localhost
env['SERVER_PORT'] = str(self.server_port) # 8888
return env
def start_response(self, status, response_headers, exc_info=None):
# Add necessary server headers
server_headers = [
('Date', 'Tue, 31 Mar 2015 12:54:48 GMT'),
('Server', 'WSGIServer 0.2'),
]
self.headers_set = [status, response_headers + server_headers]
# To adhere to WSGI specification the start_response must return
# a 'write' callable. We simplicity's sake we'll ignore that detail
# for now.
# return self.finish_response
def finish_response(self, result):
try:
status, response_headers = self.headers_set
response = 'HTTP/1.1 {status}\r\n'.format(status=status)
for header in response_headers:
response += '{0}: {1}\r\n'.format(*header)
response += '\r\n'
for data in result:
response += data
# Print formatted response data a la 'curl -v'
print(''.join(
'> {line}\n'.format(line=line)
for line in response.splitlines()
))
self.client_connection.sendall(response)
finally:
s
|
elf.client_connection.close()
SERVER_ADDRESS = (HOST, PORT) = '', 8888
def make_server(server_address, application):
signal.signal(signal.SIGCHLD, grim_reaper)
serve
|
r = WSGIServer(server_address)
server.set_app(application)
return server
if __name__ == '__main__':
if len(sys.argv) < 2:
sys.exit('Provide a WSGI application object as module:callable')
app_path = sys.argv[1]
module, application = app_path.split(':')
module = __import__(module)
application = getattr(module, application)
httpd = make_server(SERVER_ADDRESS, application)
print('WSGIServer: Serving HTTP on port {port} ...\n'.format(port=PORT))
httpd.serve_forever()
|
logandk/serverless-wsgi
|
setup.py
|
Python
|
mit
| 867 | 0.001153 |
import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name="serverless-wsgi",
version="3.0.0",
python_requires=">3.6",
author="Logan Raarup",
author_email="logan@logan.dk",
description="Amazon AWS API Gateway WSGI wrapper",
long_description=long_description,
long_descriptio
|
n_content_type="text/markdown",
url="https://github.com/logandk/serverless-wsgi",
py_modules=["serverless_wsgi"],
install_requires=["werkzeug>2"],
classifiers=(
"Development Status :: 5 - Production/Stable",
"Programming Language :: Python :: 3",
"Intended Audience :: Developers",
"License :: OSI Ap
|
proved :: MIT License",
"Operating System :: OS Independent",
),
keywords="wsgi serverless aws lambda api gateway apigw flask django pyramid",
)
|
lmiccini/sos
|
sos/plugins/dpkg.py
|
Python
|
gpl-2.0
| 1,590 | 0 |
# Copyright (c) 2012 Adam Stokes <adam.stokes@canonical.com>
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCH
|
ANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy
|
of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
from sos.plugins import Plugin, DebianPlugin, UbuntuPlugin
class Dpkg(Plugin, DebianPlugin, UbuntuPlugin):
"""Debian Package Management
"""
plugin_name = 'dpkg'
profiles = ('sysmgmt', 'packagemanager')
def setup(self):
self.add_cmd_output("dpkg -l", root_symlink="installed-debs")
if self.get_option("verify"):
self.add_cmd_output("dpkg -V")
self.add_cmd_output("dpkg -C")
self.add_copy_spec([
"/var/cache/debconf/config.dat",
"/etc/debconf.conf"
])
if not self.get_option("all_logs"):
limit = self.get_option("log_size")
self.add_copy_spec_limit("/var/log/dpkg.log",
sizelimit=limit)
else:
self.add_copy_spec("/var/log/dpkg.log*")
# vim: et ts=4 sw=4
|
JPO1/python-docs-samples
|
appengine/ndb/modeling/relation_model_models_test.py
|
Python
|
apache-2.0
| 2,299 | 0 |
# Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific langua
|
ge governing permissions and
# limitations under the License.
"""Test classes for code snippet for modeling article."""
from google.appengine.ext import nd
|
b
from tests import AppEngineTestbedCase
from . import relation_model_models as models
class ContactTestCase(AppEngineTestbedCase):
"""A test case for the Contact model with relationship model."""
def setUp(self):
"""Creates 1 contact and 1 company.
Assuming the contact belongs to tmatsuo's addressbook.
"""
super(ContactTestCase, self).setUp()
self.myaddressbook_key = ndb.Key('AddressBook', 'tmatsuo')
mary = models.Contact(parent=self.myaddressbook_key, name='Mary')
mary.put()
self.mary_key = mary.key
google = models.Company(name='Google')
google.put()
self.google_key = google.key
candit = models.Company(name='Candit')
candit.put()
self.candit_key = candit.key
def test_relationship(self):
"""Two companies hire Mary."""
mary = self.mary_key.get()
google = self.google_key.get()
candit = self.candit_key.get()
# first google hires Mary
models.ContactCompany(parent=self.myaddressbook_key,
contact=mary.key,
company=google.key,
title='engineer').put()
# then another company named 'candit' hires Mary too
models.ContactCompany(parent=self.myaddressbook_key,
contact=mary.key,
company=candit.key,
title='president').put()
# get the list of companies that Mary belongs to
self.assertEqual(len(mary.companies), 2)
|
danielbair/aeneas
|
aeneas/validator.py
|
Python
|
agpl-3.0
| 28,502 | 0.001193 |
#!/usr/bin/env python
# coding=utf-8
# aeneas is a Python/C library and a set of tools
# to automagically synchronize audio and text (aka forced alignment)
#
# Copyright (C) 2012-2013, Alberto Pettarin (www.albertopettarin.it)
# Copyright (C) 2013-2015, ReadBeyond Srl (www.readbeyond.it)
# Copyright (C) 2015-2017, Alberto Pettarin (www.albertopettarin.it)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
This module contains the following classes:
* :class:`~aeneas.validator.Validator`, assessing whether user input is well-formed;
* :class:`~aeneas.validator.ValidatorResult`, a record holding validation result and possibly messages.
"""
from __future__ import absolute_import
from __future__ import print_function
import io
from aeneas.analyzecontainer import AnalyzeContainer
from aeneas.container import Container
from aeneas.container import ContainerFormat
from aeneas.executetask import AdjustBoundaryAlgorithm
from aeneas.hierarchytype import HierarchyType
from aeneas.idsortingalgorithm import IDSortingAlgorithm
from aeneas.logger import Loggable
from aeneas.runtimeconfiguration import RuntimeConfiguration
from aeneas.syncmap import SyncMapFormat
from aeneas.syncmap import SyncMapHeadTailFormat
from aeneas.textfile import TextFileFormat
import aeneas.globalconstants as gc
import aeneas.globalfunctions as gf
class Validator(Loggable):
"""
A validator to assess
|
whether user input is well
|
-formed.
:param rconf: a runtime configuration
:type rconf: :class:`~aeneas.runtimeconfiguration.RuntimeConfiguration`
:param logger: the logger object
:type logger: :class:`~aeneas.logger.Logger`
"""
ALLOWED_VALUES = [
#
# NOTE disabling the check on language since now we support multiple TTS
# COMMENTED (
# COMMENTED gc.PPN_JOB_LANGUAGE,
# COMMENTED Language.ALLOWED_VALUES
# COMMENTED ),
# COMMENTED (
# COMMENTED gc.PPN_TASK_LANGUAGE,
# COMMENTED Language.ALLOWED_VALUES
# COMMENTED ),
#
(
gc.PPN_JOB_IS_HIERARCHY_TYPE,
HierarchyType.ALLOWED_VALUES
),
(
gc.PPN_JOB_OS_CONTAINER_FORMAT,
ContainerFormat.ALLOWED_VALUES
),
(
gc.PPN_JOB_OS_HIERARCHY_TYPE,
HierarchyType.ALLOWED_VALUES
),
(
gc.PPN_TASK_IS_TEXT_FILE_FORMAT,
TextFileFormat.ALLOWED_VALUES
),
(
gc.PPN_TASK_OS_FILE_FORMAT,
SyncMapFormat.ALLOWED_VALUES
),
(
gc.PPN_TASK_IS_TEXT_UNPARSED_ID_SORT,
IDSortingAlgorithm.ALLOWED_VALUES
),
(
gc.PPN_TASK_ADJUST_BOUNDARY_ALGORITHM,
AdjustBoundaryAlgorithm.ALLOWED_VALUES
),
(
gc.PPN_TASK_OS_FILE_HEAD_TAIL_FORMAT,
SyncMapHeadTailFormat.ALLOWED_VALUES
)
]
IMPLIED_PARAMETERS = [
(
# is_hierarchy_type=paged => is_task_dir_name_regex
gc.PPN_JOB_IS_HIERARCHY_TYPE,
[HierarchyType.PAGED],
[gc.PPN_JOB_IS_TASK_DIRECTORY_NAME_REGEX]
),
(
# is_text_type=unparsed => is_text_unparsed_id_sort
gc.PPN_TASK_IS_TEXT_FILE_FORMAT,
[TextFileFormat.UNPARSED],
[gc.PPN_TASK_IS_TEXT_UNPARSED_ID_SORT]
),
(
# is_text_type=munparsed => is_text_munparsed_l1_id_regex
gc.PPN_TASK_IS_TEXT_FILE_FORMAT,
[TextFileFormat.MUNPARSED],
[gc.PPN_TASK_IS_TEXT_MUNPARSED_L1_ID_REGEX]
),
(
# is_text_type=munparsed => is_text_munparsed_l2_id_regex
gc.PPN_TASK_IS_TEXT_FILE_FORMAT,
[TextFileFormat.MUNPARSED],
[gc.PPN_TASK_IS_TEXT_MUNPARSED_L2_ID_REGEX]
),
(
# is_text_type=munparsed => is_text_munparsed_l3_id_regex
gc.PPN_TASK_IS_TEXT_FILE_FORMAT,
[TextFileFormat.MUNPARSED],
[gc.PPN_TASK_IS_TEXT_MUNPARSED_L3_ID_REGEX]
),
(
# is_text_type=unparsed => is_text_unparsed_class_regex or
# is_text_unparsed_id_regex
gc.PPN_TASK_IS_TEXT_FILE_FORMAT,
[TextFileFormat.UNPARSED],
[
gc.PPN_TASK_IS_TEXT_UNPARSED_CLASS_REGEX,
gc.PPN_TASK_IS_TEXT_UNPARSED_ID_REGEX
]
),
(
# os_task_file_format=smil => os_task_file_smil_audio_ref
# os_task_file_format=smilh => os_task_file_smil_audio_ref
# os_task_file_format=smilm => os_task_file_smil_audio_ref
gc.PPN_TASK_OS_FILE_FORMAT,
[SyncMapFormat.SMIL, SyncMapFormat.SMILH, SyncMapFormat.SMILM],
[gc.PPN_TASK_OS_FILE_SMIL_AUDIO_REF]
),
(
# os_task_file_format=smil => os_task_file_smil_page_ref
# os_task_file_format=smilh => os_task_file_smil_page_ref
# os_task_file_format=smilm => os_task_file_smil_page_ref
gc.PPN_TASK_OS_FILE_FORMAT,
[SyncMapFormat.SMIL, SyncMapFormat.SMILH, SyncMapFormat.SMILM],
[gc.PPN_TASK_OS_FILE_SMIL_PAGE_REF]
),
(
# task_adjust_boundary_algorithm=percent => task_adjust_boundary_percent_value
gc.PPN_TASK_ADJUST_BOUNDARY_ALGORITHM,
[AdjustBoundaryAlgorithm.PERCENT],
[gc.PPN_TASK_ADJUST_BOUNDARY_PERCENT_VALUE]
),
(
# task_adjust_boundary_algorithm=rate => task_adjust_boundary_rate_value
gc.PPN_TASK_ADJUST_BOUNDARY_ALGORITHM,
[AdjustBoundaryAlgorithm.RATE],
[gc.PPN_TASK_ADJUST_BOUNDARY_RATE_VALUE]
),
(
# task_adjust_boundary_algorithm=rate_aggressive => task_adjust_boundary_rate_value
gc.PPN_TASK_ADJUST_BOUNDARY_ALGORITHM,
[AdjustBoundaryAlgorithm.RATEAGGRESSIVE],
[gc.PPN_TASK_ADJUST_BOUNDARY_RATE_VALUE]
),
(
# task_adjust_boundary_algorithm=currentend => task_adjust_boundary_currentend_value
gc.PPN_TASK_ADJUST_BOUNDARY_ALGORITHM,
[AdjustBoundaryAlgorithm.AFTERCURRENT],
[gc.PPN_TASK_ADJUST_BOUNDARY_AFTERCURRENT_VALUE]
),
(
# task_adjust_boundary_algorithm=rate => task_adjust_boundary_nextstart_value
gc.PPN_TASK_ADJUST_BOUNDARY_ALGORITHM,
[AdjustBoundaryAlgorithm.BEFORENEXT],
[gc.PPN_TASK_ADJUST_BOUNDARY_BEFORENEXT_VALUE]
),
(
# task_adjust_boundary_algorithm=offset => task_adjust_boundary_offset_value
gc.PPN_TASK_ADJUST_BOUNDARY_ALGORITHM,
[AdjustBoundaryAlgorithm.OFFSET],
[gc.PPN_TASK_ADJUST_BOUNDARY_OFFSET_VALUE]
)
]
JOB_REQUIRED_PARAMETERS = [
gc.PPN_JOB_LANGUAGE,
gc.PPN_JOB_OS_CONTAINER_FORMAT,
gc.PPN_JOB_OS_FILE_NAME,
]
TASK_REQUIRED_PARAMETERS = [
gc.PPN_TASK_IS_TEXT_FILE_FORMAT,
gc.PPN_TASK_LANGUAGE,
gc.PPN_TASK_OS_FILE_FORMAT,
gc.PPN_TASK_OS_FILE_NAME,
]
TASK_REQUIRED_PARAMETERS_EXTERNAL_NAME = [
gc.PPN_TASK_IS_TEXT_FILE_FORMAT,
gc.PPN_TASK_LANGUAGE,
gc.PPN_TASK_OS_FILE_FORMAT,
]
TXT_REQUIRED_PARAMETERS = [
gc.PPN_JOB_IS_AUDIO_FILE_NAME_REGEX,
gc.PPN_JOB_IS_AUDIO_FILE_RELATIVE_PATH,
gc.PPN_JOB_IS_HIERARCHY_PREFIX,
gc.PPN_JOB_IS_HIERARCH
|
ccrisan/motioneye
|
motioneye/diskctl.py
|
Python
|
gpl-3.0
| 7,702 | 0.006492 |
# Copyright (c) 2013 Calin Crisan
# This file is part of motionEye.
#
# motionEye is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import logging
import os
import re
import subprocess
import utils
def _list_mounts():
logging.debug('listing mounts...')
seen_targets = set()
mounts = []
with open('/proc/mounts', 'r') as f:
for line in f:
line = line.strip()
if not line:
continue
parts = line.split()
if len(parts) < 4:
continue
target = parts[0]
mount_point = parts[1]
fstype = parts[2]
opts = parts[3]
if not os.access(mount_point, os.W_OK):
continue
if target in seen_targets:
continue # probably a bind mount
seen_targets.add(target)
if fstype == 'fuseblk':
fstype = 'ntfs' # most likely
logging.debug('found mount "%s" at "%s"' % (target, mount_point))
mounts.append({
'target': target,
'mount_point': mount_point,
'fstype': fstype,
'opts': opts,
})
return mounts
def _list_disks():
if os.path.exists('/dev/disk/by-id/'):
return _list_disks_dev_by_id()
else: # fall back to fdisk -l
return _list_disks_fdisk()
def _list_disks_dev_by_id():
logging.debug('listing disks using /dev/disk/by-id/')
disks_by_dev = {}
partitions_by_dev = {}
for entry in os.listdir('/dev/disk/by-id/'):
parts = entry.split('-', 1)
if len(parts) < 2:
continue
target = os.path.realpath(os.path.join('/dev/disk/by-id/', entry))
bus, entry = parts
m = re.search('-part(\d+)$', entry)
if m:
part_no = int(m.group(1))
entry = re.sub('-part\d+$', '', entry)
else:
part_no = None
parts = entry.split('_')
if len(parts) < 2:
vendor = parts[0]
model = ''
else:
vendor, model = parts[:2]
if part_no is not None:
logging.debug('found partition "%s" at "%s" on bus "%s": "%s %s"' % (part_no, target, bus, vendor, model))
partitions_by_dev[target] = {
'target': target,
'bus': bus,
'vendor': vendor,
'model': model,
'part_no': part_no,
'unmatched': True
}
else:
logging.debug('found disk at "%s" on bus "%s": "%s %s"' % (target, bus, vendor, model))
disks_by_dev[target] = {
'target': target,
'bus': bus,
'vendor': vendor,
'model': model,
'partitions': []
}
# group partitions by disk
for dev, partition in partitions_by_dev.items():
for disk_dev, disk in disks_by_dev.items():
if dev.startswith(disk_dev):
disk['partitions'].append(partition)
partition.pop('unmatched', None)
|
# add separate partitions that did not match any disk
for partition in partitions_by_dev.values():
if partition.pop('unmatched', False):
disks_by_dev[partition['target']] = partition
partition['partitions'] = [dict(partition)]
# prepare
|
flat list of disks
disks = disks_by_dev.values()
disks.sort(key=lambda d: d['vendor'])
for disk in disks:
disk['partitions'].sort(key=lambda p: p['part_no'])
return disks
def _list_disks_fdisk():
try:
output = subprocess.check_output(['fdisk', '-l'], stderr=utils.DEV_NULL)
except Exception as e:
logging.error('failed to list disks using "fdisk -l": %s' % e, exc_info=True)
return []
disks = []
disk = None
def add_disk(d):
logging.debug('found disk at "%s" on bus "%s": "%s %s"' %
(d['target'], d['bus'], d['vendor'], d['model']))
for part in d['partitions']:
logging.debug('found partition "%s" at "%s" on bus "%s": "%s %s"' %
(part['part_no'], part['target'], part['bus'], part['vendor'], part['model']))
disks.append(d)
for line in output.split('\n'):
line = line.replace('*', '')
line = re.sub('\s+', ' ', line.strip())
if not line:
continue
if line.startswith('Disk /dev/'):
if disk and disk['partitions']:
add_disk(disk)
parts = line.split()
disk = {
'target': parts[1].strip(':'),
'bus': '',
'vendor': '',
'model': parts[2] + ' ' + parts[3].strip(','),
'partitions': []
}
elif line.startswith('/dev/') and disk:
parts = line.split()
part_no = re.findall('\d+$', parts[0])
partition = {
'part_no': int(part_no[0]) if part_no else None,
'target': parts[0],
'bus': '',
'vendor': '',
'model': parts[4] + ' ' + ' '.join(parts[6:]),
}
disk['partitions'].append(partition)
if disk and disk['partitions']:
add_disk(disk)
disks.sort(key=lambda d: d['target'])
for disk in disks:
disk['partitions'].sort(key=lambda p: p['part_no'])
return disks
def list_mounted_disks():
mounted_disks = []
try:
disks = _list_disks()
mounts_by_target = dict((m['target'], m) for m in _list_mounts())
for disk in disks:
for partition in disk['partitions']:
mount = mounts_by_target.get(partition['target'])
if mount:
partition.update(mount)
# filter out unmounted partitions
disk['partitions'] = [p for p in disk['partitions'] if p.get('mount_point')]
# filter out unmounted disks
mounted_disks = [d for d in disks if d['partitions']]
except Exception as e:
logging.error('failed to list mounted disks: %s' % e, exc_info=True)
return mounted_disks
def list_mounted_partitions():
mounted_partitions = {}
try:
disks = _list_disks()
mounts_by_target = dict((m['target'], m) for m in _list_mounts())
for disk in disks:
for partition in disk['partitions']:
mount = mounts_by_target.get(partition['target'])
if mount:
partition.update(mount)
mounted_partitions[partition['target']] = partition
except Exception as e:
logging.error('failed to list mounted partitions: %s' % e, exc_info=True)
return mounted_partitions
|
sourlows/rating-cruncher
|
src/lib/click/exceptions.py
|
Python
|
apache-2.0
| 6,390 | 0.000156 |
from ._compat import PY2, filename_to_ui, get_text_stderr
from .utils import echo
class ClickException(Exception):
"""An exception that Click can handle and show to the user."""
#: The exit code for this exception
exit_code = 1
def __init__(self, message):
if PY2:
if message is not None:
message = message.encode('utf-8')
Exception.__init__(self, message)
self.message = message
def format_message(self):
return self.message
def show(self, file=None):
if file is None:
file = get_text_stderr()
echo('Error: %s' % self.format_message(), file=file)
class UsageError(ClickException):
"""An internal exception that signals a usage error. This typically
aborts any further handling.
:param message: the error message to display.
:param ctx: optionally the context that caused this error. Click will
fill in the context automatically in some situations.
"""
exit_code = 2
def __init__(self, message, ctx=None):
ClickException.__init__(self, message)
self.ctx = ctx
def show(self, file=None):
if file is None:
file = get_text_stderr()
color = None
if self.ctx is not None:
color = self.ctx.color
echo(self.ctx.get_usage() + '\n', file=file, color=color)
echo('Error: %s' % self.format_message(), file=file, color=color)
class BadParameter(UsageError):
"""An exception that formats out a standardized error message for a
bad parameter. This is useful when thrown from a callback or type as
Click will attach contextual information to it (for instance, which
parameter it is).
.. versionadded:: 2.0
:param param: the parameter object that caused this error. This can
be left out, and Click will attach this info itself
if possible.
:param param_hint: a string that shows up as parameter name. This
can be used as alternative to `param` in cases
where custom validation should happen. If it is
a string it's used as such, if it's a list then
each item is quoted and separated.
"""
def __init__(self, message, ctx=None, param=None,
param_hint=None):
UsageError.__init__(self, message, ctx)
self.param = param
self.param_hint = param_hint
def format_message(self):
if self.param_hint is not None:
param_hint = self.param_hint
elif self.param is not None:
param_hint = self.param.opts or [self.param.human_readable_name]
else:
return 'Invalid value: %s' % self.message
if isinstance(param_hint, (tuple, list)):
param_hint = ' / '.join('"%s"' % x for x in param_hint)
return 'Invalid value for %s: %s' % (param_hint, self.message)
class MissingParameter(BadParameter):
"""Raised if click required an option or argument but it was not
provided when invoking the script.
|
.. versionadded:: 4.0
:param param_type: a string that indicates the type of the parameter.
The default is to inherit the parameter type from
the given `param`. Valid values are ``'parameter'``,
``'option'`` or ``'argument'``.
"""
def __init__(self, message=None, ctx=None, param=None,
param_hint=None, param_type=None):
BadParameter.__init__(self, message, ctx, param, param_hint)
|
self.param_type = param_type
def format_message(self):
if self.param_hint is not None:
param_hint = self.param_hint
elif self.param is not None:
param_hint = self.param.opts or [self.param.human_readable_name]
else:
param_hint = None
if isinstance(param_hint, (tuple, list)):
param_hint = ' / '.join('"%s"' % x for x in param_hint)
param_type = self.param_type
if param_type is None and self.param is not None:
param_type = self.param.param_type_name
msg = self.message
msg_extra = self.param.type.get_missing_message(self.param)
if msg_extra:
if msg:
msg += '. ' + msg_extra
else:
msg = msg_extra
return 'Missing %s%s%s%s' % (
param_type,
param_hint and ' %s' % param_hint or '',
msg and '. ' or '.',
msg or '',
)
class NoSuchOption(UsageError):
"""Raised if click attempted to handle an option that does not
exist.
.. versionadded:: 4.0
"""
def __init__(self, option_name, message=None, possibilities=None,
ctx=None):
if message is None:
message = 'no such option: %s' % option_name
UsageError.__init__(self, message, ctx)
self.option_name = option_name
self.possibilities = possibilities
def format_message(self):
bits = [self.message]
if self.possibilities:
if len(self.possibilities) == 1:
bits.append('Did you mean %s?' % self.possibilities[0])
else:
possibilities = sorted(self.possibilities)
bits.append('(Possible options: %s)' % ', '.join(possibilities))
return ' '.join(bits)
class BadOptionUsage(UsageError):
"""Raised if an option is generally supplied but the use of the option
was incorrect. This is for instance raised if the number of arguments
for an option is not correct.
.. versionadded:: 4.0
"""
def __init__(self, option_name, message, ctx=None):
UsageError.__init__(self, message, ctx)
class FileError(ClickException):
"""Raised if a file cannot be opened."""
def __init__(self, filename, hint=None):
ui_filename = filename_to_ui(filename)
if hint is None:
hint = 'unknown error'
ClickException.__init__(self, hint)
self.ui_filename = ui_filename
self.filename = filename
def format_message(self):
return 'Could not open file %s: %s' % (self.ui_filename, self.message)
class Abort(RuntimeError):
"""An internal signalling exception that signals Click to abort."""
|
mistalaba/cookiecutter-django
|
{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/users/models.py
|
Python
|
bsd-3-clause
| 466 | 0 |
from django.contrib.auth.models import AbstractUser
from django.db.models import CharField
fr
|
om django.urls import reverse
from django.utils.translation import ugettext_lazy as _
c
|
lass User(AbstractUser):
# First Name and Last Name do not cover name patterns
# around the globe.
name = CharField(_("Name of User"), blank=True, max_length=255)
def get_absolute_url(self):
return reverse("users:detail", kwargs={"username": self.username})
|
shub0/algorithm-data-structure
|
python/sum_roof_to_leaf.py
|
Python
|
bsd-3-clause
| 1,428 | 0.002101 |
#! /usr/bin/python
'''
Given a binary tree containing digits from 0-9 only, each root-to-leaf path could represent a number.
An example is the root-to-leaf path 1->2->3 which represents the number 123.
Find the total sum of all root-to-leaf numbers.
For example,
1
/ \
2 3
The root-to-leaf path 1->2 represents the number 12.
The root-to-leaf path 1->3 represents the number 13.
Return the sum = 12 + 13 = 25.
'''
from node_struct import TreeNode
class Solution:
def leafNode(self, root):
|
if not root.left and not root.right:
|
return True
return False
def inOrderTraversal(self, root, currentPath, path):
if not root:
return
# visit()
currentPath = 10 * currentPath + root.val
if self.leafNode(root):
path.append(currentPath)
else:
self.inOrderTraversal(root.left, currentPath, path)
self.inOrderTraversal(root.right, currentPath, path)
# @param root, a tree node
# @return an integer
def sumNumbers(self, root):
path = list()
self.inOrderTraversal(root, 0, path)
return sum(path)
if __name__ == '__main__':
solution = Solution()
root = TreeNode(1)
root.left = TreeNode(2)
root.right = TreeNode(3)
root.left.right = TreeNode(4)
root.left.left = TreeNode(5)
print solution.sumNumbers(root)
print solution.sumNumbers(None)
|
gabriel-samfira/jrunner
|
jrunner/jobqueue/workers/__init__.py
|
Python
|
apache-2.0
| 1,084 | 0 |
# Copyright 2013 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use
|
this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permi
|
ssions and limitations
# under the License.
import importlib
from jrunner.common import *
from oslo.config import cfg
opts = [
cfg.StrOpt(
'worker_module',
default='jrunner.jobqueue.workers.simple',
help='Worker module'),
]
CONF = cfg.CONF
CONF.register_opts(opts, 'jobqueue')
def get_worker_module():
try:
return importlib.import_module(CONF.jobqueue.worker_module)
except Exception as err:
LOG.exception(err)
raise Exception("Failed to import worker module")
|
wasit7/PythonDay
|
django/mysite2/mysite2/settings.py
|
Python
|
bsd-3-clause
| 3,183 | 0.001257 |
"""
Django settings for mysite2 project.
Generated by 'django-admin startproject' using Django 1.9.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.9/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.9/ho
|
wto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'f@d3+wz7y8uj!+alcvc!6du++db!-3jh6=vr(%z(e^2n5_fml-'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INST
|
ALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'myauthen',
]
MIDDLEWARE_CLASSES = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'mysite2.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'mysite2.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.9/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.9/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
STATIC_URL = '/static/'
|
endlessm/chromium-browser
|
tools/swarming_client/third_party/httplib2/python3/httplib2/iri2uri.py
|
Python
|
bsd-3-clause
| 4,153 | 0.000963 |
# -*- coding: utf-8 -*-
"""Converts an IRI to a URI."""
__author__ = "Joe Gregorio (joe@bitworking.org)"
__copyright__ = "Copyright 2006, Joe Gregorio"
__contributors__ = []
__version__ = "1.0.0"
__license__ = "MIT"
import urllib.parse
# Convert an IRI to a URI following the rules in RFC 3987
#
# The characters we need to enocde and escape are defined in the spec:
#
# iprivate = %xE000-F8FF / %xF0000-FFFFD / %x100000-10FFFD
# ucschar = %xA0-D7FF / %xF900-FDCF / %xFDF0-FFEF
# / %x10000-1FFFD / %x20000-2FFFD / %x30000-3FFFD
# / %x40000-4FFFD / %x50000-5FFFD / %x60000-6FFFD
# / %x70000-7FFFD / %x80000-8FFFD / %x90000-9FFFD
# / %xA0000-AFFFD / %xB0000-BFFFD / %xC0000-CFFFD
# / %xD0000-DFFFD / %xE1000-EFFFD
escape_range = [
(0xA0, 0xD7FF),
(0xE000, 0xF8FF),
(0xF900, 0xFDCF),
(0xFDF0, 0xFFEF),
(0x10000, 0x1FFFD),
(0x20000, 0x2FFFD),
(0x30000, 0x3FFFD),
(0x40000, 0x4FFFD),
(0x50000, 0x5FFFD),
(0x60000, 0x6FFFD),
(0x70000, 0x7FFFD),
(0x80000, 0x8FFFD),
(0x90000, 0x9FFFD),
(0xA0000, 0xAFFFD),
(0xB0000, 0xBFFFD),
(0xC0000, 0xCFFFD),
(0xD0000, 0xDFFFD),
(0xE1000, 0xEFFFD),
(0xF0000, 0xFFFFD),
(0x100000, 0x10FFFD),
]
def encode(c):
retval = c
i = ord(c)
for low, high in escape_range:
if i < low:
break
if i >= low and i <= high:
retval = "".join(["%%%2X" % o for o in c.encode("utf-8")])
break
return retval
def iri2uri(uri):
"""Convert an IRI to a URI. Note that IRIs must be
passed in a unicode strings. That is, do not utf-8 encode
the IRI before passing it into the function."""
if isinstance(uri, str):
(scheme, authority, path, query, fragment) = urllib.parse.urlsplit(uri)
authority = authority.encode("idna").decode("utf-8")
# For each character in 'ucschar' or 'iprivate'
# 1. encode as utf-8
# 2. then %-encode each octet of that utf-8
uri = urllib.parse.urlunsplit((scheme, authority, path, query, fragment))
uri = "".join([encode(c) for c in uri])
return uri
if __name__ == "__main__":
import unittest
class Test(unittest.TestCase):
def test_uris(self):
"""Test that URIs are invariant under the transformation."""
invariant = [
"ftp://ftp.is.co.za/
|
rfc/rfc1808.txt",
"http://www.ietf.org/rfc/rfc2396.txt",
"ldap://
|
[2001:db8::7]/c=GB?objectClass?one",
"mailto:John.Doe@example.com",
"news:comp.infosystems.www.servers.unix",
"tel:+1-816-555-1212",
"telnet://192.0.2.16:80/",
"urn:oasis:names:specification:docbook:dtd:xml:4.1.2",
]
for uri in invariant:
self.assertEqual(uri, iri2uri(uri))
def test_iri(self):
"""Test that the right type of escaping is done for each part of the URI."""
self.assertEqual(
"http://xn--o3h.com/%E2%98%84",
iri2uri("http://\N{COMET}.com/\N{COMET}"),
)
self.assertEqual(
"http://bitworking.org/?fred=%E2%98%84",
iri2uri("http://bitworking.org/?fred=\N{COMET}"),
)
self.assertEqual(
"http://bitworking.org/#%E2%98%84",
iri2uri("http://bitworking.org/#\N{COMET}"),
)
self.assertEqual("#%E2%98%84", iri2uri("#\N{COMET}"))
self.assertEqual(
"/fred?bar=%E2%98%9A#%E2%98%84",
iri2uri("/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}"),
)
self.assertEqual(
"/fred?bar=%E2%98%9A#%E2%98%84",
iri2uri(iri2uri("/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}")),
)
self.assertNotEqual(
"/fred?bar=%E2%98%9A#%E2%98%84",
iri2uri(
"/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}".encode("utf-8")
),
)
unittest.main()
|
gygcnc/gygcnc
|
gygcnc/image_gallery/admin.py
|
Python
|
bsd-3-clause
| 255 | 0.003922 |
from django.c
|
ontrib import admin
from .models import Gallery, Photo
class PhotoInline(admin.StackedInline):
model = Photo
extra = 1
class GalleryAdmin(admin.ModelAdmin):
|
inlines = [PhotoInline]
admin.site.register(Gallery, GalleryAdmin)
|
tmc/mutter
|
mutter/notifiers.py
|
Python
|
bsd-2-clause
| 1,358 | 0.003682 |
import os
import platform
import subprocess
class SoundPlayer:
"""Simple audio file player, invokes afplay on macs, mpg123 otherwise."""
def __init__(self):
self.basedir = os.path.dirname(__file__)
self.sounds = {
'startup': 'run.mp3',
'shutdown': 'quit.mp3',
'run': 'run_command.mp3',
'red': 'red.mp3',
'green': 'green.mp3'
}
self.player = 'mpg123'
if platform.system() == 'Darwin':
self.player = 'afplay'
def play(self, name):
if name not in self.sounds:
print 'sound "%s" not found in mapping' % name
sound_file = os.path.join(self.basedir, 'sounds', self.sounds[name])
subprocess.call([self.player, sound_file], stderr=subprocess.PIPE, stdout=subprocess.PIPE)
class BaseNotifier(object):
"Notifiers need on
|
ly to implement notify"
def notify(self, event):
raise NotImplementedError()
class TextNotifier(object):
"Basic text notifier"
def notify(self, event):
print 'Notify: ', event
class SoundNotifier(object):
"Simple notifier that uses SoundPlayer"
|
def __init__(self):
self.player = SoundPlayer()
def notify(self, event):
print 'mutter: %s' % event
if event in self.player.sounds:
self.player.play(event)
|
rg3/youtube-dl
|
youtube_dl/extractor/toggle.py
|
Python
|
unlicense
| 8,970 | 0.0019 |
# coding: utf-8
from __future__ import unicode_literals
import json
import re
from .common import InfoExtractor
from ..utils import (
determine_ext,
ExtractorError,
float_or_none,
int_or_none,
parse_iso8601,
strip_or_none,
)
class ToggleIE(InfoExtractor):
IE_NAME = 'toggle'
_VALID_URL = r'(?:https?://(?:(?:www\.)?mewatch|video\.toggle)\.sg/(?:en|zh)/(?:[^/]+/){2,}|toggle:)(?P<id>[0-9]+)'
_TESTS = [{
'url': 'http://www.mewatch.sg/en/series/lion-moms-tif/trailers/lion-moms-premier/343115',
'info_dict': {
'id': '343115',
'ext': 'mp4',
'title': 'Lion Moms Premiere',
'description': 'md5:aea1149404bff4d7f7b6da11fafd8e6b',
'upload_date': '20150910',
'timestamp': 1441858274,
},
'params': {
'skip_download': 'm3u8 download',
}
}, {
'note': 'DRM-protected video',
'url': 'http://www.mewatch.sg/en/movies/dug-s-special-mission/341413',
'info_dict': {
'id': '341413',
'ext': 'wvm',
'title': 'Dug\'s Special Mission',
'description': 'md5:e86c6f4458214905c1772398fabc93e0',
'upload_date': '20150827',
'timestamp': 1440644006,
},
'params': {
'skip_download': 'DRM-protected wvm download',
}
}, {
# this also tests correct video id extraction
'note': 'm3u8 links are geo-restricted, but Android/mp4 is okay',
'url': 'http://www.mewatch.sg/en/series/28th-sea-games-5-show/28th-sea-games-5-show-ep11/332861',
'info_dict': {
'id': '332861',
'ext': 'mp4',
'title': '28th SEA Games (5 Show) - Episode 11',
'description': 'md5:3cd4f5f56c7c3b1340c50a863f896faa',
'upload_date': '20150605',
'timestamp': 1433480166,
},
'params': {
'skip_download': 'DRM-protected wvm download',
},
'skip': 'm3u8 links are geo-restricted'
}, {
'url': 'http://video.toggle.sg/en/clips/seraph-sun-aloysius-will-suddenly-sing-some-old-songs-in-high-pitch-on-set/343331',
'only_matching': True,
}, {
'url': 'http://www.mewatch.sg/en/clips/seraph-sun-aloysius-will-suddenly-sing-some-old-songs-in-high-pitch-on-set/343331',
'only_matching': True,
}, {
'url': 'http://www.mewatch.sg/zh/series/zero-calling-s2-hd/ep13/336367',
'only_matching': True,
}, {
'url': 'http://www.mewatch.sg/en/series/vetri-s2/webisodes/jeeva-is-an-orphan-vetri-s2-webisode-7/342302',
'only_matching': True,
}, {
'url': 'http://www.mewatch.sg/en/movies/seven-days/321936',
'only_matching': True,
}, {
'url': 'https://www.mewatch.sg/en/tv-show/news/may-2017-cna-singapore-tonight/fri-19-may-2017/512456',
'only_matching': True,
}, {
'url': 'http://www.mewatch.sg/en/channels/eleven-plus/401585',
'only_matching': True,
}]
_API_USER = 'tvpapi_147'
_API_PASS = '11111'
def _real_extract(self, url):
video_id = self._match_id(url)
params = {
'initObj': {
'Locale': {
'LocaleLanguage': '',
'LocaleCountry': '',
'LocaleDevice': '',
'LocaleUserState': 0
},
'Platform': 0,
'SiteGuid': 0,
'DomainID': '0',
'UDID': '',
'ApiUser': self._API_USER,
'ApiPass': self._API_PASS
},
'MediaID': video_id,
'mediaType': 0,
}
info = self._download_json(
'http://tvpapi.as.tvinci.com/v2_9/gateways/jsonpostgw.aspx?m=GetMediaInfo',
video_id, 'Downloading video info json', data=json.dumps(params).encode('utf-8'))
title = info['MediaName']
formats = []
for video_file in info.get('Files', []):
video_url, vid_format = video_file.get('URL'), video_file.get('Format')
if not video_url or video_url == 'NA' or not vid_format:
continue
ext = determine_ext(video_url)
vid_format = vid_format.replace(' ', '')
# if geo-restricted, m3u8 is inaccessible, but mp4 is okay
if ext == 'm3u8':
m3u8_formats = self._extract_m3u8_formats(
video_url, video_id, ext='mp4', m3u8_id=vid_format,
note='Downloading %s m3u8 information' % vid_format,
errnote='Failed to download %s m3u8 information'
|
% vid_format,
fatal=False)
for f in m3u8_formats:
# Apple FairPlay Streaming
if '/fpshls/' in f['url']:
continue
formats.append(f)
elif ext == 'mpd':
formats.extend(self._extract_mpd_formats(
video_url, video_id, mpd_id=vid_format,
|
note='Downloading %s MPD manifest' % vid_format,
errnote='Failed to download %s MPD manifest' % vid_format,
fatal=False))
elif ext == 'ism':
formats.extend(self._extract_ism_formats(
video_url, video_id, ism_id=vid_format,
note='Downloading %s ISM manifest' % vid_format,
errnote='Failed to download %s ISM manifest' % vid_format,
fatal=False))
elif ext == 'mp4':
formats.append({
'ext': ext,
'url': video_url,
'format_id': vid_format,
})
if not formats:
for meta in (info.get('Metas') or []):
if meta.get('Key') == 'Encryption' and meta.get('Value') == '1':
raise ExtractorError(
'This video is DRM protected.', expected=True)
# Most likely because geo-blocked
raise ExtractorError('No downloadable videos found', expected=True)
self._sort_formats(formats)
thumbnails = []
for picture in info.get('Pictures', []):
if not isinstance(picture, dict):
continue
pic_url = picture.get('URL')
if not pic_url:
continue
thumbnail = {
'url': pic_url,
}
pic_size = picture.get('PicSize', '')
m = re.search(r'(?P<width>\d+)[xX](?P<height>\d+)', pic_size)
if m:
thumbnail.update({
'width': int(m.group('width')),
'height': int(m.group('height')),
})
thumbnails.append(thumbnail)
def counter(prefix):
return int_or_none(
info.get(prefix + 'Counter') or info.get(prefix.lower() + '_counter'))
return {
'id': video_id,
'title': title,
'description': strip_or_none(info.get('Description')),
'duration': int_or_none(info.get('Duration')),
'timestamp': parse_iso8601(info.get('CreationDate') or None),
'average_rating': float_or_none(info.get('Rating')),
'view_count': counter('View'),
'like_count': counter('Like'),
'thumbnails': thumbnails,
'formats': formats,
}
class MeWatchIE(InfoExtractor):
IE_NAME = 'mewatch'
_VALID_URL = r'https?://(?:(?:www|live)\.)?mewatch\.sg/watch/[^/?#&]+-(?P<id>[0-9]+)'
_TESTS = [{
'url': 'https://www.mewatch.sg/watch/Recipe-Of-Life-E1-179371',
'info_dict': {
'id': '1008625',
'ext': 'mp4',
'title': 'Recipe Of Life 味之道',
'timestamp': 1603306526,
'description': 'md5:6e88cde8af2068444fc8e1bc3ebf257c',
'upload_date': '20201021',
},
'params': {
'skip_download': 'm3u8 download',
},
}, {
'url': 'https://ww
|
Acehaidrey/incubator-airflow
|
tests/providers/amazon/aws/operators/test_s3_delete_objects.py
|
Python
|
apache-2.0
| 3,825 | 0.003137 |
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import io
import unittest
import boto3
from moto import mock_s3
from airflow.providers.amazon.aws.operators.s3 import S3DeleteObjectsOperator
class TestS3DeleteObjectsOperator(unittest.TestCase):
@mock_s3
def test_s3_delete_single_object(self
|
):
bucket = "testbucket"
key = "path/data.txt"
conn = boto3.client('s3')
conn.create_bucket(Bucket=bucket)
conn.upload_fileobj(Bucket=bucket, Key=key, Fileobj=io.BytesIO(b"input"))
# The obj
|
ect should be detected before the DELETE action is taken
objects_in_dest_bucket = conn.list_objects(Bucket=bucket, Prefix=key)
assert len(objects_in_dest_bucket['Contents']) == 1
assert objects_in_dest_bucket['Contents'][0]['Key'] == key
op = S3DeleteObjectsOperator(task_id="test_task_s3_delete_single_object", bucket=bucket, keys=key)
op.execute(None)
# There should be no object found in the bucket created earlier
assert 'Contents' not in conn.list_objects(Bucket=bucket, Prefix=key)
@mock_s3
def test_s3_delete_multiple_objects(self):
bucket = "testbucket"
key_pattern = "path/data"
n_keys = 3
keys = [key_pattern + str(i) for i in range(n_keys)]
conn = boto3.client('s3')
conn.create_bucket(Bucket=bucket)
for k in keys:
conn.upload_fileobj(Bucket=bucket, Key=k, Fileobj=io.BytesIO(b"input"))
# The objects should be detected before the DELETE action is taken
objects_in_dest_bucket = conn.list_objects(Bucket=bucket, Prefix=key_pattern)
assert len(objects_in_dest_bucket['Contents']) == n_keys
assert sorted(x['Key'] for x in objects_in_dest_bucket['Contents']) == sorted(keys)
op = S3DeleteObjectsOperator(task_id="test_task_s3_delete_multiple_objects", bucket=bucket, keys=keys)
op.execute(None)
# There should be no object found in the bucket created earlier
assert 'Contents' not in conn.list_objects(Bucket=bucket, Prefix=key_pattern)
@mock_s3
def test_s3_delete_prefix(self):
bucket = "testbucket"
key_pattern = "path/data"
n_keys = 3
keys = [key_pattern + str(i) for i in range(n_keys)]
conn = boto3.client('s3')
conn.create_bucket(Bucket=bucket)
for k in keys:
conn.upload_fileobj(Bucket=bucket, Key=k, Fileobj=io.BytesIO(b"input"))
# The objects should be detected before the DELETE action is taken
objects_in_dest_bucket = conn.list_objects(Bucket=bucket, Prefix=key_pattern)
assert len(objects_in_dest_bucket['Contents']) == n_keys
assert sorted(x['Key'] for x in objects_in_dest_bucket['Contents']) == sorted(keys)
op = S3DeleteObjectsOperator(task_id="test_task_s3_delete_prefix", bucket=bucket, prefix=key_pattern)
op.execute(None)
# There should be no object found in the bucket created earlier
assert 'Contents' not in conn.list_objects(Bucket=bucket, Prefix=key_pattern)
|
piratecb/up1and
|
app/main/__init__.py
|
Python
|
mit
| 98 | 0.010204 |
from flask import Blu
|
eprint
main = Blueprint('main', __name__)
fro
|
m . import views, errors
|
JohnVinyard/zounds
|
zounds/datasets/predownload.py
|
Python
|
mit
| 250 | 0 |
from io import BytesIO
class P
|
reDownload(BytesIO):
def __init__(self, initial_bytes, url):
super(PreDownload, self).__init__(initial_bytes)
if not url:
r
|
aise ValueError('url must be provided')
self.url = url
|
altnight/individual-sandbox
|
diary/20171022/sample/auth_sample/server.py
|
Python
|
apache-2.0
| 1,243 | 0.006436 |
from flask import Flask, request, redirect, url_for
app = Flask(__name__)
logged_in = False
LOGIN_TEMPLATE = """
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<title></title>
</head>
<body>
<form method="POST" action="">
<input type="text" name="username">
<input type="password" name="password">
<input type="submit" value="submit">
</form>
</body>
</html>
"""
@app.route("/")
def index():
return "hello world"
@app.route("/two")
d
|
ef two():
return "two"
@app.route("/login", methods=["GET", "POST"])
def login():
global logged_in
if logged_in:
return redirect(url_for("mypage"))
if request.method == "GET":
return LOGIN_TEMPLATE
if not request.form.get("username") or not request.form.get("password"):
return LOGIN_TEMPLATE
logged_in = True
return "logged in"
@app.route("/mypage")
def mypage():
global logged_in
|
if not logged_in:
return redirect(url_for("login"))
return "mypage"
@app.route("/logout")
def logout():
global logged_in
if not logged_in:
return redirect(url_for("login"))
logged_in = False
return "logout"
def main():
app.run(debug=True)
if __name__ == "__main__":
main()
|
assefay/inasafe
|
safe_qgis/ui/options_dialog_base.py
|
Python
|
gpl-3.0
| 23,148 | 0.003629 |
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'options_dialog_base.ui'
#
# Created: Mon Feb 17 11:50:09 2014
# by: PyQt4 UI code generator 4.10.3
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_OptionsDialogBase(object):
def setupUi(self, OptionsDialogBase):
OptionsDialogBase.setObjectName(_fromUtf8("OptionsDialogBase"))
OptionsDialogBase.resize(683, 453)
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap(_fromUtf8(":/plugins/inasafe/icon.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
OptionsDialogBase.setWindowIcon(icon)
self.gridLayout_2 = QtGui.QGridLayout(OptionsDialogBase)
self.gridLayout_2.setObjectName(_fromUtf8("gridLayout_2"))
self.buttonBox = QtGui.QDialogButtonBox(OptionsDialogBase)
self.buttonBox.setOrientation(QtCore.Qt.Horizontal)
self.buttonBox.setStandardButtons(QtGui.QDialogButtonBox.Cancel|QtGui.QDialogButtonBox.Help|QtGui.QDialogButtonBox.Ok)
self.buttonBox.setObjectName(_fromUtf8("buttonBox"))
self.gridLayout_2.addWidget(self.buttonBox, 1, 0, 1, 1)
self.tabWidget = QtGui.QTabWidget(OptionsDialogBase)
self.tabWidget.setObjectName(_fromUtf8("tabWidget"))
self.tab_basic = QtGui.QWidget()
self.tab_basic.setObjectName(_fromUtf8("tab_basic"))
self.gridLayout_4 = QtGui.QGridLayout(self.tab_basic)
self.gridLayout_4.setMargin(0)
self.gridLayout_4.setObjectName(_fromUtf8("gridLayout_4"))
self.scrollArea = QtGui.QScrollArea(self.tab_basic)
self.scrollArea.setFrameShape(QtGui.QFrame.NoFrame)
self.scrollArea.setFrameShadow(QtGui.QFrame.Sunken)
self.scrollArea.setWidgetResizable(True)
self.scrollArea.setObjectName(_fromUtf8("scrollArea"))
self.scrollAreaWidgetContents = QtGui.QWidget()
self.scrollAreaWidgetContents.setGeometry(QtCore.QRect(0, 0, 638, 454
|
))
self.scrollAreaWidgetContents.setObjectName(_fromUtf8("scrollAreaWidgetContents"))
self.gridLayout = QtGui.QGridLayout(
|
self.scrollAreaWidgetContents)
self.gridLayout.setObjectName(_fromUtf8("gridLayout"))
self.cbxVisibleLayersOnly = QtGui.QCheckBox(self.scrollAreaWidgetContents)
self.cbxVisibleLayersOnly.setObjectName(_fromUtf8("cbxVisibleLayersOnly"))
self.gridLayout.addWidget(self.cbxVisibleLayersOnly, 0, 0, 1, 1)
self.cbxSetLayerNameFromTitle = QtGui.QCheckBox(self.scrollAreaWidgetContents)
self.cbxSetLayerNameFromTitle.setEnabled(True)
self.cbxSetLayerNameFromTitle.setObjectName(_fromUtf8("cbxSetLayerNameFromTitle"))
self.gridLayout.addWidget(self.cbxSetLayerNameFromTitle, 1, 0, 1, 1)
self.cbxZoomToImpact = QtGui.QCheckBox(self.scrollAreaWidgetContents)
self.cbxZoomToImpact.setEnabled(True)
self.cbxZoomToImpact.setObjectName(_fromUtf8("cbxZoomToImpact"))
self.gridLayout.addWidget(self.cbxZoomToImpact, 2, 0, 1, 1)
self.cbxHideExposure = QtGui.QCheckBox(self.scrollAreaWidgetContents)
self.cbxHideExposure.setEnabled(True)
self.cbxHideExposure.setObjectName(_fromUtf8("cbxHideExposure"))
self.gridLayout.addWidget(self.cbxHideExposure, 3, 0, 1, 1)
self.cbxClipToViewport = QtGui.QCheckBox(self.scrollAreaWidgetContents)
self.cbxClipToViewport.setChecked(False)
self.cbxClipToViewport.setObjectName(_fromUtf8("cbxClipToViewport"))
self.gridLayout.addWidget(self.cbxClipToViewport, 4, 0, 1, 1)
self.cbxClipHard = QtGui.QCheckBox(self.scrollAreaWidgetContents)
self.cbxClipHard.setObjectName(_fromUtf8("cbxClipHard"))
self.gridLayout.addWidget(self.cbxClipHard, 5, 0, 1, 1)
self.cbxShowPostprocessingLayers = QtGui.QCheckBox(self.scrollAreaWidgetContents)
self.cbxShowPostprocessingLayers.setObjectName(_fromUtf8("cbxShowPostprocessingLayers"))
self.gridLayout.addWidget(self.cbxShowPostprocessingLayers, 6, 0, 1, 1)
self.horizontalLayout = QtGui.QHBoxLayout()
self.horizontalLayout.setObjectName(_fromUtf8("horizontalLayout"))
self.label_6 = QtGui.QLabel(self.scrollAreaWidgetContents)
self.label_6.setObjectName(_fromUtf8("label_6"))
self.horizontalLayout.addWidget(self.label_6)
self.dsbFemaleRatioDefault = QtGui.QDoubleSpinBox(self.scrollAreaWidgetContents)
self.dsbFemaleRatioDefault.setAccelerated(True)
self.dsbFemaleRatioDefault.setMaximum(1.0)
self.dsbFemaleRatioDefault.setSingleStep(0.01)
self.dsbFemaleRatioDefault.setObjectName(_fromUtf8("dsbFemaleRatioDefault"))
self.horizontalLayout.addWidget(self.dsbFemaleRatioDefault)
spacerItem = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout.addItem(spacerItem)
self.gridLayout.addLayout(self.horizontalLayout, 7, 0, 1, 1)
self.grpNotImplemented = QtGui.QGroupBox(self.scrollAreaWidgetContents)
self.grpNotImplemented.setObjectName(_fromUtf8("grpNotImplemented"))
self.gridLayout_3 = QtGui.QGridLayout(self.grpNotImplemented)
self.gridLayout_3.setObjectName(_fromUtf8("gridLayout_3"))
self.horizontalLayout_4 = QtGui.QHBoxLayout()
self.horizontalLayout_4.setObjectName(_fromUtf8("horizontalLayout_4"))
self.lineEdit_4 = QtGui.QLineEdit(self.grpNotImplemented)
self.lineEdit_4.setEnabled(True)
self.lineEdit_4.setObjectName(_fromUtf8("lineEdit_4"))
self.horizontalLayout_4.addWidget(self.lineEdit_4)
self.toolButton_4 = QtGui.QToolButton(self.grpNotImplemented)
self.toolButton_4.setEnabled(True)
self.toolButton_4.setObjectName(_fromUtf8("toolButton_4"))
self.horizontalLayout_4.addWidget(self.toolButton_4)
self.gridLayout_3.addLayout(self.horizontalLayout_4, 8, 0, 1, 1)
self.label_4 = QtGui.QLabel(self.grpNotImplemented)
self.label_4.setEnabled(True)
self.label_4.setObjectName(_fromUtf8("label_4"))
self.gridLayout_3.addWidget(self.label_4, 7, 0, 1, 1)
self.cbxBubbleLayersUp = QtGui.QCheckBox(self.grpNotImplemented)
self.cbxBubbleLayersUp.setEnabled(True)
self.cbxBubbleLayersUp.setObjectName(_fromUtf8("cbxBubbleLayersUp"))
self.gridLayout_3.addWidget(self.cbxBubbleLayersUp, 0, 0, 1, 1)
self.horizontalLayout_5 = QtGui.QHBoxLayout()
self.horizontalLayout_5.setObjectName(_fromUtf8("horizontalLayout_5"))
self.label_5 = QtGui.QLabel(self.grpNotImplemented)
self.label_5.setEnabled(True)
self.label_5.setObjectName(_fromUtf8("label_5"))
self.horizontalLayout_5.addWidget(self.label_5)
self.spinBox = QtGui.QSpinBox(self.grpNotImplemented)
self.spinBox.setEnabled(True)
self.spinBox.setObjectName(_fromUtf8("spinBox"))
self.horizontalLayout_5.addWidget(self.spinBox)
self.gridLayout_3.addLayout(self.horizontalLayout_5, 9, 0, 1, 1)
self.horizontalLayout_2 = QtGui.QHBoxLayout()
self.horizontalLayout_2.setObjectName(_fromUtf8("horizontalLayout_2"))
self.lineEdit = QtGui.QLineEdit(self.grpNotImplemented)
self.lineEdit.setEnabled(True)
self.lineEdit.setObjectName(_fromUtf8("lineEdit"))
self.horizontalLayout_2.addWidget(self.lineEdit)
self.toolButton = QtGui.QToolButton(self.grpNotImplemented)
self.toolButton.setEnabled(True)
self.toolButton.setObjectName(_fromUtf8("toolButton"))
self.horizontalLayout_2.addWidget(self.toolButton)
self.gridLayout_3.addLayout(self.horizontalLayout_2, 2, 0, 1, 1)
self.label = QtGui.QLabel(self.g
|
blackjax-devs/blackjax
|
setup.py
|
Python
|
apache-2.0
| 1,650 | 0.000606 |
import codecs
import os.path
import sys
import setuptools
# READ README.md for long description on PyPi.
try:
long_description = open("README.md", encoding="utf-8").read()
except Exception as e:
sys.stderr.write(f"Failed to read README.md:\n {e}\n")
sys.stderr.flush()
long_description
|
= ""
# Get the package's version number
|
of the __init__.py file
def read(rel_path):
"""Read the file located at the provided relative path."""
here = os.path.abspath(os.path.dirname(__file__))
with codecs.open(os.path.join(here, rel_path), "r") as fp:
return fp.read()
def get_version(rel_path):
"""Get the package's version number.
We fetch the version number from the `__version__` variable located in the
package root's `__init__.py` file. This way there is only a single source
of truth for the package's version number.
"""
for line in read(rel_path).splitlines():
if line.startswith("__version__"):
delim = '"' if '"' in line else "'"
return line.split(delim)[1]
else:
raise RuntimeError("Unable to find version string.")
INSTALL_REQS = read("requirements.txt").splitlines()
setuptools.setup(
name="blackjax",
author="The BlackJAX team",
version=get_version("blackjax/__init__.py"),
description="Flexible and fast inference in Python",
long_description=long_description,
packages=setuptools.find_packages(),
install_requires=INSTALL_REQS,
long_description_content_type="text/markdown",
keywords="probabilistic machine learning bayesian statistics sampling algorithms",
license="Apache License 2.0",
)
|
supernifty/mgsa
|
mgsa/count_homopolymers.py
|
Python
|
mit
| 550 | 0.038182 |
i
|
mport collections
import itertools
import sys
def count_homopolymers( fh ):
s = []
print "building..."
for line in fh:
if line.startswith( '>' ):
continue
s.append( line.strip() )
print "counting..."
runs = collections.defaultdict(int)
best = collections.defaultdict(int)
last = None
for c in ''.join(s):
runs[c] += 1
best[c] = max(best[c], runs[c])
if last is not None and c != las
|
t:
runs[last] = 0
last = c
return best
if __name__ == '__main__':
print count_homopolymers( sys.stdin )
|
sahilshekhawat/sympy
|
sympy/plotting/tests/test_plot.py
|
Python
|
bsd-3-clause
| 8,460 | 0.002719 |
from sympy import (pi, sin, cos, Symbol, Integral, summation, sqrt, log,
oo, LambertW, I, meijerg, exp_polar, Max)
from sympy.plotting import (plot, plot_parametric, plot3d_parametric_line,
plot3d, plot3d_parametric_surface)
from sympy.plotting.plot import unset_show
from sympy.utilities.pytest import skip, raises
from sympy.plotting.experimental_lambdify import lambdify
from sympy.external import import_module
from sympy.core.decorators import wraps
from tempfile import NamedTemporaryFile
import os
import sys
class MockPrint(object):
def write(self, s):
pass
def disable_print(func, *args, **kwargs):
@wraps(func)
def wrapper(*args, **kwargs):
sys.stdout = MockPrint()
func(*args, **kwargs)
sys.stdout = sys.__stdout__
return wrapper
unset_show()
# XXX: We could implement this as a context manager instead
# That would need rewriting the plot_and_save() function
# entirely
class TmpFileManager:
tmp_files = []
@classmethod
def tmp_file(cls, name=''):
cls.tmp_files.append(NamedTemporaryFile(prefix=name, suffix='.png').name)
return cls.tmp_files[-1]
@classmethod
def cleanup(cls):
map(os.remove, cls.tmp_files)
def plot_and_save(name):
tmp_file = TmpFileManager.tmp_file
x = Symbol('x')
y = Symbol('y')
z = Symbol('z')
###
# Examples from the 'introduction' notebook
###
p = plot(x)
p = plot(x*sin(x), x*cos(x))
p.extend(p)
p[0].line_color = lambda a: a
p[1].line_color = 'b'
p.title = 'Big title'
p.xlabel = 'the x axis'
p[1].label = 'straight line'
p.legend = True
p.aspect_ratio = (1, 1)
p.xlim = (-15, 20)
p.save(tmp_file('%s_basic_options_and_colors' % name))
p.extend(plot(x + 1))
p.append(plot(x + 3, x**2)[1])
p.save(tmp_file('%s_plot_extend_append' % name))
p[2] = plot(x**2, (x, -2, 3))
p.save(tmp_file('%s_plot_setitem' % name))
p = plot(sin(x), (x, -2*pi, 4*pi))
p.save(tmp_file('%s_line_explicit' % name))
p = plot(sin(x))
p.save(tmp_file('%s_line_default_range' % name))
p = plot((x**2, (x, -5, 5)), (x**3, (x, -3, 3)))
p.save(tmp_file('%s_line_multiple_range' % name))
raises(ValueError, lambda: plot(x, y))
#parametric 2d plots.
#Single plot with default range.
plot_parametric(sin(x), cos(x)).save(tmp_file())
#Single plot with range.
p = plot_parametric(sin(x), cos(x), (x, -5, 5))
p.save(tmp_file('%s_parametric_range' % name))
#Multiple plots with same range.
p = plot_parametric((sin(x), cos(x)), (x, sin(x)))
p.save(tmp_file('%s_parametric_multiple' % name))
#Multiple plots with different ranges.
p = plot_parametric((sin(x), cos(x), (x, -3, 3)), (x, sin(x), (x, -5, 5)))
p.save(tmp_file('%s_parametric_multiple_ranges' % name))
#depth of recursion specified.
p = plot_parametric(x, sin(x), depth=13)
p.save(tmp_file('%s_recursion_depth' % name))
#No adaptive sampling.
p = plot_parametric(cos(x), sin(x), adaptive=False, nb_of_points=500)
p.save(tmp_file('%s_adaptive' % name))
#3d parametric plots
p = plot3d_parametric_line(sin(x), cos(x), x)
p.save(tmp_file('%s_3d_line' % name))
p = plot3d_parametric_line(
(sin(x), cos(x), x, (x, -5, 5)), (cos(x), sin(x), x, (x, -3, 3)))
p.save(tmp_file('%s_3d_line_multiple' % name))
p = plot3d_parametric_line(sin(x), cos(x), x, nb_of_points=30)
p.save(tmp_file('%s_3d_line_points' % name))
# 3d surface single plot.
p = plot3d(x * y)
p.save(tmp_file('%s_surface' % name))
# Multiple 3D plots with same range.
|
p = plot3d(-x * y, x * y, (x, -5, 5))
p.save(tmp_file('%s_surface_multiple' % name))
# Multiple 3D plots with different ranges.
p = plot3d(
(x * y, (x, -3, 3), (y, -3, 3)), (-x * y, (x, -3, 3), (y, -3, 3)))
p.save(tmp_file('%s_surface_multiple_ranges' % name))
# Single Parametric 3D plot
p = plot3d_parametric_surface(sin(x + y), cos(x - y), x - y)
p.save(tmp_file('%s_parametric_surface' % name))
# Multiple Parametric 3D plo
|
ts.
p = plot3d_parametric_surface(
(x*sin(z), x*cos(z), z, (x, -5, 5), (z, -5, 5)),
(sin(x + y), cos(x - y), x - y, (x, -5, 5), (y, -5, 5)))
p.save(tmp_file('%s_parametric_surface' % name))
###
# Examples from the 'colors' notebook
###
p = plot(sin(x))
p[0].line_color = lambda a: a
p.save(tmp_file('%s_colors_line_arity1' % name))
p[0].line_color = lambda a, b: b
p.save(tmp_file('%s_colors_line_arity2' % name))
p = plot(x*sin(x), x*cos(x), (x, 0, 10))
p[0].line_color = lambda a: a
p.save(tmp_file('%s_colors_param_line_arity1' % name))
p[0].line_color = lambda a, b: a
p.save(tmp_file('%s_colors_param_line_arity2a' % name))
p[0].line_color = lambda a, b: b
p.save(tmp_file('%s_colors_param_line_arity2b' % name))
p = plot3d_parametric_line(sin(x) + 0.1*sin(x)*cos(7*x),
cos(x) + 0.1*cos(x)*cos(7*x),
0.1*sin(7*x),
(x, 0, 2*pi))
p[0].line_color = lambda a: sin(4*a)
p.save(tmp_file('%s_colors_3d_line_arity1' % name))
p[0].line_color = lambda a, b: b
p.save(tmp_file('%s_colors_3d_line_arity2' % name))
p[0].line_color = lambda a, b, c: c
p.save(tmp_file('%s_colors_3d_line_arity3' % name))
p = plot3d(sin(x)*y, (x, 0, 6*pi), (y, -5, 5))
p[0].surface_color = lambda a: a
p.save(tmp_file('%s_colors_surface_arity1' % name))
p[0].surface_color = lambda a, b: b
p.save(tmp_file('%s_colors_surface_arity2' % name))
p[0].surface_color = lambda a, b, c: c
p.save(tmp_file('%s_colors_surface_arity3a' % name))
p[0].surface_color = lambda a, b, c: sqrt((a - 3*pi)**2 + b**2)
p.save(tmp_file('%s_colors_surface_arity3b' % name))
p = plot3d_parametric_surface(x * cos(4 * y), x * sin(4 * y), y,
(x, -1, 1), (y, -1, 1))
p[0].surface_color = lambda a: a
p.save(tmp_file('%s_colors_param_surf_arity1' % name))
p[0].surface_color = lambda a, b: a*b
p.save(tmp_file('%s_colors_param_surf_arity2' % name))
p[0].surface_color = lambda a, b, c: sqrt(a**2 + b**2 + c**2)
p.save(tmp_file('%s_colors_param_surf_arity3' % name))
###
# Examples from the 'advanced' notebook
###
i = Integral(log((sin(x)**2 + 1)*sqrt(x**2 + 1)), (x, 0, y))
p = plot(i, (y, 1, 5))
p.save(tmp_file('%s_advanced_integral' % name))
s = summation(1/x**y, (x, 1, oo))
p = plot(s, (y, 2, 10))
p.save(tmp_file('%s_advanced_inf_sum' % name))
p = plot(summation(1/x, (x, 1, y)), (y, 2, 10), show=False)
p[0].only_integers = True
p[0].steps = True
p.save(tmp_file('%s_advanced_fin_sum' % name))
###
# Test expressions that can not be translated to np and generate complex
# results.
###
plot(sin(x) + I*cos(x)).save(tmp_file())
plot(sqrt(sqrt(-x))).save(tmp_file())
plot(LambertW(x)).save(tmp_file())
plot(sqrt(LambertW(x))).save(tmp_file())
#Characteristic function of a StudentT distribution with nu=10
plot((meijerg(((1 / 2,), ()), ((5, 0, 1 / 2), ()), 5 * x**2 * exp_polar(-I*pi)/2)
+ meijerg(((1/2,), ()), ((5, 0, 1/2), ()),
5*x**2 * exp_polar(I*pi)/2)) / (48 * pi), (x, 1e-6, 1e-2)).save(tmp_file())
def test_matplotlib():
matplotlib = import_module('matplotlib', min_module_version='1.1.0', catch=(RuntimeError,))
if matplotlib:
try:
plot_and_save('test')
finally:
# clean up
TmpFileManager.cleanup()
else:
skip("Matplotlib not the default backend")
# Tests for exception handling in experimental_lambdify
def test_experimental_lambify():
x = Symbol('x')
f = lambdify([x], Max(x, 5))
# XXX should f be tested? If f(2) is attempted, an
# error is raised because a complex produced during wrapping of the arg
# is being compared with an int.
assert Max(2, 5) == 5
assert Max(5, 7) == 7
x = Symbol('x-3')
f = lambdify([x], x + 1)
assert f(1) == 2
@disable_print
def test_appen
|
osuripple/lets
|
pp/__init__.py
|
Python
|
agpl-3.0
| 272 | 0 |
from common.constants import gameModes
|
from pp import ez
from pp import wifipiano3
from pp import cicciobello
PP_CALCULATORS = {
gameModes.STD: ez.Ez,
gameModes.TAIKO: ez.Ez,
gameModes.CTB: cicciobello.Cicciobello,
gam
|
eModes.MANIA: wifipiano3.WiFiPiano
}
|
felipewaku/compiladores-p2
|
lambda_compiler/__main__.py
|
Python
|
mit
| 195 | 0.010256 |
from
|
compiler.compiler import LambdaCompiler
d
|
ef main():
f = open('input.txt', 'r')
compiler = LambdaCompiler(f)
compiler.perform('output.py')
if __name__ == "__main__":
main()
|
marcore/pok-eco
|
xapi/patterns/manage_wiki.py
|
Python
|
agpl-3.0
| 1,746 | 0.001145 |
import re
from django.conf import settings
from tincan import (
Activity,
ActivityDefinition,
LanguageMap
)
from xapi.patterns.base import BasePattern
from xapi.patterns.eco_verbs import (
LearnerCreatesWikiPageVerb,
LearnerEditsWikiPageVerb
)
class BaseWikiRule(BasePattern): # pylint: disable=abstract-method
def convert(self, evt, course_id):
title = None
obj = None
try:
# We need to do this because we receive a string instead than a dictionary
# event_data = json.loads(evt['event'])
event_data = evt['event']
title = event_data['POST'].get('title', None)
except: # pylint: disable=bare-except
pass
if title:
title = title[0] # from parametervalues to single value
verb = self.get_verb() # pylint: disable=no-member
obj = Activity(
id=self.fix_id(self.base_url, evt['context']['path']),
definition=ActivityDefinition(
name=LanguageMap({'en-US': title}),
type="http://www.ecolearning.eu/expapi/activitytype/wiki"
)
)
else:
verb = None # Skip the not really created pages
return verb, obj
class CreateWikiRule(BaseWikiRule, LearnerCreatesWikiPageVerb):
def match(self, evt, course_id):
return re.match(
'/courses/'+settings.COURSE_ID_PATTERN+'/wiki/_create/?',
evt['event_type'])
class EditWikiRule(BaseWikiRule, LearnerEditsWikiPage
|
Verb):
def match(self, evt, course_id):
return re.match(
'/courses/'+settin
|
gs.COURSE_ID_PATTERN+r'/wiki/\w+/_edit/?',
evt['event_type'])
|
narayanaditya95/aldryn-wow
|
aldryn_wow/south_migrations/0001_initial.py
|
Python
|
bsd-3-clause
| 5,058 | 0.007513 |
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Animation'
db.create_table(u'aldryn_wow_animation', (
(u'cmsplugin_ptr', self.gf('django.db.models.fields.related.OneToOneField')(to=orm['cms.CMSPlugin'], unique=True, primary_key=True)),
('animation_class', self.gf('django.db.models.fields.CharField')(default='bounce', max_length=25)),
('infinite', self.gf('django.db.models.fields.NullBooleanField')(null=True, blank=True)),
))
db.send_create_signal(u'aldryn_wow', ['Animation'])
# Adding model 'WOWAnimation'
db.create_table(u'aldryn_wow_wowanimation', (
(u'cmsplugin_ptr', self.gf('django.db.models.fields.related.OneToOneField')(to=orm['cms.CMSPlugin'], unique=True, primary_key=True)),
('animation_class', self.gf('django.db.models.fields.CharField')(default='bounce', max_length=25)),
('duration', self.gf('django.db.models.fields.PositiveSmallIntegerField')(null=True, blank=True)),
('delay', self.gf('django.db.models.fields.PositiveSmallIntegerField')(null=True, blank=True)),
('
|
offset', self.gf('django.db.models.fields.PositiveSmallIntegerField')(null=True, blank=True)),
('iteration', self.gf('django.db.models.fields.PositiveSmallIntegerField')(null=True, blank=True)),
))
db.send_create_signal(u'aldryn_wow', ['WOWAnimation'])
def backwards(self, orm
|
):
# Deleting model 'Animation'
db.delete_table(u'aldryn_wow_animation')
# Deleting model 'WOWAnimation'
db.delete_table(u'aldryn_wow_wowanimation')
models = {
u'aldryn_wow.animation': {
'Meta': {'object_name': 'Animation', '_ormbases': ['cms.CMSPlugin']},
'animation_class': ('django.db.models.fields.CharField', [], {'default': "'bounce'", 'max_length': '25'}),
u'cmsplugin_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['cms.CMSPlugin']", 'unique': 'True', 'primary_key': 'True'}),
'infinite': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'})
},
u'aldryn_wow.wowanimation': {
'Meta': {'object_name': 'WOWAnimation', '_ormbases': ['cms.CMSPlugin']},
'animation_class': ('django.db.models.fields.CharField', [], {'default': "'bounce'", 'max_length': '25'}),
u'cmsplugin_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['cms.CMSPlugin']", 'unique': 'True', 'primary_key': 'True'}),
'delay': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'duration': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'iteration': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'offset': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'})
},
'cms.cmsplugin': {
'Meta': {'object_name': 'CMSPlugin'},
'changed_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'creation_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '15', 'db_index': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.CMSPlugin']", 'null': 'True', 'blank': 'True'}),
'placeholder': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Placeholder']", 'null': 'True'}),
'plugin_type': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
'position': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'})
},
'cms.placeholder': {
'Meta': {'object_name': 'Placeholder'},
'default_width': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'slot': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'})
}
}
complete_apps = ['aldryn_wow']
|
REBradley/WineArb
|
winearb/articles/serializers.py
|
Python
|
bsd-3-clause
| 435 | 0.002299 |
f
|
rom rest_framework
|
import serializers
from ..upload_handling.serializers import ArticleImageSerializer
class ArticleSerializer(serializers.Serializer):
main_title = serializers.CharField(max_length=255)
sub_title = serializers.CharField(max_length=255)
author = serializers.CharField(max_length=255)
image = ArticleImageSerializer()
date = serializers.CharField(max_length=40)
text = serializers.CharField()
|
hfp/tensorflow-xsmm
|
tensorflow/contrib/gan/python/features/python/spectral_normalization_impl.py
|
Python
|
apache-2.0
| 12,318 | 0.003491 |
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Keras-like layers and utilities that implement Spectral Normalization.
Based on "Spectral Normalization for Generative Adversarial Networks" by Miyato,
et al in ICLR 2018. https://openreview.net/pdf?id=B1QRgziT-
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import contextlib
import numbers
import re
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.keras.engine import base_layer_utils as keras_base_layer_utils
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn
from tensorflow.python.ops import variable_scope
from tensorflow.python.platform import tf_logging as logging
__all__ = [
'compute_spectral_norm', 'spectral_normalize', 'spectral_norm_regularizer',
'spectral_normalization_custom_getter', 'keras_spectral_normalization'
]
# tf.bfloat16 should work, but tf.matmul converts those to tf.float32 which then
# can't directly be assigned back to the tf.bfloat16 variable.
_OK_DTYPES_FOR_SPECTRAL_NORM = (dtypes.float16, dtypes.float32, dtypes.float64)
_PERSISTED_U_VARIABLE_SUFFIX = 'spectral_norm_u'
def compute_spectral_norm(w_tensor, power_iteration_rounds=1, name=None):
"""Estimates the largest singular value in the weight tensor.
Args:
w_tensor: The weight matrix whose spectral norm should be computed.
power_iteration_rounds: The number of iterations of the power method to
perform. A higher number yeilds a better approximation.
name: An optional scope name.
Returns:
The largest singular value (the spectral norm) of w.
"""
with variable_scope.variable_scope(name, 'spectral_norm'):
# The paper says to flatten convnet kernel weights from
# (C_out, C_in, KH, KW) to (C_out, C_in * KH * KW). But TensorFlow's Conv2D
# kernel weight shape is (KH, KW, C_in, C_out), so it should be reshaped to
# (KH * KW * C_in, C_out), and similarly for other layers that put output
# channels as last dimension.
# n.b. this means that w here is equivalent to w.T in the paper.
w = array_ops.reshape(w_tensor, (-1, w_tensor.get_shape()[-1]))
# Persisted approximation of first left singular vector of matrix `w`.
u_var = variable_scope.get_variable(
_PERSISTED_U_VARIABLE_SUFFIX,
shape=(w.shape[0], 1),
dtype=w.dtype,
initializer=init_ops.random_normal_initializer(),
trainable=False)
u = u_var
# Use power iteration method to approximate spectral norm.
for _ in range(power_iteration_rounds):
# `v` approximates the first right singular vector of matrix `w`.
v = nn.l2_normalize(math_ops.matmul(array_ops.transpose(w), u))
u = nn.l2_normalize(math_ops.matmul(w, v))
# Update persisted approximation.
with ops.control_dependencies([u_var.assign(u, name='update_u')]):
u = array_ops.identity(u)
u = array_ops.stop_gradient(u)
v = array_ops.stop_gradient(v)
# Largest singular value of `w`.
spectral_norm = math_ops.matmul(
math_ops.matmul(array_ops.transpose(u), w), v)
spectral_norm.shape.assert_is_fully_defined()
spectral_norm.shape.assert_is_compatible_with([1, 1])
return spectral_norm[0][0]
def spectral_normalize(w, power_iteration_rounds=1, name=None):
"""Normalizes a weight matrix by its spectral norm.
Args:
w: The weight matrix to be normalized.
power_iteration_rounds: The number of iterations of the power method to
perform. A higher number yeilds a better approximation.
name: An optional scope name.
Returns:
A normalized weight matrix tensor.
"""
with variable_scope.variable_scope(name, 'spectral_normalize'):
w_normalized = w / compute_spectral_norm(
w, power_iteration_rounds=power_iteration_rounds)
return array_ops.reshape(w_normalized, w.get_shape())
def spectral_norm_regularizer(scale, power_iteration_rounds=1, scope=None):
"""Returns a functions that can be used to apply spectral norm regularization.
Small spectral norms enforce a small Lipschitz constant, which is necessary
for Wasserstein GANs.
Args:
scale: A scalar multiplier. 0.0 disables the regularizer.
power_iteration_rounds: The number of iterations of the power method to
perform. A higher number yeilds a better approximation.
scope: An optional scope name.
Returns:
A function with the signature `sn(weights)` that applies spectral norm
regularization.
Raises:
ValueError: If scale is negative or if scale is not a float.
"""
if isinstance(scale, numbers.Integral):
raise ValueError('scale cannot be an integer: %s' % scale)
if isinstance(scale, numbers.Real):
if scale < 0.0:
raise ValueError(
'Setting a scale less than 0 on a regularizer: %g' % scale)
if scale == 0.0:
logging.info('Scale of 0 disables regularizer.')
return lambda _: None
def sn(weights, name=None):
"""Applies spectral norm regularization to weights."""
with ops.name_scope(scope, 'SpectralNormRegularizer', [weights]) as name:
scale_t = ops.convert_to_tensor(
scale, dtype=weights.dtype.base_dtype, name='scale')
return math_ops.multiply(
scale_t,
compu
|
te_spectral_norm(
weights, power_iteration_rounds=power_iteration_rounds),
name=name)
return sn
def _default_name_filter(name):
"""A filter function to identify common names of weight variables.
Args:
name: The variable name.
Returns:
Whether `name` is a standard name for a weight/kernel variables used in the
Keras, tf.layers, tf.contrib.layers or tf.contrib.slim libraries.
"""
match = re.match(r'(.*\/)?(depthwise_|pointwise_)?(weights|kernel)$',
|
name)
return match is not None
def spectral_normalization_custom_getter(name_filter=_default_name_filter,
power_iteration_rounds=1):
"""Custom getter that performs Spectral Normalization on a weight tensor.
Specifically it divides the weight tensor by its largest singular value. This
is intended to stabilize GAN training, by making the discriminator satisfy a
local 1-Lipschitz constraint.
Based on [Spectral Normalization for Generative Adversarial Networks][sn-gan].
[sn-gan]: https://openreview.net/forum?id=B1QRgziT-
To reproduce an SN-GAN, apply this custom_getter to every weight tensor of
your discriminator. The last dimension of the weight tensor must be the number
of output channels.
Apply this to layers by supplying this as the `custom_getter` of a
`tf.variable_scope`. For example:
with tf.variable_scope('discriminator',
custom_getter=spectral_norm_getter()):
net = discriminator_fn(net)
IMPORTANT: Keras does not respect the custom_getter supplied by the
VariableScope, so Keras users should use `keras_spectral_normalization`
instead of (or in addition to) this approach.
It is important to carefully select to which weights you want to apply
Spectral Normalization. In general you want to normalize the kernels of
convolution and dense layers, but you do not want to normalize biases. You
also want to avoid normalizing batch normalization (and similar) variables,
but in general such layers play poorly with Spectral Normalization, since the
gamma can canc
|
LLNL/spack
|
var/spack/repos/builtin/packages/pygmo/package.py
|
Python
|
lgpl-2.1
| 1,202 | 0.002496 |
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifi
|
er: (Apache-2.0 OR M
|
IT)
from spack import *
class Pygmo(CMakePackage):
"""Parallel Global Multiobjective Optimizer (and its Python alter ego
PyGMO) is a C++ / Python platform to perform parallel computations of
optimisation tasks (global and local) via the asynchronous generalized
island model."""
homepage = "https://esa.github.io/pygmo2/"
url = "https://github.com/esa/pygmo2/archive/v2.18.0.tar.gz"
git = "https://github.com/esa/pygmo2.git"
version('master', branch='master')
version('2.18.0', sha256='9f081cc973297894af09f713f889870ac452bfb32b471f9f7ba08a5e0bb9a125')
depends_on('pagmo2', type=('build', 'link'))
depends_on('mpi', type='build')
depends_on('py-pybind11@2.6.0:2.6.2', type='build')
depends_on('cmake@3.1:', type='build')
variant('shared', default=True, description='Build shared libraries')
def cmake_args(self):
args = [
self.define_from_variant('BUILD_SHARED_LIBS', 'shared'),
]
return args
|
viaict/viaduct
|
app/forms/examination.py
|
Python
|
mit
| 1,224 | 0 |
from app import constants
from flask_babel import lazy_gettext as _
from flask_wtf import FlaskForm
from flask_wtf.file import FileField
from wtforms import StringField, SelectField, DateField
from wtforms.validators import InputRequired, Optional
from app.models.examination import test_type_default
class CourseForm(FlaskForm):
title = StringField(_('Title'), validators=[InputRequired()])
descri
|
ption = StringField(_('Description'))
class EducationForm(FlaskForm):
title = StringField(_('Title'), validators=[InputRequired()])
|
class EditForm(FlaskForm):
date = DateField(_('Date'),
validators=[Optional()],
format=constants.DATE_FORMAT)
course = SelectField(_('Course'), coerce=int,
validators=[InputRequired()])
education = SelectField(_('Education'), coerce=int,
validators=[InputRequired()])
test_type = SelectField(_('Examination type'), coerce=str,
default=test_type_default,
validators=[InputRequired()])
comment = StringField(_('Comment'))
examination = FileField(_('Examination'))
answers = FileField(_('Answers'))
|
synergeticsedx/deployment-wipro
|
openedx/core/djangoapps/monitoring/startup.py
|
Python
|
agpl-3.0
| 131 | 0 |
"""
Registers signal handlers at startup.
"""
# pylint: disable=unused-import
import openedx.core.djangoapps.monit
|
oring.except
|
ions
|
hackday-profilers/flocker
|
flocker/dockerplugin/_script.py
|
Python
|
apache-2.0
| 3,412 | 0 |
# Copyright ClusterHQ Inc. See LICENSE file for details.
"""
Command to start up the Docker plugin.
"""
from os import umask
from stat import S_IRUSR, S_IWUSR, S_IXUSR
from twisted.python.usage import Options
from twisted.internet.endpoints import serverFromString
from twisted.application.internet import StreamServerEndpointService
from twisted.web.server import Site
from twisted.python.filepath import FilePath
from twisted.internet.address import UNIXAddress
from ..common.script import (
flocker_standard_options, FlockerScriptRunner, main_for_service)
from ._api import VolumePlugin
from ..node.script import get_configuration
from ..apiclient import FlockerClient
from ..control.httpapi import REST_API_PORT
PLUGIN_PATH = FilePath("/run/docker/plugins/flocker/flocker.sock")
@flocker_standard_options
class DockerPluginOptions(Options):
"""
Command-line options for the Docker plugin.
"""
optParameters = [
["rest-api-port", "p", REST_API_PORT,
"Port to connect to for control service REST API."],
["agent-config", "c", "/etc/flocker/agent.yml",
"The configuration file for the local agent."],
]
def postOptions(self):
self['agent-config'] = FilePath(self['agent-config'])
class DockerPluginScript(object):
"""
Start the Docker plugin.
"""
def _create_listening_directory(self, directory_path):
"""
Create the parent directory for the Unix socket if it doesn't exist.
:param FilePath directory_path: The directory to create.
"""
original_umask = umask(0)
try:
if not directory_path.exists():
directory_path.makedirs()
directory_path.chmod(S_IRUSR | S_IWUSR | S_IXUSR)
finally:
umask(original_umask)
def main(self, reactor, options):
# Many places in both twisted.web and Klein are unhappy with
|
# listening on Unix socket, e.g.
# https://twistedmatrix.com/trac/ticket/5406 "fix" that by
# pretending we have a port number. Yes, I feel guilty.
UNIXAddress.port = 0
# We can use /etc/flocker/agent.yml and /etc/flocker/node.crt to load
# some information we need:
agent_config = get_configuration(options)
control_host = agen
|
t_config['control-service']['hostname']
node_id = agent_config['node-credential'].uuid
certificates_path = options["agent-config"].parent()
control_port = options["rest-api-port"]
flocker_client = FlockerClient(reactor, control_host, control_port,
certificates_path.child(b"cluster.crt"),
certificates_path.child(b"plugin.crt"),
certificates_path.child(b"plugin.key"))
self._create_listening_directory(PLUGIN_PATH.parent())
endpoint = serverFromString(
reactor, "unix:{}:mode=600".format(PLUGIN_PATH.path))
service = StreamServerEndpointService(endpoint, Site(
VolumePlugin(reactor, flocker_client, node_id).app.resource()))
return main_for_service(reactor, service)
def docker_plugin_main():
"""
Script entry point that runs the Docker plugin.
"""
return FlockerScriptRunner(script=DockerPluginScript(),
options=DockerPluginOptions()).main()
|
hwaf/hwaf
|
py-hwaftools/orch/tools.py
|
Python
|
bsd-3-clause
| 7,185 | 0.010717 |
#!/usr/bin/env python
'''
Main entry to worch from a waf wscript file.
Use the following in the options(), configure() and build() waf wscript methods:
ctx.load('orch.tools', tooldir='.')
'''
def options(opt):
opt.add_option('--orch-config', action = 'store', default = 'orch.cfg',
help='Give an orchestration configuration file.')
opt.add_option('--orch-start', action = 'store', default = 'start',
|
help='Set the section to start the orchestration')
def configure(cfg):
import orch.configure
orch.configure.configure(cfg)
def build(bld):
import orch.build
orch.build.build(bld)
# the stuff below is for augmenting waf
import time
from orch.wafutil import exec_command
from orch.util import string2list
default_step_cwd = dict(
download = '{download_dir}',
unpack = '{source_dir}',
patch = '{s
|
ource_dir}',
prepare = '{build_dir}',
build = '{build_dir}',
install = '{build_dir}',
)
# Main interface to worch configuration items
class WorchConfig(object):
def __init__(self, **pkgcfg):
self._config = pkgcfg
def __getattr__(self, name):
return self._config[name]
def get(self, name, default = None):
return self._config.get(name,default)
def format(self, string, **kwds):
'''
Return a string formatted with kwds and configuration items
'''
d = dict(self._config, **kwds)
return string.format(**d)
def depends_step(self, step):
'''
Return a list of steps that this step depends on
'''
d = self._config.get('depends')
if not d: return list()
ds = [x[1] for x in [s.split(':') for s in string2list(d)] if x[0] == step]
return ds
def dependencies(self):
'''
Return all dependencies set via "depends" configuration items
return list of tuples: (mystep, package, package_step)
eg: ('prepare', 'gcc', 'install')
'''
ret = list()
try:
deps = getattr(self, 'depends', None)
except KeyError:
return list()
for dep in string2list(deps):
mystep, other = dep.split(':')
pkg,pkg_step = other.split('_',1)
ret.append((mystep, pkg, pkg_step))
return ret
def exports(self):
'''
Return all environment settings via export_* configuration items
return list of tuples: (variable, value, operator) for exports
eg: ('PATH', '/blah/blah', 'prepend')
'''
ret = list()
for key,val in self._config.items():
if not key.startswith('export_'):
continue
var = key[len('export_'):]
oper = 'set'
for maybe in ['prepend', 'append', 'set']:
if val.startswith(maybe+':'):
oper = maybe
val = val[len(maybe)+1:]
ret.append((var, val, oper))
return ret
# Augment the task generator with worch-specific methods
from waflib.TaskGen import taskgen_method
@taskgen_method
def worch_hello(self):
'Just testing'
print ("%s" % self.worch.format('Hi from worch, my name is "{package}/{version}" and I am using "{dumpenv_cmd}" with extra {extra}', extra='spice'))
print ('My bld.env: %s' % (self.bld.env.keys(),))
print ('My all_envs: %s' % (sorted(self.bld.all_envs.keys()),))
print ('My env: %s' % (self.env.keys(),))
print ('My groups: %s' % (self.env['orch_group_dict'].keys(),))
print ('My packages: %s' % (self.env['orch_package_list'],))
# print ('My package dict: %s' % '\n'.join(['%s=%s' %kv for kv in sorted(self.bld.env['orch_package_dict'][self.worch.package].items())]))
@taskgen_method
def step(self, name, rule, **kwds):
'''
Make a worch installation step.
This invokes the build context on the rule with the following augmentations:
- the given step name is prefixed with the package name
- if the rule is a string (scriptlet) then the worch exec_command is used
- successful execution of the rule leads to a worch control file being produced.
'''
step_name = '%s_%s' % (self.worch.package, name)
# append control file as an additional output
target = string2list(kwds.get('target', ''))
if not isinstance(target, list):
target = [target]
cn = self.control_node(name)
if not cn in target:
target.append(cn)
kwds['target'] = target
kwds.setdefault('env', self.env)
cwd = kwds.get('cwd')
if not cwd:
cwd = default_step_cwd.get(name)
if cwd:
cwd = self.worch.format(cwd)
cwd = self.make_node(cwd)
msg.debug('orch: using cwd for step "%s": %s' % (step_name, cwd.abspath()))
kwds['cwd'] = cwd.abspath()
depends = self.worch.depends_step(name)
after = string2list(kwds.get('after',[])) + depends
if after:
kwds['after'] = after
msg.debug('orch: run %s AFTER: %s' % (step_name, after))
# functionalize scriptlet
rulefun = rule
if isinstance(rule, type('')):
rulefun = lambda t: exec_command(t, rule)
# curry the real rule function in order to write control file if successful
def runit(t):
rc = rulefun(t)
if not rc:
msg.debug('orch: successfully ran %s' % step_name)
cn.write(time.asctime(time.localtime()) + '\n')
return rc
# msg.debug('orch: step "%s" with %s in %s\nsource=%s\ntarget=%s' % \
# (step_name, rulefun, cwd, kwds.get('source'), kwds.get('target')))
# have to switch group each time as steps are called already asynchronously
self.bld.set_group(self.worch.group)
return self.bld(name=step_name, rule = runit, **kwds)
@taskgen_method
def control_node(self, step, package = None):
'''
Return a node for the control file given step of this package or optionally another package.
'''
if not package:
package = self.worch.package
filename = '%s_%s' % (package, step)
path = self.worch.format('{control_dir}/{filename}', filename=filename)
return self.path.find_or_declare(path)
@taskgen_method
def make_node(self, path, parent_node=None):
if not parent_node:
if path.startswith('/'):
parent_node = self.bld.root
else:
parent_node = self.bld.bldnode
return parent_node.make_node(path)
import waflib.Logs as msg
from waflib.Build import BuildContext
def worch_package(ctx, worch_config, *args, **kw):
# transfer waf-specific keywords explicitly
kw['name'] = worch_config['package']
kw['features'] = ' '.join(string2list(worch_config['features']))
kw['use'] = worch_config.get('use')
# make the TaskGen object for the package
worch=WorchConfig(**worch_config)
tgen = ctx(*args, worch=worch, **kw)
tgen.env = ctx.all_envs[worch.package]
tgen.env.env = tgen.env.munged_env
msg.debug('orch: package "%s" with features: %s' % \
(kw['name'], ', '.join(kw['features'].split())))
return tgen
BuildContext.worch_package = worch_package
del worch_package
|
kammmoun/PFE
|
codes/Ewens&uniform+RSK_rho_1.py
|
Python
|
apache-2.0
| 2,011 | 0.03083 |
# -*- coding: utf-8 -*-
"""
Created on Thu Apr 27 13:35:59 2017
@author: mkammoun.lct
"""
import numpy as np
import matplotlib.pyplot as pl
from bisect import bisect
import math
n=200
n2=10000
def per(theta,n):
perm=[]
for i in range(1,n+1):
if np.random.binomial(1,theta/(float(theta)+i-1))==1:
perm.append(i)
else:
j=np.random.randint(i-1)
k=p
|
erm[j]
perm[j]=i
perm.append(k)
return perm
per(0.1,1000)
def RSK(p):
'''Given a permutation p, spit out a pair of Young tableaux'''
P = []; Q = []
def insert(m, n=0):
'''Insert m into P, then place n in Q at the same place'''
for r in range(len(P)):
if m > P[r][-1]:
P[r].append(m);
return
c = bisect(P[r], m)
P[r][c],m =
|
m,P[r][c]
P.append([m])
return P
for i in range(len(p)):
insert(int(p[i]), i+1)
return map(len,P)
def pointspos(per):
rsk=RSK(per)
return [rsk[i]-i-1 for i in range(len(rsk)) if (rsk[i]-i -1) >=0]
pointspos([1,2,3])
## seulement les points entre [-3 rac(n) et 3 rac(n)]
alea1={}
alea2={}
for i in range(int(3*n**0.5)+1):
alea1[i]=0
alea2[i]=0
for j in range(n2):
per_unif=np.random.permutation(range(1,np.random.poisson(n)+1))
per_ewens=per(0.1,np.random.poisson(n))
print j
p1=pointspos(per_unif)
p2=pointspos(per_ewens)
for i in p1 :
if i<3*n**0.5:
alea1[i]+=1
for i in p2 :
if i<3*n**0.5:
alea2[i]+=1
x=range(int(3*n**0.5+1))
a1=np.array([alea1[i]for i in x])/float(n2)
a2=np.array([alea2[i]for i in x])/float(n2)
x2=np.array(range(int(1000*2*n**0.5+1)))/1000
a3=np.array(np.arccos(np.array(x2)/(2*n**0.5)))/math.pi
pl.plot(x,a1,"*",label="uniform")
pl.plot(x,a2,"+",label="Ewens")
pl.plot(x2,a3,label="approximation sinus")
pl.legend()
|
epochblue/nanogen
|
tests/test_models.py
|
Python
|
mit
| 10,406 | 0.001057 |
import datetime
import os
from unittest import mock
import pytest
from nanogen import models
example_post = """\
# Test Post
And this is my _markdown_ **content**.
Look, it also has:
* an
* unordered
* list
"""
example_config = """\
[site]
author = Example user
email = user@example.com
description = A test description
url = http://www.example.com
title = Test Example
"""
def test_post(tmpdir):
f = tmpdir.mkdir('blog').join('2018-01-01-test-post.md')
f.write(example_post)
file_path = os.path.join(str(tmpdir), 'blog', '2018-01-01-test-post.md')
p = models.Post(str(tmpdir), file_path)
assert p.filename == '2018-01-01-test-post.md'
assert p.title == 'Test Post'
assert p.raw_content == example_post
expected_markdown = example_post.strip().splitlines()
assert p.markdown_content == '\n'.join(expected_markdown[2:])
assert p.pub_date == datetime.datetime(2018, 1, 1, 0, 0, 0)
assert p.slug == 'test-post'
assert p.html_filename == 'test-post.html'
assert p.permapath == os.path.join(str(tmpdir), '2018', '01', 'test-post.html')
assert p.permalink == os.path.join('2018', '01', 'test-post.html')
def test_blog_create(tmpdir):
path = tmpdir.mkdir('blog')
config_file = path.join('blog.cfg')
config_file.write(example_config)
blog = models.Blog(str(path))
assert len(blog.posts) == 0
assert blog.config['site']['author'] == 'Example user'
assert blog.config['site']['email'] == 'user@example.com'
assert blog.config['site']['description'] == 'A test description'
assert blog.config['site']['url'] == 'http://www.example.com'
assert blog.config['site']['title'] == 'Test Example'
def test_blog_init(tmpdir):
path = tmpdir.mkdir('blog')
blog = models.Blog(str(path))
blog.init()
listing = [os.path.basename(str(file)) for file in path.listdir()]
assert len(listing) == 4
assert 'blog.cfg' in listing
assert '_layout' in listing
assert '_posts' in listing
assert '_drafts' in listing
def test_blog_new_post(tmpdir):
path = tmpdir.mkdir('blog')
blog = models.Blog(str(path))
blog.init()
before_posts = blog.collect_posts()
assert len(before_posts) == 0
with mock.patch('subprocess.call'):
blog.new_post('Test title', draft=False)
after_posts = blog.collect_posts()
assert len(after_posts) == 1
today = datetime.date.today()
expected_filename = '{}-{:02d}-{:02d}-test-title.md'.format(
today.year,
today.month,
today.day
)
assert after_posts[0].filename == expected_filename
def test_blog_new_draft(tmpdir):
path = tmpdir.mkdir('blog')
blog = models.Blog(str(path))
blog.init()
before_posts = blog.collect_posts()
assert len(before_posts) == 0
with mock.patch('subprocess.call'):
blog.new_post('Test title', draft=True)
after_posts = blog.collect_posts()
assert len(after_posts) == 0
def test_blog_copy_static_files(tmpdir):
path = tmpdir.mkdir('blog')
site_path = path.mkdir('_site')
# Add a static file to the projet
blog = models.Blog(str(path))
blog.init()
blog.copy_static_files()
site_static_path = site_path.join('static')
static_files = [os.path.basename(str(file)) for file in site_static_path.listdir()]
assert 'blog.css' in static_files
def test_blog_generate_posts(tmpdir):
path = tmpdir.mkdir('blog')
site_path = path.mkdir('_site')
# Set up a nanogen blog for posts
blog = models.Blog(str(path))
blog.init()
with mock.patch('subprocess.call'):
blog.new_post('Test title 1', draft=False)
post_template = path.join('_layout').join('post.html')
post_template.write("""\
<!doctype html>
<html>
<body>Single post template would go here.</body>
</html>
""")
# Refresh the blog instance to better emulate real-world usage
blog = models.Blog(str(path))
blog.generate_posts()
today = datetime.date.today()
expected_post_dir = site_path.join('{}'.format(today.year)).join('{:02d}'.format(today.month))
generated_posts = [os.path.basename(str(file)) for file in expected_post_dir.listdir()]
assert len(generated_posts) == 1
assert 'test-title-1.html' in generated_posts
def test_blog_generate_index_page(tmpdir):
path = tmpdir.mkdir('blog')
site_path = path.mkdir('_site')
# Set up a nanogen blog for posts
blog = models.Blog(str(path))
blog.init()
with mock.patch('subprocess.call'):
blog.new_post('Test title 1', draft=False)
# Refresh the blog instance to better emulate real-world usage
blog = models.Blog(str(path))
blog.generate_index_page()
site_dir = [os.path.basename(str(file)) for file in site_path.listdir()]
assert 'index.html' in site_dir
def test_blog_generate_feeds_no_feed_files(tmpdir):
path = tmpdir.mkdir('blog')
site_path = path.mkdir('_site')
# Set up a nanogen blog for posts
blog = models.Blog(str(path))
blog.init()
# Remove the feed files
os.unlink(os.path.join(blog.PATHS['layout'], 'rss.xml'))
os.unlink(os.path.join(blog.PATHS['layout'], 'feed.json'))
# Refresh the blog instance to better emulate real-world usage
blog = models.Blog(str(path))
blog.generate_feeds()
site_dir = [os.path.basename(str(file)) for file in site_path.listdir()]
assert 'rss.xml' not in site_dir
assert 'feed.json' not in site_dir
def test_blog_feeds(tmpdir):
path = tmpdir.mkdir('blog')
site_path = path.mkdir('_site')
# Set up a nanogen blog for posts
blog = models.Blog(str(path))
blog.init()
with mock.patch('subprocess.call'):
blog.new_post('Test title 1', draft=False)
# Refresh the blog instance to better emulate real-world usage
blog = models.Blog(str(path))
blog.generate_feeds()
site_dir = [os.path.basename(str(file)) for file in site_path.listdir()]
assert 'rss.xml' in site_dir
assert 'feed.json' in site_dir
def test_blog_build_and_clean(tmpdir):
path = tmpdir.mkdir('blog')
site_path = path.mkdir('_site')
# Set up a nanogen blog for posts
blog = models.Blog(str(path))
blog.init()
with mock.patch('subprocess.call'):
blog.new_post('Test title 1', draft=False)
post_template = path.join('_layout').join('post.html')
post_template.write("""\
<!doctype html>
<html>
<body>Post template would go here.</body>
</html>
""")
index_template = path.join('_layout').join('index.html')
index_template.write("""\
<!doctype html>
<html>
<body>Index template would go here.</body>
</html>
""")
blog_config = path.join('_layout').join('blog.cfg')
blog_config.write(example_config)
# Refresh the blog instance to better emulate real-world usage
blog = models.Blog(str(path))
blog.build()
site_dir = [os.path.basename(str(file)) for file in site_path.listdir()]
assert 'index.html' in site_dir
today = datetime.date.today()
expected_post_dir = site_path.join('{}'.format(today.year)).join('{:02d}'.format(today.month))
generated_posts = [os.path.basename(str(file)) for file in expected_post_dir.listdir()]
assert len(generated_posts) == 1
assert 'test-title-1.html' in generated_posts
blog.clean()
assert not os.path.isdir(str(site_path))
def test_blog_build_and_clean_with_draf
|
ts(tmpdir):
path = tmpdir.mkdir('blog')
preview_path = path.mkdir('_preview')
# Set up a nanogen blog for posts
blog = models.Blog(str(path))
blog.init()
wit
|
h mock.patch('subprocess.call'):
blog.new_post('Test post', draft=False)
blog.new_post('Draft post', draft=True)
post_template = path.join('_layout').join('post.html')
post_template.write("""\
<!doctype html>
<html>
<body>Post template would go here.</body>
</html>
""")
index_template = path.join('_layout').join('index.html')
index_template.write("""\
<!doctype html>
<html>
<body>Index template would go here.</body>
</html>
""")
blog_config = path.join('_layout').join('blog
|
adrienverge/yamllint
|
tests/test_yamllint_directives.py
|
Python
|
gpl-3.0
| 17,973 | 0 |
# -*- coding: utf-8 -*-
# Copyright (C) 2016 Adrien Vergé
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from tests.common import RuleTestCase
class YamllintDirectivesTestCase(RuleTestCase):
conf = ('commas: disable\n'
'trailing-spaces: {}\n'
'colons: {max-spaces-before: 1}\n')
def test_disable_directive(self):
self.check('---\n'
'- [valid , YAML]\n'
'- trailing spaces \n'
'- bad : colon\n'
'- [valid , YAML]\n'
'- bad : colon and spaces \n'
'- [valid , YAML]\n',
|
self.conf,
problem1=(3, 18, 'trailing-spaces'),
problem2=(4, 8, 'colons'),
problem3=(6, 7, 'colons'),
problem4=(6, 26, 'trailing-spaces'))
self.check('---\n'
'- [valid , YAML]\n'
'- trailing spaces \n'
'# yamllint disable\n'
'- bad : colon\n'
'- [valid , YAML]\n'
'- bad : colon and spaces \n'
'- [valid , YAML]\n',
self.conf,
problem=(3, 18, 'trailing-spaces'))
self.check('---\n'
'- [valid , YAML]\n'
'# yamllint disable\n'
'- trailing spaces \n'
'- bad : colon\n'
'- [valid , YAML]\n'
'# yamllint enable\n'
'- bad : colon and spaces \n'
'- [valid , YAML]\n',
self.conf,
problem1=(8, 7, 'colons'),
problem2=(8, 26, 'trailing-spaces'))
def test_disable_directive_with_rules(self):
self.check('---\n'
'- [valid , YAML]\n'
'- trailing spaces \n'
'# yamllint disable rule:trailing-spaces\n'
'- bad : colon\n'
'- [valid , YAML]\n'
'- bad : colon and spaces \n'
'- [valid , YAML]\n',
self.conf,
problem1=(3, 18, 'trailing-spaces'),
problem2=(5, 8, 'colons'),
problem3=(7, 7, 'colons'))
self.check('---\n'
'- [valid , YAML]\n'
'# yamllint disable rule:trailing-spaces\n'
'- trailing spaces \n'
'- bad : colon\n'
'- [valid , YAML]\n'
'# yamllint enable rule:trailing-spaces\n'
'- bad : colon and spaces \n'
'- [valid , YAML]\n',
self.conf,
problem1=(5, 8, 'colons'),
problem2=(8, 7, 'colons'),
problem3=(8, 26, 'trailing-spaces'))
self.check('---\n'
'- [valid , YAML]\n'
'# yamllint disable rule:trailing-spaces\n'
'- trailing spaces \n'
'- bad : colon\n'
'- [valid , YAML]\n'
'# yamllint enable\n'
'- bad : colon and spaces \n'
'- [valid , YAML]\n',
self.conf,
problem1=(5, 8, 'colons'),
problem2=(8, 7, 'colons'),
problem3=(8, 26, 'trailing-spaces'))
self.check('---\n'
'- [valid , YAML]\n'
'# yamllint disable\n'
'- trailing spaces \n'
'- bad : colon\n'
'- [valid , YAML]\n'
'# yamllint enable rule:trailing-spaces\n'
'- bad : colon and spaces \n'
'- [valid , YAML]\n',
self.conf,
problem=(8, 26, 'trailing-spaces'))
self.check('---\n'
'- [valid , YAML]\n'
'# yamllint disable rule:colons\n'
'- trailing spaces \n'
'# yamllint disable rule:trailing-spaces\n'
'- bad : colon\n'
'- [valid , YAML]\n'
'# yamllint enable rule:colons\n'
'- bad : colon and spaces \n'
'- [valid , YAML]\n',
self.conf,
problem1=(4, 18, 'trailing-spaces'),
problem2=(9, 7, 'colons'))
def test_disable_line_directive(self):
self.check('---\n'
'- [valid , YAML]\n'
'- trailing spaces \n'
'# yamllint disable-line\n'
'- bad : colon\n'
'- [valid , YAML]\n'
'- bad : colon and spaces \n'
'- [valid , YAML]\n',
self.conf,
problem1=(3, 18, 'trailing-spaces'),
problem2=(7, 7, 'colons'),
problem3=(7, 26, 'trailing-spaces'))
self.check('---\n'
'- [valid , YAML]\n'
'- trailing spaces \n'
'- bad : colon # yamllint disable-line\n'
'- [valid , YAML]\n'
'- bad : colon and spaces \n'
'- [valid , YAML]\n',
self.conf,
problem1=(3, 18, 'trailing-spaces'),
problem2=(6, 7, 'colons'),
problem3=(6, 26, 'trailing-spaces'))
self.check('---\n'
'- [valid , YAML]\n'
'- trailing spaces \n'
'- bad : colon\n'
'- [valid , YAML] # yamllint disable-line\n'
'- bad : colon and spaces \n'
'- [valid , YAML]\n',
self.conf,
problem1=(3, 18, 'trailing-spaces'),
problem2=(4, 8, 'colons'),
problem3=(6, 7, 'colons'),
problem4=(6, 26, 'trailing-spaces'))
def test_disable_line_directive_with_rules(self):
self.check('---\n'
'- [valid , YAML]\n'
'# yamllint disable-line rule:colons\n'
'- trailing spaces \n'
'- bad : colon\n'
'- [valid , YAML]\n'
'- bad : colon and spaces \n'
'- [valid , YAML]\n',
self.conf,
problem1=(4, 18, 'trailing-spaces'),
problem2=(5, 8, 'colons'),
problem3=(7, 7, 'colons'),
problem4=(7, 26, 'trailing-spaces'))
self.check('---\n'
'- [valid , YAML]\n'
'- trailing spaces # yamllint disable-line rule:colons \n'
'- bad : colon\n'
'- [valid , YAML]\n'
'- bad : colon and spaces \n'
'- [valid , YAML]\n',
self.conf,
problem1=(3, 55, 'trailing-spaces'),
problem2=(4, 8, 'colons'),
problem3=(6, 7, 'colons'),
problem4=(6, 26, 'trailing-spaces'))
self.check('---\n'
'- [valid , YAML]\n'
'- trailing spaces \n'
'# yamllint disable-line rule:colons\n'
'- bad : colon\n'
'- [valid
|
|
ercius/openNCEM
|
ncempy/algo/distortion.py
|
Python
|
gpl-3.0
| 8,278 | 0.010993 |
"""
Module to handle distortions in diffraction patterns.
"""
import numpy as np
import scipy.optimize
def filter_ring(points, center, rminmax):
"""Filter points to be in a certain radial distance range from center.
Parameters
----------
points : np.ndarray
Candidate points.
center : np.ndarray or tuple
Center position.
rminmax : tuple
Tuple of min and max radial distance.
Returns
-------
: np.ndarray
List of filtered points, two column array.
"""
try:
# points have to be 2D array with 2 columns
assert(isinstance(points, np.ndarray))
assert(points.shape[1] == 2)
assert(len(points.shape) == 2)
# center can either be tuple or np.array
center = np.array(center)
center = np.reshape(center, 2)
rminmax = np.array(rminmax)
rminmax = np.reshape(rminmax, 2)
except:
raise TypeError('Something wrong with the input!')
# calculate radii
rs = np.sqrt( np.square(points[:,0]-center[0]) + np.square(points[:,1]-center[1]) )
# filter by given limits
sel = (rs>=rminmax[0])*(rs<=rminmax[1])
if sel.any():
return points[sel]
else:
return None
def points_topolar(points, center):
"""Convert points to polar coordinate system.
Can be either in pixel or real dim, but should be the same for points and center.
Parameters
----------
points : np.ndarray
Positions as two column array.
center : np.ndarray or tuple
Origin of the polar coordinate system.
Returns
-------
: np.ndarray
Positions in polar coordinate system as two column array (r, theta).
"""
try:
# points have to be 2D array with 2 columns
assert(isinstance(points, np.ndarray))
assert(points.shape[1] == 2)
assert(len(points.shape) == 2)
# center can either be tuple or np.array
center =
|
np.array(center)
center = np.reshape(center, 2)
except:
raise TypeError('Something wrong with the input!')
# calculate radii
rs = np.sqrt( np.square(points[:,0]-center[0]) + np.square(points[:,1]-center[1]) )
# calculate angle
thes = np.arctan2(points[:,1]-center[1], points[:,0]-center[0])
return np.array( [rs, thes] ).transpose()
def residuals_center( param, data):
"""Residual function f
|
or minimizing the deviations from the mean radial distance.
Parameters
----------
param : np.ndarray
The center to optimize.
data : np.ndarray
The points in x,y coordinates of the original image.
Returns
-------
: np.ndarray
Residuals.
"""
# manually calculating the radii, as we do not need the thetas
rs = np.sqrt( np.square(data[:,0]-param[0]) + np.square(data[:,1]-param[1]) )
return rs-np.mean(rs)
def optimize_center(points, center, maxfev=1000, verbose=None):
"""Optimize the center by minimizing the sum of square deviations from the mean radial distance.
Parameters
----------
points : np.ndarray
The points to which the optimization is done (x,y coords in org image).
center : np.ndarray or tuple
Initial center guess.
maxfev : int
Max number of iterations forwarded to scipy.optimize.leastsq().
verbose : bool
Set to get verbose output.
Returns
-------
: np.ndarray
The optimized center.
"""
try:
# points have to be 2D array with 2 columns
assert(isinstance(points, np.ndarray))
assert(points.shape[1] == 2)
assert(len(points.shape) == 2)
# center can either be tuple or np.array
center = np.array(center)
center = np.reshape(center, 2)
except:
raise TypeError('Something wrong with the input!')
# run the optimization
popt, flag = scipy.optimize.leastsq(residuals_center, center, args=points, maxfev=maxfev)
if flag not in [1, 2, 3, 4]:
print('WARNING: center optimization failed.')
if verbose:
print('optimized center: ({}, {})'.format(center[0], center[1]))
return popt
def rad_dis(theta, alpha, beta, order=2):
"""Radial distortion due to ellipticity or higher order distortion.
Relative distortion, to be multiplied with radial distance.
Parameters
----------
theta : np.ndarray
Angles at which to evaluate. Must be float.
alpha : float
Orientation of major axis.
beta : float
Strength of distortion (beta = (1-r_min/r_max)/(1+r_min/r_max).
order : int
Order of distortion.
Returns
-------
: np.ndarray
Distortion factor.
"""
return (1.-np.square(beta))/np.sqrt(1.+np.square(beta)-2.*beta*np.cos(order*(theta+alpha)))
def residuals_dis(param, points, ns):
"""Residual function for distortions.
Parameters
----------
param : np.ndarray
Parameters for distortion.
points : np.ndarray
Points to fit to.
ns : tuple
List of orders to account for.
Returns
-------
: np.ndarray
Residuals.
"""
est = param[0]*np.ones(points[:, 1].shape)
for i in range(len(ns)):
est *=rad_dis(points[:, 1], param[i*2+1], param[i*2+2], ns[i])
return points[:, 0] - est
def optimize_distortion(points, ns, maxfev=1000, verbose=False):
"""Optimize distortions.
The orders in the list ns are first fitted subsequently and the result is refined in a final fit simultaneously fitting all orders.
Parameters
----------
points : np.ndarray
Points to optimize to (in polar coords).
ns : tuple
List of orders to correct for.
maxfev : int
Max number of iterations forwarded to scipy.optimize.leastsq().
verbose : bool
Set for verbose output.
Returns
-------
: np.ndarray
Optimized parameters according to ns.
"""
try:
assert(isinstance(points, np.ndarray))
assert(points.shape[1] == 2)
# check points to be sufficient for fitting
assert(points.shape[0] >= 3)
# check orders
assert(len(ns)>=1)
except:
raise TypeError('Something wrong with the input!')
# init guess for full fit
init_guess = np.ones(len(ns)*2+1)
init_guess[0] = np.mean(points[:,0])
# make a temporary copy
points_tmp = np.copy(points)
if verbose:
print('correction for {} order distortions.'.format(ns))
print('starting with subsequent fitting:')
# subsequently fit the orders
for i in range(len(ns)):
# optimize order to points_tmp
popt, flag = scipy.optimize.leastsq(residuals_dis, np.array((init_guess[0], 0.1, 0.1)),
args=(points_tmp, (ns[i],)), maxfev=maxfev)
if flag not in [1, 2, 3, 4]:
print('WARNING: optimization of distortions failed.')
# information
if verbose:
print('fitted order {}: R={} alpha={} beta={}'.format(ns[i], popt[0], popt[1], popt[2]))
# save for full fit
init_guess[i*2+1] = popt[1]
init_guess[i*2+2] = popt[2]
# do correction
points_tmp[:, 0] /= rad_dis(points_tmp[:, 1], popt[1], popt[2], ns[i])
# full fit
if verbose:
print('starting the full fit:')
popt, flag = scipy.optimize.leastsq(residuals_dis, init_guess, args=(points, ns), maxfev=maxfev)
if flag not in [1, 2, 3, 4]:
print('WARNING: optimization of distortions failed.')
if verbose:
print('fitted to: R={}'.format(popt[0]))
for i in range(len(ns)):
|
jasonsbrooks/ARTIST
|
src/artist_generator/analyze/chords.py
|
Python
|
mit
| 10,666 | 0.008063 |
from db import get_engines,get_sessions,Song,Track,Note
from iter import TimeIterator
from utils import Counter
from sqlalchemy.orm import sessionmaker
from preference_rules import *
import music21,sys
from optparse import OptionParser
from multiprocessing import Process,Queue
class ChordSpan(object):
"""
A ChordSpan is a series of TimeInstances that all have the same root.
Each ChordSpan also maintains a pointer (prev_cs) to the previous ChordSpan computed in the song.
"""
def __init__(self,initial_ts,prev_cs):
"""
Initialize a ChordSpan
Args:
initial_ts: the first TimeInstance to consider
prev_cs: the previous TimeInstance
"""
self.tss = [initial_ts]
self.root = None
# a back-pointer to the previous best chord-span
self.prev_cs = prev_cs
def __repr__(self):
return "<ChordSpan: root=%r>" % (self.root)
def last_ts(self):
"""
Calculate and return the last TimeInstance in this ChordSpan.
Returns:
TimeInstance: the last time instance in the ChordSpan
"""
return max(self.tss,key=lambda ts: ts.time)
def add(self,ts):
"""
Add a TimeInstance to this ChordSpan
Args:
ts: the TimeInstance to add
"""
self.tss.append(ts)
def remove(self,ts):
"""
Remove a TimeInstance from this ChordSpan
Args:
ts: the TimeInstance to remove
"""
self.tss.remove(ts)
def notes(self):
"""
Flatten all notes in the TimeInstances that comprise this ChordSpan.
Returns:
All notes played in this ChordSpan
"""
res = []
# iterate through all chords
for ts in self.tss:
# all notes in this time instance
for no
|
te in ts.notes():
res.append(note)
return res
def roman_numeral(self,track):
"""
Calculate the roman numeral corresponding to the computed root and key of the corresponding track
Args:
track: The track to w
|
hich a Note in this ChordSpan belongs. Note: Here we assume that at any moment in
time, there is only one key signature in all tracks of the song.
Returns:
the Music21 Roman Numeral object.
"""
pitch = music21.key.sharpsToPitch(track.key_sig_top)
key = music21.key.Key(pitch)
if track.key_sig_bottom == 0:
scale = music21.scale.MajorScale(self.root.name)
else:
scale = music21.scale.MelodicMinorScale(self.root.name)
chord = music21.chord.Chord([scale.chord.root(),scale.chord.third,scale.chord.fifth])
return music21.roman.romanNumeralFromChord(chord,key).scaleDegree
def label(self,depth=0):
"""
Label all the notes in this ChordSpan with the determined root.
Then proceed to recursively label the preceding ChordSpan
"""
rn = None
# label all the notes in this chord span
for note in self.notes():
if self.root:
note.root = self.root.midi
note.iso_root = self.root.name
if not rn:
rn = self.roman_numeral(note.track)
note.roman = rn
# label the previous chord span (assuming we haven't surpassed max recursion limit)
if self.prev_cs and depth < sys.getrecursionlimit() - 1:
self.prev_cs.label()
def pr_score(self,m_root):
"""
Calculate the preference rule score, when using m_root as a root for this ChordSpan.
Note this method is the core of the Preference Rule approach to Harmonic Analysis
Such an approach is heavily inspired by the work of Daniel Sleater and David Temperley at CMU
in their Melisma Music Analyzer: http://www.link.cs.cmu.edu/melisma/
Args:
m_root (Music21.note.Note): a note representing the proposed root of this chord
Returns:
the score obtained using this note as a root
"""
last_ts = self.last_ts()
ts_notes = last_ts.notes()
# calculate the beat strength
stren = beat_strength(ts_notes)
# compatibility scores
comp_score = compatibility(ts_notes,m_root)
# difference from previous chord root on line of fifths
lof = (lof_difference(self.prev_cs.root,m_root) if self.prev_cs else 0)
return STRENGTH_MULTIPLIER * stren + COMPATIBILITY_MULTIPLIER * comp_score + LOF_MULTIPLIER * lof
def calc_best_root(self):
"""
Calculate the best root for this chord span
Returns:
the combined score of this ChordSpan and its predecessor
"""
# start with C, weight of 0
best_root,best_weight = music21.note.Note('C'),-len(line_of_fifths)
# try all possible roots
for m_root in music21.scale.ChromaticScale('C').pitches:
val = self.pr_score(m_root)
if val > best_weight:
best_root,best_weight = m_root,val
# use this as the chord-span root
self.root = best_root
# calculate the combined score
prev_cs_score = (self.prev_cs.score if self.prev_cs else 0)
return prev_cs_score + best_weight
# increased recursion limit for dynamic programming back-pointer labelling process
RECURSION_LIMIT = 10000
class HarmonicAnalyzer(Process):
"""
Run Harmonic Analysis in a separate process.
Such an approach is heavily inspired by the work of Daniel Sleater and David Temperley at CMU
in their Melisma Music Analyzer: http://www.link.cs.cmu.edu/melisma/
"""
def __init__(self,durk_step,engine,counter):
"""
Initialize the Harmonic Analyzer process
Args:
durk_step: steps between TimeInstances
engine: the database engine to draw songs from
counter (Counter): atomic song counter
"""
# Initialize the Process
Process.__init__(self)
# time step used in TimeIterator
self.durk_step = durk_step
# session to pull songs from
Session = sessionmaker(bind=engine)
self.session = Session()
# Counter object representing number of songs that have been processed
self.counter = counter
# increase the recursion limit
sys.setrecursionlimit(RECURSION_LIMIT)
def run(self):
"""
Start the Process. Note that this method overrides Process.run()
"""
# Iterate through every song in the database attached to this process
for song in self.session.query(Song).all():
# Atomically increment the song counter
count = self.counter.incrementAndGet()
print count, ". ", song
# skip songs that have already been analyzed
if song.analyzed:
print count, ". Already analyzed. Skipping."
continue
# and run the analysis
try:
self.analyze(song)
# mark this song as analyzed
song.analyzed = True
self.session.commit()
except Exception,e:
sys.stderr.write("Exception when processing " + str(song) + ":\n")
sys.stderr.write("\t" + str(e) + "\n")
def analyze(self,song):
"""
Run Harmonic Analysis on a particular Song
Args:
song (Song): the song to analyze
"""
cs,idx = None,0
try:
# construct the iterator
ti = TimeIterator(song,self.durk_step)
except ValueError,e:
# something is very wrong with this song... let's skip it!
sys.stderr.write("Exception when processing " + str(song) + ":\n")
sys.stderr.write("\t" + str(e) + "\n")
return False
# iterate through every TimeInstance in the song
for ts in ti:
# and consider what to do...
cs = self.consider_ts(cs,ts)
# print idx, ts, "--", cs.score, ":", cs
|
EmanueleCannizzaro/scons
|
src/engine/SCons/Tool/MSCommon/sdk.py
|
Python
|
mit
| 14,245 | 0.00702 |
#
# Copyright (c) 2001 - 2016 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
__revision__ = "src/engine/SCons/Tool/MSCommon/sdk.py rel_2.5.1:3735:9dc6cee5c168 2016/11/03 14:02:02 bdbaddog"
__doc__ = """Module to detect the Platform/Windows SDK
PSDK 2003 R1 is the earliest version detected.
"""
import os
import SCons.Errors
import SCons.Util
import common
debug = common.debug
# SDK Checks. This is of course a mess as everything else on MS platforms. Here
# is what we do to detect the SDK:
#
# For Windows SDK >= 6.0: just look into the registry entries:
# HKLM\Software\Microsoft\Microsoft SDKs\Windows
# All the keys in there are the available versions.
#
# For Platform SDK before 6.0 (2003 server R1 and R2, etc...), there does not
# seem to be any sane registry key, so the precise location is hardcoded.
#
# For versions below 2003R1, it seems the PSDK is included with Visual Studio?
#
# Also, per the following:
# http://benjamin.smedbergs.us/blog/tag/atl/
# VC++ Professional comes with the SDK, VC++ Express does not.
# Location of the SDK (checked for 6.1 only)
_CURINSTALLED_SDK_HKEY_ROOT = \
r"Software\Microsoft\Microsoft SDKs\Windows\CurrentInstallFolder"
class SDKDefinition(object):
"""
An abstract base class for trying to find installed SDK directories.
"""
def __init__(self, version, **kw):
self.version = version
self.__dict__.update(kw)
def find_sdk_dir(self):
"""Try to find the MS SDK from the registry.
Return None if failed or the directory does not exist.
"""
if not SCons.Util.can_read_reg:
debug('find_sdk_dir(): can not read registry')
return None
hkey = self.HKEY_FMT % self.hkey_data
debug('find_sdk_dir(): checking registry:%s'%hkey)
try:
sdk_dir = common.read_reg(hkey)
except SCons.Util.WinError, e:
debug('find_sdk_dir(): no SDK registry key %s' % repr(hkey))
return None
debug('find_sdk_dir(): Trying SDK Dir: %s'%sdk_dir)
if not os.path.exists(sdk_dir):
debug('find_sdk_dir(): %s not on file system' % sdk_dir)
return None
ftc = os.path.join(sdk_dir, self.sanity_check_file)
if not os.path.exists(ftc):
debug("find_sdk_dir(): sanity check %s not found" % ftc)
return None
return sdk_dir
def get_sdk_dir(self):
"""Return the MSSSDK given the version string."""
try:
return self._sdk_dir
except AttributeError:
sdk_dir = self.find_sdk_dir()
self._sdk_dir = sdk_dir
return sdk_dir
def get_sdk_vc_script(self,host_arch, target_arch):
""" Return the script to initialize the VC compiler installed by SDK
"""
if (host_arch == 'amd64' and target_arch == 'x86'):
# No cross tools needed compiling 32 bits on 64 bit machine
host_arch=target_arch
arch_string=target_arch
if (host_arch != target_arch):
arch_string='%s_%s'%(host_arch,target_arch)
debug("sdk.py: get_sdk_vc_script():arch_string:%s host_arch:%s target_arch:%s"%(arch_string,
|
host_arch,
target_arch))
file=self.vc_setup_scripts.get(arch_string,None)
debug("sdk.py: get_sdk_vc_script():file:%s"%file)
return file
class WindowsSDK(SDKDefinition):
"""
A subclass for trying to find installed Windows SDK directories.
"""
HKEY_FMT = r'Software\Microsoft\Microsoft SDKs\Windows\v%s\InstallationFolder'
def __init
|
__(self, *args, **kw):
SDKDefinition.__init__(self, *args, **kw)
self.hkey_data = self.version
class PlatformSDK(SDKDefinition):
"""
A subclass for trying to find installed Platform SDK directories.
"""
HKEY_FMT = r'Software\Microsoft\MicrosoftSDK\InstalledSDKS\%s\Install Dir'
def __init__(self, *args, **kw):
SDKDefinition.__init__(self, *args, **kw)
self.hkey_data = self.uuid
#
# The list of VC initialization scripts installed by the SDK
# These should be tried if the vcvarsall.bat TARGET_ARCH fails
preSDK61VCSetupScripts = { 'x86' : r'bin\vcvars32.bat',
'amd64' : r'bin\vcvarsamd64.bat',
'x86_amd64': r'bin\vcvarsx86_amd64.bat',
'x86_ia64' : r'bin\vcvarsx86_ia64.bat',
'ia64' : r'bin\vcvarsia64.bat'}
SDK61VCSetupScripts = {'x86' : r'bin\vcvars32.bat',
'amd64' : r'bin\amd64\vcvarsamd64.bat',
'x86_amd64': r'bin\x86_amd64\vcvarsx86_amd64.bat',
'x86_ia64' : r'bin\x86_ia64\vcvarsx86_ia64.bat',
'ia64' : r'bin\ia64\vcvarsia64.bat'}
SDK70VCSetupScripts = { 'x86' : r'bin\vcvars32.bat',
'amd64' : r'bin\vcvars64.bat',
'x86_amd64': r'bin\vcvarsx86_amd64.bat',
'x86_ia64' : r'bin\vcvarsx86_ia64.bat',
'ia64' : r'bin\vcvarsia64.bat'}
# The list of support SDKs which we know how to detect.
#
# The first SDK found in the list is the one used by default if there
# are multiple SDKs installed. Barring good reasons to the contrary,
# this means we should list SDKs from most recent to oldest.
#
# If you update this list, update the documentation in Tool/mssdk.xml.
SupportedSDKList = [
WindowsSDK('7.1',
sanity_check_file=r'bin\SetEnv.Cmd',
include_subdir='include',
lib_subdir={
'x86' : ['lib'],
'x86_64' : [r'lib\x64'],
'ia64' : [r'lib\ia64'],
},
vc_setup_scripts = SDK70VCSetupScripts,
),
WindowsSDK('7.0A',
sanity_check_file=r'bin\SetEnv.Cmd',
include_subdir='include',
lib_subdir={
'x86' : ['lib'],
'x86_64' : [r'lib\x64'],
'ia64' : [r'lib\ia64'],
},
vc_setup_scripts = SDK70VCSetupScripts,
),
WindowsSDK('7.0',
sanity_check_file=r'bin\SetEnv.Cmd',
include_subdir='include',
lib_subdir={
'x86' : ['lib'],
'x86_64' : [r'lib\x64'],
'ia64' : [r'lib\ia64'],
},
vc_setup_scripts = SDK70VCSetupScripts,
),
WindowsSDK('6.1',
sanity_check_file=r'bin\SetEnv.Cmd',
include_subdir='include',
lib_subdir={
'x86' : ['lib'],
'x86_64' : [r'lib\x64'],
'ia64' : [r'lib\ia64'],
},
vc_setup_scripts = SDK61VCSetupScripts,
),
WindowsSDK('6.0A',
|
beckjake/python3-hglib
|
tests/test-paths.py
|
Python
|
mit
| 512 | 0.001953 |
from . import common
import os
import hglib
class test_paths(common.basetest):
def test_basic(self):
f =
|
open('.hg/hgrc', 'a')
f.write('[paths]\nfoo = bar\n')
f.close()
# hgrc isn't watch
|
ed for changes yet, have to reopen
self.client = hglib.open()
paths = self.client.paths()
self.assertEquals(len(paths), 1)
self.assertEquals(paths['foo'], os.path.abspath('bar'))
self.assertEquals(self.client.paths('foo'), os.path.abspath('bar'))
|
crmccreary/openerp_server
|
openerp/addons/document_ftp/wizard/ftp_browse.py
|
Python
|
agpl-3.0
| 2,556 | 0.004304 |
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from osv import osv, fields
# from tools.translate import _
from .. import ftpserver
class document_ftp_browse(osv.osv_memory):
_name = 'document.ftp.browse'
_description = 'Document FTP Browse'
_columns = {
'url' : fields.char('FTP Server', size=64, required=True),
}
def default_get(self, cr, uid, fields, context=None):
res = {}
if 'url' in fields:
user_pool = self.pool.get('res.users')
current_user = user_pool.browse(cr, uid, uid, context=context)
data_pool = self.pool.get('ir.model.data')
aid = data_pool._get_id(cr, uid, 'document_ftp', 'action_document_browse')
aid = data_pool.browse(cr, uid, aid, context=context).res_id
ftp_url = self.pool.get('ir.actions.url').browse(cr, uid, aid, context=context)
url = ftp_url.url and ftp_url.url.split('ftp://') or []
if url:
url = url[1]
if url[-1] == '/':
url = url[:-1]
else:
url = '%s:%s' %(ftpserver.HOST, ftpserver.PORT)
res['url'] = 'ftp://%s@%s'%(current_user.login, url)
return res
def browse_ftp(self, cr, uid, ids,
|
context=None):
data_id = ids and ids[0] or False
data = self.browse(cr, uid, data_id, context=context)
final_url = data.url
return {
'type': 'ir.actions.act_url',
|
'url':final_url,
'target': 'new'
}
document_ftp_browse()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
Cloudlie/pythonlearning
|
ex18.py
|
Python
|
mit
| 514 | 0.015564 |
# this one is like your scripts with argv
def print_two(*args):
arg1,arg2 = args
print "arg1: %r, arg2: %r" % (arg1, arg2)
# ok, that *args is actually pointless, we can just do this
def print_tw
|
o_again(arg1, arg2):
print "arg1: %r, arg2: %r" % (arg1, arg2)
# this just takes one arguments
def print_one(arg1):
print "arg1: %r" % arg1
# this just takes no arguments
def print_none():
print "I got nothin'."
print_two("Zed", "Shaw
|
")
print_two_again("Zed", "Shaw")
print_one("First")
print_none()
|
fake-name/ReadableWebProxy
|
WebMirror/management/rss_parser_funcs/feed_parse_extractCnoveltranslationsCom.py
|
Python
|
bsd-3-clause
| 784 | 0.026786 |
def extractCnoveltranslationsCom(item):
'''
Parser for 'cnoveltranslations.com'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return None
|
tagmap = [
('what if my brother is too good?', 'What if My Brother is Too Good?', 'translated'),
('i am this type of woman', 'I Am Th
|
is Type of Woman', 'translated'),
('PRC', 'PRC', 'translated'),
('Loiterous', 'Loiterous', 'oel'),
]
for tagname, name, tl_type in tagmap:
if tagname in item['tags']:
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False
|
DanteOnline/free-art
|
venv/bin/painter.py
|
Python
|
gpl-3.0
| 2,138 | 0.000935 |
#!/home/dante/Projects/free-art/venv/bin/python3
#
# The Python Imaging Library
# $Id$
#
# this demo script illustrates pasting into an already displayed
# photoimage. note that the current version of Tk updates the whole
# image every time we paste, so to get decent performance, we split
# the image into a set of tiles.
#
try:
from tkinter import Tk, Canvas, NW
except ImportError:
from Tkinter import Tk, Canvas, NW
from PIL import Image, ImageTk
import sys
#
# painter widget
class PaintCanvas(Canvas):
def __init__(self, master, image):
Canvas.__init__(self, master, width=image.size[0], height=image.size[1])
# fill the canvas
self.tile = {}
self.tilesize = tilesize = 32
xsize, ysize = image.size
for x in range(0, xsize, tilesize):
for y in range(0, ysize, tilesize):
box = x, y, min(xsize, x+tilesize), min(ysize, y+tilesize)
tile = ImageTk.PhotoImage(image.crop(box))
self.create_image(x, y, image=tile, anchor=NW)
self.tile[(x, y)] = box, tile
self.image = image
self.bind("<B1-Motion>", self.paint)
def paint(self, event):
xy = event.x - 10, event.y - 10, event.x + 10, event.y + 10
im = self.image.crop(xy)
# process the image in some fashion
im = im.convert("L")
self.image.paste(im, xy)
self.repair(xy)
def repair(self, box):
# update canvas
dx = box[0] % self.tilesize
dy = box[1] % self.tilesize
for x in range(box[0]-dx, box[2]+1, self.tilesize):
for y in range(box[1]-dy, box[3]+1, self.tilesize):
try:
xy, tile = self.tile[(x, y)]
tile.paste(self.image.crop(xy))
ex
|
cept KeyError:
pass # outside the image
self.update_idletasks()
#
# main
if len(sys.argv) != 2:
print("Usage: painter file")
sys.exit(1)
root = Tk()
im = Image.open(sys.argv[
|
1])
if im.mode != "RGB":
im = im.convert("RGB")
PaintCanvas(root, im).pack()
root.mainloop()
|
MattPerron/esper
|
esper/query/management/commands/gender_tracks.py
|
Python
|
apache-2.0
| 2,055 | 0.001946 |
from django.core.management.base import BaseCommand
from faceDB.face_db import FaceDB
from faceDB.face import FaceCluster
from faceDB.util import * # only required for saving cluster images
from carnie_helper import RudeCarnie
from query.models import *
import random
import json
class Command(BaseCommand):
help = 'Find genders for all the detected faces'
def add_arguments(self, parser):
parser.add_argument('path')
def handle(self, *args, **options):
with open(options['path']) as f:
paths = [s.strip() for s in f.readlines()]
model_dir = '/app/deps/rude-carnie/inception_gender_checkpoint'
rc = RudeCarnie(model_dir=model_dir)
for path in paths:
confident
|
= 0
if path == '':
return
video = Video.objects.filter(path=path).get()
labelset = video.detected_labelset()
tracks = Track.objects.filter(first_frame__labelset=labelset).all()
for track in tracks:
|
if track.gender != '0':
print 'skipping_track', track.id
continue
faces = Face.objects.filter(track=track)
print track.id
print("len of faces for path {}, is {}".format(path, len(faces)))
imgs = ['./assets/thumbnails/{}_{}.jpg'.format(labelset.id, f.id)
for f in faces]
best = rc.get_gender(imgs)
# Update each of the faces.
male_sum = 0.0
female_sum = 0.0
for i, face in enumerate(faces):
if best[i] is None:
# couldn't get gender output for some reason
continue
if best[i][0] == 'M':
male_sum += best[i][1]
elif best[i][0] == 'F':
female_sum += best[i][1]
track.gender = 'M' if male_sum>female_sum else 'F'
track.save()
|
welltempered/rpy2-heroku
|
rpy/robjects/tests/__init__.py
|
Python
|
gpl-2.0
| 1,832 | 0.003821 |
import unittest
import testRObject
import testVector
import testArray
import testDataFrame
import testFormula
import testFunction
import testEnvironment
import testRobjects
import testMethods
import testPackages
import testHelp
import testLanguage
# wrap this nicely so a warning is issued if no numpy present
import testNumpyConversions
def suite():
suite_RObject = testRObject.suite()
suite_Vector = testVector.suite()
suite_Array = testArray.suite()
suite_DataFrame = testDataFrame.suite()
suite_Function = testFunction.suite()
suite_Environment = testEnvironment.suite()
suite_Formula = testFormula.suite()
suite_Robjects = testRobjects.suite()
suite_NumpyConversions = testNumpyConversions.suite()
suite_Methods = testMethods.suite()
suite_Packa
|
ges = testPackages.suite()
suite_Help = testHelp.suite()
suite_Language = testLanguage.suite()
alltests = unittest.TestSuite([suite_RObject,
suite_Vector,
suite_Array,
suite_DataFrame,
suite_Function,
suite_Environment,
suite_Formula,
|
suite_Robjects,
suite_Methods,
suite_NumpyConversions,
suite_Packages,
suite_Help,
suite_Language
])
return alltests
def main():
r = unittest.TestResult()
suite().run(r)
return r
if __name__ == '__main__':
tr = unittest.TextTestRunner(verbosity = 2)
suite = suite()
tr.run(suite)
|
Lapin-Blanc/AS_STAGES
|
django_calendar/views.py
|
Python
|
mit
| 8,052 | 0.007211 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.shortcuts import render
from django.http import HttpResponse, JsonResponse
from django.utils import formats, dateparse, timezone
from .models import Period, Traineeship, Student
from django.core.exceptions import ValidationError
from datetime import datetime, date
from io import BytesIO
from docx import Document
from docx.shared import Pt
def json_access_error(request):
return JsonResponse(
{
"errors": [
{
"status": "403",
"source": { "pointer": request.path },
"detail": "vous n'êtes plus autorisé à utiliser cette période"
},
]
},
status=403
)
def time_limit():
today = timezone.localdate()
days_offset = 3-today.weekday()
return timezone.make_aware(datetime.combine(today+timezone.timedelta(days=days_offset), datetime.min.time()))
def calendar(request, action, traineeship):
user = request.user
traineeship = Traineeship.objects.get(id=int(traineeship))
try:
student = user.student
except Student.DoesNotExist:
student = None
# calendar read
if action=='read':
time_start = timezone.make_aware(datetime.combine(dateparse.parse_date(request.GET['start']), datetime.min.time()))
time_end = timezone.make_aware(datetime
|
.combine(dateparse.parse_date(request.GET['end']), datetime.min.time()))
base_criteria = {
'traineeship' : traineeship
}
if request.GET['type']=='past':
base_criteria['start__gte'] = time_start
base_criteria['end__lt'
|
] = time_limit()
if request.GET['type']=='future':
base_criteria['start__gte'] = time_limit()
base_criteria['end__lt'] = time_end
ps = Period.objects.filter(**base_criteria)
d = []
for p in ps:
d.append({
'id': p.id,
'start': p.start,
'end': p.end,
})
return JsonResponse(d, safe=False)
# create period
if action=='create':
time_start = dateparse.parse_datetime(request.GET['start'])
time_end = dateparse.parse_datetime(request.GET['end'])
if student and time_start<time_limit():
return json_access_error(request)
try:
p = traineeship.periods.create(start=time_start, end=time_end)
return JsonResponse({"event_id" : p.id}, safe=False)
except ValidationError as e:
return JsonResponse(
{
"errors": [
{
"status": "422",
"source": { "pointer": request.path },
"detail": "%s" % e.args[0]
},
]
},
status=422
)
# delete event
if action=='delete':
p = traineeship.periods.get(id=int(request.GET['event_id']))
if student and p.start<time_limit():
return json_access_error(request)
p.delete()
return JsonResponse({"event_id" : 0}, safe=False)
# update event
if action=='update':
try:
p = traineeship.periods.get(id=int(request.GET['event_id']))
time_start = dateparse.parse_datetime(request.GET['start'])
time_end = dateparse.parse_datetime(request.GET['end'])
if student and time_start<time_limit():
return json_access_error(request)
p.start = time_start
p.end = time_end
p.save()
return JsonResponse({"event_id" : p.id}, safe=False)
except ValidationError as e:
return JsonResponse(
{
"errors": [
{
"status": "422",
"source": { "pointer": request.path },
"detail": "%s" % e.args[0]
},
]
},
status=422
)
# On ne devrait pas arriver ici...
return JsonResponse(
{
"errors": [
{
"status": "400",
"source": { "pointer": request.path },
"detail": "action not found"
},
]
},
status=400
)
# DOCX
def download_schedule(request, traineeship):
user = request.user
ts = Traineeship.objects.get(id=int(traineeship))
try:
student = user.student
except Student.DoesNotExist:
student = None
# Create the HttpResponse object with the appropriate docx headers.
response = HttpResponse(content_type='application/vnd.openxmlformats-officedocument.wordprocessingml.document')
response['Content-Disposition'] = 'attachment; filename="horaire.docx"'
buffer = BytesIO()
document = Document()
document.add_heading("%s %s : Stage d'%s" % (ts.student.first_name, ts.student.last_name, ts.category), 0)
document.save(buffer)
# Get the value of the BytesIO buffer and write it to the response.
doc = buffer.getvalue()
buffer.close()
response.write(doc)
return response
JOURS = ['Lundi', 'Mardi', 'Mercredi', 'Jeudi', 'Vendredi', 'Samedi', 'Dimanche']
def download_schedule_for_student(request, student, from_date=timezone.localdate()):
next_monday = from_date + timezone.timedelta(days=7-from_date.weekday())
# télécharge l'horaire d'un étudiant particulier pour la semaine suivant la date fournie ou
# aujourd'hui si cette date n'est pas fournie
student = Student.objects.get(id=student)
#ts = student.traineeships.filter(date_start__lte=from_date, is_closed=False)[0]
ts = student.traineeships.filter(is_closed=False)[0]
# TODO : pas de stage ouvert, plus d'un stage ouvert, étudiant n'existant pas
# Create the HttpResponse object with the appropriate docx headers.
response = HttpResponse(content_type='application/vnd.openxmlformats-officedocument.wordprocessingml.document')
response['Content-Disposition'] = 'attachment; filename="horaire %s %s.docx"' % (student.last_name,
student.first_name)
buffer = BytesIO()
document = Document()
document.styles["Title"].font.size = Pt(18)
document.styles["Subtitle"].font.size = Pt(16)
document.add_heading("%s %s : du %s au %s" % (
ts.student.first_name,
ts.student.last_name,
next_monday.strftime("%d-%m-%Y"),
(next_monday + timezone.timedelta(days=6)).strftime("%d-%m-%Y"),
)
,0)
document.add_paragraph("Stage d'%s - %s" % (ts.category, ts.place,), style="Subtitle")
table = document.add_table(rows=1, cols=5)
table.style = 'Light Shading Accent 1'
hdr_cells = table.rows[0].cells
hdr_cells[0].text = 'Jour'
hdr_cells[1].text = 'De'
hdr_cells[2].text = 'A'
hdr_cells[3].text = 'Périodes'
hdr_cells[4].text = 'Heures'
for x in range(7):
row_day = next_monday + timezone.timedelta(days=x)
day_periods = ts.periods.filter(start__date=row_day).order_by('start')
row_cells = table.add_row().cells
row_cells[0].text = JOURS[x]
num_p = 0
for p in day_periods :
num_p += 1
row_cells[1].text = timezone.localtime(p.start).strftime("%H:%M")
row_cells[2].text = timezone.localtime(p.end).strftime("%H:%M")
row_cells[3].text = str(p.period_duration())
row_cells[4].text = str(p.hour_duration())
if not num_p == len(day_periods):
row_cells = table.add_row().cells
document.save(buffer)
# Get the value of the BytesIO buffer and write it to the response.
doc = buffer.getvalue()
buffer.close()
response.write(doc)
return response
|
SKA-ScienceDataProcessor/legion-sdp-clone
|
language/travis.py
|
Python
|
apache-2.0
| 2,295 | 0.004793 |
#!/usr/bin/env python
# Copyright 2015 Stanford University
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os, platform, subprocess
def install_dependencies():
env = dict(os.environ.iteritems())
if platform.system() == 'Darwin':
clang_tarball = 'clang+llvm-3.4.2-x86_64-apple-darwin10.9.xz'
clang_dir = os.path.abspath('clang+llvm-3.4.2-x86_64-apple-darwin10.9')
print('http://llvm.org/releases/3.4.2/%s' % clang_tarball)
subprocess.check_call(
['curl', '-O', 'http://llvm.org/releases/3.4.2/%s' % clang_tarball])
shasum = subprocess.Popen(['shasum', '-c'], stdin=subprocess.PIPE)
shasum.communicate(
'b182ca49f8e4933041daa8ed466f1e4a589708bf %s' % clang_tarball)
|
assert shasum.wait() == 0
subprocess.check_call(['tar', 'xfJ', clang_tarball])
env.update({
'PATH': ':'.join(
[os.path.join(clang_dir, 'bin'), os.environ['PATH']]),
'DYLD_LIBRARY_PATH': ':'.join(
[os.path.join(clang_dir, 'lib')] +
([os.environ['DYLD_LIBRARY_PATH']]
if 'DYLD_LIBRARY_PATH' in os.environ else [])),
|
})
return env
def test(root_dir, install_args, install_env):
subprocess.check_call(
['./install.py'] + install_args,
env = install_env,
cwd = root_dir)
subprocess.check_call(
['./test.py'],
cwd = root_dir)
if __name__ == '__main__':
root_dir = os.path.realpath(os.path.dirname(__file__))
legion_dir = os.path.dirname(root_dir)
runtime_dir = os.path.join(legion_dir, 'runtime')
env = install_dependencies()
env.update({
'LG_RT_DIR': runtime_dir,
})
test(root_dir, ['--debug'], env)
test(root_dir, [], env)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.