text
stringlengths 6
947k
| repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
|
---|---|---|---|---|---|---|
# pylint: disable=W0511
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation; either version 2 of the License, or (at your option) any later
# version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
""" Copyright (c) 2000-2010 LOGILAB S.A. (Paris, FRANCE).
http://www.logilab.fr/ -- mailto:contact@logilab.fr
Check source code is ascii only or has an encoding declaration (PEP 263)
"""
import re, sys
from pylint.interfaces import IRawChecker
from pylint.checkers import BaseChecker
MSGS = {
'W0511': ('%s',
'Used when a warning note as FIXME or XXX is detected.'),
}
class EncodingChecker(BaseChecker):
"""checks for:
* warning notes in the code like FIXME, XXX
* PEP 263: source code with non ascii character but no encoding declaration
"""
__implements__ = IRawChecker
# configuration section name
name = 'miscellaneous'
msgs = MSGS
options = (('notes',
{'type' : 'csv', 'metavar' : '<comma separated values>',
'default' : ('FIXME', 'XXX', 'TODO'),
'help' : 'List of note tags to take in consideration, \
separated by a comma.'
}),
)
def __init__(self, linter=None):
BaseChecker.__init__(self, linter)
def process_module(self, node):
"""inspect the source file to found encoding problem or fixmes like
notes
"""
stream = node.file_stream
stream.seek(0)
# warning notes in the code
notes = []
for note in self.config.notes:
notes.append(re.compile(note))
linenum = 1
for line in stream.readlines():
for note in notes:
match = note.search(line)
if match:
self.add_message('W0511', args=line[match.start():-1],
line=linenum)
break
linenum += 1
def register(linter):
"""required method to auto register this checker"""
linter.register_checker(EncodingChecker(linter))
|
isohybrid/dotfile
|
vim/bundle/git:--github.com-klen-python-mode/pylibs/pylint/checkers/misc.py
|
Python
|
bsd-2-clause
| 2,594 | 0.003084 |
import factory
import factory.django
from faker import Faker
from machina.core.db.models import get_model
from machina.test.factories.auth import UserFactory
from machina.test.factories.conversation import TopicFactory
faker = Faker()
TopicPoll = get_model('forum_polls', 'TopicPoll')
TopicPollOption = get_model('forum_polls', 'TopicPollOption')
TopicPollVote = get_model('forum_polls', 'TopicPollVote')
class TopicPollFactory(factory.django.DjangoModelFactory):
topic = factory.SubFactory(TopicFactory)
question = faker.text(max_nb_chars=200)
class Meta:
model = TopicPoll
class TopicPollOptionFactory(factory.django.DjangoModelFactory):
poll = factory.SubFactory(TopicPollFactory)
text = faker.text(max_nb_chars=100)
class Meta:
model = TopicPollOption
class TopicPollVoteFactory(factory.django.DjangoModelFactory):
poll_option = factory.SubFactory(TopicPollOptionFactory)
voter = factory.SubFactory(UserFactory)
class Meta:
model = TopicPollVote
|
ellmetha/django-machina
|
machina/test/factories/polls.py
|
Python
|
bsd-3-clause
| 1,023 | 0 |
import math as mth
import numpy as np
#----------------------
# J Matthews, 21/02
# This is a file containing useful constants for python coding
#
# Units in CGS unless stated
#
#----------------------
#H=6.62606957E-27
HEV=4.13620e-15
#C=29979245800.0
#BOLTZMANN=1.3806488E-16
VERY_BIG=1e50
H=6.6262e-27
HC=1.98587e-16
HEV=4.13620e-15 # Planck's constant in eV
HRYD=3.04005e-16 # NSH 1204 Planck's constant in Rydberg
C =2.997925e10
G=6.670e-8
BOLTZMANN =1.38062e-16
WIEN= 5.879e10 # NSH 1208 Wien Disp Const in frequency units
H_OVER_K=4.799437e-11
STEFAN_BOLTZMANN =5.6696e-5
THOMPSON=0.66524e-24
PI = 3.1415927
MELEC = 9.10956e-28
E= 4.8035e-10 # Electric charge in esu
MPROT = 1.672661e-24
MSOL = 1.989e33
PC= 3.08e18
YR = 3.1556925e7
PI_E2_OVER_MC=0.02655103 # Classical cross-section
PI_E2_OVER_M =7.96e8
ALPHA= 7.297351e-3 # Fine structure constant
BOHR= 0.529175e-8 # Bohr radius
CR= 3.288051e15 #Rydberg frequency for H != Ryd freq for infinite mass
ANGSTROM = 1.e-8 #Definition of an Angstrom in units of this code, e.g. cm
EV2ERGS =1.602192e-12
RADIAN= 57.29578
RYD2ERGS =2.1798741e-11
PARSEC=3.086E18
|
jhmatthews/panlens
|
constants.py
|
Python
|
gpl-2.0
| 1,157 | 0.057044 |
import math
import random
import GameData
from Util.TileTypes import *
from Util import Line, StarCallback
def initializeRandom( x, y ):
dist = math.sqrt( x ** 2 + y ** 2 )
angle = math.atan2( x, y ) / math.pi * 5
rand = ( random.random() * 7 ) - 3.5
val = ( ( dist + angle + rand ) % 10 )
if val > 5:
return 1
else:
return 0
def circle(x0, y0, radius, endRadius, cb):
stepSize = 1.0 / endRadius
angle = math.pi / 2
while angle >= 0:
c = math.cos( angle )
s = math.sin( angle )
r = radius
while r < endRadius:
cb( int( c * r ) + x0, int( s * r ) + y0 )
cb( int( s * r ) + x0, int( c * r ) + y0 )
cb(-int( c * r ) + x0, int( s * r ) + y0 )
cb(-int( s * r ) + x0, int( c * r ) + y0 )
cb( int( c * r ) + x0,-int( s * r ) + y0 )
cb( int( s * r ) + x0,-int( c * r ) + y0 )
cb(-int( c * r ) + x0,-int( s * r ) + y0 )
cb(-int( s * r ) + x0,-int( c * r ) + y0 )
r += 0.5
angle -= stepSize
def buildFixedWalls( self, I, _buffer, val ):
#Clear center room
centerX = int( self.width / 2 )
centerY = int( self.height / 2 )
for x in range( centerX - GameData.MapGen_CenterRoom_Size[0] - 1, centerX + GameData.MapGen_CenterRoom_Size[0] + 1 ):
for y in range( centerY - GameData.MapGen_CenterRoom_Size[1] - 1, centerY + GameData.MapGen_CenterRoom_Size[1] + 1 ):
_buffer[ I( x, y ) ] = 0
#Build center room walls
for x in range( centerX - GameData.MapGen_CenterRoom_Size[0] - 1, centerX + GameData.MapGen_CenterRoom_Size[0] + 1 ):
_buffer[ I( x, centerY - GameData.MapGen_CenterRoom_Size[1] - 1 ) ] = val
_buffer[ I( x, centerY + GameData.MapGen_CenterRoom_Size[1] ) ] = val
for y in range( centerY - GameData.MapGen_CenterRoom_Size[1] - 1, centerY + GameData.MapGen_CenterRoom_Size[1] + 1 ):
_buffer[ I( centerX - GameData.MapGen_CenterRoom_Size[0] - 1, y ) ] = val
_buffer[ I( centerX + GameData.MapGen_CenterRoom_Size[0], y ) ] = val
def preIterInit( self, I, _buffer ):
#Outer wall
for x in range( self.width ):
_buffer[ I( x, 0 ) ] = 1
_buffer[ I( x, self.height - 1 ) ] = 1
for y in range( self.height ):
_buffer[ I( 0, y ) ] = 1
_buffer[ I( self.width - 1, y ) ] = 1
#Area around outer wall
for x in range( 1, self.width- 1 ):
_buffer[ I( x, 1 ) ] = 0
_buffer[ I( x, self.height - 2 ) ] = 0
for y in range( 1, self.height - 1 ):
_buffer[ I( 1, y ) ] = 0
_buffer[ I( self.width - 2, y ) ] = 0
buildFixedWalls( self, I, _buffer, 1 )
def postInit( self, I, _buffer ):
centerX = int( self.width / 2 )
centerY = int( self.height / 2 )
for x in range( self.width ):
for y in range( self.height ):
i = I( x, y )
val = _buffer[ i ]
if val == 0:
_buffer[ i ] = TILE_AIR #NOOP, but for clarity
elif val == 1:
_buffer[ i ] = TILE_WALL
else:
raise Exception( "Incorrect tile type in postInit!" )
for x in range( self.width ):
_buffer[ I( x, 0 ) ] = TILE_FIXED_WALL
_buffer[ I( x, self.height - 1 ) ] = TILE_FIXED_WALL
for y in range( self.height ):
_buffer[ I( 0, y ) ] = TILE_FIXED_WALL
_buffer[ I( self.width - 1, y ) ] = TILE_FIXED_WALL
buildFixedWalls( self, I, _buffer, TILE_FIXED_WALL )
curSurface = ( GameData.MapGen_CenterRoom_Size[0] * 2 ) * ( GameData.MapGen_CenterRoom_Size[1] * 2 )
curRadius = -1
def setFixedWall( x, y ):
_buffer[ I( int( x ), int( y ) ) ] = TILE_FIXED_WALL
circleNum = 0
while curRadius < GameData.MapGen_MaxCircleRadius:
sectionCount = max( circleNum * GameData.MapGen_CircleSectionsPerLayer, 1 )
nextSurface = curSurface + ( GameData.MapGen_BaseSurface * sectionCount )
nextRadius = int( math.sqrt( nextSurface / math.pi ) )
circle( centerX, centerY, nextRadius, nextRadius + 2, setFixedWall )
#Seperate sections in circle
if sectionCount > 1:
for i in range( sectionCount ):
angle = i * math.pi * 2 / sectionCount
s = math.sin( angle )
c = math.cos( angle )
Line( int( s * ( curRadius + 1 ) ) + centerX, int( c * ( curRadius + 1 ) ) + centerY, int( s * nextRadius ) + centerX, int( c * nextRadius ) + centerY, StarCallback( setFixedWall ) )
curRadius = nextRadius
curSurface = int( curRadius ** 2 * math.pi )
circleNum += 1
print( curRadius )
curRadius += 1
curRadiusSquared = curRadius ** 2
for x in range( self.width ):
for y in range( self.height ):
if ( ( x - centerX ) ** 2 + ( y - centerY ) ** 2 ) > curRadiusSquared:
_buffer[ I( x, y ) ] = TILE_AIR #NOOP, but for clarity
|
KevinVDVelden/7DRL_2015
|
Game/MapGen.py
|
Python
|
gpl-2.0
| 4,996 | 0.055244 |
#!/usr/bin/env python3
from argparse import ArgumentParser
from os import environ
from sys import argv
from requests import put
def read_file(file_path): # pragma: no cover
with open(file_path, 'rb') as f:
return f.read()
def upload(file_path, repository, repository_path, url, username, password):
url = f'{url}/repository/{repository}/{repository_path}'
data = read_file(file_path)
headers = {'Content-Type': 'application/octet-stream'}
response = put(url, data=data, headers=headers, auth=(username, password))
if response.status_code != 201:
raise OSError(f'{response.status_code}, {response.content}')
return response.status_code
def parse_args(args):
parser = ArgumentParser(description='Get assets')
parser.add_argument('file_path', help='File to upload, e.g. ./myartifact-1.0.0.jar')
parser.add_argument('repository', help='Nexus3 repository, e.g. maven-releases')
parser.add_argument('repository_path',
help='Path within Nexus3 repository, e.g com/myorg/myartifact/1.0.0/myartifact-1.0.0.jar')
parser.add_argument('-l', '--url', default=environ.get('NEXUS3_REST_URL', None),
help='Nexus3 url, e.g. http://nexus_host:8080')
parser.add_argument('-u', '--username', default=environ.get('NEXUS3_USERNAME', None), help='Nexus3 username')
parser.add_argument('-p', '--password', default=environ.get('NEXUS3_PASSWORD', None), help='Nexus3 password')
return parser.parse_args(args)
def main(file_path, repository, repository_path, url, username, password):
print(upload(file_path, repository, repository_path, url, username, password))
if __name__ == '__main__': # pragma: no cover
main(**parse_args(argv[1:]).__dict__)
|
bjuvensjo/scripts
|
vang/nexus3/upload.py
|
Python
|
apache-2.0
| 1,766 | 0.003964 |
# -*- coding: utf-8 -*-
import sys
import os
import logging
import random
import PyQt4
from PyQt4.QtCore import *
#from PyQt4.QtCore import QAbstractTableModel
import constants
class Model(QAbstractTableModel):
keys = list()
modelType = None
def __init__(self, parent = None):
''' '''
self.log = logging.getLogger('Model')
#self.log.debug('__init__ start')
super(QAbstractTableModel, self).__init__(parent)
def rowCount(self, parent = None):
''' '''
#self.log.debug('rowCount start')
#self.log.debug('rowCount end')
if hasattr(self, 'album') and self.album:
if hasattr(self.album, 'rows'):
return len(self.album.rows)
return 0
def columnCount(self, parent = None):
''' '''
#self.log.debug('columnCount start')
#self.log.debug('columnCount end')
return len(self.keys)
def data(self, index, role = None):
''' '''
#self.log.debug('data start')
if index.isValid():
if index.row() >= 0 or index.row() < len(self.rows):
if role == Qt.DisplayRole or role == Qt.ToolTipRole or role == Qt.EditRole:
return self.album.rows[index.row()][self.keys[index.column()]]
#self.log.debug('data end')
return QVariant()
def setData(self, index, value, role):
''' '''
#self.log.debug('setData start')
if index.isValid() and role == Qt.EditRole:
key = self.keys[index.column()]
row = index.row()
value = unicode(value.toString())
self.album.rows[index.row()][key] = value
self.emit(SIGNAL('dataChanged'), index, index)
#self.log.debug('setData end')
return True
def headerData(self, section, orientation, role):
''' '''
#self.log.debug('headerData start' + str(section))
if section >= 0 and section < len(self.keys):
if orientation == Qt.Horizontal and role == Qt.DisplayRole:
return self.keys[section]
#self.log.debug('headerData end ')
return QVariant()
def flags(self, index):
''' '''
#self.log.debug('flags start')
if self.modelType == constants.ModelType.ModelTypeFinal:
return super(QAbstractTableModel, self).flags(index) | Qt.ItemIsEditable
#self.log.debug('flags end')
return super(QAbstractTableModel, self).flags(index)
def getModelType(self):
''' '''
#self.log.debug('getModelType start')
#self.log.debug('getModelType end')
return self.modelType
#def getState(self):
#''' '''
##self.log.debug('getState start')
##self.log.debug('getState end')
#return None
|
CaesarTjalbo/musictagger
|
mp3names/model_classes.py
|
Python
|
gpl-3.0
| 3,029 | 0.028722 |
from n5a import make_type
from n5a.generate import generate
from .test_definitions import get_pos3d_definition
def test_generate_string():
s = generate(get_pos3d_definition())
assert 'struct Pos3D' in s
def test_generate_file():
s = generate(get_pos3d_definition())
with open('test/test_cpp/generated/pos3d.hpp', 'w') as f:
f.write(s)
|
sschaetz/n5a
|
test/test_generate.py
|
Python
|
mit
| 361 | 0.00831 |
# Version 5
'''This takes a base MineCraft level and adds or edits trees.
Place it in the folder where the save files are (usually .../.minecraft/saves)
Requires mcInterface.py in the same folder.'''
# Here are the variables you can edit.
# This is the name of the map to edit.
# Make a backup if you are experimenting!
LOADNAME = "LevelSave"
# How many trees do you want to add?
TREECOUNT = 12
# Where do you want the new trees?
# X, and Z are the map coordinates
X = 66
Z = -315
# How large an area do you want the trees to be in?
# for example, RADIUS = 10 will make place trees randomly in
# a circular area 20 blocks wide.
RADIUS = 80
# NOTE: tree density will be higher in the center than at the edges.
# Which shapes would you like the trees to be?
# these first three are best suited for small heights, from 5 - 10
# "normal" is the normal minecraft shape, it only gets taller and shorter
# "bamboo" a trunk with foliage, it only gets taller and shorter
# "palm" a trunk with a fan at the top, only gets taller and shorter
# "stickly" selects randomly from "normal", "bamboo" and "palm"
# these last five are best suited for very large trees, heights greater than 8
# "round" procedural spherical shaped tree, can scale up to immense size
# "cone" procedural, like a pine tree, also can scale up to immense size
# "procedural" selects randomly from "round" and "conical"
# "rainforest" many slender trees, most at the lower range of the height,
# with a few at the upper end.
# "mangrove" makes mangrove trees (see PLANTON below).
SHAPE = "procedural"
# What height should the trees be?
# Specifies the average height of the tree
# Examples:
# 5 is normal minecraft tree
# 3 is minecraft tree with foliage flush with the ground
# 10 is very tall trees, they will be hard to chop down
# NOTE: for round and conical, this affects the foliage size as well.
# CENTERHEIGHT is the height of the trees at the center of the area
# ie, when radius = 0
CENTERHEIGHT = 55
# EDGEHEIGHT is the height at the trees at the edge of the area.
# ie, when radius = RADIUS
EDGEHEIGHT = 25
# What should the variation in HEIGHT be?
# actual value +- variation
# default is 1
# Example:
# HEIGHT = 8 and HEIGHTVARIATION = 3 will result in
# trunk heights from 5 to 11
# value is clipped to a max of HEIGHT
# for a good rainforest, set this value not more than 1/2 of HEIGHT
HEIGHTVARIATION = 12
# Do you want branches, trunk, and roots?
# True makes all of that
# False does not create the trunk and branches, or the roots (even if they are
# enabled further down)
WOOD = True
# Trunk thickness multiplyer
# from zero (super thin trunk) to whatever huge number you can think of.
# Only works if SHAPE is not a "stickly" subtype
# Example:
# 1.0 is the default, it makes decently normal sized trunks
# 0.3 makes very thin trunks
# 4.0 makes a thick trunk (good for HOLLOWTRUNK).
# 10.5 will make a huge thick trunk. Not even kidding. Makes spacious
# hollow trunks though!
TRUNKTHICKNESS = 1.0
# Trunk height, as a fraction of the tree
# Only works on "round" shaped trees
# Sets the height of the crown, where the trunk ends and splits
# Examples:
# 0.7 the default value, a bit more than half of the height
# 0.3 good for a fan-like tree
# 1.0 the trunk will extend to the top of the tree, and there will be no crown
# 2.0 the trunk will extend out the top of the foliage, making the tree appear
# like a cluster of green grapes impaled on a spike.
TRUNKHEIGHT = 0.7
# Do you want the trunk and tree broken off at the top?
# removes about half of the top of the trunk, and any foliage
# and branches that would attach above it.
# Only works if SHAPE is not a "stickly" subtype
# This results in trees that are shorter than the height settings
# True does that stuff
# False makes a normal tree (default)
BROKENTRUNK = False
# Note, this works well with HOLLOWTRUNK (below) turned on as well.
# Do you want the trunk to be hollow (or filled) inside?
# Only works with larger sized trunks.
# Only works if SHAPE is not a "stickly" subtype
# True makes the trunk hollow (or filled with other stuff)
# False makes a solid trunk (default)
HOLLOWTRUNK = False
# Note, this works well with BROKENTRUNK set to true (above)
# Further note, you may want to use a large value for TRUNKTHICKNESS
# How many branches should there be?
# General multiplyer for the number of branches
# However, it will not make more branches than foliage clusters
# so to garuntee a branch to every foliage cluster, set it very high, like 10000
# this also affects the number of roots, if they are enabled.
# Examples:
# 1.0 is normal
# 0.5 will make half as many branches
# 2.0 will make twice as mnay branches
# 10000 will make a branch to every foliage cluster (I'm pretty sure)
BRANCHDENSITY = 1.0
# do you want roots from the bottom of the tree?
# Only works if SHAPE is "round" or "cone" or "procedural"
# "yes" roots will penetrate anything, and may enter underground caves.
# "tostone" roots will be stopped by stone (default see STOPSROOTS below).
# There may be some penetration.
# "hanging" will hang downward in air. Good for "floating" type maps
# (I really miss "floating" terrain as a default option)
# "no" roots will not be generated
ROOTS = "tostone"
# Do you want root buttresses?
# These make the trunk not-round at the base, seen in tropical or old trees.
# This option generally makes the trunk larger.
# Only works if SHAPE is "round" or "cone" or "procedural"
# Options:
# True makes root butresses
# False leaves them out
ROOTBUTTRESSES = True
# Do you want leaves on the trees?
# True there will be leaves
# False there will be no leaves
FOLIAGE = True
# How thick should the foliage be
# General multiplyer for the number of foliage clusters
# Examples:
# 1.0 is normal
# 0.3 will make very sparse spotty trees, half as many foliage clusters
# 2.0 will make dense foliage, better for the "rainforests" SHAPE
FOLIAGEDENSITY = 1.0
# Limit the tree height to the top of the map?
# True the trees will not grow any higher than the top of the map
# False the trees may be cut off by the top of the map
MAPHEIGHTLIMIT = True
# add lights in the middle of foliage clusters
# for those huge trees that get so dark underneath
# or for enchanted forests that should glow and stuff
# Only works if SHAPE is "round" or "cone" or "procedural"
# 0 makes just normal trees
# 1 adds one light inside the foliage clusters for a bit of light
# 2 adds two lights around the base of each cluster, for more light
# 4 adds lights all around the base of each cluster for lots of light
LIGHTTREE = 0
# Do you want to only place trees near existing trees?
# True will only plant new trees near existing trees.
# False will not check for existing trees before planting.
# NOTE: the taller the tree, the larger the forest needs to be to qualify
# OTHER NOTE: this feature has not been extensively tested.
# IF YOU HAVE PROBLEMS: SET TO False
ONLYINFORESTS = False
#####################
# Advanced options! #
#####################
# What kind of material should the "wood" be made of?
# defaults to 17
WOODMAT = 17
# What data value should the wood blocks have?
# Some blocks, like wood, leaves, and cloth change
# apperance with different data values
# defaults to 0
WOODDATA = 0
# What kind of material should the "leaves" be made of?
# defaults to 18
LEAFMAT = 18
# What data value should the leaf blocks have?
# Some blocks, like wood, leaves, and cloth change
# apperance with different data values
# defaults to 0
LEAFDATA = 0
# What kind of material should the "lights" be made of?
# defaults to 89 (glowstone)
LIGHTMAT = 89
# What data value should the light blocks have?
# defaults to 0
LIGHTDATA = 0
# What kind of material would you like the "hollow" trunk filled with?
# defaults to 0 (air)
TRUNKFILLMAT = 0
# What data value would you like the "hollow" trunk filled with?
# defaults to 0
TRUNKFILLDATA = 0
# What kind of blocks should the trees be planted on?
# Use the Minecraft index.
# Examples
# 2 is grass (the default)
# 3 is dirt
# 1 is stone (an odd choice)
# 12 is sand (for beach or desert)
# 9 is water (if you want an aquatic forest)
# this is a list, and comma seperated.
# example: [2, 3]
# will plant trees on grass or dirt
PLANTON = [2]
# What kind of blocks should stop the roots?
# a list of block id numbers like PLANTON
# Only works if ROOTS = "tostone"
# default, [1] (stone)
# if you want it to be stopped by other block types, add it to the list
STOPSROOTS = [1]
# What kind of blocks should stop branches?
# same as STOPSROOTS above, but is always turned on
# defaults to stone, cobblestone, and glass
# set it to [] if you want branches to go through everything
STOPSBRANCHES = [1, 4, 20]
# How do you want to interpolate from center to edge?
# "linear" makes a cone-shaped forest
# This is the only option at present
INTERPOLATION = "linear"
# Do a rough recalculation of the lighting?
# Slows it down to do a very rough and incomplete re-light.
# If you want to really fix the lighting, use a seperate re-lighting tool.
# True do the rough fix
# False don't bother
LIGHTINGFIX = True
# How many times do you want to try to find a location?
# it will stop planing after MAXTRIES has been exceeded.
# Set to smaller numbers to abort quicker, or larger numbers
# if you want to keep trying for a while.
# NOTE: the number of trees will not exceed this number
# Default: 1000
MAXTRIES = 1000
# Do you want lots of text telling you waht is going on?
# True lots of text (default). Good for debugging.
# False no text
VERBOSE = True
##############################################################
# Don't edit below here unless you know what you are doing #
##############################################################
# input filtering
TREECOUNT = int(TREECOUNT)
if TREECOUNT < 0:
TREECOUNT = 0
if SHAPE not in ["normal", "bamboo", "palm", "stickly",
"round", "cone", "procedural",
"rainforest", "mangrove"]:
if VERBOSE:
print("SHAPE not set correctly, using 'procedural'.")
SHAPE = "procedural"
if CENTERHEIGHT < 1:
CENTERHEIGHT = 1
if EDGEHEIGHT < 1:
EDGEHEIGHT = 1
minheight = min(CENTERHEIGHT, EDGEHEIGHT)
if HEIGHTVARIATION > minheight:
HEIGHTVARIATION = minheight
if INTERPOLATION not in ["linear"]:
if VERBOSE:
print("INTERPOLATION not set correctly, using 'linear'.")
INTERPOLATION = "linear"
if WOOD not in [True, False]:
if VERBOSE:
print("WOOD not set correctly, using True")
WOOD = True
if TRUNKTHICKNESS < 0.0:
TRUNKTHICKNESS = 0.0
if TRUNKHEIGHT < 0.0:
TRUNKHEIGHT = 0.0
if ROOTS not in ["yes", "tostone", "hanging", "no"]:
if VERBOSE:
print("ROOTS not set correctly, using 'no' and creating no roots")
ROOTS = "no"
if ROOTBUTTRESSES not in [True, False]:
if VERBOSE:
print("ROOTBUTTRESSES not set correctly, using False")
ROOTBUTTRESSES = False
if FOLIAGE not in [True, False]:
if VERBOSE:
print("FOLIAGE not set correctly, using True")
ROOTBUTTRESSES = True
if FOLIAGEDENSITY < 0.0:
FOLIAGEDENSITY = 0.0
if BRANCHDENSITY < 0.0:
BRANCHDENSITY = 0.0
if MAPHEIGHTLIMIT not in [True, False]:
if VERBOSE:
print("MAPHEIGHTLIMIT not set correctly, using False")
MAPHEIGHTLIMIT = False
if LIGHTTREE not in [0, 1, 2, 4]:
if VERBOSE:
print("LIGHTTREE not set correctly, using 0 for no torches")
LIGHTTREE = 0
# assemble the material dictionaries
WOODINFO = {'B': WOODMAT, 'D': WOODDATA}
LEAFINFO = {'B': LEAFMAT, 'D': LEAFDATA}
LIGHTINFO = {'B': LIGHTMAT, 'D': LIGHTDATA}
TRUNKFILLINFO = {'B': TRUNKFILLMAT, 'D': TRUNKFILLDATA}
# The following is an interface class for .mclevel data for minecraft savefiles.
# The following also includes a useful coordinate to index convertor and several
# other useful functions.
import mcInterface
#some handy functions
def dist_to_mat(cord, vec, matidxlist, mcmap, invert=False, limit=False):
'''travel from cord along vec and return how far it was to a point of matidx
the distance is returned in number of iterations. If the edge of the map
is reached, then return the number of iterations as well.
if invert == True, search for anything other than those in matidxlist
'''
assert isinstance(mcmap, mcInterface.SaveFile)
block = mcmap.block
curcord = [i + .5 for i in cord]
iterations = 0
on_map = True
while on_map:
x = int(curcord[0])
y = int(curcord[1])
z = int(curcord[2])
return_dict = block(x, y, z)
if return_dict is None:
break
else:
block_value = return_dict['B']
if (block_value in matidxlist) and (invert is False):
break
elif (block_value not in matidxlist) and invert:
break
else:
curcord = [curcord[i] + vec[i] for i in range(3)]
iterations += 1
if limit and iterations > limit:
break
return iterations
# This is the end of the MCLevel interface.
# Now, on to the actual code.
from random import random, choice, sample
from math import sqrt, sin, cos, pi
def calc_column_lighting(x, z, mclevel):
'''Recalculate the sky lighting of the column.'''
# Begin at the top with sky light level 15.
cur_light = 15
# traverse the column until cur_light == 0
# and the existing light values are also zero.
y = 255
get_block = mclevel.block
set_block = mclevel.set_block
get_height = mclevel.retrieve_heightmap
set_height = mclevel.set_heightmap
#get the current heightmap
cur_height = get_height(x, z)
# set a flag that the highest point has been updated
height_updated = False
# if this doesn't exist, the block doesn't exist either, abort.
if cur_height is None:
return None
light_reduction_lookup = {0: 0, 20: 0, 18: 1, 8: 2, 79: 2}
while True:
#get the block sky light and type
block_info = get_block(x, y, z, 'BS')
block_light = block_info['S']
block_type = block_info['B']
# update the height map if it hasn't been updated yet,
# and the current block reduces light
if (not height_updated) and (block_type not in (0, 20)):
new_height = y + 1
if new_height == 256:
new_height = 255
set_height(x, new_height, z)
height_updated = True
#compare block with cur_light, escape if both 0
if block_light == 0 and cur_light == 0:
break
#set the block light if necessary
if block_light != cur_light:
set_block(x, y, z, {'S': cur_light})
#set the new cur_light
if block_type in light_reduction_lookup:
# partial light reduction
light_reduction = light_reduction_lookup[block_type]
else:
# full light reduction
light_reduction = 16
cur_light += -light_reduction
if cur_light < 0:
cur_light = 0
#increment and check y
y += -1
if y < 0:
break
class ReLight(object):
'''keep track of which squares need to be relit, and then relight them'''
def add(self, x, z):
coords = (x, z)
self.all_columns.add(coords)
def calc_lighting(self):
mclevel = self.save_file
for column_coords in self.all_columns:
# recalculate the lighting
x = column_coords[0]
z = column_coords[1]
calc_column_lighting(x, z, mclevel)
def __init__(self):
self.all_columns = set()
self.save_file = None
relight_master = ReLight()
def assign_value(x, y, z, values, save_file):
'''Assign an index value to a location in mcmap.
If the index is outside the bounds of the map, return None. If the
assignment succeeds, return True.
'''
if y > 255:
return None
result = save_file.set_block(x, y, z, values)
if LIGHTINGFIX:
relight_master.add(x, z)
return result
class Tree(object):
'''Set up the interface for tree objects. Designed for subclassing.
'''
def prepare(self, mcmap):
'''initialize the internal values for the Tree object.
'''
return None
def maketrunk(self, mcmap):
'''Generate the trunk and enter it in mcmap.
'''
return None
def makefoliage(self, mcmap):
"""Generate the foliage and enter it in mcmap.
Note, foliage will disintegrate if there is no log nearby"""
return None
def copy(self, other):
'''Copy the essential values of the other tree object into self.
'''
self.pos = other.pos
self.height = other.height
def __init__(self, pos=[0, 0, 0], height=1):
'''Accept values for the position and height of a tree.
Store them in self.
'''
self.pos = pos
self.height = height
class StickTree(Tree):
'''Set up the trunk for trees with a trunk width of 1 and simple geometry.
Designed for sublcassing. Only makes the trunk.
'''
def maketrunk(self, mcmap):
x = self.pos[0]
y = self.pos[1]
z = self.pos[2]
for i in range(self.height):
assign_value(x, y, z, WOODINFO, mcmap)
y += 1
class NormalTree(StickTree):
'''Set up the foliage for a 'normal' tree.
This tree will be a single bulb of foliage above a single width trunk.
This shape is very similar to the default Minecraft tree.
'''
def makefoliage(self, mcmap):
"""note, foliage will disintegrate if there is no foliage below, or
if there is no "log" block within range 2 (square) at the same level or
one level below"""
topy = self.pos[1] + self.height - 1
start = topy - 2
end = topy + 2
for y in range(start, end):
if y > start + 1:
rad = 1
else:
rad = 2
for xoff in range(-rad, rad + 1):
for zoff in range(-rad, rad + 1):
if (random() > 0.618
and abs(xoff) == abs(zoff)
and abs(xoff) == rad
):
continue
x = self.pos[0] + xoff
z = self.pos[2] + zoff
assign_value(x, y, z, LEAFINFO, mcmap)
class BambooTree(StickTree):
'''Set up the foliage for a bamboo tree.
Make foliage sparse and adjacent to the trunk.
'''
def makefoliage(self, mcmap):
start = self.pos[1]
end = self.pos[1] + self.height + 1
for y in range(start, end):
for _ in [0, 1]:
xoff = choice([-1, 1])
zoff = choice([-1, 1])
x = self.pos[0] + xoff
z = self.pos[2] + zoff
assign_value(x, y, z, LEAFINFO, mcmap)
class PalmTree(StickTree):
'''Set up the foliage for a palm tree.
Make foliage stick out in four directions from the top of the trunk.
'''
def makefoliage(self, mcmap):
y = self.pos[1] + self.height
for xoff in range(-2, 3):
for zoff in range(-2, 3):
if abs(xoff) == abs(zoff):
x = self.pos[0] + xoff
z = self.pos[2] + zoff
assign_value(x, y, z, LEAFINFO, mcmap)
class ProceduralTree(Tree):
'''Set up the methods for a larger more complicated tree.
This tree type has roots, a trunk, and branches all of varying width,
and many foliage clusters.
MUST BE SUBCLASSED. Specifically, self.foliage_shape must be set.
Subclass 'prepare' and 'shapefunc' to make different shaped trees.
'''
@staticmethod
def crossection(center, radius, diraxis, matidx, mcmap):
'''Create a round section of type matidx in mcmap.
Passed values:
center = [x, y, z] for the coordinates of the center block
radius = <number> as the radius of the section. May be a float or int.
diraxis: The list index for the axis to make the section
perpendicular to. 0 indicates the x axis, 1 the y, 2 the z. The
section will extend along the other two axies.
matidx = <int> the integer value to make the section out of.
mcmap = the array generated by make_mcmap
matdata = <int> the integer value to make the block data value.
'''
rad = int(radius + .618)
if rad <= 0:
return None
secidx1 = (diraxis - 1) % 3
secidx2 = (1 + diraxis) % 3
coord = [0, 0, 0]
for off1 in range(-rad, rad + 1):
for off2 in range(-rad, rad + 1):
thisdist = sqrt((abs(off1) + .5) ** 2 + (abs(off2) + .5) ** 2)
if thisdist > radius:
continue
pri = center[diraxis]
sec1 = center[secidx1] + off1
sec2 = center[secidx2] + off2
coord[diraxis] = pri
coord[secidx1] = sec1
coord[secidx2] = sec2
assign_value(coord[0], coord[1], coord[2], matidx, mcmap)
def shapefunc(self, y):
'''Take y and return a radius for the location of the foliage cluster.
If no foliage cluster is to be created, return None
Designed for sublcassing. Only makes clusters close to the trunk.
'''
if random() < 100. / (self.height ** 2) and y < self.trunkheight:
return self.height * .12
return None
def foliagecluster(self, center, mcmap):
'''generate a round cluster of foliage at the location center.
The shape of the cluster is defined by the list self.foliage_shape.
This list must be set in a subclass of ProceduralTree.
'''
level_radius = self.foliage_shape
x = center[0]
y = center[1]
z = center[2]
for i in level_radius:
self.crossection([x, y, z], i, 1, LEAFINFO, mcmap)
y += 1
def taperedcylinder(self, start, end, startsize, endsize, mcmap, blockdata):
'''Create a tapered cylinder in mcmap.
start and end are the beginning and ending coordinates of form [x, y, z].
startsize and endsize are the beginning and ending radius.
The material of the cylinder is WOODMAT.
'''
# delta is the coordinate vector for the difference between
# start and end.
delta = [int(end[i] - start[i]) for i in range(3)]
# primidx is the index (0, 1, or 2 for x, y, z) for the coordinate
# which has the largest overall delta.
maxdist = max(delta, key=abs)
if maxdist == 0:
return None
primidx = delta.index(maxdist)
# secidx1 and secidx2 are the remaining indicies out of [0, 1, 2].
secidx1 = (primidx - 1) % 3
secidx2 = (1 + primidx) % 3
# primsign is the digit 1 or -1 depending on whether the limb is headed
# along the positive or negative primidx axis.
primsign = int(delta[primidx] / abs(delta[primidx]))
# secdelta1 and ...2 are the amount the associated values change
# for every step along the prime axis.
secdelta1 = delta[secidx1]
secfac1 = float(secdelta1) / delta[primidx]
secdelta2 = delta[secidx2]
secfac2 = float(secdelta2) / delta[primidx]
# Initialize coord. These values could be anything, since
# they are overwritten.
coord = [0, 0, 0]
# Loop through each crossection along the primary axis,
# from start to end.
endoffset = delta[primidx] + primsign
for primoffset in range(0, endoffset, primsign):
primloc = start[primidx] + primoffset
secloc1 = int(start[secidx1] + primoffset * secfac1)
secloc2 = int(start[secidx2] + primoffset * secfac2)
coord[primidx] = primloc
coord[secidx1] = secloc1
coord[secidx2] = secloc2
primdist = abs(delta[primidx])
radius = endsize + (startsize - endsize) * abs(delta[primidx]
- primoffset) / primdist
self.crossection(coord, radius, primidx, blockdata, mcmap)
def makefoliage(self, mcmap):
'''Generate the foliage for the tree in mcmap.
'''
"""note, foliage will disintegrate if there is no foliage below, or
if there is no "log" block within range 2 (square) at the same level or
one level below"""
foliage_coords = self.foliage_cords
for coord in foliage_coords:
self.foliagecluster(coord, mcmap)
for cord in foliage_coords:
assign_value(cord[0], cord[1], cord[2], WOODINFO, mcmap)
if LIGHTTREE == 1:
assign_value(cord[0], cord[1] + 1, cord[2], LIGHTINFO, mcmap)
elif LIGHTTREE in [2, 4]:
assign_value(cord[0] + 1, cord[1], cord[2], LIGHTINFO, mcmap)
assign_value(cord[0] - 1, cord[1], cord[2], LIGHTINFO, mcmap)
if LIGHTTREE == 4:
assign_value(cord[0], cord[1], cord[2] + 1, LIGHTINFO, mcmap)
assign_value(cord[0], cord[1], cord[2] - 1, LIGHTINFO, mcmap)
def makebranches(self, mcmap):
'''Generate the branches and enter them in mcmap.
'''
treeposition = self.pos
height = self.height
topy = treeposition[1] + int(self.trunkheight + 0.5)
# endrad is the base radius of the branches at the trunk
endrad = self.trunkradius * (1 - self.trunkheight / height)
if endrad < 1.0:
endrad = 1.0
for coord in self.foliage_cords:
dist = (sqrt(float(coord[0] - treeposition[0]) ** 2 +
float(coord[2] - treeposition[2]) ** 2))
ydist = coord[1] - treeposition[1]
# value is a magic number that weights the probability
# of generating branches properly so that
# you get enough on small trees, but not too many
# on larger trees.
# Very difficult to get right... do not touch!
value = (self.branchdensity * 220 * height) / ((ydist + dist) ** 3)
if value < random():
continue
posy = coord[1]
slope = self.branchslope + (0.5 - random()) * .16
if coord[1] - dist * slope > topy:
# Another random rejection, for branches between
# the top of the trunk and the crown of the tree
threshhold = 1 / float(height)
if random() < threshhold:
continue
branchy = topy
basesize = endrad
else:
branchy = posy - dist * slope
basesize = (endrad + (self.trunkradius - endrad) *
(topy - branchy) / self.trunkheight)
startsize = (basesize * (1 + random()) * .618 *
(dist / height) ** 0.618)
rndr = sqrt(random()) * basesize * 0.618
rndang = random() * 2 * pi
rndx = int(rndr * sin(rndang) + 0.5)
rndz = int(rndr * cos(rndang) + 0.5)
startcoord = [treeposition[0] + rndx,
int(branchy),
treeposition[2] + rndz]
if startsize < 1.0:
startsize = 1.0
endsize = 1.0
self.taperedcylinder(startcoord, coord, startsize, endsize,
mcmap, WOODINFO)
def makeroots(self, rootbases, mcmap):
'''generate the roots and enter them in mcmap.
rootbases = [[x, z, base_radius], ...] and is the list of locations
the roots can originate from, and the size of that location.
'''
treeposition = self.pos
height = self.height
for coord in self.foliage_cords:
# First, set the threshhold for randomly selecting this
# coordinate for root creation.
dist = (sqrt(float(coord[0] - treeposition[0]) ** 2 +
float(coord[2] - treeposition[2]) ** 2))
ydist = coord[1] - treeposition[1]
value = (self.branchdensity * 220 * height) / ((ydist + dist) ** 3)
# Randomly skip roots, based on the above threshold
if value < random():
continue
# initialize the internal variables from a selection of
# starting locations.
rootbase = choice(rootbases)
rootx = rootbase[0]
rootz = rootbase[1]
rootbaseradius = rootbase[2]
# Offset the root origin location by a random amount
# (radialy) from the starting location.
rndr = (sqrt(random()) * rootbaseradius * .618)
rndang = random() * 2 * pi
rndx = int(rndr * sin(rndang) + 0.5)
rndz = int(rndr * cos(rndang) + 0.5)
rndy = int(random() * rootbaseradius * 0.5)
startcoord = [rootx + rndx, treeposition[1] + rndy, rootz + rndz]
# offset is the distance from the root base to the root tip.
offset = [startcoord[i] - coord[i] for i in range(3)]
# If this is a mangrove tree, make the roots longer.
if SHAPE == "mangrove":
offset = [int(val * 1.618 - 1.5) for val in offset]
endcoord = [startcoord[i] + offset[i] for i in range(3)]
rootstartsize = (rootbaseradius * 0.618 * abs(offset[1]) /
(height * 0.618))
if rootstartsize < 1.0:
rootstartsize = 1.0
endsize = 1.0
# If ROOTS is set to "tostone" or "hanging" we need to check
# along the distance for collision with existing materials.
if ROOTS in ["tostone", "hanging"]:
offlength = sqrt(float(offset[0]) ** 2 +
float(offset[1]) ** 2 +
float(offset[2]) ** 2)
if offlength < 1:
continue
rootmid = endsize
# vec is a unit vector along the direction of the root.
vec = [offset[i] / offlength for i in range(3)]
if ROOTS == "tostone":
searchindex = STOPSROOTS
elif ROOTS == "hanging":
searchindex = [0]
# startdist is how many steps to travel before starting to
# search for the material. It is used to ensure that large
# roots will go some distance before changing directions
# or stopping.
startdist = int(random() * 6 * sqrt(rootstartsize) + 2.8)
# searchstart is the coordinate where the search should begin
searchstart = [startcoord[i] + startdist * vec[i]
for i in range(3)]
# dist stores how far the search went (including searchstart)
# before encountering the expected marterial.
dist = startdist + dist_to_mat(searchstart, vec,
searchindex, mcmap, limit=offlength)
# If the distance to the material is less than the length
# of the root, change the end point of the root to where
# the search found the material.
if dist < offlength:
# rootmid is the size of the crossection at endcoord.
rootmid += (rootstartsize -
endsize) * (1 - dist / offlength)
# endcoord is the midpoint for hanging roots,
# and the endpoint for roots stopped by stone.
endcoord = [startcoord[i] + int(vec[i] * dist)
for i in range(3)]
if ROOTS == "hanging":
# remaining_dist is how far the root had left
# to go when it was stopped.
remaining_dist = offlength - dist
# Initialize bottomcord to the stopping point of
# the root, and then hang straight down
# a distance of remaining_dist.
bottomcord = endcoord[:]
bottomcord[1] += -int(remaining_dist)
# Make the hanging part of the hanging root.
self.taperedcylinder(endcoord, bottomcord,
rootmid, endsize, mcmap, WOODINFO)
# make the beginning part of hanging or "tostone" roots
self.taperedcylinder(startcoord, endcoord,
rootstartsize, rootmid, mcmap, WOODINFO)
# If you aren't searching for stone or air, just make the root.
else:
self.taperedcylinder(startcoord, endcoord,
rootstartsize, endsize, mcmap, WOODINFO)
def maketrunk(self, mcmap):
'''Generate the trunk, roots, and branches in mcmap.
'''
height = self.height
trunkheight = self.trunkheight
trunkradius = self.trunkradius
treeposition = self.pos
starty = treeposition[1]
midy = treeposition[1] + int(trunkheight * .382)
topy = treeposition[1] + int(trunkheight + 0.5)
# In this method, x and z are the position of the trunk.
x = treeposition[0]
z = treeposition[2]
end_size_factor = trunkheight / height
midrad = trunkradius * (1 - end_size_factor * .5)
endrad = trunkradius * (1 - end_size_factor)
if endrad < 1.0:
endrad = 1.0
if midrad < endrad:
midrad = endrad
# Make the root buttresses, if indicated
if ROOTBUTTRESSES or SHAPE == "mangrove":
# The start radius of the trunk should be a little smaller if we
# are using root buttresses.
startrad = trunkradius * .8
# rootbases is used later in self.makeroots(...) as
# starting locations for the roots.
rootbases = [[x, z, startrad]]
buttress_radius = trunkradius * 0.382
# posradius is how far the root buttresses should be offset
# from the trunk.
posradius = trunkradius
# In mangroves, the root buttresses are much more extended.
if SHAPE == "mangrove":
posradius *= 2.618
num_of_buttresses = int(sqrt(trunkradius) + 3.5)
for i in range(num_of_buttresses):
rndang = random() * 2 * pi
thisposradius = posradius * (0.9 + random() * .2)
# thisx and thisz are the x and z position for the base of
# the root buttress.
thisx = x + int(thisposradius * sin(rndang))
thisz = z + int(thisposradius * cos(rndang))
# thisbuttressradius is the radius of the buttress.
# Currently, root buttresses do not taper.
thisbuttressradius = buttress_radius * (0.618 + random())
if thisbuttressradius < 1.0:
thisbuttressradius = 1.0
# Make the root buttress.
self.taperedcylinder([thisx, starty, thisz], [x, midy, z],
thisbuttressradius, thisbuttressradius,
mcmap, WOODINFO)
# Add this root buttress as a possible location at
# which roots can spawn.
rootbases += [[thisx, thisz, thisbuttressradius]]
else:
# If root buttresses are turned off, set the trunk radius
# to normal size.
startrad = trunkradius
rootbases = [[x, z, startrad]]
# Make the lower and upper sections of the trunk.
self.taperedcylinder([x, starty, z], [x, midy, z], startrad, midrad,
mcmap, WOODINFO)
self.taperedcylinder([x, midy, z], [x, topy, z], midrad, endrad,
mcmap, WOODINFO)
#Make the branches
self.makebranches(mcmap)
#Make the roots, if indicated.
if ROOTS in ["yes", "tostone", "hanging"]:
self.makeroots(rootbases, mcmap)
# Hollow the trunk, if specified
# check to make sure that the trunk is large enough to be hollow
if trunkradius > 2 and HOLLOWTRUNK:
# wall thickness is actually the double the wall thickness
# it is a diameter difference, not a radius difference.
wall_thickness = (1 + trunkradius * 0.1 * random())
if wall_thickness < 1.3:
wall_thickness = 1.3
base_radius = trunkradius - wall_thickness
if base_radius < 1:
base_radius = 1.0
mid_radius = midrad - wall_thickness
top_radius = endrad - wall_thickness
# the starting x and y can be offset by up to the wall thickness.
base_offset = int(wall_thickness)
x_choices = [i for i in range(x - base_offset,
x + base_offset + 1)]
start_x = choice(x_choices)
z_choices = [i for i in range(z - base_offset,
z + base_offset + 1)]
start_z = choice(z_choices)
self.taperedcylinder([start_x, starty, start_z], [x, midy, z],
base_radius, mid_radius,
mcmap, TRUNKFILLINFO)
hollow_top_y = int(topy + trunkradius + 1.5)
self.taperedcylinder([x, midy, z], [x, hollow_top_y, z],
mid_radius, top_radius,
mcmap, TRUNKFILLINFO)
def prepare(self, mcmap):
'''Initialize the internal values for the Tree object.
Primarily, sets up the foliage cluster locations.
'''
treeposition = self.pos
self.trunkradius = .618 * sqrt(self.height * TRUNKTHICKNESS)
if self.trunkradius < 1:
self.trunkradius = 1
if BROKENTRUNK:
self.trunkheight = self.height * (.3 + random() * .4)
yend = int(treeposition[1] + self.trunkheight + .5)
else:
self.trunkheight = self.height
yend = int(treeposition[1] + self.height)
self.branchdensity = BRANCHDENSITY / FOLIAGEDENSITY
topy = treeposition[1] + int(self.trunkheight + 0.5)
foliage_coords = []
ystart = treeposition[1]
num_of_clusters_per_y = int(1.5 + (FOLIAGEDENSITY *
self.height / 19.) ** 2)
if num_of_clusters_per_y < 1:
num_of_clusters_per_y = 1
# make sure we don't spend too much time off the top of the map
if yend > 255:
yend = 255
if ystart > 255:
ystart = 255
for y in range(yend, ystart, -1):
for i in range(num_of_clusters_per_y):
shapefac = self.shapefunc(y - ystart)
if shapefac is None:
continue
r = (sqrt(random()) + .328) * shapefac
theta = random() * 2 * pi
x = int(r * sin(theta)) + treeposition[0]
z = int(r * cos(theta)) + treeposition[2]
# if there are values to search in STOPSBRANCHES
# then check to see if this cluster is blocked
# by stuff, like dirt or rock, or whatever
if len(STOPSBRANCHES):
dist = (sqrt(float(x - treeposition[0]) ** 2 +
float(z - treeposition[2]) ** 2))
slope = self.branchslope
if y - dist * slope > topy:
# the top of the tree
starty = topy
else:
starty = y - dist * slope
# the start position of the search
start = [treeposition[0], starty, treeposition[2]]
offset = [x - treeposition[0],
y - starty,
z - treeposition[2]]
offlength = sqrt(offset[0] ** 2 + offset[1] ** 2 + offset[2] ** 2)
# if the branch is as short as... nothing, don't bother.
if offlength < 1:
continue
# unit vector for the search
vec = [offset[i] / offlength for i in range(3)]
mat_dist = dist_to_mat(start, vec, STOPSBRANCHES,
mcmap, limit=offlength + 3)
# after all that, if you find something, don't add
# this coordinate to the list
if mat_dist < offlength + 2:
continue
foliage_coords += [[x, y, z]]
self.foliage_cords = foliage_coords
class RoundTree(ProceduralTree):
'''This kind of tree is designed to resemble a deciduous tree.
'''
def prepare(self, mcmap):
self.branchslope = 0.382
ProceduralTree.prepare(self, mcmap)
self.foliage_shape = [2, 3, 3, 2.5, 1.6]
self.trunkradius *= 0.8
self.trunkheight *= TRUNKHEIGHT
def shapefunc(self, y):
twigs = ProceduralTree.shapefunc(self, y)
if twigs is not None:
return twigs
if y < self.height * (.282 + .1 * sqrt(random())):
return None
radius = self.height / 2.
adj = self.height / 2. - y
if adj == 0:
dist = radius
elif abs(adj) >= radius:
dist = 0
else:
dist = sqrt((radius ** 2) - (adj ** 2))
dist *= .618
return dist
class ConeTree(ProceduralTree):
'''this kind of tree is designed to resemble a conifer tree.
'''
# woodType is the kind of wood the tree has, a data value
woodType = 1
def prepare(self, mcmap):
self.branchslope = 0.15
ProceduralTree.prepare(self, mcmap)
self.foliage_shape = [3, 2.6, 2, 1]
self.trunkradius *= 0.5
def shapefunc(self, y):
twigs = ProceduralTree.shapefunc(self, y)
if twigs is not None:
return twigs
if y < self.height * (.25 + .05 * sqrt(random())):
return None
radius = (self.height - y) * 0.382
if radius < 0:
radius = 0
return radius
class RainforestTree(ProceduralTree):
'''This kind of tree is designed to resemble a rainforest tree.
'''
def prepare(self, mcmap):
self.foliage_shape = [3.4, 2.6]
self.branchslope = 1.0
ProceduralTree.prepare(self, mcmap)
self.trunkradius *= 0.382
self.trunkheight *= .9
def shapefunc(self, y):
if y < self.height * 0.8:
if EDGEHEIGHT < self.height:
twigs = ProceduralTree.shapefunc(self, y)
if (twigs is not None) and random() < 0.07:
return twigs
return None
else:
width = self.height * .382
topdist = (self.height - y) / (self.height * 0.2)
dist = width * (0.618 + topdist) * (0.618 + random()) * 0.382
return dist
class MangroveTree(RoundTree):
'''This kind of tree is designed to resemble a mangrove tree.
'''
def prepare(self, mcmap):
self.branchslope = 1.0
RoundTree.prepare(self, mcmap)
self.trunkradius *= 0.618
def shapefunc(self, y):
val = RoundTree.shapefunc(self, y)
if val is None:
return val
val *= 1.618
return val
def planttrees(mcmap, treelist):
'''Take mcmap and add trees to random locations on the surface to treelist.
'''
assert isinstance(mcmap, mcInterface.SaveFile)
# keep looping until all the trees are placed
# calc the radius difference, for interpolation
in_out_dif = EDGEHEIGHT - CENTERHEIGHT
if VERBOSE:
print('Tree Locations: x, y, z, tree height')
tries = 0
max_tries = MAXTRIES
while len(treelist) < TREECOUNT:
if tries > max_tries:
if VERBOSE:
print("Stopping search for tree locations after {0} tries".format(tries))
print("If you don't have enough trees, check X, Y, RADIUS, and PLANTON")
break
tries += 1
# choose a location
rad_fraction = random()
# this is some kind of square interpolation
rad_fraction = 1.0 - rad_fraction
rad_fraction **= 2
rad_fraction = 1.0 - rad_fraction
rad = rad_fraction * RADIUS
ang = random() * pi * 2
x = X + int(rad * sin(ang) + .5)
z = Z + int(rad * cos(ang) + .5)
# check to see if this location is suitable
y_top = mcmap.surface_block(x, z)
if y_top is None:
# this location is off the map!
continue
if y_top['B'] in PLANTON:
# plant the tree on the block above the ground
# hence the " + 1"
y = y_top['y'] + 1
else:
continue
# this is linear interpolation also.
base_height = CENTERHEIGHT + (in_out_dif * rad_fraction)
height_rand = (random() - .5) * 2 * HEIGHTVARIATION
height = int(base_height + height_rand)
# if the option is set, check the surrounding area for trees
if ONLYINFORESTS:
'''we are looking for foliage
it should show up in the "surface_block" search
check every fifth block in a square pattern,
offset around the trunk
and equal to the trees height
if the area is not at least one third foliage,
don't build the tree'''
# spacing is how far apart each sample should be
spacing = 5
# search_size is how many blocks to check
# along each axis
search_size = 2 + (height // spacing)
# check at least 3 x 3
search_size = max([search_size, 3])
# set up the offset values to offset the starting corner
offset = ((search_size - 1) * spacing) // 2
# foliage_count is the total number of foliage blocks found
foliage_count = 0
# check each sample location for foliage
for step_x in range(search_size):
# search_x is the x location to search this sample
search_x = x - offset + (step_x * spacing)
for step_z in range(search_size):
# same as for search_x
search_z = z - offset + (step_z * spacing)
search_block = mcmap.surface_block(search_x, search_z)
if search_block is None:
continue
if search_block['B'] == 18:
# this sample contains foliage!
# add it to the total
foliage_count += 1
#now that we have the total count, find the ratio
total_searched = search_size ** 2
foliage_ratio = foliage_count / total_searched
# the acceptable amount is about a third
acceptable_ratio = .3
if foliage_ratio < acceptable_ratio:
# after all that work, there wasn't enough foliage around!
# try again!
continue
# generate the new tree
newtree = Tree([x, y, z], height)
if VERBOSE:
print(x, y, z, height)
treelist += [newtree]
def processtrees(mcmap, treelist):
'''Initalize all of the trees in treelist.
Set all of the trees to the right type, and run prepare. If indicated
limit the height of the trees to the top of the map.
'''
assert isinstance(mcmap, mcInterface.SaveFile)
if SHAPE == "stickly":
shape_choices = ["normal", "bamboo", "palm"]
elif SHAPE == "procedural":
shape_choices = ["round", "cone"]
else:
shape_choices = [SHAPE]
# initialize mapheight, just in case
mapheight = 255
for i in range(len(treelist)):
newshape = choice(shape_choices)
if newshape == "normal":
newtree = NormalTree()
elif newshape == "bamboo":
newtree = BambooTree()
elif newshape == "palm":
newtree = PalmTree()
elif newshape == "round":
newtree = RoundTree()
elif newshape == "cone":
newtree = ConeTree()
elif newshape == "rainforest":
newtree = RainforestTree()
elif newshape == "mangrove":
newtree = MangroveTree()
# Get the height and position of the existing trees in
# the list.
newtree.copy(treelist[i])
# Now check each tree to ensure that it doesn't stick
# out the top of the map. If it does, shorten it until
# the top of the foliage just touches the top of the map.
if MAPHEIGHTLIMIT:
height = newtree.height
ybase = newtree.pos[1]
if SHAPE == "rainforest":
foliageheight = 2
else:
foliageheight = 4
if ybase + height + foliageheight > mapheight:
newheight = mapheight - ybase - foliageheight
newtree.height = newheight
# Even if it sticks out the top of the map, every tree
# should be at least one unit tall.
if newtree.height < 1:
newtree.height = 1
newtree.prepare(mcmap)
treelist[i] = newtree
def main(the_map):
'''create the trees
'''
treelist = []
if VERBOSE:
print("Planting new trees")
planttrees(the_map, treelist)
if VERBOSE:
print("Processing tree changes")
processtrees(the_map, treelist)
if FOLIAGE:
if VERBOSE:
print("Generating foliage ")
for i in treelist:
i.makefoliage(the_map)
if VERBOSE:
print(' completed')
if WOOD:
if VERBOSE:
print("Generating trunks, roots, and branches ")
for i in treelist:
i.maketrunk(the_map)
if VERBOSE:
print(' completed')
return None
def standalone():
if VERBOSE:
print("Importing the map")
try:
the_map = mcInterface.SaveFile(LOADNAME)
except IOError:
if VERBOSE:
print('File name invalid or save file otherwise corrupted. Aborting')
return None
main(the_map)
if LIGHTINGFIX:
if VERBOSE:
print("Rough re-lighting the map")
relight_master.save_file = the_map
relight_master.calc_lighting()
if VERBOSE:
print("Saving the map, this could be a while")
the_map.write()
if VERBOSE:
print("finished")
if __name__ == '__main__':
standalone()
# to do:
# get height limits from map
# set "limit height" or somesuch to respect level height limits
|
DragonQuiz/MCEdit-Unified
|
stock-filters/Forester.py
|
Python
|
isc
| 51,634 | 0.000562 |
import re
from coalib.bearlib.abstractions.Lint import Lint
from coalib.bears.LocalBear import LocalBear
from coalib.results.RESULT_SEVERITY import RESULT_SEVERITY
class InferBear(LocalBear, Lint):
executable = 'infer'
arguments = '-npb -- javac {filename}'
output_regex = re.compile(
r'(.+):'
r'(?P<line>.+): '
r'(?P<severity>error|warning): '
r'(?P<message>.*)')
severity_map = {
"error": RESULT_SEVERITY.MAJOR,
"warning": RESULT_SEVERITY.NORMAL}
LANGUAGES = {"Java"}
AUTHORS = {'The coala developers'}
AUTHORS_EMAILS = {'coala-devel@googlegroups.com'}
LICENSE = 'AGPL-3.0'
ASCIINEMA_URL = 'https://asciinema.org/a/1g2k0la7xo5az9t8f1v5zy66q'
CAN_DETECT = {'Security'}
def run(self, filename, file):
'''
Checks the code with ``infer``.
'''
return self.lint(filename)
|
chriscoyfish/coala-bears
|
bears/java/InferBear.py
|
Python
|
agpl-3.0
| 896 | 0 |
from __future__ import absolute_import, unicode_literals
from django.template import Context, Template
from django.utils.translation import ugettext_lazy as _
from time import time
from .base import CallRecordingPanel
from ..utils.function_wrapper import FunctionWrapper
from ..utils.patch_context import PatchContext
TEMPLATE = Template(
"""
{% load i18n %}
<h4>{% trans "Requests" %}</h4>
<table>
<thead>
<tr>
<th>{% trans "Duration" %}</th>
<th>{% trans "Command" %}</th>
<th>{% trans "Args" %}</th>
</tr>
</thead>
<tbody>
{% for call in calls %}
<tr>
<td>{{ call.duration }} ms</td>
<td>{{ call.command }}</td>
<td>{{ call.args }} {{ call.kwargs }}</td>
</tr>
{% endfor %}
</tbody>
</table>
"""
)
class RedisPipelineWrapper(FunctionWrapper):
def __call__(self, func, pipeline, *args, **kwargs):
__traceback_hide__ = True # NOQA
command_stack = pipeline.command_stack[:]
start = time()
try:
return func(pipeline, *args, **kwargs)
finally:
end = time()
data = {
'name': 'pipeline',
'args': repr(command_stack),
'kwargs': repr({}),
'start': start,
'end': end,
}
self.record(data)
class RedisWrapper(FunctionWrapper):
def __call__(self, func, *args, **kwargs):
__traceback_hide__ = True # NOQA
start = time()
try:
return func(*args, **kwargs)
finally:
end = time()
data = {
'name': args[1],
'args': repr(args[2:]),
'kwargs': repr(kwargs),
'start': start,
'end': end,
}
self.record(data)
class RedisPanel(CallRecordingPanel):
title = nav_title = _("Redis")
@classmethod
def get_context(cls, collector):
return [
PatchContext('redis.client.StrictRedis.execute_command', RedisWrapper(collector)),
PatchContext('redis.client.BasePipeline.execute', RedisPipelineWrapper(collector)),
]
@property
def content(self):
stats = self.get_stats()
return TEMPLATE.render(Context(stats))
def process_response(self, request, response):
calls = []
total_time = 0
for call in self.calls:
duration = int((call['end'] - call['start']) * 1000)
total_time += duration
calls.append(
{
'duration': duration,
'command': call['name'],
'args': call['args'],
'kwargs': call['kwargs'],
}
)
self.record_stats({
'calls': calls,
'total_time': total_time,
})
|
looker/sentry
|
src/sentry/debug/panels/redis.py
|
Python
|
bsd-3-clause
| 2,952 | 0.000678 |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"Test the function for mapping Terraform arguments."
import pytest
import tftest
ARGS_TESTS = (
({'auto_approve': True}, ['-auto-approve']),
({'auto_approve': False}, []),
({'backend': True}, []),
({'backend': None}, []),
({'backend': False}, ['-backend=false']),
({'color': True}, []),
({'color': False}, ['-no-color']),
({'color': False, 'input': False}, ['-no-color', '-input=false']),
({'force_copy': True}, ['-force-copy']),
({'force_copy': None}, []),
({'force_copy': False}, []),
({'input': True}, []),
({'input': False}, ['-input=false']),
({'json_format': True}, ['-json']),
({'json_format': False}, []),
({'lock': True}, []),
({'lock': False}, ['-lock=false']),
({'plugin_dir': ''}, []),
({'plugin_dir': 'abc'}, ['-plugin-dir', 'abc']),
({'refresh': True}, []),
({'refresh': None}, []),
({'refresh': False}, ['-refresh=false']),
({'upgrade': True}, ['-upgrade']),
({'upgrade': False}, []),
({'tf_var_file': None}, []),
({'tf_var_file': 'foo.tfvar'}, ['-var-file=foo.tfvar']),
)
@pytest.mark.parametrize("kwargs, expected", ARGS_TESTS)
def test_args(kwargs, expected):
assert tftest.parse_args() == []
assert tftest.parse_args(**kwargs) == expected
TERRAGRUNT_ARGS_TESTCASES = [
({"tg_config": "Obama"}, ['--terragrunt-config', 'Obama']),
({"tg_tfpath": "Barrack"}, ['--terragrunt-tfpath', 'Barrack']),
({"tg_no_auto_init": True}, ['--terragrunt-no-auto-init']),
({"tg_no_auto_init": False}, []),
({"tg_no_auto_retry": True}, ['--terragrunt-no-auto-retry']),
({"tg_no_auto_retry": False}, []),
({"tg_non_interactive": True}, ['--terragrunt-non-interactive']),
({"tg_non_interactive": False}, []),
({"tg_working_dir": "George"}, ['--terragrunt-working-dir', 'George']),
({"tg_download_dir": "Bush"}, ['--terragrunt-download-dir', 'Bush']),
({"tg_source": "Clinton"}, ['--terragrunt-source', 'Clinton']),
({"tg_source_update": True}, ['--terragrunt-source-update']),
({"tg_source_update": False}, []),
({"tg_iam_role": "Bill"}, ['--terragrunt-iam-role', 'Bill']),
({"tg_ignore_dependency_errors": True}, ['--terragrunt-ignore-dependency-errors']),
({"tg_ignore_dependency_errors": False}, []),
({"tg_ignore_dependency_order": True}, ['--terragrunt-ignore-dependency-order']),
({"tg_ignore_dependency_order": False}, []),
({"tg_ignore_external_dependencies": "dont care what is here"},
['--terragrunt-ignore-external-dependencies']),
({"tg_include_external_dependencies": True}, ['--terragrunt-include-external-dependencies']),
({"tg_include_external_dependencies": False}, []),
({"tg_parallelism": 20}, ['--terragrunt-parallelism 20']),
({"tg_exclude_dir": "Ronald"}, ['--terragrunt-exclude-dir', 'Ronald']),
({"tg_include_dir": "Reagan"}, ['--terragrunt-include-dir', 'Reagan']),
({"tg_check": True}, ['--terragrunt-check']),
({"tg_check": False}, []),
({"tg_hclfmt_file": "Biden"}, ['--terragrunt-hclfmt-file', 'Biden']),
({"tg_override_attr": {"Iron": "Man", "Captain": "America"}},
['--terragrunt-override-attr=Iron=Man', '--terragrunt-override-attr=Captain=America']),
({"tg_debug": True}, ['--terragrunt-debug']),
({"tg_debug": False}, []),
]
@pytest.mark.parametrize("kwargs, expected", TERRAGRUNT_ARGS_TESTCASES)
def test_terragrunt_args(kwargs, expected):
assert tftest.parse_args(**kwargs) == expected
def test_var_args():
assert sorted(tftest.parse_args(init_vars={'a': 1, 'b': '["2"]'})) == sorted(
["-backend-config=a=1", '-backend-config=b=["2"]'])
assert sorted(tftest.parse_args(tf_vars={'a': 1, 'b': '["2"]'})) == sorted(
['-var', 'b=["2"]', '-var', 'a=1'])
def test_targets():
assert tftest.parse_args(targets=['one', 'two']) == sorted(
['-target=one', '-target=two'])
|
GoogleCloudPlatform/terraform-python-testing-helper
|
test/test_args.py
|
Python
|
apache-2.0
| 4,448 | 0.002248 |
#-- GAUDI jobOptions generated on Mon Jul 20 10:20:49 2015
#-- Contains event types :
#-- 11134011 - 42 files - 900254 events - 251.92 GBytes
#-- Extra information about the data processing phases:
#-- Processing Pass Step-125836
#-- StepId : 125836
#-- StepName : Stripping20-NoPrescalingFlagged for Sim08 - Implicit merging.
#-- ApplicationName : DaVinci
#-- ApplicationVersion : v32r2p1
#-- OptionFiles : $APPCONFIGOPTS/DaVinci/DV-Stripping20-Stripping-MC-NoPrescaling.py;$APPCONFIGOPTS/DaVinci/DataType-2012.py;$APPCONFIGOPTS/DaVinci/InputType-DST.py;$APPCONFIGOPTS/Persistency/Compression-ZLIB-1.py
#-- DDDB : fromPreviousStep
#-- CONDDB : fromPreviousStep
#-- ExtraPackages : AppConfig.v3r164
#-- Visible : Y
#-- Processing Pass Step-127969
#-- StepId : 127969
#-- StepName : Reco14c for MC - 2012
#-- ApplicationName : Brunel
#-- ApplicationVersion : v43r2p11
#-- OptionFiles : $APPCONFIGOPTS/Brunel/DataType-2012.py;$APPCONFIGOPTS/Brunel/MC-WithTruth.py;$APPCONFIGOPTS/Persistency/DST-multipleTCK-2012.py;$APPCONFIGOPTS/Persistency/Compression-ZLIB-1.py
#-- DDDB : fromPreviousStep
#-- CONDDB : fromPreviousStep
#-- ExtraPackages : AppConfig.v3r218
#-- Visible : Y
from Gaudi.Configuration import *
from GaudiConf import IOHelper
IOHelper('ROOT').inputFiles(['LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00046297/0000/00046297_00000001_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00046297/0000/00046297_00000002_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00046297/0000/00046297_00000003_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00046297/0000/00046297_00000004_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00046297/0000/00046297_00000005_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00046297/0000/00046297_00000006_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00046297/0000/00046297_00000007_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00046297/0000/00046297_00000008_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00046297/0000/00046297_00000009_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00046297/0000/00046297_00000010_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00046297/0000/00046297_00000011_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00046297/0000/00046297_00000012_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00046297/0000/00046297_00000013_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00046297/0000/00046297_00000014_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00046297/0000/00046297_00000015_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00046297/0000/00046297_00000016_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00046297/0000/00046297_00000017_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00046297/0000/00046297_00000018_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00046297/0000/00046297_00000019_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00046297/0000/00046297_00000020_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00046297/0000/00046297_00000021_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00046297/0000/00046297_00000022_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00046297/0000/00046297_00000023_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00046297/0000/00046297_00000024_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00046297/0000/00046297_00000025_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00046297/0000/00046297_00000026_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00046297/0000/00046297_00000027_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00046297/0000/00046297_00000029_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00046297/0000/00046297_00000030_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00046297/0000/00046297_00000031_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00046297/0000/00046297_00000032_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00046297/0000/00046297_00000033_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00046297/0000/00046297_00000034_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00046297/0000/00046297_00000035_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00046297/0000/00046297_00000036_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00046297/0000/00046297_00000037_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00046297/0000/00046297_00000038_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00046297/0000/00046297_00000039_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00046297/0000/00046297_00000040_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00046297/0000/00046297_00000041_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00046297/0000/00046297_00000042_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00046297/0000/00046297_00000043_2.AllStreams.dst'
], clear=True)
|
Williams224/davinci-scripts
|
ksteta3pi/Consideredbkg/MC_12_11134011_MagUp.py
|
Python
|
mit
| 4,905 | 0.026911 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('contacts', '0020_unset_suspend_from_dynamic'),
]
operations = [
migrations.AddField(
model_name='contact',
name='is_stopped',
field=models.NullBooleanField(help_text='Whether this contact opted out of receiving messages'),
),
migrations.AlterField(
model_name='contact',
name='is_stopped',
field=models.NullBooleanField(default=False,
help_text='Whether this contact opted out of receiving messages'),
),
]
|
xkmato/casepro
|
casepro/contacts/migrations/0021_contact_is_stopped_pt1.py
|
Python
|
bsd-3-clause
| 745 | 0.002685 |
# Copyright 2015, 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from synapse.storage.databases.main.directory import DirectoryWorkerStore
from ._base import BaseSlavedStore
class DirectoryStore(DirectoryWorkerStore, BaseSlavedStore):
pass
|
matrix-org/synapse
|
synapse/replication/slave/storage/directory.py
|
Python
|
apache-2.0
| 767 | 0 |
# =============================================================================
# Copyright (C) 2014 Ryan Holmes
#
# This file is part of pyfa.
#
# pyfa is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# pyfa is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with pyfa. If not, see <http://www.gnu.org/licenses/>.
# =============================================================================
from service.const import PortMultiBuyOptions
from service.price import Price as sPrc
MULTIBUY_OPTIONS = (
(PortMultiBuyOptions.LOADED_CHARGES, 'Loaded Charges', 'Export charges loaded into modules', True),
(PortMultiBuyOptions.IMPLANTS, 'Implants && Boosters', 'Export implants and boosters', False),
(PortMultiBuyOptions.CARGO, 'Cargo', 'Export cargo contents', True),
(PortMultiBuyOptions.OPTIMIZE_PRICES, 'Optimize Prices', 'Replace items by cheaper alternatives', False),
)
def exportMultiBuy(fit, options, callback):
itemAmounts = {}
for module in fit.modules:
if module.item:
# Mutated items are of no use for multibuy
if module.isMutated:
continue
_addItem(itemAmounts, module.item)
if module.charge and options[PortMultiBuyOptions.LOADED_CHARGES]:
_addItem(itemAmounts, module.charge, module.numCharges)
for drone in fit.drones:
_addItem(itemAmounts, drone.item, drone.amount)
for fighter in fit.fighters:
_addItem(itemAmounts, fighter.item, fighter.amount)
if options[PortMultiBuyOptions.CARGO]:
for cargo in fit.cargo:
_addItem(itemAmounts, cargo.item, cargo.amount)
if options[PortMultiBuyOptions.IMPLANTS]:
for implant in fit.implants:
_addItem(itemAmounts, implant.item)
for booster in fit.boosters:
_addItem(itemAmounts, booster.item)
if options[PortMultiBuyOptions.OPTIMIZE_PRICES]:
def formatCheaperExportCb(replacementsCheaper):
updatedAmounts = {}
for item, itemAmount in itemAmounts.items():
_addItem(updatedAmounts, replacementsCheaper.get(item, item), itemAmount)
string = _prepareString(fit.ship.item, updatedAmounts)
callback(string)
priceSvc = sPrc.getInstance()
priceSvc.findCheaperReplacements(itemAmounts, formatCheaperExportCb)
else:
string = _prepareString(fit.ship.item, itemAmounts)
if callback:
callback(string)
else:
return string
def _addItem(container, item, quantity=1):
if item not in container:
container[item] = 0
container[item] += quantity
def _prepareString(shipItem, itemAmounts):
exportLines = []
exportLines.append(shipItem.name)
for item in sorted(itemAmounts, key=lambda i: (i.group.category.name, i.group.name, i.name)):
count = itemAmounts[item]
if count == 1:
exportLines.append(item.name)
else:
exportLines.append('{} x{}'.format(item.name, count))
return "\n".join(exportLines)
|
DarkFenX/Pyfa
|
service/port/multibuy.py
|
Python
|
gpl-3.0
| 3,507 | 0.001426 |
"""Generic wrapper for read-eval-print-loops, a.k.a. interactive shells
"""
import os.path
import signal
import sys
import re
import pexpect
PY3 = (sys.version_info[0] >= 3)
if PY3:
def u(s): return s
basestring = str
else:
def u(s): return s.decode('utf-8')
PEXPECT_PROMPT = u('[PEXPECT_PROMPT>')
PEXPECT_CONTINUATION_PROMPT = u('[PEXPECT_PROMPT+')
class REPLWrapper(object):
"""Wrapper for a REPL.
:param cmd_or_spawn: This can either be an instance of :class:`pexpect.spawn`
in which a REPL has already been started, or a str command to start a new
REPL process.
:param str orig_prompt: The prompt to expect at first.
:param str prompt_change: A command to change the prompt to something more
unique. If this is ``None``, the prompt will not be changed. This will
be formatted with the new and continuation prompts as positional
parameters, so you can use ``{}`` style formatting to insert them into
the command.
:param str new_prompt: The more unique prompt to expect after the change.
:param str extra_init_cmd: Commands to do extra initialisation, such as
disabling pagers.
"""
def __init__(self, cmd_or_spawn, orig_prompt, prompt_change,
new_prompt=PEXPECT_PROMPT,
continuation_prompt=PEXPECT_CONTINUATION_PROMPT,
extra_init_cmd=None):
if isinstance(cmd_or_spawn, basestring):
self.child = pexpect.spawnu(cmd_or_spawn, echo=False)
else:
self.child = cmd_or_spawn
if self.child.echo:
# Existing spawn instance has echo enabled, disable it
# to prevent our input from being repeated to output.
self.child.setecho(False)
self.child.waitnoecho()
if prompt_change is None:
self.prompt = orig_prompt
else:
self.set_prompt(orig_prompt,
prompt_change.format(new_prompt, continuation_prompt))
self.prompt = new_prompt
self.continuation_prompt = continuation_prompt
self._expect_prompt()
if extra_init_cmd is not None:
self.run_command(extra_init_cmd)
def set_prompt(self, orig_prompt, prompt_change):
self.child.expect(orig_prompt)
self.child.sendline(prompt_change)
def _expect_prompt(self, timeout=-1):
return self.child.expect_exact([self.prompt, self.continuation_prompt],
timeout=timeout)
def run_command(self, command, timeout=-1):
"""Send a command to the REPL, wait for and return output.
:param str command: The command to send. Trailing newlines are not needed.
This should be a complete block of input that will trigger execution;
if a continuation prompt is found after sending input, :exc:`ValueError`
will be raised.
:param int timeout: How long to wait for the next prompt. -1 means the
default from the :class:`pexpect.spawn` object (default 30 seconds).
None means to wait indefinitely.
"""
# Split up multiline commands and feed them in bit-by-bit
cmdlines = command.splitlines()
# splitlines ignores trailing newlines - add it back in manually
if command.endswith('\n'):
cmdlines.append('')
if not cmdlines:
raise ValueError("No command was given")
self.child.sendline(cmdlines[0])
for line in cmdlines[1:]:
self._expect_prompt(timeout=1)
self.child.sendline(line)
# Command was fully submitted, now wait for the next prompt
if self._expect_prompt(timeout=timeout) == 1:
# We got the continuation prompt - command was incomplete
self.child.kill(signal.SIGINT)
self._expect_prompt(timeout=1)
raise ValueError("Continuation prompt found - input was incomplete:\n"
+ command)
return self.child.before
def python(command="python"):
"""Start a Python shell and return a :class:`REPLWrapper` object."""
return REPLWrapper(command, u(">>> "), u("import sys; sys.ps1={0!r}; sys.ps2={1!r}"))
def bash(command="bash"):
"""Start a bash shell and return a :class:`REPLWrapper` object."""
bashrc = os.path.join(os.path.dirname(__file__), 'bashrc.sh')
child = pexpect.spawnu(command, ['--rcfile', bashrc], echo=False)
return REPLWrapper(child, u'\$', u("PS1='{0}' PS2='{1}' PROMPT_COMMAND=''"),
extra_init_cmd="export PAGER=cat")
|
Wakeupbuddy/pexpect
|
pexpect/replwrap.py
|
Python
|
isc
| 4,604 | 0.002389 |
# -*- coding: utf-8 -*-
# This file is part of beets.
# Copyright 2016, François-Xavier Thomas.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
"""Use command-line tools to check for audio file corruption.
"""
from __future__ import division, absolute_import, print_function
from beets.plugins import BeetsPlugin
from beets.ui import Subcommand
from beets.util import displayable_path, confit
from beets import ui
from subprocess import check_output, CalledProcessError, list2cmdline, STDOUT
import shlex
import os
import errno
import sys
class BadFiles(BeetsPlugin):
def run_command(self, cmd):
self._log.debug(u"running command: {}",
displayable_path(list2cmdline(cmd)))
try:
output = check_output(cmd, stderr=STDOUT)
errors = 0
status = 0
except CalledProcessError as e:
output = e.output
errors = 1
status = e.returncode
except OSError as e:
if e.errno == errno.ENOENT:
ui.print_(u"command not found: {}".format(cmd[0]))
sys.exit(1)
else:
raise
output = output.decode(sys.getfilesystemencoding())
return status, errors, [line for line in output.split("\n") if line]
def check_mp3val(self, path):
status, errors, output = self.run_command(["mp3val", path])
if status == 0:
output = [line for line in output if line.startswith("WARNING:")]
errors = len(output)
return status, errors, output
def check_flac(self, path):
return self.run_command(["flac", "-wst", path])
def check_custom(self, command):
def checker(path):
cmd = shlex.split(command)
cmd.append(path)
return self.run_command(cmd)
return checker
def get_checker(self, ext):
ext = ext.lower()
try:
command = self.config['commands'].get(dict).get(ext)
except confit.NotFoundError:
command = None
if command:
return self.check_custom(command)
elif ext == "mp3":
return self.check_mp3val
elif ext == "flac":
return self.check_flac
def check_bad(self, lib, opts, args):
for item in lib.items(ui.decargs(args)):
# First, check whether the path exists. If not, the user
# should probably run `beet update` to cleanup your library.
dpath = displayable_path(item.path)
self._log.debug(u"checking path: {}", dpath)
if not os.path.exists(item.path):
ui.print_(u"{}: file does not exist".format(
ui.colorize('text_error', dpath)))
# Run the checker against the file if one is found
ext = os.path.splitext(item.path)[1][1:]
checker = self.get_checker(ext)
if not checker:
continue
path = item.path
if not isinstance(path, unicode):
path = item.path.decode(sys.getfilesystemencoding())
status, errors, output = checker(path)
if status > 0:
ui.print_(u"{}: checker exited withs status {}"
.format(ui.colorize('text_error', dpath), status))
for line in output:
ui.print_(" {}".format(displayable_path(line)))
elif errors > 0:
ui.print_(u"{}: checker found {} errors or warnings"
.format(ui.colorize('text_warning', dpath), errors))
for line in output:
ui.print_(u" {}".format(displayable_path(line)))
else:
ui.print_(u"{}: ok".format(ui.colorize('text_success', dpath)))
def commands(self):
bad_command = Subcommand('bad',
help=u'check for corrupt or missing files')
bad_command.func = self.check_bad
return [bad_command]
|
bbsan2k/nzbToMedia
|
libs/beetsplug/badfiles.py
|
Python
|
gpl-3.0
| 4,564 | 0 |
# Copyright 2022 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from connector import channel
from google3.cloud.graphite.mmv2.services.google.monitoring import service_pb2
from google3.cloud.graphite.mmv2.services.google.monitoring import service_pb2_grpc
from typing import List
class Service(object):
def __init__(
self,
name: str = None,
display_name: str = None,
custom: dict = None,
telemetry: dict = None,
user_labels: dict = None,
project: str = None,
service_account_file: str = "",
):
channel.initialize()
self.name = name
self.display_name = display_name
self.custom = custom
self.telemetry = telemetry
self.user_labels = user_labels
self.project = project
self.service_account_file = service_account_file
def apply(self):
stub = service_pb2_grpc.MonitoringServiceServiceStub(channel.Channel())
request = service_pb2.ApplyMonitoringServiceRequest()
if Primitive.to_proto(self.name):
request.resource.name = Primitive.to_proto(self.name)
if Primitive.to_proto(self.display_name):
request.resource.display_name = Primitive.to_proto(self.display_name)
if ServiceCustom.to_proto(self.custom):
request.resource.custom.CopyFrom(ServiceCustom.to_proto(self.custom))
else:
request.resource.ClearField("custom")
if ServiceTelemetry.to_proto(self.telemetry):
request.resource.telemetry.CopyFrom(
ServiceTelemetry.to_proto(self.telemetry)
)
else:
request.resource.ClearField("telemetry")
if Primitive.to_proto(self.user_labels):
request.resource.user_labels = Primitive.to_proto(self.user_labels)
if Primitive.to_proto(self.project):
request.resource.project = Primitive.to_proto(self.project)
request.service_account_file = self.service_account_file
response = stub.ApplyMonitoringService(request)
self.name = Primitive.from_proto(response.name)
self.display_name = Primitive.from_proto(response.display_name)
self.custom = ServiceCustom.from_proto(response.custom)
self.telemetry = ServiceTelemetry.from_proto(response.telemetry)
self.user_labels = Primitive.from_proto(response.user_labels)
self.project = Primitive.from_proto(response.project)
def delete(self):
stub = service_pb2_grpc.MonitoringServiceServiceStub(channel.Channel())
request = service_pb2.DeleteMonitoringServiceRequest()
request.service_account_file = self.service_account_file
if Primitive.to_proto(self.name):
request.resource.name = Primitive.to_proto(self.name)
if Primitive.to_proto(self.display_name):
request.resource.display_name = Primitive.to_proto(self.display_name)
if ServiceCustom.to_proto(self.custom):
request.resource.custom.CopyFrom(ServiceCustom.to_proto(self.custom))
else:
request.resource.ClearField("custom")
if ServiceTelemetry.to_proto(self.telemetry):
request.resource.telemetry.CopyFrom(
ServiceTelemetry.to_proto(self.telemetry)
)
else:
request.resource.ClearField("telemetry")
if Primitive.to_proto(self.user_labels):
request.resource.user_labels = Primitive.to_proto(self.user_labels)
if Primitive.to_proto(self.project):
request.resource.project = Primitive.to_proto(self.project)
response = stub.DeleteMonitoringService(request)
@classmethod
def list(self, project, service_account_file=""):
stub = service_pb2_grpc.MonitoringServiceServiceStub(channel.Channel())
request = service_pb2.ListMonitoringServiceRequest()
request.service_account_file = service_account_file
request.Project = project
return stub.ListMonitoringService(request).items
def to_proto(self):
resource = service_pb2.MonitoringService()
if Primitive.to_proto(self.name):
resource.name = Primitive.to_proto(self.name)
if Primitive.to_proto(self.display_name):
resource.display_name = Primitive.to_proto(self.display_name)
if ServiceCustom.to_proto(self.custom):
resource.custom.CopyFrom(ServiceCustom.to_proto(self.custom))
else:
resource.ClearField("custom")
if ServiceTelemetry.to_proto(self.telemetry):
resource.telemetry.CopyFrom(ServiceTelemetry.to_proto(self.telemetry))
else:
resource.ClearField("telemetry")
if Primitive.to_proto(self.user_labels):
resource.user_labels = Primitive.to_proto(self.user_labels)
if Primitive.to_proto(self.project):
resource.project = Primitive.to_proto(self.project)
return resource
class ServiceCustom(object):
@classmethod
def to_proto(self, resource):
if not resource:
return None
res = service_pb2.MonitoringServiceCustom()
return res
@classmethod
def from_proto(self, resource):
if not resource:
return None
return ServiceCustom()
class ServiceCustomArray(object):
@classmethod
def to_proto(self, resources):
if not resources:
return resources
return [ServiceCustom.to_proto(i) for i in resources]
@classmethod
def from_proto(self, resources):
return [ServiceCustom.from_proto(i) for i in resources]
class ServiceTelemetry(object):
def __init__(self, resource_name: str = None):
self.resource_name = resource_name
@classmethod
def to_proto(self, resource):
if not resource:
return None
res = service_pb2.MonitoringServiceTelemetry()
if Primitive.to_proto(resource.resource_name):
res.resource_name = Primitive.to_proto(resource.resource_name)
return res
@classmethod
def from_proto(self, resource):
if not resource:
return None
return ServiceTelemetry(
resource_name=Primitive.from_proto(resource.resource_name),
)
class ServiceTelemetryArray(object):
@classmethod
def to_proto(self, resources):
if not resources:
return resources
return [ServiceTelemetry.to_proto(i) for i in resources]
@classmethod
def from_proto(self, resources):
return [ServiceTelemetry.from_proto(i) for i in resources]
class Primitive(object):
@classmethod
def to_proto(self, s):
if not s:
return ""
return s
@classmethod
def from_proto(self, s):
return s
|
GoogleCloudPlatform/declarative-resource-client-library
|
python/services/monitoring/service.py
|
Python
|
apache-2.0
| 7,348 | 0.001225 |
#
# Copyright (c) 2008--2018 Red Hat, Inc.
# Copyright (c) 2010--2011 SUSE Linux Products GmbH
#
# This software is licensed to you under the GNU General Public License,
# version 2 (GPLv2). There is NO WARRANTY for this software, express or
# implied, including the implied warranties of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2
# along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
#
# Red Hat trademarks are not licensed under GPLv2. No permission is
# granted to use or replicate Red Hat trademarks that are incorporated
# in this software or its documentation.
#
import os
import re
import shutil
import sys
from datetime import datetime
from xml.dom import minidom
import gzip
import ConfigParser
import gettext
import errno
from rhn.connections import idn_puny_to_unicode
from spacewalk.server import rhnPackage, rhnSQL, rhnChannel
from spacewalk.common.usix import raise_with_tb
from spacewalk.common import fileutils, rhnLog, rhnCache, rhnMail
from spacewalk.common.rhnLib import isSUSE
from spacewalk.common.checksum import getFileChecksum
from spacewalk.common.rhnConfig import CFG, initCFG
from spacewalk.common.rhnException import rhnFault
from spacewalk.server.importlib import importLib, mpmSource, packageImport, errataCache
from spacewalk.server.importlib.packageImport import ChannelPackageSubscription
from spacewalk.server.importlib.backendOracle import SQLBackend
from spacewalk.server.importlib.errataImport import ErrataImport
from spacewalk.satellite_tools.download import ThreadedDownloader, ProgressBarLogger, TextLogger
from spacewalk.satellite_tools.repo_plugins import CACHE_DIR
from spacewalk.server import taskomatic, rhnPackageUpload
from spacewalk.satellite_tools.satCerts import verify_certificate_dates
from syncLib import log, log2, log2disk, dumpEMAIL_LOG, log2background
translation = gettext.translation('spacewalk-backend-server', fallback=True)
_ = translation.ugettext
default_log_location = '/var/log/rhn/'
relative_comps_dir = 'rhn/comps'
relative_modules_dir = 'rhn/modules'
checksum_cache_filename = 'reposync/checksum_cache'
default_import_batch_size = 10
errata_typemap = {
'security': 'Security Advisory',
'recommended': 'Bug Fix Advisory',
'bugfix': 'Bug Fix Advisory',
'optional': 'Product Enhancement Advisory',
'feature': 'Product Enhancement Advisory',
'enhancement': 'Product Enhancement Advisory'
}
def send_mail(sync_type="Repo"):
""" Send email summary """
body = dumpEMAIL_LOG()
if body:
print(_("+++ sending log as an email +++"))
host_label = idn_puny_to_unicode(os.uname()[1])
headers = {
'Subject': _("%s sync. report from %s") % (sync_type, host_label),
}
sndr = "root@%s" % host_label
if CFG.default_mail_from:
sndr = CFG.default_mail_from
rhnMail.send(headers, body, sender=sndr)
else:
print(_("+++ email requested, but there is nothing to send +++"))
class KSDirParser:
file_blacklist = ["release-notes/"]
def __init__(self, dir_html, additional_blacklist=None):
self.dir_content = []
if additional_blacklist is None:
additional_blacklist = []
elif not isinstance(additional_blacklist, type([])):
additional_blacklist = [additional_blacklist]
for s in (m.group(1) for m in re.finditer(r'(?i)<a href="(.+?)"', dir_html)):
if not (re.match(r'/', s) or re.search(r'\?', s) or re.search(r'\.\.', s) or re.match(r'[a-zA-Z]+:', s) or
re.search(r'\.rpm$', s)):
if re.search(r'/$', s):
file_type = 'DIR'
else:
file_type = 'FILE'
if s not in (self.file_blacklist + additional_blacklist):
self.dir_content.append({'name': s, 'type': file_type})
def get_content(self):
return self.dir_content
class TreeInfoError(Exception):
pass
class TreeInfoParser(object):
def __init__(self, filename):
self.parser = ConfigParser.RawConfigParser()
# do not lowercase
self.parser.optionxform = str
fp = open(filename)
try:
try:
self.parser.readfp(fp)
except ConfigParser.ParsingError:
raise TreeInfoError("Could not parse treeinfo file!")
finally:
if fp is not None:
fp.close()
def get_images(self):
files = []
for section_name in self.parser.sections():
if section_name.startswith('images-') or section_name == 'stage2':
for item in self.parser.items(section_name):
files.append(item[1])
return files
def get_family(self):
for section_name in self.parser.sections():
if section_name == 'general':
for item in self.parser.items(section_name):
if item[0] == 'family':
return item[1]
def get_major_version(self):
for section_name in self.parser.sections():
if section_name == 'general':
for item in self.parser.items(section_name):
if item[0] == 'version':
return item[1].split('.')[0]
def get_package_dir(self):
for section_name in self.parser.sections():
if section_name == 'general':
for item in self.parser.items(section_name):
if item[0] == 'packagedir':
return item[1]
def get_addons(self):
addons_dirs = []
for section_name in self.parser.sections():
# check by name
if section_name.startswith('addon-'):
for item in self.parser.items(section_name):
if item[0] == 'repository':
addons_dirs.append(item[1])
# check by type
else:
repository = None
repo_type = None
for item in self.parser.items(section_name):
if item[0] == 'repository':
repository = item[1]
elif item[0] == 'type':
repo_type = item[1]
if repo_type == 'addon' and repository is not None:
addons_dirs.append(repository)
return addons_dirs
def set_filter_opt(option, opt_str, value, parser):
# pylint: disable=W0613
if opt_str in ['--include', '-i']:
f_type = '+'
else:
f_type = '-'
parser.values.filters.append((f_type, [v.strip() for v in value.split(',') if v.strip()]))
def getChannelRepo():
initCFG('server.satellite')
rhnSQL.initDB()
items = {}
sql = """
select s.source_url, c.label
from rhnContentSource s,
rhnChannelContentSource cs,
rhnChannel c
where s.id = cs.source_id and cs.channel_id=c.id
"""
h = rhnSQL.prepare(sql)
h.execute()
while 1:
row = h.fetchone_dict()
if not row:
break
if not row['label'] in items:
items[row['label']] = []
items[row['label']] += [row['source_url']]
return items
def getParentsChilds(b_only_custom=False):
initCFG('server.satellite')
rhnSQL.initDB()
sql = """
select c1.label, c2.label parent_channel, c1.id
from rhnChannel c1 left outer join rhnChannel c2 on c1.parent_channel = c2.id
order by c2.label desc, c1.label asc
"""
h = rhnSQL.prepare(sql)
h.execute()
d_parents = {}
while 1:
row = h.fetchone_dict()
if not row:
break
if not b_only_custom or rhnChannel.isCustomChannel(row['id']):
parent_channel = row['parent_channel']
if not parent_channel:
d_parents[row['label']] = []
else:
# If the parent is not a custom channel treat the child like
# it's a parent for our purposes
if parent_channel not in d_parents:
d_parents[row['label']] = []
else:
d_parents[parent_channel].append(row['label'])
return d_parents
def getCustomChannels():
d_parents = getParentsChilds(True)
l_custom_ch = []
for ch in d_parents:
l_custom_ch += [ch] + d_parents[ch]
return l_custom_ch
def write_ssl_set_cache(ca_cert, client_cert, client_key):
"""Write one SSL set into cache directory and return path to files."""
def create_dir_tree(path):
try:
os.makedirs(path, int('0750', 8))
except OSError:
exc = sys.exc_info()[1]
if exc.errno == errno.EEXIST and os.path.isdir(path):
pass
else:
raise
filenames = {}
for cert in (ca_cert, client_cert, client_key):
(name, pem, org) = cert
filenames[cert] = None
if name is not None and pem is not None:
if not org:
org = "NULL"
else:
org = str(org)
ssldir = os.path.join(CACHE_DIR, '.ssl-certs', org)
cert_file = os.path.join(ssldir, "%s.pem" % name)
if not os.path.exists(cert_file):
create_dir_tree(ssldir)
f = open(cert_file, "w")
f.write(str(pem))
f.close()
filenames[cert] = cert_file
return filenames[ca_cert], filenames[client_cert], filenames[client_key]
def clear_ssl_cache():
ssldir = os.path.join(CACHE_DIR, '.ssl-certs')
shutil.rmtree(ssldir, True)
def get_single_ssl_set(keys, check_dates=False):
"""Picks one of available SSL sets for given repository."""
if check_dates:
for ssl_set in keys:
if verify_certificate_dates(str(ssl_set['ca_cert'])) and \
(not ssl_set['client_cert'] or
verify_certificate_dates(str(ssl_set['client_cert']))):
return ssl_set
# Get first
else:
return keys[0]
return None
class RepoSync(object):
def __init__(self, channel_label, repo_type=None, url=None, fail=False,
filters=None, no_errata=False, sync_kickstart=False, latest=False,
metadata_only=False, strict=0, excluded_urls=None, no_packages=False,
log_dir="reposync", log_level=None, force_kickstart=False, force_all_errata=False,
check_ssl_dates=False, force_null_org_content=False, show_packages_only=False):
self.regen = False
self.fail = fail
self.filters = filters or []
self.no_packages = no_packages
self.no_errata = no_errata
self.sync_kickstart = sync_kickstart
self.force_all_errata = force_all_errata
self.force_kickstart = force_kickstart
self.latest = latest
self.metadata_only = metadata_only
self.ks_tree_type = 'externally-managed'
self.ks_install_type = None
self.show_packages_only = show_packages_only
initCFG('server.satellite')
rhnSQL.initDB()
# setup logging
log_filename = channel_label + '.log'
log_path = default_log_location + log_dir + '/' + log_filename
if log_level is None:
log_level = 0
CFG.set('DEBUG', log_level)
rhnLog.initLOG(log_path, log_level)
# os.fchown isn't in 2.4 :/
if isSUSE():
os.system("chgrp www " + log_path)
else:
os.system("chgrp apache " + log_path)
log2disk(0, "Command: %s" % str(sys.argv))
log2disk(0, "Sync of channel started.")
self.channel_label = channel_label
self.channel = self.load_channel()
if not self.channel:
log(0, "Channel %s does not exist." % channel_label)
if not self.channel['org_id'] or force_null_org_content:
self.org_id = None
else:
self.org_id = int(self.channel['org_id'])
if not url:
# TODO:need to look at user security across orgs
h = rhnSQL.prepare("""select s.id, s.source_url, s.label as repo_label, cst.label as repo_type_label
from rhnContentSource s,
rhnChannelContentSource cs,
rhnContentSourceType cst
where s.id = cs.source_id
and cst.id = s.type_id
and cs.channel_id = :channel_id""")
h.execute(channel_id=int(self.channel['id']))
source_data = h.fetchall_dict()
self.urls = []
if excluded_urls is None:
excluded_urls = []
if source_data:
for row in source_data:
if row['source_url'] not in excluded_urls:
# Override repo type DB value using parameter
if repo_type:
repo_type_label = repo_type
else:
repo_type_label = row['repo_type_label']
self.urls.append((row['id'], row['source_url'], repo_type_label, row['repo_label']))
else:
if repo_type:
repo_type_label = repo_type
else:
repo_type_label = 'yum'
self.urls = [(None, u, repo_type_label, None) for u in url]
if not self.urls:
log2(0, 0, "Channel %s has no URL associated" % channel_label, stream=sys.stderr)
self.strict = strict
self.all_packages = set()
self.all_errata = set()
self.check_ssl_dates = check_ssl_dates
# Init cache for computed checksums to not compute it on each reposync run again
self.checksum_cache = rhnCache.get(checksum_cache_filename)
if self.checksum_cache is None:
self.checksum_cache = {}
self.import_batch_size = default_import_batch_size
def set_import_batch_size(self, batch_size):
self.import_batch_size = int(batch_size)
def set_urls_prefix(self, prefix):
"""If there are relative urls in DB, set their real location in runtime"""
for index, url in enumerate(self.urls):
# Make list, add prefix, make tuple and save
url = list(url)
url[1] = "%s%s" % (prefix, url[1])
url = tuple(url)
self.urls[index] = url
def sync(self, update_repodata=True):
"""Trigger a reposync"""
failed_packages = 0
sync_error = 0
start_time = datetime.now()
for (repo_id, url, repo_type, repo_label) in self.urls:
log(0, '')
log(0, " Processing repository with URL: %s" % url)
if self.metadata_only:
log(0, ' * WARNING: processing RPM metadata only.')
plugin = None
# pylint: disable=W0703
try:
if '://' not in url:
raise Exception("Unknown protocol in repo URL: %s" % url)
# If the repository uses a uln:// URL, switch to the ULN plugin, overriding the command-line
if url.startswith("uln://"):
repo_type = "uln"
repo_plugin = self.load_plugin(repo_type)
if repo_label:
repo_name = repo_label
else:
# use modified relative_url as name of repo plugin, because
# it used as name of cache directory as well
relative_url = '_'.join(url.split('://')[1].split('/')[1:])
repo_name = relative_url.replace("?", "_").replace("&", "_").replace("=", "_")
(ca_cert_file, client_cert_file, client_key_file) = (None, None, None)
if repo_id is not None:
keys = rhnSQL.fetchall_dict("""
select k1.description as ca_cert_name, k1.key as ca_cert, k1.org_id as ca_cert_org,
k2.description as client_cert_name, k2.key as client_cert, k2.org_id as client_cert_org,
k3.description as client_key_name, k3.key as client_key, k3.org_id as client_key_org
from rhncontentsource cs inner join
rhncontentsourcessl csssl on cs.id = csssl.content_source_id inner join
rhncryptokey k1 on csssl.ssl_ca_cert_id = k1.id left outer join
rhncryptokey k2 on csssl.ssl_client_cert_id = k2.id left outer join
rhncryptokey k3 on csssl.ssl_client_key_id = k3.id
where cs.id = :repo_id
""", repo_id=int(repo_id))
if keys:
ssl_set = get_single_ssl_set(keys, check_dates=self.check_ssl_dates)
if ssl_set:
(ca_cert_file, client_cert_file, client_key_file) = write_ssl_set_cache(
(ssl_set['ca_cert_name'], ssl_set['ca_cert'], ssl_set['ca_cert_org']),
(ssl_set['client_cert_name'], ssl_set['client_cert'], ssl_set['client_cert_org']),
(ssl_set['client_key_name'], ssl_set['client_key'], ssl_set['client_key_org']))
else:
raise ValueError("No valid SSL certificates were found for repository.")
plugin = repo_plugin(url, repo_name,
org=str(self.org_id or ''),
channel_label=self.channel_label,
ca_cert_file=ca_cert_file,
client_cert_file=client_cert_file,
client_key_file=client_key_file)
if self.show_packages_only:
self.show_packages(plugin, repo_id)
else:
if update_repodata:
plugin.clear_cache()
if not self.no_packages:
self.import_groups(plugin)
if repo_type == "yum":
self.import_modules(plugin)
ret = self.import_packages(plugin, repo_id, url)
failed_packages += ret
if not self.no_errata:
self.import_updates(plugin)
# only for repos obtained from the DB
if self.sync_kickstart and repo_label:
try:
self.import_kickstart(plugin, repo_label)
except:
rhnSQL.rollback()
raise
except rhnSQL.SQLError:
raise
except Exception:
e = sys.exc_info()[1]
log2(0, 0, "ERROR: %s" % e, stream=sys.stderr)
log2disk(0, "ERROR: %s" % e)
# pylint: disable=W0104
sync_error = -1
# In strict mode unlink all packages from channel which are not synced from current repositories
if self.strict and sync_error == 0:
if not self.no_packages:
channel_packages = rhnSQL.fetchall_dict("""
select p.id, ct.label as checksum_type, c.checksum
from rhnChannelPackage cp,
rhnPackage p,
rhnChecksumType ct,
rhnChecksum c
where cp.channel_id = :channel_id
and cp.package_id = p.id
and p.checksum_id = c.id
and c.checksum_type_id = ct.id
""", channel_id=int(self.channel['id'])) or []
for package in channel_packages:
if (package['checksum_type'], package['checksum']) not in self.all_packages:
self.disassociate_package(package['checksum_type'], package['checksum'])
self.regen = True
# For custom channels unlink also errata
if not self.no_errata and self.channel['org_id']:
channel_errata = self.list_errata()
for erratum in channel_errata:
if erratum not in self.all_errata:
self.disassociate_erratum(erratum)
self.regen = True
# Update cache with package checksums
rhnCache.set(checksum_cache_filename, self.checksum_cache)
if self.regen:
taskomatic.add_to_repodata_queue_for_channel_package_subscription(
[self.channel_label], [], "server.app.yumreposync")
taskomatic.add_to_erratacache_queue(self.channel_label)
self.update_date()
rhnSQL.commit()
# update permissions
fileutils.createPath(os.path.join(CFG.MOUNT_POINT, 'rhn')) # if the directory exists update ownership only
for root, dirs, files in os.walk(os.path.join(CFG.MOUNT_POINT, 'rhn')):
for d in dirs:
fileutils.setPermsPath(os.path.join(root, d), group='apache')
for f in files:
fileutils.setPermsPath(os.path.join(root, f), group='apache')
elapsed_time = datetime.now() - start_time
log(0, "Sync of channel completed in %s." % str(elapsed_time).split('.')[0])
# if there is no global problems, but some packages weren't synced
if sync_error == 0 and failed_packages > 0:
sync_error = failed_packages
return elapsed_time, sync_error
def set_ks_tree_type(self, tree_type='externally-managed'):
self.ks_tree_type = tree_type
def set_ks_install_type(self, install_type='generic_rpm'):
self.ks_install_type = install_type
def update_date(self):
""" Updates the last sync time"""
h = rhnSQL.prepare("""update rhnChannel set LAST_SYNCED = current_timestamp
where label = :channel""")
h.execute(channel=self.channel['label'])
@staticmethod
def load_plugin(repo_type):
name = repo_type + "_src"
mod = __import__('spacewalk.satellite_tools.repo_plugins', globals(), locals(), [name])
submod = getattr(mod, name)
return getattr(submod, "ContentSource")
def import_updates(self, plug):
notices = plug.get_updates()
log(0, '')
log(0, " Errata in repo: %s." % len(notices))
if notices:
self.upload_updates(notices)
def copy_metadata_file(self, filename, comps_type, relative_dir):
old_checksum = None
basename = os.path.basename(filename)
log(0, '')
log(0, " Importing %s file %s." % (comps_type, basename))
relativedir = os.path.join(relative_dir, self.channel_label)
absdir = os.path.join(CFG.MOUNT_POINT, relativedir)
if not os.path.exists(absdir):
os.makedirs(absdir)
relativepath = os.path.join(relativedir, basename)
abspath = os.path.join(absdir, basename)
for suffix in ['.gz', '.bz', '.xz']:
if basename.endswith(suffix):
abspath = abspath.rstrip(suffix)
relativepath = relativepath.rstrip(suffix)
h = rhnSQL.prepare("""select relative_filename
from rhnChannelComps
where channel_id = :cid
and comps_type_id = (select id from rhnCompsType where label = :ctype)""")
if h.execute(cid=self.channel['id'], ctype=comps_type):
old_checksum = getFileChecksum('sha256', os.path.join(CFG.MOUNT_POINT, h.fetchone()[0]))
src = fileutils.decompress_open(filename)
dst = open(abspath, "w")
shutil.copyfileobj(src, dst)
dst.close()
src.close()
if old_checksum and old_checksum != getFileChecksum('sha256', abspath):
self.regen = True
# update or insert
hu = rhnSQL.prepare("""update rhnChannelComps
set relative_filename = :relpath,
modified = current_timestamp
where channel_id = :cid
and comps_type_id = (select id from rhnCompsType where label = :ctype)""")
hu.execute(cid=self.channel['id'], relpath=relativepath, ctype=comps_type)
hi = rhnSQL.prepare("""insert into rhnChannelComps
(id, channel_id, relative_filename, comps_type_id)
(select sequence_nextval('rhn_channelcomps_id_seq'),
:cid,
:relpath,
(select id from rhnCompsType where label = :ctype)
from dual
where not exists (select 1 from rhnChannelComps
where channel_id = :cid
and comps_type_id = (select id from rhnCompsType where label = :ctype)))""")
hi.execute(cid=self.channel['id'], relpath=relativepath, ctype=comps_type)
return abspath
def import_groups(self, plug):
groupsfile = plug.get_groups()
if groupsfile:
abspath = self.copy_metadata_file(groupsfile, 'comps', relative_comps_dir)
plug.groupsfile = abspath
def import_modules(self, plug):
modulesfile = plug.get_modules()
if modulesfile:
self.copy_metadata_file(modulesfile, 'modules', relative_modules_dir)
def _populate_erratum(self, notice):
advisory = notice['update_id'] + '-' + notice['version']
existing_errata = self.get_errata(notice['update_id'])
e = importLib.Erratum()
e['errata_from'] = notice['from']
e['advisory'] = advisory
e['advisory_name'] = notice['update_id']
e['advisory_rel'] = notice['version']
e['advisory_type'] = errata_typemap.get(notice['type'], 'Product Enhancement Advisory')
e['product'] = notice['release'] or 'Unknown'
e['description'] = notice['description']
e['synopsis'] = notice['title'] or notice['update_id']
if notice['type'] == 'security' and 'severity' in notice and notice['severity'].lower() != 'none':
e['security_impact'] = notice['severity']
if notice['type'] == 'security' and not e['synopsis'].startswith(notice['severity'] + ': '):
e['synopsis'] = notice['severity'] + ': ' + e['synopsis']
if 'summary' in notice and not notice['summary'] is None:
e['topic'] = notice['summary']
else:
e['topic'] = ' '
if 'solution' in notice and not notice['solution'] is None:
e['solution'] = notice['solution']
else:
e['solution'] = ' '
e['issue_date'] = self._to_db_date(notice['issued'])
if notice['updated']:
e['update_date'] = self._to_db_date(notice['updated'])
else:
e['update_date'] = self._to_db_date(notice['issued'])
e['org_id'] = self.org_id
e['notes'] = ''
e['channels'] = []
e['packages'] = []
e['files'] = []
if existing_errata:
e['channels'] = existing_errata['channels']
e['packages'] = existing_errata['packages']
e['channels'].append({'label': self.channel_label})
for pkg in notice['pkglist'][0]['packages']:
param_dict = {
'name': pkg['name'],
'version': pkg['version'],
'release': pkg['release'],
'arch': pkg['arch'],
'channel_id': int(self.channel['id']),
}
if pkg['epoch'] == '0':
epochStatement = "(pevr.epoch is NULL or pevr.epoch = '0')"
elif pkg['epoch'] is None or pkg['epoch'] == '':
epochStatement = "pevr.epoch is NULL"
else:
epochStatement = "pevr.epoch = :epoch"
param_dict['epoch'] = pkg['epoch']
if self.org_id:
param_dict['org_id'] = self.org_id
orgStatement = "= :org_id"
else:
orgStatement = "is NULL"
h = rhnSQL.prepare("""
select p.id, pevr.epoch, c.checksum, c.checksum_type
from rhnPackage p
join rhnPackagename pn on p.name_id = pn.id
join rhnpackageevr pevr on p.evr_id = pevr.id
join rhnpackagearch pa on p.package_arch_id = pa.id
join rhnArchType at on pa.arch_type_id = at.id
join rhnChecksumView c on p.checksum_id = c.id
join rhnChannelPackage cp on p.id = cp.package_id
where pn.name = :name
and p.org_id %s
and pevr.version = :version
and pevr.release = :release
and pa.label = :arch
and %s
and at.label = 'rpm'
and cp.channel_id = :channel_id
""" % (orgStatement, epochStatement))
h.execute(**param_dict)
cs = h.fetchone_dict() or None
if not cs:
if 'epoch' in param_dict:
epoch = str(param_dict['epoch']) + ":"
else:
epoch = ""
log(2, "No checksum found for %s-%s%s-%s.%s."
" Skipping Package" % (param_dict['name'],
epoch,
param_dict['version'],
param_dict['release'],
param_dict['arch']))
continue
newpkgs = []
for oldpkg in e['packages']:
if oldpkg['package_id'] != cs['id']:
newpkgs.append(oldpkg)
package = importLib.IncompletePackage().populate(pkg)
package['epoch'] = cs['epoch']
package['org_id'] = self.org_id
package['checksums'] = {cs['checksum_type']: cs['checksum']}
package['checksum_type'] = cs['checksum_type']
package['checksum'] = cs['checksum']
package['package_id'] = cs['id']
newpkgs.append(package)
e['packages'] = newpkgs
# Empty package list in original metadata
if not e['packages'] and not notice['pkglist'][0]['packages']:
log(2, "Advisory %s has empty package list." % e['advisory_name'])
elif not e['packages']:
raise ValueError("Advisory %s skipped because of empty package list (filtered)." % e['advisory_name'])
e['keywords'] = []
if notice['reboot_suggested']:
kw = importLib.Keyword()
kw.populate({'keyword': 'reboot_suggested'})
e['keywords'].append(kw)
if notice['restart_suggested']:
kw = importLib.Keyword()
kw.populate({'keyword': 'restart_suggested'})
e['keywords'].append(kw)
e['bugs'] = []
e['cve'] = []
if notice['references']:
bzs = [r for r in notice['references'] if r['type'] == 'bugzilla']
if bzs:
tmp = {}
for bz in bzs:
try:
bz_id = int(bz['id'])
# This can happen in some incorrectly generated updateinfo, let's be smart
except ValueError:
log(2, "Bugzilla assigned to advisory %s has invalid id: %s, trying to get it from URL..."
% (e['advisory_name'], bz['id']))
bz_id = int(re.search(r"\d+$", bz['href']).group(0))
if bz_id not in tmp:
bug = importLib.Bug()
bug.populate({'bug_id': bz_id, 'summary': bz['title'], 'href': bz['href']})
e['bugs'].append(bug)
tmp[bz_id] = None
cves = [r for r in notice['references'] if r['type'] == 'cve']
if cves:
tmp = {}
for cve in cves:
if cve['id'] not in tmp:
e['cve'].append(cve['id'])
tmp[cve['id']] = None
others = [r for r in notice['references'] if not r['type'] == 'bugzilla' and not r['type'] == 'cve']
if others:
refers_to = ""
for other in others:
if refers_to:
refers_to += "\n"
refers_to += other['href']
e['refers_to'] = refers_to
e['locally_modified'] = None
return e
def upload_updates(self, notices):
batch = []
channel_advisory_names = self.list_errata()
for notice in notices:
notice = self.fix_notice(notice)
# Save advisory names from all repositories
self.all_errata.add(notice['update_id'])
if not self.force_all_errata and notice['update_id'] in channel_advisory_names:
continue
# pylint: disable=W0703
try:
erratum = self._populate_erratum(notice)
batch.append(erratum)
except Exception:
e = "Skipped %s - %s" % (notice['update_id'], sys.exc_info()[1])
log2(1, 1, e, stream=sys.stderr)
if self.fail:
raise
if batch:
log(0, " Syncing %s new errata to channel." % len(batch))
backend = SQLBackend()
importer = ErrataImport(batch, backend)
importer.run()
self.regen = True
elif notices:
log(0, " No new errata to sync.")
def import_packages(self, plug, source_id, url):
failed_packages = 0
if (not self.filters) and source_id:
h = rhnSQL.prepare("""
select flag, filter
from rhnContentSourceFilter
where source_id = :source_id
order by sort_order """)
h.execute(source_id=source_id)
filter_data = h.fetchall_dict() or []
filters = [(row['flag'], [v.strip() for v in row['filter'].split(',') if v.strip()])
for row in filter_data]
else:
filters = self.filters
packages = plug.list_packages(filters, self.latest)
to_disassociate = {}
to_process = []
num_passed = len(packages)
log(0, " Packages in repo: %5d" % plug.num_packages)
if plug.num_excluded:
log(0, " Packages passed filter rules: %5d" % num_passed)
channel_id = int(self.channel['id'])
for pack in packages:
db_pack = rhnPackage.get_info_for_package(
[pack.name, pack.version, pack.release, pack.epoch, pack.arch],
channel_id, self.org_id)
to_download = True
to_link = True
# Package exists in DB
if db_pack:
# Path in filesystem is defined
if db_pack['path']:
pack.path = os.path.join(CFG.MOUNT_POINT, db_pack['path'])
else:
pack.path = ""
if self.metadata_only or self.match_package_checksum(db_pack['path'], pack.path,
pack.checksum_type, pack.checksum):
# package is already on disk or not required
to_download = False
if db_pack['channel_id'] == channel_id:
# package is already in the channel
to_link = False
# just pass data from DB, they will be used in strict channel
# linking if there is no new RPM downloaded
pack.checksum = db_pack['checksum']
pack.checksum_type = db_pack['checksum_type']
pack.epoch = db_pack['epoch']
self.all_packages.add((pack.checksum_type, pack.checksum))
elif db_pack['channel_id'] == channel_id:
# different package with SAME NVREA
# disassociate from channel if it doesn't match package which will be downloaded
to_disassociate[(db_pack['checksum_type'], db_pack['checksum'])] = True
if to_download or to_link:
to_process.append((pack, to_download, to_link))
num_to_process = len(to_process)
if num_to_process == 0:
log(0, " No new packages to sync.")
# If we are just appending, we can exit
if not self.strict:
return failed_packages
else:
log(0, " Packages already synced: %5d" % (num_passed - num_to_process))
log(0, " Packages to sync: %5d" % num_to_process)
is_non_local_repo = (url.find("file:/") < 0)
downloader = ThreadedDownloader()
to_download_count = 0
for what in to_process:
pack, to_download, to_link = what
if to_download:
target_file = os.path.join(plug.repo.pkgdir, os.path.basename(pack.unique_id.relativepath))
pack.path = target_file
params = {}
checksum_type = pack.checksum_type
checksum = pack.checksum
plug.set_download_parameters(params, pack.unique_id.relativepath, target_file,
checksum_type=checksum_type, checksum_value=checksum)
downloader.add(params)
to_download_count += 1
if num_to_process != 0:
log(0, " New packages to download: %5d" % to_download_count)
log2(0, 0, " Downloading packages:")
logger = TextLogger(None, to_download_count)
downloader.set_log_obj(logger)
downloader.run()
log2background(0, "Importing packages started.")
log(0, '')
log(0, ' Importing packages to DB:')
progress_bar = ProgressBarLogger(" Importing packages: ", to_download_count)
# Prepare SQL statements
h_delete_package_queue = rhnSQL.prepare("""delete from rhnPackageFileDeleteQueue where path = :path""")
backend = SQLBackend()
mpm_bin_batch = importLib.Collection()
mpm_src_batch = importLib.Collection()
affected_channels = []
upload_caller = "server.app.uploadPackage"
import_count = 0
for (index, what) in enumerate(to_process):
pack, to_download, to_link = what
if not to_download:
continue
import_count += 1
stage_path = pack.path
# pylint: disable=W0703
try:
# check if package was downloaded
if not os.path.exists(stage_path):
raise Exception
pack.load_checksum_from_header()
if not self.metadata_only:
rel_package_path = rhnPackageUpload.relative_path_from_header(pack.a_pkg.header, self.org_id,
pack.a_pkg.checksum_type,
pack.a_pkg.checksum)
else:
rel_package_path = None
if rel_package_path:
# Save uploaded package to cache with repository checksum type
self.checksum_cache[rel_package_path] = {pack.checksum_type: pack.checksum}
# First write the package to the filesystem to final location
# pylint: disable=W0703
try:
importLib.move_package(pack.a_pkg.payload_stream.name, basedir=CFG.MOUNT_POINT,
relpath=rel_package_path,
checksum_type=pack.a_pkg.checksum_type,
checksum=pack.a_pkg.checksum, force=1)
except OSError:
e = sys.exc_info()[1]
raise_with_tb(rhnFault(50, "Package upload failed: %s" % e), sys.exc_info()[2])
except importLib.FileConflictError:
raise_with_tb(rhnFault(50, "File already exists"), sys.exc_info()[2])
except Exception:
raise_with_tb(rhnFault(50, "File error"), sys.exc_info()[2])
# Remove any pending scheduled file deletion for this package
h_delete_package_queue.execute(path=rel_package_path)
pkg = mpmSource.create_package(pack.a_pkg.header, size=pack.a_pkg.payload_size,
checksum_type=pack.a_pkg.checksum_type, checksum=pack.a_pkg.checksum,
relpath=rel_package_path, org_id=self.org_id,
header_start=pack.a_pkg.header_start,
header_end=pack.a_pkg.header_end, channels=[])
if pack.a_pkg.header.is_source:
mpm_src_batch.append(pkg)
else:
mpm_bin_batch.append(pkg)
# we do not want to keep a whole 'a_pkg' object for every package in memory,
# because we need only checksum. see BZ 1397417
pack.checksum = pack.a_pkg.checksum
pack.checksum_type = pack.a_pkg.checksum_type
pack.epoch = pack.a_pkg.header['epoch']
pack.a_pkg = None
self.all_packages.add((pack.checksum_type, pack.checksum))
# Downloaded pkg checksum matches with pkg already in channel, no need to disassociate from channel
if (pack.checksum_type, pack.checksum) in to_disassociate:
to_disassociate[(pack.checksum_type, pack.checksum)] = False
# Set to_link to False, no need to link again
to_process[index] = (pack, True, False)
# importing packages by batch or if the current packages is the last
if mpm_bin_batch and (import_count == to_download_count
or len(mpm_bin_batch) % self.import_batch_size == 0):
importer = packageImport.PackageImport(mpm_bin_batch, backend, caller=upload_caller)
importer.setUploadForce(1)
importer.run()
rhnSQL.commit()
del importer.batch
affected_channels.extend(importer.affected_channels)
del mpm_bin_batch
mpm_bin_batch = importLib.Collection()
if mpm_src_batch and (import_count == to_download_count
or len(mpm_src_batch) % self.import_batch_size == 0):
src_importer = packageImport.SourcePackageImport(mpm_src_batch, backend, caller=upload_caller)
src_importer.setUploadForce(1)
src_importer.run()
rhnSQL.commit()
del mpm_src_batch
mpm_src_batch = importLib.Collection()
progress_bar.log(True, None)
except KeyboardInterrupt:
raise
except rhnSQL.SQLError:
raise
except Exception:
failed_packages += 1
e = str(sys.exc_info()[1])
if e:
log2(0, 1, e, stream=sys.stderr)
if self.fail:
raise
to_process[index] = (pack, False, False)
progress_bar.log(False, None)
finally:
if is_non_local_repo and stage_path and os.path.exists(stage_path):
os.remove(stage_path)
if affected_channels:
errataCache.schedule_errata_cache_update(affected_channels)
log2background(0, "Importing packages finished.")
# Disassociate packages
for (checksum_type, checksum) in to_disassociate:
if to_disassociate[(checksum_type, checksum)]:
self.disassociate_package(checksum_type, checksum)
# Do not re-link if nothing was marked to link
if any([to_link for (pack, to_download, to_link) in to_process]):
log(0, '')
log(0, " Linking packages to the channel.")
# Packages to append to channel
import_batch = [self.associate_package(pack) for (pack, to_download, to_link) in to_process if to_link]
backend = SQLBackend()
caller = "server.app.yumreposync"
importer = ChannelPackageSubscription(import_batch,
backend, caller=caller, repogen=False)
importer.run()
backend.commit()
self.regen = True
return failed_packages
def show_packages(self, plug, source_id):
if (not self.filters) and source_id:
h = rhnSQL.prepare("""
select flag, filter
from rhnContentSourceFilter
where source_id = :source_id
order by sort_order """)
h.execute(source_id=source_id)
filter_data = h.fetchall_dict() or []
filters = [(row['flag'], re.split(r'[,\s]+', row['filter']))
for row in filter_data]
else:
filters = self.filters
packages = plug.raw_list_packages(filters)
num_passed = len(packages)
log(0, " Packages in repo: %5d" % plug.num_packages)
if plug.num_excluded:
log(0, " Packages passed filter rules: %5d" % num_passed)
log(0, " Package marked with '+' will be downloaded next channel synchronization")
log(0, " Package marked with '.' is already presented on filesystem")
channel_id = int(self.channel['id'])
for pack in packages:
db_pack = rhnPackage.get_info_for_package(
[pack.name, pack.version, pack.release, pack.epoch, pack.arch],
channel_id, self.org_id)
pack_status = " + " # need to be downloaded by default
pack_full_name = "%-60s\t" % (pack.name + "-" + pack.version + "-" + pack.release + "." +
pack.arch + ".rpm")
pack_size = "%11d bytes\t" % pack.packagesize
if pack.checksum_type == 'sha512':
pack_hash_info = "%-140s" % (pack.checksum_type + ' ' + pack.checksum)
else:
pack_hash_info = "%-80s " % (pack.checksum_type + ' ' + pack.checksum)
# Package exists in DB
if db_pack:
# Path in filesystem is defined
if db_pack['path']:
pack.path = os.path.join(CFG.MOUNT_POINT, db_pack['path'])
else:
pack.path = ""
if self.match_package_checksum(db_pack['path'], pack.path, pack.checksum_type, pack.checksum):
# package is already on disk
pack_status = ' . '
log(0, " " + pack_status + pack_full_name + pack_size + pack_hash_info)
def match_package_checksum(self, relpath, abspath, checksum_type, checksum):
if os.path.exists(abspath):
if relpath not in self.checksum_cache:
self.checksum_cache[relpath] = {}
cached_checksums = self.checksum_cache[relpath]
if checksum_type not in cached_checksums:
checksum_disk = getFileChecksum(checksum_type, filename=abspath)
cached_checksums[checksum_type] = checksum_disk
else:
checksum_disk = cached_checksums[checksum_type]
if checksum_disk == checksum:
return 1
elif relpath in self.checksum_cache:
# Remove path from cache if not exists
del self.checksum_cache[relpath]
return 0
def associate_package(self, pack):
package = {}
package['name'] = pack.name
package['version'] = pack.version
package['release'] = pack.release
package['arch'] = pack.arch
if pack.a_pkg:
package['checksum'] = pack.a_pkg.checksum
package['checksum_type'] = pack.a_pkg.checksum_type
# use epoch from file header because createrepo puts epoch="0" to
# primary.xml even for packages with epoch=''
package['epoch'] = pack.a_pkg.header['epoch']
else:
# RPM not available but package metadata are in DB, reuse these values
package['checksum'] = pack.checksum
package['checksum_type'] = pack.checksum_type
package['epoch'] = pack.epoch
package['channels'] = [{'label': self.channel_label,
'id': self.channel['id']}]
package['org_id'] = self.org_id
return importLib.IncompletePackage().populate(package)
def disassociate_package(self, checksum_type, checksum):
log(3, "Disassociating package with checksum: %s (%s)" % (checksum, checksum_type))
h = rhnSQL.prepare("""
delete from rhnChannelPackage cp
where cp.channel_id = :channel_id
and cp.package_id in (select p.id
from rhnPackage p
join rhnChecksumView c
on p.checksum_id = c.id
where c.checksum = :checksum
and c.checksum_type = :checksum_type
)
""")
h.execute(channel_id=self.channel['id'],
checksum_type=checksum_type, checksum=checksum)
def disassociate_erratum(self, advisory_name):
log(3, "Disassociating erratum: %s" % advisory_name)
h = rhnSQL.prepare("""
delete from rhnChannelErrata ce
where ce.channel_id = :channel_id
and ce.errata_id in (select e.id
from rhnErrata e
where e.advisory_name = :advisory_name
)
""")
h.execute(channel_id=self.channel['id'], advisory_name=advisory_name)
def load_channel(self):
return rhnChannel.channel_info(self.channel_label)
@staticmethod
def _to_db_date(date):
ret = ""
if date.isdigit():
ret = datetime.fromtimestamp(float(date)).isoformat(' ')
else:
# we expect to get ISO formated date
ret = date
return ret[:19] # return 1st 19 letters of date, therefore preventing ORA-01830 caused by fractions of seconds
@staticmethod
def fix_notice(notice):
# pylint: disable=W0212
if "." in notice['version']:
new_version = 0
for n in notice['version'].split('.'):
new_version = (new_version + int(n)) * 100
notice['version'] = str(new_version / 100)
return notice
def get_errata(self, update_id):
h = rhnSQL.prepare("""select
e.id, e.advisory, e.advisory_name, e.advisory_rel
from rhnerrata e
where e.advisory_name = :name
and (e.org_id = :org_id or (e.org_id is null and :org_id is null))
""")
h.execute(name=update_id, org_id=self.org_id)
ret = h.fetchone_dict() or None
if not ret:
return None
h = rhnSQL.prepare("""select distinct c.label
from rhnchannelerrata ce
join rhnchannel c on c.id = ce.channel_id
where ce.errata_id = :eid
""")
h.execute(eid=ret['id'])
channels = h.fetchall_dict() or []
ret['channels'] = channels
ret['packages'] = []
h = rhnSQL.prepare("""
select p.id as package_id,
pn.name,
pevr.epoch,
pevr.version,
pevr.release,
pa.label as arch,
p.org_id,
cv.checksum,
cv.checksum_type
from rhnerratapackage ep
join rhnpackage p on p.id = ep.package_id
join rhnpackagename pn on pn.id = p.name_id
join rhnpackageevr pevr on pevr.id = p.evr_id
join rhnpackagearch pa on pa.id = p.package_arch_id
join rhnchecksumview cv on cv.id = p.checksum_id
where ep.errata_id = :eid
""")
h.execute(eid=ret['id'])
packages = h.fetchall_dict() or []
for pkg in packages:
ipackage = importLib.IncompletePackage().populate(pkg)
ipackage['epoch'] = pkg.get('epoch', '')
ipackage['checksums'] = {ipackage['checksum_type']: ipackage['checksum']}
ret['packages'].append(ipackage)
return ret
def list_errata(self):
"""List advisory names present in channel"""
h = rhnSQL.prepare("""select e.advisory_name
from rhnChannelErrata ce
inner join rhnErrata e on e.id = ce.errata_id
where ce.channel_id = :cid
""")
h.execute(cid=self.channel['id'])
advisories = [row['advisory_name'] for row in h.fetchall_dict() or []]
return advisories
def import_kickstart(self, plug, repo_label):
log(0, '')
log(0, ' Importing kickstarts.')
ks_path = 'rhn/kickstart/'
ks_tree_label = re.sub(r'[^-_0-9A-Za-z@.]', '', repo_label.replace(' ', '_'))
if len(ks_tree_label) < 4:
ks_tree_label += "_repo"
# construct ks_path and check we already have this KS tree synced
id_request = """
select id
from rhnKickstartableTree
where channel_id = :channel_id and label = :label
"""
if self.org_id:
ks_path += str(self.org_id) + '/' + ks_tree_label
# Trees synced from external repositories are expected to have full path it database
db_path = os.path.join(CFG.MOUNT_POINT, ks_path)
row = rhnSQL.fetchone_dict(id_request + " and org_id = :org_id", channel_id=self.channel['id'],
label=ks_tree_label, org_id=self.org_id)
else:
ks_path += ks_tree_label
db_path = ks_path
row = rhnSQL.fetchone_dict(id_request + " and org_id is NULL", channel_id=self.channel['id'],
label=ks_tree_label)
treeinfo_path = ['treeinfo', '.treeinfo']
treeinfo_parser = None
for path in treeinfo_path:
log(1, "Trying " + path)
treeinfo = plug.get_file(path, os.path.join(plug.repo.basecachedir, plug.name))
if treeinfo:
try:
treeinfo_parser = TreeInfoParser(treeinfo)
break
except TreeInfoError:
pass
if not treeinfo_parser:
log(0, " Kickstartable tree not detected (no valid treeinfo file)")
return
if self.ks_install_type is None:
family = treeinfo_parser.get_family()
if family == 'Fedora':
self.ks_install_type = 'fedora18'
elif family == 'CentOS':
self.ks_install_type = 'rhel_' + treeinfo_parser.get_major_version()
else:
self.ks_install_type = 'generic_rpm'
fileutils.createPath(os.path.join(CFG.MOUNT_POINT, ks_path))
# Make sure images are included
to_download = set()
for repo_path in treeinfo_parser.get_images():
local_path = os.path.join(CFG.MOUNT_POINT, ks_path, repo_path)
# TODO: better check
if not os.path.exists(local_path) or self.force_kickstart:
to_download.add(repo_path)
if row:
log(0, " Kickstartable tree %s already synced. Updating content..." % ks_tree_label)
ks_id = row['id']
else:
row = rhnSQL.fetchone_dict("""
select sequence_nextval('rhn_kstree_id_seq') as id from dual
""")
ks_id = row['id']
rhnSQL.execute("""
insert into rhnKickstartableTree (id, org_id, label, base_path, channel_id, kstree_type,
install_type, last_modified, created, modified)
values (:id, :org_id, :label, :base_path, :channel_id,
( select id from rhnKSTreeType where label = :ks_tree_type),
( select id from rhnKSInstallType where label = :ks_install_type),
current_timestamp, current_timestamp, current_timestamp)""", id=ks_id,
org_id=self.org_id, label=ks_tree_label, base_path=db_path,
channel_id=self.channel['id'], ks_tree_type=self.ks_tree_type,
ks_install_type=self.ks_install_type)
log(0, " Added new kickstartable tree %s. Downloading content..." % ks_tree_label)
insert_h = rhnSQL.prepare("""
insert into rhnKSTreeFile (kstree_id, relative_filename, checksum_id, file_size, last_modified, created,
modified) values (:id, :path, lookup_checksum('sha256', :checksum), :st_size,
epoch_seconds_to_timestamp_tz(:st_time), current_timestamp, current_timestamp)
""")
delete_h = rhnSQL.prepare("""
delete from rhnKSTreeFile where kstree_id = :id and relative_filename = :path
""")
# Downloading/Updating content of KS Tree
dirs_queue = ['']
log(0, " Gathering all files in kickstart repository...")
while dirs_queue:
cur_dir_name = dirs_queue.pop(0)
cur_dir_html = plug.get_file(cur_dir_name)
if cur_dir_html is None:
continue
parser = KSDirParser(cur_dir_html)
for ks_file in parser.get_content():
repo_path = cur_dir_name + ks_file['name']
if ks_file['type'] == 'DIR':
dirs_queue.append(repo_path)
continue
if not os.path.exists(os.path.join(CFG.MOUNT_POINT, ks_path, repo_path)) or self.force_kickstart:
to_download.add(repo_path)
for addon_dir in treeinfo_parser.get_addons():
repomd_url = str(addon_dir + '/repodata/repomd.xml')
repomd_file = plug.get_file(repomd_url, os.path.join(plug.repo.basecachedir, plug.name))
if repomd_file:
# find location of primary.xml
repomd_xml = minidom.parse(repomd_file)
for i in repomd_xml.getElementsByTagName('data'):
if i.attributes['type'].value == 'primary':
primary_url = str(addon_dir + '/' +
i.getElementsByTagName('location')[0].attributes['href'].value)
break
primary_zip = plug.get_file(primary_url, os.path.join(plug.repo.basecachedir, plug.name))
if primary_zip:
primary_xml = gzip.open(primary_zip, 'r')
xmldoc = minidom.parse(primary_xml)
for i in xmldoc.getElementsByTagName('package'):
package = i.getElementsByTagName('location')[0].attributes['href'].value
repo_path = str(os.path.normpath(os.path.join(addon_dir, package)))
if not os.path.exists(os.path.join(CFG.MOUNT_POINT, ks_path, repo_path)) \
or self.force_kickstart:
to_download.add(repo_path)
if to_download:
log(0, " Downloading %d kickstart files." % len(to_download))
progress_bar = ProgressBarLogger(" Downloading kickstarts:", len(to_download))
downloader = ThreadedDownloader(force=self.force_kickstart)
for item in to_download:
params = {}
plug.set_download_parameters(params, item, os.path.join(CFG.MOUNT_POINT, ks_path, item))
downloader.add(params)
downloader.set_log_obj(progress_bar)
downloader.run()
log2background(0, "Download finished.")
for item in to_download:
st = os.stat(os.path.join(CFG.MOUNT_POINT, ks_path, item))
# update entity about current file in a database
delete_h.execute(id=ks_id, path=item)
insert_h.execute(id=ks_id, path=item,
checksum=getFileChecksum('sha256', os.path.join(CFG.MOUNT_POINT, ks_path, item)),
st_size=st.st_size, st_time=st.st_mtime)
else:
log(0, "No new kickstart files to download.")
rhnSQL.commit()
|
renner/spacewalk
|
backend/satellite_tools/reposync.py
|
Python
|
gpl-2.0
| 62,750 | 0.002343 |
# -*- coding:utf_8 -*-
import urllib2
from bs4 import BeautifulSoup
from Bangumi import Bangumi
class Youku(Bangumi):
link = "http://comic.youku.com"
name = u'优酷'
def getBangumi(self):
"""Youku processing function"""
# Get Youku bangumi HTML
req = urllib2.Request(self.link)
res = urllib2.urlopen(req)
html = res.read()
# Give the HTML to BeautifulSoup
# TODO: Change the parser to lxml for better performance
soup = BeautifulSoup(html, "html.parser")
# Get the list by day of the week
for wd in range(7):
if wd == 0:
lid = "tab_100895_7"
else:
lid = "tab_100895_{}".format(wd)
div = soup.find(id=lid)
blist = div.find_all("div", class_="v-meta va")
for binfo in blist:
bupdate = binfo.find("span", class_="v-status").string
btitle = binfo.find(class_="v-meta-title")
bname = btitle.find("a").string
blink = btitle.find('a')['href']
self.add(wd, bname, bupdate, blink)
|
MrWhoami/WhoamiBangumi
|
Youku.py
|
Python
|
mit
| 1,140 | 0.00088 |
# -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2015-12-12 16:07
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('tocayoapp', '0007_gender_description'),
]
operations = [
migrations.AlterField(
model_name='gender',
name='description',
field=models.CharField(max_length=15),
),
]
|
philpot/tocayo
|
tocayoproj/tocayoapp/migrations/0008_auto_20151212_1607.py
|
Python
|
apache-2.0
| 455 | 0 |
'''Autogenerated by xml_generate script, do not edit!'''
from OpenGL import platform as _p, arrays
# Code generation uses this
from OpenGL.raw.GL import _types as _cs
# End users want this...
from OpenGL.raw.GL._types import *
from OpenGL.raw.GL import _errors
from OpenGL.constant import Constant as _C
import ctypes
_EXTENSION_NAME = 'GL_ARB_texture_swizzle'
def _f( function ):
return _p.createFunction( function,_p.PLATFORM.GL,'GL_ARB_texture_swizzle',error_checker=_errors._error_checker)
GL_TEXTURE_SWIZZLE_A=_C('GL_TEXTURE_SWIZZLE_A',0x8E45)
GL_TEXTURE_SWIZZLE_B=_C('GL_TEXTURE_SWIZZLE_B',0x8E44)
GL_TEXTURE_SWIZZLE_G=_C('GL_TEXTURE_SWIZZLE_G',0x8E43)
GL_TEXTURE_SWIZZLE_R=_C('GL_TEXTURE_SWIZZLE_R',0x8E42)
GL_TEXTURE_SWIZZLE_RGBA=_C('GL_TEXTURE_SWIZZLE_RGBA',0x8E46)
|
stack-of-tasks/rbdlpy
|
tutorial/lib/python2.7/site-packages/OpenGL/raw/GL/ARB/texture_swizzle.py
|
Python
|
lgpl-3.0
| 781 | 0.025608 |
"""This module provides REST services for Scenario"""
import werkzeug.exceptions as WEXC
from LmCommon.common.lmconstants import HTTPStatus
from LmWebServer.common.lmconstants import HTTPMethod
from LmWebServer.services.api.v2.base import LmService
from LmWebServer.services.common.access_control import check_user_permission
from LmWebServer.services.cp_tools.lm_format import lm_formatter
# .............................................................................
class ScenarioService(LmService):
"""Scenarios service class.
"""
# ................................
# @lm_formatter
# def GET(self, scenario_id=None, after_time=None,
# alt_pred_code=None, before_time=None, date_code=None,
# epsg_code=None, gcm_code=None, limit=100, offset=0, url_user=None,
# **params):
# """GET request. Individual, list, count
# """
# if scenario_id is None:
# return self._list_scenarios(
# self.get_user_id(url_user=url_user), after_time=after_time,
# alt_pred_code=alt_pred_code, before_time=before_time,
# date_code=date_code, epsg_code=epsg_code, gcm_code=gcm_code,
# limit=limit, offset=offset)
#
# if scenario_id.lower() == 'count':
# return self._count_scenarios(
# self.get_user_id(url_user=url_user), after_time=after_time,
# alt_pred_code=alt_pred_code, before_time=before_time,
# date_code=date_code, epsg_code=epsg_code, gcm_code=gcm_code)
#
# return self._get_scenario(scenario_id)
# ................................
@lm_formatter
def count_scenarios(
self, user_id, after_time=None, before_time=None, alt_pred_code=None, date_code=None,
gcm_code=None, epsg_code=None):
"""Return a list of scenarios matching the specified criteria"""
scen_count = self.scribe.count_scenarios(
user_id=user_id, before_time=before_time, after_time=after_time,
epsg=epsg_code, gcm_code=gcm_code, alt_pred_code=alt_pred_code,
date_code=date_code)
return {'count': scen_count}
# ................................
@lm_formatter
def get_scenario(self, user_id, scenario_id):
"""Return a scenario"""
scn = self.scribe.get_scenario(int(scenario_id), fill_layers=True)
if scn is None:
raise WEXC.NotFound('Scenario {} not found'.format(scenario_id))
if check_user_permission(user_id, scn, HTTPMethod.GET):
return scn
else:
raise WEXC.Forbidden('User {} does not have permission to get scenario {}'.format(
user_id, scenario_id))
# ................................
@lm_formatter
def list_scenarios(
self, user_id, after_time=None, before_time=None, alt_pred_code=None, date_code=None,
gcm_code=None, epsg_code=None, limit=100, offset=0):
"""Return a list of scenarios matching the specified criteria"""
scn_atoms = self.scribe.list_scenarios(
offset, limit, user_id=user_id, before_time=before_time, after_time=after_time,
epsg=epsg_code, gcm_code=gcm_code, alt_pred_code=alt_pred_code, date_code=date_code)
return scn_atoms
|
lifemapper/core
|
LmWebServer/flask_app/scenario.py
|
Python
|
gpl-3.0
| 3,342 | 0.00389 |
# coding=utf-8
import logging
from urllib import urlencode
import datetime
import mimetypes
import cgi
from ckan.common import config
from paste.deploy.converters import asbool
import paste.fileapp
import ckan.logic as logic
import ckan.lib.base as base
import ckan.lib.i18n as i18n
import ckan.lib.maintain as maintain
import ckan.lib.navl.dictization_functions as dict_fns
import ckan.lib.helpers as h
import ckan.model as model
import ckan.lib.datapreview as datapreview
import ckan.lib.plugins
import ckan.lib.uploader as uploader
import ckan.plugins as p
import ckan.lib.render
from ckan.common import OrderedDict, _, json, request, c, response
#from home import CACHE_PARAMETERS
from ckan.controllers.package import PackageController
log = logging.getLogger(__name__)
render = base.render
abort = base.abort
NotFound = logic.NotFound
NotAuthorized = logic.NotAuthorized
ValidationError = logic.ValidationError
check_access = logic.check_access
get_action = logic.get_action
tuplize_dict = logic.tuplize_dict
clean_dict = logic.clean_dict
parse_params = logic.parse_params
flatten_to_string_key = logic.flatten_to_string_key
lookup_package_plugin = ckan.lib.plugins.lookup_package_plugin
def _encode_params(params):
return [(k, v.encode('utf-8') if isinstance(v, basestring) else str(v))
for k, v in params]
def url_with_params(url, params):
params = _encode_params(params)
return url + u'?' + urlencode(params)
def search_url(params, package_type=None):
if not package_type or package_type == 'dataset':
url = h.url_for(controller='package', action='search')
else:
url = h.url_for('{0}_search'.format(package_type))
return url_with_params(url, params)
class SchemingPagesController(PackageController):
def search(self):
from ckan.lib.search import SearchError, SearchQueryError
# Get package type name
package_type = self._guess_package_type()[:-1]
c.package_type = package_type
# Get page content from Wordpress
# =========================================
import ckanext.dadosgovbr.helpers.wordpress as wp
wp_page_slug = 'scheming_'+package_type+'s'
c.wp_page = type('Nothing', (object,), {})
c.wp_page.content = type('Nothing', (object,), {})
c.wp_page.content.rendered = "Conteudo da pagina nao encontrado..."
try:
c.wp_page = wp.page(wp_page_slug)
except:
pass
# DEBUG
# from pprint import pprint
# pprint(c.concursos)
# Package type facets (filters)
# =========================================
package_type_facets = u'organization groups tags res_format license_id'
if(package_type == 'inventario'):
package_type_facets = u'organization situacao_base informacoes_sigilosas_base informacoes_publicas_base atualizacoes_base dados_abertos_base'
if(package_type == 'concurso'):
package_type_facets = u'organization datasets_used'
if(package_type == 'aplicativo'):
package_type_facets = u'organization groups tags res_format license_id'
try:
context = {'model': model, 'user': c.user,
'auth_user_obj': c.userobj}
check_access('site_read', context)
except NotAuthorized:
abort(403, _('Not authorized to see this page'))
# unicode format (decoded from utf8)
q = c.q = request.params.get('q', u'')
c.query_error = False
page = h.get_page_number(request.params)
limit = int(config.get('ckan.datasets_per_page', 20))
# most search operations should reset the page counter:
params_nopage = [(k, v) for k, v in request.params.items()
if k != 'page']
def drill_down_url(alternative_url=None, **by):
return h.add_url_param(alternative_url=alternative_url,
controller='package', action='search',
new_params=by)
c.drill_down_url = drill_down_url
def remove_field(key, value=None, replace=None):
return h.remove_url_param(key, value=value, replace=replace,
controller='package', action='search')
c.remove_field = remove_field
sort_by = request.params.get('sort', None)
params_nosort = [(k, v) for k, v in params_nopage if k != 'sort']
def _sort_by(fields):
"""
Sort by the given list of fields.
Each entry in the list is a 2-tuple: (fieldname, sort_order)
eg - [('metadata_modified', 'desc'), ('name', 'asc')]
If fields is empty, then the default ordering is used.
"""
params = params_nosort[:]
if fields:
sort_string = ', '.join('%s %s' % f for f in fields)
params.append(('sort', sort_string))
return search_url(params, package_type)
c.sort_by = _sort_by
if not sort_by:
c.sort_by_fields = []
else:
c.sort_by_fields = [field.split()[0]
for field in sort_by.split(',')]
def pager_url(q=None, page=None):
params = list(params_nopage)
params.append(('page', page))
return search_url(params, package_type)
c.search_url_params = urlencode(_encode_params(params_nopage))
try:
c.fields = []
# c.fields_grouped will contain a dict of params containing
# a list of values eg {'tags':['tag1', 'tag2']}
c.fields_grouped = {}
search_extras = {}
fq = ''
for (param, value) in request.params.items():
if param not in ['q', 'page', 'sort'] \
and len(value) and not param.startswith('_'):
if not param.startswith('ext_'):
c.fields.append((param, value))
fq += ' %s:"%s"' % (param, value)
if param not in c.fields_grouped:
c.fields_grouped[param] = [value]
else:
c.fields_grouped[param].append(value)
else:
search_extras[param] = value
context = {'model': model, 'session': model.Session,
'user': c.user, 'for_view': True,
'auth_user_obj': c.userobj}
if package_type and package_type != 'dataset':
# Only show datasets of this particular type
fq += ' +dataset_type:{type}'.format(type=package_type)
else:
# Unless changed via config options, don't show non standard
# dataset types on the default search page
if not asbool(
config.get('ckan.search.show_all_types', 'False')):
fq += ' +dataset_type:dataset'
facets = OrderedDict()
default_facet_titles = {
# Default package
'organization': _('Organizations'),
'groups': _('Groups'),
'tags': _('Tags'),
'res_format': _('Formats'),
'license_id': _('Licenses'),
# Inventário package
'situacao_base': _(u'Situação da base'),
'informacoes_sigilosas_base': _(u'Base possui informações sigilosas?'),
'vocab_sim': _(u'Sim'),
'vocab_nao': _(u'Não'),
'informacoes_publicas_base': _(u'Base possui informações públicas?'),
'informacoes_publicas_base_publico': _(u'Público'),
'atualizacoes_base': _(u'Período de atualização dos dados'),
'dados_abertos_base': _(u'Exporta para dados abertos?'),
# Concurso package
'datasets_used': _(u'Dados utilizados'),
'tags': _(u'Tags'),
'date': _(u'Data de início'),
'end_date': _(u'Data final'),
'publico': _(u'Público'),
'sim': _(u'Sim'),
'nao': _(u'Não'),
}
for facet in config.get(u'search.facets', package_type_facets.split()):
if facet in default_facet_titles:
facets[facet] = default_facet_titles[facet]
else:
facets[facet] = facet
# Facet titles
for plugin in p.PluginImplementations(p.IFacets):
facets = plugin.dataset_facets(facets, package_type)
c.facet_titles = facets
data_dict = {
'q': q,
'fq': fq.strip(),
'facet.field': facets.keys(),
'rows': limit,
'start': (page - 1) * limit,
'sort': sort_by,
'extras': search_extras,
'include_private': asbool(config.get(
'ckan.search.default_include_private', True)),
}
query = get_action('package_search')(context, data_dict)
c.sort_by_selected = query['sort']
c.page = h.Page(
collection=query['results'],
page=page,
url=pager_url,
item_count=query['count'],
items_per_page=limit
)
c.search_facets = query['search_facets']
c.page.items = query['results']
except SearchQueryError, se:
# User's search parameters are invalid, in such a way that is not
# achievable with the web interface, so return a proper error to
# discourage spiders which are the main cause of this.
log.info('Dataset search query rejected: %r', se.args)
abort(400, _('Invalid search query: {error_message}')
.format(error_message=str(se)))
except SearchError, se:
# May be bad input from the user, but may also be more serious like
# bad code causing a SOLR syntax error, or a problem connecting to
# SOLR
log.error('Dataset search error: %r', se.args)
c.query_error = True
c.search_facets = {}
c.page = h.Page(collection=[])
c.search_facets_limits = {}
for facet in c.search_facets.keys():
try:
limit = int(request.params.get('_%s_limit' % facet,
int(config.get('search.facets.default', 10))))
except ValueError:
abort(400, _('Parameter "{parameter_name}" is not '
'an integer').format(
parameter_name='_%s_limit' % facet))
c.search_facets_limits[facet] = limit
self._setup_template_variables(context, {},
package_type=package_type)
return render('scheming/'+package_type+'/search.html',
extra_vars={'dataset_type': package_type})
def resources(self, id):
context = {'model': model, 'session': model.Session,
'user': c.user, 'for_view': True,
'auth_user_obj': c.userobj}
data_dict = {'id': id, 'include_tracking': True}
try:
check_access('package_update', context, data_dict)
except NotFound:
abort(404, _('Dataset not found'))
except NotAuthorized:
abort(403, _('User %r not authorized to edit %s') % (c.user, id))
# check if package exists
try:
c.pkg_dict = get_action('package_show')(context, data_dict)
c.pkg = context['package']
except (NotFound, NotAuthorized):
abort(404, _('Dataset not found'))
package_type = c.pkg_dict['type'] or 'dataset'
self._setup_template_variables(context, {'id': id},
package_type=package_type)
return render('package/resources.html',
extra_vars={'dataset_type': package_type})
def _read_template(self, package_type):
return 'scheming/'+package_type+'/read.html'
|
dadosgovbr/ckanext-dadosabertos
|
ckanext/dadosgovbr/controllers/scheming.py
|
Python
|
agpl-3.0
| 12,430 | 0.001853 |
import os
import re
import stat
import mimetypes
try:
from io import UnsupportedOperation
except ImportError:
UnsupportedOperation = object()
import cherrypy
from cherrypy._cpcompat import ntob, unquote
from cherrypy.lib import cptools, httputil, file_generator_limited
mimetypes.init()
mimetypes.types_map['.dwg'] = 'image/x-dwg'
mimetypes.types_map['.ico'] = 'image/x-icon'
mimetypes.types_map['.bz2'] = 'application/x-bzip2'
mimetypes.types_map['.gz'] = 'application/x-gzip'
def serve_file(path, content_type=None, disposition=None, name=None,
debug=False):
"""Set status, headers, and body in order to serve the given path.
The Content-Type header will be set to the content_type arg, if provided.
If not provided, the Content-Type will be guessed by the file extension
of the 'path' argument.
If disposition is not None, the Content-Disposition header will be set
to "<disposition>; filename=<name>". If name is None, it will be set
to the basename of path. If disposition is None, no Content-Disposition
header will be written.
"""
response = cherrypy.serving.response
# If path is relative, users should fix it by making path absolute.
# That is, CherryPy should not guess where the application root is.
# It certainly should *not* use cwd (since CP may be invoked from a
# variety of paths). If using tools.staticdir, you can make your relative
# paths become absolute by supplying a value for "tools.staticdir.root".
if not os.path.isabs(path):
msg = "'%s' is not an absolute path." % path
if debug:
cherrypy.log(msg, 'TOOLS.STATICFILE')
raise ValueError(msg)
try:
st = os.stat(path)
except OSError:
if debug:
cherrypy.log('os.stat(%r) failed' % path, 'TOOLS.STATIC')
raise cherrypy.NotFound()
# Check if path is a directory.
if stat.S_ISDIR(st.st_mode):
# Let the caller deal with it as they like.
if debug:
cherrypy.log('%r is a directory' % path, 'TOOLS.STATIC')
raise cherrypy.NotFound()
# Set the Last-Modified response header, so that
# modified-since validation code can work.
response.headers['Last-Modified'] = httputil.HTTPDate(st.st_mtime)
cptools.validate_since()
if content_type is None:
# Set content-type based on filename extension
ext = ""
i = path.rfind('.')
if i != -1:
ext = path[i:].lower()
content_type = mimetypes.types_map.get(ext, None)
if content_type is not None:
response.headers['Content-Type'] = content_type
if debug:
cherrypy.log('Content-Type: %r' % content_type, 'TOOLS.STATIC')
cd = None
if disposition is not None:
if name is None:
name = os.path.basename(path)
cd = '%s; filename="%s"' % (disposition, name)
response.headers["Content-Disposition"] = cd
if debug:
cherrypy.log('Content-Disposition: %r' % cd, 'TOOLS.STATIC')
# Set Content-Length and use an iterable (file object)
# this way CP won't load the whole file in memory
content_length = st.st_size
fileobj = open(path, 'rb')
return _serve_fileobj(fileobj, content_type, content_length, debug=debug)
def serve_fileobj(fileobj, content_type=None, disposition=None, name=None,
debug=False):
"""Set status, headers, and body in order to serve the given file object.
The Content-Type header will be set to the content_type arg, if provided.
If disposition is not None, the Content-Disposition header will be set
to "<disposition>; filename=<name>". If name is None, 'filename' will
not be set. If disposition is None, no Content-Disposition header will
be written.
CAUTION: If the request contains a 'Range' header, one or more seek()s will
be performed on the file object. This may cause undesired behavior if
the file object is not seekable. It could also produce undesired results
if the caller set the read position of the file object prior to calling
serve_fileobj(), expecting that the data would be served starting from that
position.
"""
response = cherrypy.serving.response
try:
st = os.fstat(fileobj.fileno())
except AttributeError:
if debug:
cherrypy.log('os has no fstat attribute', 'TOOLS.STATIC')
content_length = None
except UnsupportedOperation:
content_length = None
else:
# Set the Last-Modified response header, so that
# modified-since validation code can work.
response.headers['Last-Modified'] = httputil.HTTPDate(st.st_mtime)
cptools.validate_since()
content_length = st.st_size
if content_type is not None:
response.headers['Content-Type'] = content_type
if debug:
cherrypy.log('Content-Type: %r' % content_type, 'TOOLS.STATIC')
cd = None
if disposition is not None:
if name is None:
cd = disposition
else:
cd = '%s; filename="%s"' % (disposition, name)
response.headers["Content-Disposition"] = cd
if debug:
cherrypy.log('Content-Disposition: %r' % cd, 'TOOLS.STATIC')
return _serve_fileobj(fileobj, content_type, content_length, debug=debug)
def _serve_fileobj(fileobj, content_type, content_length, debug=False):
"""Internal. Set response.body to the given file object, perhaps ranged."""
response = cherrypy.serving.response
# HTTP/1.0 didn't have Range/Accept-Ranges headers, or the 206 code
request = cherrypy.serving.request
if request.protocol >= (1, 1):
response.headers["Accept-Ranges"] = "bytes"
r = httputil.get_ranges(request.headers.get('Range'), content_length)
if r == []:
response.headers['Content-Range'] = "bytes */%s" % content_length
message = ("Invalid Range (first-byte-pos greater than "
"Content-Length)")
if debug:
cherrypy.log(message, 'TOOLS.STATIC')
raise cherrypy.HTTPError(416, message)
if r:
if len(r) == 1:
# Return a single-part response.
start, stop = r[0]
if stop > content_length:
stop = content_length
r_len = stop - start
if debug:
cherrypy.log(
'Single part; start: %r, stop: %r' % (start, stop),
'TOOLS.STATIC')
response.status = "206 Partial Content"
response.headers['Content-Range'] = (
"bytes %s-%s/%s" % (start, stop - 1, content_length))
response.headers['Content-Length'] = r_len
fileobj.seek(start)
response.body = file_generator_limited(fileobj, r_len)
else:
# Return a multipart/byteranges response.
response.status = "206 Partial Content"
try:
# Python 3
from email.generator import _make_boundary as make_boundary
except ImportError:
# Python 2
from mimetools import choose_boundary as make_boundary
boundary = make_boundary()
ct = "multipart/byteranges; boundary=%s" % boundary
response.headers['Content-Type'] = ct
if "Content-Length" in response.headers:
# Delete Content-Length header so finalize() recalcs it.
del response.headers["Content-Length"]
def file_ranges():
# Apache compatibility:
yield ntob("\r\n")
for start, stop in r:
if debug:
cherrypy.log(
'Multipart; start: %r, stop: %r' % (
start, stop),
'TOOLS.STATIC')
yield ntob("--" + boundary, 'ascii')
yield ntob("\r\nContent-type: %s" % content_type,
'ascii')
yield ntob(
"\r\nContent-range: bytes %s-%s/%s\r\n\r\n" % (
start, stop - 1, content_length),
'ascii')
fileobj.seek(start)
gen = file_generator_limited(fileobj, stop - start)
for chunk in gen:
yield chunk
yield ntob("\r\n")
# Final boundary
yield ntob("--" + boundary + "--", 'ascii')
# Apache compatibility:
yield ntob("\r\n")
response.body = file_ranges()
return response.body
else:
if debug:
cherrypy.log('No byteranges requested', 'TOOLS.STATIC')
# Set Content-Length and use an iterable (file object)
# this way CP won't load the whole file in memory
response.headers['Content-Length'] = content_length
response.body = fileobj
return response.body
def serve_download(path, name=None):
"""Serve 'path' as an application/x-download attachment."""
# This is such a common idiom I felt it deserved its own wrapper.
return serve_file(path, "application/x-download", "attachment", name)
def _attempt(filename, content_types, debug=False):
if debug:
cherrypy.log('Attempting %r (content_types %r)' %
(filename, content_types), 'TOOLS.STATICDIR')
try:
# you can set the content types for a
# complete directory per extension
content_type = None
if content_types:
r, ext = os.path.splitext(filename)
content_type = content_types.get(ext[1:], None)
serve_file(filename, content_type=content_type, debug=debug)
return True
except cherrypy.NotFound:
# If we didn't find the static file, continue handling the
# request. We might find a dynamic handler instead.
if debug:
cherrypy.log('NotFound', 'TOOLS.STATICFILE')
return False
def staticdir(section, dir, root="", match="", content_types=None, index="",
debug=False):
"""Serve a static resource from the given (root +) dir.
match
If given, request.path_info will be searched for the given
regular expression before attempting to serve static content.
content_types
If given, it should be a Python dictionary of
{file-extension: content-type} pairs, where 'file-extension' is
a string (e.g. "gif") and 'content-type' is the value to write
out in the Content-Type response header (e.g. "image/gif").
index
If provided, it should be the (relative) name of a file to
serve for directory requests. For example, if the dir argument is
'/home/me', the Request-URI is 'myapp', and the index arg is
'index.html', the file '/home/me/myapp/index.html' will be sought.
"""
request = cherrypy.serving.request
if request.method not in ('GET', 'HEAD'):
if debug:
cherrypy.log('request.method not GET or HEAD', 'TOOLS.STATICDIR')
return False
if match and not re.search(match, request.path_info):
if debug:
cherrypy.log('request.path_info %r does not match pattern %r' %
(request.path_info, match), 'TOOLS.STATICDIR')
return False
# Allow the use of '~' to refer to a user's home directory.
dir = os.path.expanduser(dir)
# If dir is relative, make absolute using "root".
if not os.path.isabs(dir):
if not root:
msg = "Static dir requires an absolute dir (or root)."
if debug:
cherrypy.log(msg, 'TOOLS.STATICDIR')
raise ValueError(msg)
dir = os.path.join(root, dir)
# Determine where we are in the object tree relative to 'section'
# (where the static tool was defined).
if section == 'global':
section = "/"
section = section.rstrip(r"\/")
branch = request.path_info[len(section) + 1:]
branch = unquote(branch.lstrip(r"\/"))
# If branch is "", filename will end in a slash
filename = os.path.join(dir, branch)
if debug:
cherrypy.log('Checking file %r to fulfill %r' %
(filename, request.path_info), 'TOOLS.STATICDIR')
# There's a chance that the branch pulled from the URL might
# have ".." or similar uplevel attacks in it. Check that the final
# filename is a child of dir.
if not os.path.normpath(filename).startswith(os.path.normpath(dir)):
raise cherrypy.HTTPError(403) # Forbidden
handled = _attempt(filename, content_types)
if not handled:
# Check for an index file if a folder was requested.
if index:
handled = _attempt(os.path.join(filename, index), content_types)
if handled:
request.is_index = filename[-1] in (r"\/")
return handled
def staticfile(filename, root=None, match="", content_types=None, debug=False):
"""Serve a static resource from the given (root +) filename.
match
If given, request.path_info will be searched for the given
regular expression before attempting to serve static content.
content_types
If given, it should be a Python dictionary of
{file-extension: content-type} pairs, where 'file-extension' is
a string (e.g. "gif") and 'content-type' is the value to write
out in the Content-Type response header (e.g. "image/gif").
"""
request = cherrypy.serving.request
if request.method not in ('GET', 'HEAD'):
if debug:
cherrypy.log('request.method not GET or HEAD', 'TOOLS.STATICFILE')
return False
if match and not re.search(match, request.path_info):
if debug:
cherrypy.log('request.path_info %r does not match pattern %r' %
(request.path_info, match), 'TOOLS.STATICFILE')
return False
# If filename is relative, make absolute using "root".
if not os.path.isabs(filename):
if not root:
msg = "Static tool requires an absolute filename (got '%s')." % (
filename,)
if debug:
cherrypy.log(msg, 'TOOLS.STATICFILE')
raise ValueError(msg)
filename = os.path.join(root, filename)
return _attempt(filename, content_types, debug=debug)
|
deadRaccoons/TestAirlines
|
tabo/cherrypy/cherrypy/lib/static.py
|
Python
|
gpl-2.0
| 14,778 | 0 |
# ############################################################################
# OSIS stands for Open Student Information System. It's an application
# designed to manage the core business of higher education institutions,
# such as universities, faculties, institutes and professional schools.
# The core business involves the administration of students, teachers,
# courses, programs and so on.
#
# Copyright (C) 2015-2020 Université catholique de Louvain (http://www.uclouvain.be)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# A copy of this license - GNU General Public License - is available
# at the root of the source code of this program. If not,
# see http://www.gnu.org/licenses/.
# ############################################################################
from unittest import mock
from django.test import SimpleTestCase
from program_management.ddd import command
from program_management.ddd.domain.service.identity_search import ProgramTreeVersionIdentitySearch
from program_management.ddd.repositories.program_tree_version import ProgramTreeVersionRepository
from program_management.ddd.service.read import get_program_tree_version_from_node_service
class TestGetProgramTreeVersionFromNodeService(SimpleTestCase):
@mock.patch.object(ProgramTreeVersionIdentitySearch, 'get_from_node_identity')
@mock.patch.object(ProgramTreeVersionRepository, 'get')
def test_domain_service_is_called(self, mock_domain_service, mock_repository_get):
cmd = command.GetProgramTreeVersionFromNodeCommand(code="LDROI1200", year=2018)
get_program_tree_version_from_node_service.get_program_tree_version_from_node(cmd)
self.assertTrue(mock_domain_service.called)
self.assertTrue(mock_repository_get.called)
|
uclouvain/OSIS-Louvain
|
program_management/tests/ddd/service/read/test_get_program_tree_version_from_node_service.py
|
Python
|
agpl-3.0
| 2,219 | 0.003607 |
# -*- test-case-name: twisted.test.test_sip -*-
# Copyright (c) 2001-2009 Twisted Matrix Laboratories.
# See LICENSE for details.
"""Session Initialization Protocol.
Documented in RFC 2543.
[Superceded by 3261]
This module contains a deprecated implementation of HTTP Digest authentication.
See L{twisted.cred.credentials} and L{twisted.cred._digest} for its new home.
"""
# system imports
import socket, time, sys, random, warnings
from zope.interface import implements, Interface
# twisted imports
from twisted.python import log, util
from twisted.python.deprecate import deprecated
from twisted.python.versions import Version
from twisted.python.hashlib import md5
from twisted.internet import protocol, defer, reactor
from twisted import cred
import twisted.cred.error
from twisted.cred.credentials import UsernameHashedPassword, UsernamePassword
# sibling imports
from twisted.protocols import basic
PORT = 5060
# SIP headers have short forms
shortHeaders = {"call-id": "i",
"contact": "m",
"content-encoding": "e",
"content-length": "l",
"content-type": "c",
"from": "f",
"subject": "s",
"to": "t",
"via": "v",
}
longHeaders = {}
for k, v in shortHeaders.items():
longHeaders[v] = k
del k, v
statusCodes = {
100: "Trying",
180: "Ringing",
181: "Call Is Being Forwarded",
182: "Queued",
183: "Session Progress",
200: "OK",
300: "Multiple Choices",
301: "Moved Permanently",
302: "Moved Temporarily",
303: "See Other",
305: "Use Proxy",
380: "Alternative Service",
400: "Bad Request",
401: "Unauthorized",
402: "Payment Required",
403: "Forbidden",
404: "Not Found",
405: "Method Not Allowed",
406: "Not Acceptable",
407: "Proxy Authentication Required",
408: "Request Timeout",
409: "Conflict", # Not in RFC3261
410: "Gone",
411: "Length Required", # Not in RFC3261
413: "Request Entity Too Large",
414: "Request-URI Too Large",
415: "Unsupported Media Type",
416: "Unsupported URI Scheme",
420: "Bad Extension",
421: "Extension Required",
423: "Interval Too Brief",
480: "Temporarily Unavailable",
481: "Call/Transaction Does Not Exist",
482: "Loop Detected",
483: "Too Many Hops",
484: "Address Incomplete",
485: "Ambiguous",
486: "Busy Here",
487: "Request Terminated",
488: "Not Acceptable Here",
491: "Request Pending",
493: "Undecipherable",
500: "Internal Server Error",
501: "Not Implemented",
502: "Bad Gateway", # no donut
503: "Service Unavailable",
504: "Server Time-out",
505: "SIP Version not supported",
513: "Message Too Large",
600: "Busy Everywhere",
603: "Decline",
604: "Does not exist anywhere",
606: "Not Acceptable",
}
specialCases = {
'cseq': 'CSeq',
'call-id': 'Call-ID',
'www-authenticate': 'WWW-Authenticate',
}
def dashCapitalize(s):
''' Capitalize a string, making sure to treat - as a word seperator '''
return '-'.join([ x.capitalize() for x in s.split('-')])
def unq(s):
if s[0] == s[-1] == '"':
return s[1:-1]
return s
def DigestCalcHA1(
pszAlg,
pszUserName,
pszRealm,
pszPassword,
pszNonce,
pszCNonce,
):
m = md5()
m.update(pszUserName)
m.update(":")
m.update(pszRealm)
m.update(":")
m.update(pszPassword)
HA1 = m.digest()
if pszAlg == "md5-sess":
m = md5()
m.update(HA1)
m.update(":")
m.update(pszNonce)
m.update(":")
m.update(pszCNonce)
HA1 = m.digest()
return HA1.encode('hex')
DigestCalcHA1 = deprecated(Version("Twisted", 9, 0, 0))(DigestCalcHA1)
def DigestCalcResponse(
HA1,
pszNonce,
pszNonceCount,
pszCNonce,
pszQop,
pszMethod,
pszDigestUri,
pszHEntity,
):
m = md5()
m.update(pszMethod)
m.update(":")
m.update(pszDigestUri)
if pszQop == "auth-int":
m.update(":")
m.update(pszHEntity)
HA2 = m.digest().encode('hex')
m = md5()
m.update(HA1)
m.update(":")
m.update(pszNonce)
m.update(":")
if pszNonceCount and pszCNonce: # pszQop:
m.update(pszNonceCount)
m.update(":")
m.update(pszCNonce)
m.update(":")
m.update(pszQop)
m.update(":")
m.update(HA2)
hash = m.digest().encode('hex')
return hash
DigestCalcResponse = deprecated(Version("Twisted", 9, 0, 0))(DigestCalcResponse)
_absent = object()
class Via(object):
"""
A L{Via} is a SIP Via header, representing a segment of the path taken by
the request.
See RFC 3261, sections 8.1.1.7, 18.2.2, and 20.42.
@ivar transport: Network protocol used for this leg. (Probably either "TCP"
or "UDP".)
@type transport: C{str}
@ivar branch: Unique identifier for this request.
@type branch: C{str}
@ivar host: Hostname or IP for this leg.
@type host: C{str}
@ivar port: Port used for this leg.
@type port C{int}, or None.
@ivar rportRequested: Whether to request RFC 3581 client processing or not.
@type rportRequested: C{bool}
@ivar rportValue: Servers wishing to honor requests for RFC 3581 processing
should set this parameter to the source port the request was received
from.
@type rportValue: C{int}, or None.
@ivar ttl: Time-to-live for requests on multicast paths.
@type ttl: C{int}, or None.
@ivar maddr: The destination multicast address, if any.
@type maddr: C{str}, or None.
@ivar hidden: Obsolete in SIP 2.0.
@type hidden: C{bool}
@ivar otherParams: Any other parameters in the header.
@type otherParams: C{dict}
"""
def __init__(self, host, port=PORT, transport="UDP", ttl=None,
hidden=False, received=None, rport=_absent, branch=None,
maddr=None, **kw):
"""
Set parameters of this Via header. All arguments correspond to
attributes of the same name.
To maintain compatibility with old SIP
code, the 'rport' argument is used to determine the values of
C{rportRequested} and C{rportValue}. If None, C{rportRequested} is set
to True. (The deprecated method for doing this is to pass True.) If an
integer, C{rportValue} is set to the given value.
Any arguments not explicitly named here are collected into the
C{otherParams} dict.
"""
self.transport = transport
self.host = host
self.port = port
self.ttl = ttl
self.hidden = hidden
self.received = received
if rport is True:
warnings.warn(
"rport=True is deprecated since Twisted 9.0.",
DeprecationWarning,
stacklevel=2)
self.rportValue = None
self.rportRequested = True
elif rport is None:
self.rportValue = None
self.rportRequested = True
elif rport is _absent:
self.rportValue = None
self.rportRequested = False
else:
self.rportValue = rport
self.rportRequested = False
self.branch = branch
self.maddr = maddr
self.otherParams = kw
def _getrport(self):
"""
Returns the rport value expected by the old SIP code.
"""
if self.rportRequested == True:
return True
elif self.rportValue is not None:
return self.rportValue
else:
return None
def _setrport(self, newRPort):
"""
L{Base._fixupNAT} sets C{rport} directly, so this method sets
C{rportValue} based on that.
@param newRPort: The new rport value.
@type newRPort: C{int}
"""
self.rportValue = newRPort
self.rportRequested = False
rport = property(_getrport, _setrport)
def toString(self):
"""
Serialize this header for use in a request or response.
"""
s = "SIP/2.0/%s %s:%s" % (self.transport, self.host, self.port)
if self.hidden:
s += ";hidden"
for n in "ttl", "branch", "maddr", "received":
value = getattr(self, n)
if value is not None:
s += ";%s=%s" % (n, value)
if self.rportRequested:
s += ";rport"
elif self.rportValue is not None:
s += ";rport=%s" % (self.rport,)
etc = self.otherParams.items()
etc.sort()
for k, v in etc:
if v is None:
s += ";" + k
else:
s += ";%s=%s" % (k, v)
return s
def parseViaHeader(value):
"""
Parse a Via header.
@return: The parsed version of this header.
@rtype: L{Via}
"""
parts = value.split(";")
sent, params = parts[0], parts[1:]
protocolinfo, by = sent.split(" ", 1)
by = by.strip()
result = {}
pname, pversion, transport = protocolinfo.split("/")
if pname != "SIP" or pversion != "2.0":
raise ValueError, "wrong protocol or version: %r" % value
result["transport"] = transport
if ":" in by:
host, port = by.split(":")
result["port"] = int(port)
result["host"] = host
else:
result["host"] = by
for p in params:
# it's the comment-striping dance!
p = p.strip().split(" ", 1)
if len(p) == 1:
p, comment = p[0], ""
else:
p, comment = p
if p == "hidden":
result["hidden"] = True
continue
parts = p.split("=", 1)
if len(parts) == 1:
name, value = parts[0], None
else:
name, value = parts
if name in ("rport", "ttl"):
value = int(value)
result[name] = value
return Via(**result)
class URL:
"""A SIP URL."""
def __init__(self, host, username=None, password=None, port=None,
transport=None, usertype=None, method=None,
ttl=None, maddr=None, tag=None, other=None, headers=None):
self.username = username
self.host = host
self.password = password
self.port = port
self.transport = transport
self.usertype = usertype
self.method = method
self.tag = tag
self.ttl = ttl
self.maddr = maddr
if other == None:
self.other = []
else:
self.other = other
if headers == None:
self.headers = {}
else:
self.headers = headers
def toString(self):
l = []; w = l.append
w("sip:")
if self.username != None:
w(self.username)
if self.password != None:
w(":%s" % self.password)
w("@")
w(self.host)
if self.port != None:
w(":%d" % self.port)
if self.usertype != None:
w(";user=%s" % self.usertype)
for n in ("transport", "ttl", "maddr", "method", "tag"):
v = getattr(self, n)
if v != None:
w(";%s=%s" % (n, v))
for v in self.other:
w(";%s" % v)
if self.headers:
w("?")
w("&".join([("%s=%s" % (specialCases.get(h) or dashCapitalize(h), v)) for (h, v) in self.headers.items()]))
return "".join(l)
def __str__(self):
return self.toString()
def __repr__(self):
return '<URL %s:%s@%s:%r/%s>' % (self.username, self.password, self.host, self.port, self.transport)
def parseURL(url, host=None, port=None):
"""Return string into URL object.
URIs are of of form 'sip:user@example.com'.
"""
d = {}
if not url.startswith("sip:"):
raise ValueError("unsupported scheme: " + url[:4])
parts = url[4:].split(";")
userdomain, params = parts[0], parts[1:]
udparts = userdomain.split("@", 1)
if len(udparts) == 2:
userpass, hostport = udparts
upparts = userpass.split(":", 1)
if len(upparts) == 1:
d["username"] = upparts[0]
else:
d["username"] = upparts[0]
d["password"] = upparts[1]
else:
hostport = udparts[0]
hpparts = hostport.split(":", 1)
if len(hpparts) == 1:
d["host"] = hpparts[0]
else:
d["host"] = hpparts[0]
d["port"] = int(hpparts[1])
if host != None:
d["host"] = host
if port != None:
d["port"] = port
for p in params:
if p == params[-1] and "?" in p:
d["headers"] = h = {}
p, headers = p.split("?", 1)
for header in headers.split("&"):
k, v = header.split("=")
h[k] = v
nv = p.split("=", 1)
if len(nv) == 1:
d.setdefault("other", []).append(p)
continue
name, value = nv
if name == "user":
d["usertype"] = value
elif name in ("transport", "ttl", "maddr", "method", "tag"):
if name == "ttl":
value = int(value)
d[name] = value
else:
d.setdefault("other", []).append(p)
return URL(**d)
def cleanRequestURL(url):
"""Clean a URL from a Request line."""
url.transport = None
url.maddr = None
url.ttl = None
url.headers = {}
def parseAddress(address, host=None, port=None, clean=0):
"""Return (name, uri, params) for From/To/Contact header.
@param clean: remove unnecessary info, usually for From and To headers.
"""
address = address.strip()
# simple 'sip:foo' case
if address.startswith("sip:"):
return "", parseURL(address, host=host, port=port), {}
params = {}
name, url = address.split("<", 1)
name = name.strip()
if name.startswith('"'):
name = name[1:]
if name.endswith('"'):
name = name[:-1]
url, paramstring = url.split(">", 1)
url = parseURL(url, host=host, port=port)
paramstring = paramstring.strip()
if paramstring:
for l in paramstring.split(";"):
if not l:
continue
k, v = l.split("=")
params[k] = v
if clean:
# rfc 2543 6.21
url.ttl = None
url.headers = {}
url.transport = None
url.maddr = None
return name, url, params
class SIPError(Exception):
def __init__(self, code, phrase=None):
if phrase is None:
phrase = statusCodes[code]
Exception.__init__(self, "SIP error (%d): %s" % (code, phrase))
self.code = code
self.phrase = phrase
class RegistrationError(SIPError):
"""Registration was not possible."""
class Message:
"""A SIP message."""
length = None
def __init__(self):
self.headers = util.OrderedDict() # map name to list of values
self.body = ""
self.finished = 0
def addHeader(self, name, value):
name = name.lower()
name = longHeaders.get(name, name)
if name == "content-length":
self.length = int(value)
self.headers.setdefault(name,[]).append(value)
def bodyDataReceived(self, data):
self.body += data
def creationFinished(self):
if (self.length != None) and (self.length != len(self.body)):
raise ValueError, "wrong body length"
self.finished = 1
def toString(self):
s = "%s\r\n" % self._getHeaderLine()
for n, vs in self.headers.items():
for v in vs:
s += "%s: %s\r\n" % (specialCases.get(n) or dashCapitalize(n), v)
s += "\r\n"
s += self.body
return s
def _getHeaderLine(self):
raise NotImplementedError
class Request(Message):
"""A Request for a URI"""
def __init__(self, method, uri, version="SIP/2.0"):
Message.__init__(self)
self.method = method
if isinstance(uri, URL):
self.uri = uri
else:
self.uri = parseURL(uri)
cleanRequestURL(self.uri)
def __repr__(self):
return "<SIP Request %d:%s %s>" % (id(self), self.method, self.uri.toString())
def _getHeaderLine(self):
return "%s %s SIP/2.0" % (self.method, self.uri.toString())
class Response(Message):
"""A Response to a URI Request"""
def __init__(self, code, phrase=None, version="SIP/2.0"):
Message.__init__(self)
self.code = code
if phrase == None:
phrase = statusCodes[code]
self.phrase = phrase
def __repr__(self):
return "<SIP Response %d:%s>" % (id(self), self.code)
def _getHeaderLine(self):
return "SIP/2.0 %s %s" % (self.code, self.phrase)
class MessagesParser(basic.LineReceiver):
"""A SIP messages parser.
Expects dataReceived, dataDone repeatedly,
in that order. Shouldn't be connected to actual transport.
"""
version = "SIP/2.0"
acceptResponses = 1
acceptRequests = 1
state = "firstline" # or "headers", "body" or "invalid"
debug = 0
def __init__(self, messageReceivedCallback):
self.messageReceived = messageReceivedCallback
self.reset()
def reset(self, remainingData=""):
self.state = "firstline"
self.length = None # body length
self.bodyReceived = 0 # how much of the body we received
self.message = None
self.setLineMode(remainingData)
def invalidMessage(self):
self.state = "invalid"
self.setRawMode()
def dataDone(self):
# clear out any buffered data that may be hanging around
self.clearLineBuffer()
if self.state == "firstline":
return
if self.state != "body":
self.reset()
return
if self.length == None:
# no content-length header, so end of data signals message done
self.messageDone()
elif self.length < self.bodyReceived:
# aborted in the middle
self.reset()
else:
# we have enough data and message wasn't finished? something is wrong
raise RuntimeError, "this should never happen"
def dataReceived(self, data):
try:
basic.LineReceiver.dataReceived(self, data)
except:
log.err()
self.invalidMessage()
def handleFirstLine(self, line):
"""Expected to create self.message."""
raise NotImplementedError
def lineLengthExceeded(self, line):
self.invalidMessage()
def lineReceived(self, line):
if self.state == "firstline":
while line.startswith("\n") or line.startswith("\r"):
line = line[1:]
if not line:
return
try:
a, b, c = line.split(" ", 2)
except ValueError:
self.invalidMessage()
return
if a == "SIP/2.0" and self.acceptResponses:
# response
try:
code = int(b)
except ValueError:
self.invalidMessage()
return
self.message = Response(code, c)
elif c == "SIP/2.0" and self.acceptRequests:
self.message = Request(a, b)
else:
self.invalidMessage()
return
self.state = "headers"
return
else:
assert self.state == "headers"
if line:
# XXX support multi-line headers
try:
name, value = line.split(":", 1)
except ValueError:
self.invalidMessage()
return
self.message.addHeader(name, value.lstrip())
if name.lower() == "content-length":
try:
self.length = int(value.lstrip())
except ValueError:
self.invalidMessage()
return
else:
# CRLF, we now have message body until self.length bytes,
# or if no length was given, until there is no more data
# from the connection sending us data.
self.state = "body"
if self.length == 0:
self.messageDone()
return
self.setRawMode()
def messageDone(self, remainingData=""):
assert self.state == "body"
self.message.creationFinished()
self.messageReceived(self.message)
self.reset(remainingData)
def rawDataReceived(self, data):
assert self.state in ("body", "invalid")
if self.state == "invalid":
return
if self.length == None:
self.message.bodyDataReceived(data)
else:
dataLen = len(data)
expectedLen = self.length - self.bodyReceived
if dataLen > expectedLen:
self.message.bodyDataReceived(data[:expectedLen])
self.messageDone(data[expectedLen:])
return
else:
self.bodyReceived += dataLen
self.message.bodyDataReceived(data)
if self.bodyReceived == self.length:
self.messageDone()
class Base(protocol.DatagramProtocol):
"""Base class for SIP clients and servers."""
PORT = PORT
debug = False
def __init__(self):
self.messages = []
self.parser = MessagesParser(self.addMessage)
def addMessage(self, msg):
self.messages.append(msg)
def datagramReceived(self, data, addr):
self.parser.dataReceived(data)
self.parser.dataDone()
for m in self.messages:
self._fixupNAT(m, addr)
if self.debug:
log.msg("Received %r from %r" % (m.toString(), addr))
if isinstance(m, Request):
self.handle_request(m, addr)
else:
self.handle_response(m, addr)
self.messages[:] = []
def _fixupNAT(self, message, (srcHost, srcPort)):
# RFC 2543 6.40.2,
senderVia = parseViaHeader(message.headers["via"][0])
if senderVia.host != srcHost:
senderVia.received = srcHost
if senderVia.port != srcPort:
senderVia.rport = srcPort
message.headers["via"][0] = senderVia.toString()
elif senderVia.rport == True:
senderVia.received = srcHost
senderVia.rport = srcPort
message.headers["via"][0] = senderVia.toString()
def deliverResponse(self, responseMessage):
"""Deliver response.
Destination is based on topmost Via header."""
destVia = parseViaHeader(responseMessage.headers["via"][0])
# XXX we don't do multicast yet
host = destVia.received or destVia.host
port = destVia.rport or destVia.port or self.PORT
destAddr = URL(host=host, port=port)
self.sendMessage(destAddr, responseMessage)
def responseFromRequest(self, code, request):
"""Create a response to a request message."""
response = Response(code)
for name in ("via", "to", "from", "call-id", "cseq"):
response.headers[name] = request.headers.get(name, [])[:]
return response
def sendMessage(self, destURL, message):
"""Send a message.
@param destURL: C{URL}. This should be a *physical* URL, not a logical one.
@param message: The message to send.
"""
if destURL.transport not in ("udp", None):
raise RuntimeError, "only UDP currently supported"
if self.debug:
log.msg("Sending %r to %r" % (message.toString(), destURL))
self.transport.write(message.toString(), (destURL.host, destURL.port or self.PORT))
def handle_request(self, message, addr):
"""Override to define behavior for requests received
@type message: C{Message}
@type addr: C{tuple}
"""
raise NotImplementedError
def handle_response(self, message, addr):
"""Override to define behavior for responses received.
@type message: C{Message}
@type addr: C{tuple}
"""
raise NotImplementedError
class IContact(Interface):
"""A user of a registrar or proxy"""
class Registration:
def __init__(self, secondsToExpiry, contactURL):
self.secondsToExpiry = secondsToExpiry
self.contactURL = contactURL
class IRegistry(Interface):
"""Allows registration of logical->physical URL mapping."""
def registerAddress(domainURL, logicalURL, physicalURL):
"""Register the physical address of a logical URL.
@return: Deferred of C{Registration} or failure with RegistrationError.
"""
def unregisterAddress(domainURL, logicalURL, physicalURL):
"""Unregister the physical address of a logical URL.
@return: Deferred of C{Registration} or failure with RegistrationError.
"""
def getRegistrationInfo(logicalURL):
"""Get registration info for logical URL.
@return: Deferred of C{Registration} object or failure of LookupError.
"""
class ILocator(Interface):
"""Allow looking up physical address for logical URL."""
def getAddress(logicalURL):
"""Return physical URL of server for logical URL of user.
@param logicalURL: a logical C{URL}.
@return: Deferred which becomes URL or fails with LookupError.
"""
class Proxy(Base):
"""SIP proxy."""
PORT = PORT
locator = None # object implementing ILocator
def __init__(self, host=None, port=PORT):
"""Create new instance.
@param host: our hostname/IP as set in Via headers.
@param port: our port as set in Via headers.
"""
self.host = host or socket.getfqdn()
self.port = port
Base.__init__(self)
def getVia(self):
"""Return value of Via header for this proxy."""
return Via(host=self.host, port=self.port)
def handle_request(self, message, addr):
# send immediate 100/trying message before processing
#self.deliverResponse(self.responseFromRequest(100, message))
f = getattr(self, "handle_%s_request" % message.method, None)
if f is None:
f = self.handle_request_default
try:
d = f(message, addr)
except SIPError, e:
self.deliverResponse(self.responseFromRequest(e.code, message))
except:
log.err()
self.deliverResponse(self.responseFromRequest(500, message))
else:
if d is not None:
d.addErrback(lambda e:
self.deliverResponse(self.responseFromRequest(e.code, message))
)
def handle_request_default(self, message, (srcHost, srcPort)):
"""Default request handler.
Default behaviour for OPTIONS and unknown methods for proxies
is to forward message on to the client.
Since at the moment we are stateless proxy, thats basically
everything.
"""
def _mungContactHeader(uri, message):
message.headers['contact'][0] = uri.toString()
return self.sendMessage(uri, message)
viaHeader = self.getVia()
if viaHeader.toString() in message.headers["via"]:
# must be a loop, so drop message
log.msg("Dropping looped message.")
return
message.headers["via"].insert(0, viaHeader.toString())
name, uri, tags = parseAddress(message.headers["to"][0], clean=1)
# this is broken and needs refactoring to use cred
d = self.locator.getAddress(uri)
d.addCallback(self.sendMessage, message)
d.addErrback(self._cantForwardRequest, message)
def _cantForwardRequest(self, error, message):
error.trap(LookupError)
del message.headers["via"][0] # this'll be us
self.deliverResponse(self.responseFromRequest(404, message))
def deliverResponse(self, responseMessage):
"""Deliver response.
Destination is based on topmost Via header."""
destVia = parseViaHeader(responseMessage.headers["via"][0])
# XXX we don't do multicast yet
host = destVia.received or destVia.host
port = destVia.rport or destVia.port or self.PORT
destAddr = URL(host=host, port=port)
self.sendMessage(destAddr, responseMessage)
def responseFromRequest(self, code, request):
"""Create a response to a request message."""
response = Response(code)
for name in ("via", "to", "from", "call-id", "cseq"):
response.headers[name] = request.headers.get(name, [])[:]
return response
def handle_response(self, message, addr):
"""Default response handler."""
v = parseViaHeader(message.headers["via"][0])
if (v.host, v.port) != (self.host, self.port):
# we got a message not intended for us?
# XXX note this check breaks if we have multiple external IPs
# yay for suck protocols
log.msg("Dropping incorrectly addressed message")
return
del message.headers["via"][0]
if not message.headers["via"]:
# this message is addressed to us
self.gotResponse(message, addr)
return
self.deliverResponse(message)
def gotResponse(self, message, addr):
"""Called with responses that are addressed at this server."""
pass
class IAuthorizer(Interface):
def getChallenge(peer):
"""Generate a challenge the client may respond to.
@type peer: C{tuple}
@param peer: The client's address
@rtype: C{str}
@return: The challenge string
"""
def decode(response):
"""Create a credentials object from the given response.
@type response: C{str}
"""
class BasicAuthorizer:
"""Authorizer for insecure Basic (base64-encoded plaintext) authentication.
This form of authentication is broken and insecure. Do not use it.
"""
implements(IAuthorizer)
def __init__(self):
"""
This method exists solely to issue a deprecation warning.
"""
warnings.warn(
"twisted.protocols.sip.BasicAuthorizer was deprecated "
"in Twisted 9.0.0",
category=DeprecationWarning,
stacklevel=2)
def getChallenge(self, peer):
return None
def decode(self, response):
# At least one SIP client improperly pads its Base64 encoded messages
for i in range(3):
try:
creds = (response + ('=' * i)).decode('base64')
except:
pass
else:
break
else:
# Totally bogus
raise SIPError(400)
p = creds.split(':', 1)
if len(p) == 2:
return UsernamePassword(*p)
raise SIPError(400)
class DigestedCredentials(UsernameHashedPassword):
"""Yet Another Simple Digest-MD5 authentication scheme"""
def __init__(self, username, fields, challenges):
warnings.warn(
"twisted.protocols.sip.DigestedCredentials was deprecated "
"in Twisted 9.0.0",
category=DeprecationWarning,
stacklevel=2)
self.username = username
self.fields = fields
self.challenges = challenges
def checkPassword(self, password):
method = 'REGISTER'
response = self.fields.get('response')
uri = self.fields.get('uri')
nonce = self.fields.get('nonce')
cnonce = self.fields.get('cnonce')
nc = self.fields.get('nc')
algo = self.fields.get('algorithm', 'MD5')
qop = self.fields.get('qop-options', 'auth')
opaque = self.fields.get('opaque')
if opaque not in self.challenges:
return False
del self.challenges[opaque]
user, domain = self.username.split('@', 1)
if uri is None:
uri = 'sip:' + domain
expected = DigestCalcResponse(
DigestCalcHA1(algo, user, domain, password, nonce, cnonce),
nonce, nc, cnonce, qop, method, uri, None,
)
return expected == response
class DigestAuthorizer:
CHALLENGE_LIFETIME = 15
implements(IAuthorizer)
def __init__(self):
warnings.warn(
"twisted.protocols.sip.DigestAuthorizer was deprecated "
"in Twisted 9.0.0",
category=DeprecationWarning,
stacklevel=2)
self.outstanding = {}
def generateNonce(self):
c = tuple([random.randrange(sys.maxint) for _ in range(3)])
c = '%d%d%d' % c
return c
def generateOpaque(self):
return str(random.randrange(sys.maxint))
def getChallenge(self, peer):
c = self.generateNonce()
o = self.generateOpaque()
self.outstanding[o] = c
return ','.join((
'nonce="%s"' % c,
'opaque="%s"' % o,
'qop-options="auth"',
'algorithm="MD5"',
))
def decode(self, response):
response = ' '.join(response.splitlines())
parts = response.split(',')
auth = dict([(k.strip(), unq(v.strip())) for (k, v) in [p.split('=', 1) for p in parts]])
try:
username = auth['username']
except KeyError:
raise SIPError(401)
try:
return DigestedCredentials(username, auth, self.outstanding)
except:
raise SIPError(400)
class RegisterProxy(Proxy):
"""A proxy that allows registration for a specific domain.
Unregistered users won't be handled.
"""
portal = None
registry = None # should implement IRegistry
authorizers = {
'digest': DigestAuthorizer(),
}
def __init__(self, *args, **kw):
Proxy.__init__(self, *args, **kw)
self.liveChallenges = {}
def handle_ACK_request(self, message, (host, port)):
# XXX
# ACKs are a client's way of indicating they got the last message
# Responding to them is not a good idea.
# However, we should keep track of terminal messages and re-transmit
# if no ACK is received.
pass
def handle_REGISTER_request(self, message, (host, port)):
"""Handle a registration request.
Currently registration is not proxied.
"""
if self.portal is None:
# There is no portal. Let anyone in.
self.register(message, host, port)
else:
# There is a portal. Check for credentials.
if not message.headers.has_key("authorization"):
return self.unauthorized(message, host, port)
else:
return self.login(message, host, port)
def unauthorized(self, message, host, port):
m = self.responseFromRequest(401, message)
for (scheme, auth) in self.authorizers.iteritems():
chal = auth.getChallenge((host, port))
if chal is None:
value = '%s realm="%s"' % (scheme.title(), self.host)
else:
value = '%s %s,realm="%s"' % (scheme.title(), chal, self.host)
m.headers.setdefault('www-authenticate', []).append(value)
self.deliverResponse(m)
def login(self, message, host, port):
parts = message.headers['authorization'][0].split(None, 1)
a = self.authorizers.get(parts[0].lower())
if a:
try:
c = a.decode(parts[1])
except SIPError:
raise
except:
log.err()
self.deliverResponse(self.responseFromRequest(500, message))
else:
c.username += '@' + self.host
self.portal.login(c, None, IContact
).addCallback(self._cbLogin, message, host, port
).addErrback(self._ebLogin, message, host, port
).addErrback(log.err
)
else:
self.deliverResponse(self.responseFromRequest(501, message))
def _cbLogin(self, (i, a, l), message, host, port):
# It's stateless, matey. What a joke.
self.register(message, host, port)
def _ebLogin(self, failure, message, host, port):
failure.trap(cred.error.UnauthorizedLogin)
self.unauthorized(message, host, port)
def register(self, message, host, port):
"""Allow all users to register"""
name, toURL, params = parseAddress(message.headers["to"][0], clean=1)
contact = None
if message.headers.has_key("contact"):
contact = message.headers["contact"][0]
if message.headers.get("expires", [None])[0] == "0":
self.unregister(message, toURL, contact)
else:
# XXX Check expires on appropriate URL, and pass it to registry
# instead of having registry hardcode it.
if contact is not None:
name, contactURL, params = parseAddress(contact, host=host, port=port)
d = self.registry.registerAddress(message.uri, toURL, contactURL)
else:
d = self.registry.getRegistrationInfo(toURL)
d.addCallbacks(self._cbRegister, self._ebRegister,
callbackArgs=(message,),
errbackArgs=(message,)
)
def _cbRegister(self, registration, message):
response = self.responseFromRequest(200, message)
if registration.contactURL != None:
response.addHeader("contact", registration.contactURL.toString())
response.addHeader("expires", "%d" % registration.secondsToExpiry)
response.addHeader("content-length", "0")
self.deliverResponse(response)
def _ebRegister(self, error, message):
error.trap(RegistrationError, LookupError)
# XXX return error message, and alter tests to deal with
# this, currently tests assume no message sent on failure
def unregister(self, message, toURL, contact):
try:
expires = int(message.headers["expires"][0])
except ValueError:
self.deliverResponse(self.responseFromRequest(400, message))
else:
if expires == 0:
if contact == "*":
contactURL = "*"
else:
name, contactURL, params = parseAddress(contact)
d = self.registry.unregisterAddress(message.uri, toURL, contactURL)
d.addCallback(self._cbUnregister, message
).addErrback(self._ebUnregister, message
)
def _cbUnregister(self, registration, message):
msg = self.responseFromRequest(200, message)
msg.headers.setdefault('contact', []).append(registration.contactURL.toString())
msg.addHeader("expires", "0")
self.deliverResponse(msg)
def _ebUnregister(self, registration, message):
pass
class InMemoryRegistry:
"""A simplistic registry for a specific domain."""
implements(IRegistry, ILocator)
def __init__(self, domain):
self.domain = domain # the domain we handle registration for
self.users = {} # map username to (IDelayedCall for expiry, address URI)
def getAddress(self, userURI):
if userURI.host != self.domain:
return defer.fail(LookupError("unknown domain"))
if self.users.has_key(userURI.username):
dc, url = self.users[userURI.username]
return defer.succeed(url)
else:
return defer.fail(LookupError("no such user"))
def getRegistrationInfo(self, userURI):
if userURI.host != self.domain:
return defer.fail(LookupError("unknown domain"))
if self.users.has_key(userURI.username):
dc, url = self.users[userURI.username]
return defer.succeed(Registration(int(dc.getTime() - time.time()), url))
else:
return defer.fail(LookupError("no such user"))
def _expireRegistration(self, username):
try:
dc, url = self.users[username]
except KeyError:
return defer.fail(LookupError("no such user"))
else:
dc.cancel()
del self.users[username]
return defer.succeed(Registration(0, url))
def registerAddress(self, domainURL, logicalURL, physicalURL):
if domainURL.host != self.domain:
log.msg("Registration for domain we don't handle.")
return defer.fail(RegistrationError(404))
if logicalURL.host != self.domain:
log.msg("Registration for domain we don't handle.")
return defer.fail(RegistrationError(404))
if self.users.has_key(logicalURL.username):
dc, old = self.users[logicalURL.username]
dc.reset(3600)
else:
dc = reactor.callLater(3600, self._expireRegistration, logicalURL.username)
log.msg("Registered %s at %s" % (logicalURL.toString(), physicalURL.toString()))
self.users[logicalURL.username] = (dc, physicalURL)
return defer.succeed(Registration(int(dc.getTime() - time.time()), physicalURL))
def unregisterAddress(self, domainURL, logicalURL, physicalURL):
return self._expireRegistration(logicalURL.username)
|
Donkyhotay/MoonPy
|
twisted/protocols/sip.py
|
Python
|
gpl-3.0
| 41,973 | 0.003502 |
from django.db import models
from django.db.models.signals import post_delete, post_save
from django.dispatch import receiver
from django.utils.translation import ugettext_lazy as _
from jsonfield import JSONField
from model_utils import Choices
from model_utils.models import TimeStampedModel
from crate.web.packages.models import Package, Release, ReleaseFile
class Event(TimeStampedModel):
ACTIONS = Choices(
("package_create", _("Package Created")),
("package_delete", _("Package Deleted")),
("release_create", _("Release Created")),
("release_delete", _("Release Deleted")),
("file_add", _("File Added")),
("file_remove", _("File Removed")),
)
package = models.SlugField(max_length=150)
version = models.CharField(max_length=512, blank=True)
action = models.CharField(max_length=25, choices=ACTIONS)
data = JSONField(null=True, blank=True)
@receiver(post_save, sender=Package)
def history_package_create(instance, created, **kwargs):
if created:
Event.objects.create(
package=instance.name,
action=Event.ACTIONS.package_create
)
@receiver(post_delete, sender=Package)
def history_package_delete(instance, **kwargs):
Event.objects.create(
package=instance.name,
action=Event.ACTIONS.package_delete
)
@receiver(post_save, sender=Release)
def history_release_update(instance, created, **kwargs):
if created:
Event.objects.create(
package=instance.package.name,
version=instance.version,
action=Event.ACTIONS.release_create
)
if instance.has_changed("hidden"):
if instance.hidden:
Event.objects.create(
package=instance.package.name,
version=instance.version,
action=Event.ACTIONS.release_delete
)
else:
Event.objects.create(
package=instance.package.name,
version=instance.version,
action=Event.ACTIONS.release_create
)
@receiver(post_save, sender=ReleaseFile)
def history_releasefile_update(instance, created, **kwargs):
e = None
if instance.has_changed("hidden"):
if instance.hidden:
e = Event.objects.create(
package=instance.release.package.name,
version=instance.release.version,
action=Event.ACTIONS.file_remove
)
if e is not None:
try:
e.data = {
"filename": instance.filename,
"digest": instance.digest,
"uri": instance.get_absolute_url(),
}
except ValueError:
pass
else:
e.save()
|
crateio/crate.web
|
crate/web/history/models.py
|
Python
|
bsd-2-clause
| 2,810 | 0 |
"""
Testing for enumerate_param, enumerate_params, and enumerate_keyed_param
"""
import unittest
import mws
# pylint: disable=invalid-name
class TestParamsRaiseExceptions(unittest.TestCase):
"""
Simple test that asserts a ValueError is raised by an improper entry to
`utils.enumerate_keyed_param`.
"""
def test_keyed_param_fails_without_dict(self):
"""
Should raise ValueError for values not being a dict.
"""
param = "something"
values = ["this is not a dict like it should be!"]
with self.assertRaises(ValueError):
mws.utils.enumerate_keyed_param(param, values)
def test_single_param_default():
"""
Test each method type for their default empty dicts.
"""
# Single
assert mws.utils.enumerate_param("something", []) == {}
# Multi
assert mws.utils.enumerate_params() == {}
assert mws.utils.enumerate_params("antler") == {}
# Keyed
assert mws.utils.enumerate_keyed_param("acorn", []) == {}
def test_single_param_not_dotted_list_values():
"""
A param string with no dot at the end and a list of ints.
List should be ingested in order.
"""
param = "SomethingOrOther"
values = (123, 765, 3512, 756437, 3125)
result = mws.utils.enumerate_param(param, values)
assert result == {
"SomethingOrOther.1": 123,
"SomethingOrOther.2": 765,
"SomethingOrOther.3": 3512,
"SomethingOrOther.4": 756437,
"SomethingOrOther.5": 3125,
}
def test_single_param_dotted_single_value():
"""
A param string with a dot at the end and a single string value.
Values that are not list, tuple, or set should coerce to a list and provide a single output.
"""
param = "FooBar."
values = "eleven"
result = mws.utils.enumerate_param(param, values)
assert result == {
"FooBar.1": "eleven",
}
def test_multi_params():
"""
A series of params sent as a list of dicts to enumerate_params.
Each param should generate a unique set of keys and values.
Final result should be a flat dict.
"""
param1 = "Summat."
values1 = ("colorful", "cheery", "turkey")
param2 = "FooBaz.what"
values2 = "singular"
param3 = "hot_dog"
values3 = ["something", "or", "other"]
# We could test with values as a set, but we cannot be 100% of the order of the output,
# and I don't feel it necessary to flesh this out enough to account for it.
result = mws.utils.enumerate_params({
param1: values1,
param2: values2,
param3: values3,
})
assert result == {
"Summat.1": "colorful",
"Summat.2": "cheery",
"Summat.3": "turkey",
"FooBaz.what.1": "singular",
"hot_dog.1": "something",
"hot_dog.2": "or",
"hot_dog.3": "other",
}
def test_keyed_params():
"""
Asserting the result through enumerate_keyed_param is as expected.
"""
# Example:
# param = "InboundShipmentPlanRequestItems.member"
# values = [
# {'SellerSKU': 'Football2415',
# 'Quantity': 3},
# {'SellerSKU': 'TeeballBall3251',
# 'Quantity': 5},
# ...
# ]
# Returns:
# {
# 'InboundShipmentPlanRequestItems.member.1.SellerSKU': 'Football2415',
# 'InboundShipmentPlanRequestItems.member.1.Quantity': 3,
# 'InboundShipmentPlanRequestItems.member.2.SellerSKU': 'TeeballBall3251',
# 'InboundShipmentPlanRequestItems.member.2.Quantity': 5,
# ...
# }
param = "AthingToKeyUp.member"
item1 = {
"thing": "stuff",
"foo": "baz",
}
item2 = {
"thing": 123,
"foo": 908,
"bar": "hello",
}
item3 = {
"stuff": "foobarbazmatazz",
"stuff2": "foobarbazmatazz5",
}
result = mws.utils.enumerate_keyed_param(param, [item1, item2, item3])
assert result == {
"AthingToKeyUp.member.1.thing": "stuff",
"AthingToKeyUp.member.1.foo": "baz",
"AthingToKeyUp.member.2.thing": 123,
"AthingToKeyUp.member.2.foo": 908,
"AthingToKeyUp.member.2.bar": "hello",
"AthingToKeyUp.member.3.stuff": "foobarbazmatazz",
"AthingToKeyUp.member.3.stuff2": "foobarbazmatazz5",
}
|
Bobspadger/python-amazon-mws
|
tests/test_param_methods.py
|
Python
|
unlicense
| 4,345 | 0.000921 |
from main import Board
import collections
import unittest
class ClassicBoardTests(unittest.TestCase):
def test_tile_iterator(self):
options = {
'randomize_production': False,
'randomize_ports': False}
board = Board(options)
self.assertEqual([t.value for t in board.tiles if t.value], board._numbers)
hexes = collections.Counter([t.terrain for t in board.tiles])
self.assertEqual(hexes['F'], 4)
self.assertEqual(hexes['P'], 4)
self.assertEqual(hexes['H'], 4)
self.assertEqual(hexes['M'], 3)
self.assertEqual(hexes['C'], 3)
self.assertEqual(hexes['D'], 1)
if __name__ == '__main__':
unittest.main()
|
fruitnuke/catan
|
tests.py
|
Python
|
gpl-3.0
| 714 | 0.001401 |
#! /usr/python
'''
///////////////////////////////////////////////////////////
// Permission is hereby granted, free of charge,
// to any person obtaining a copy of
// this software and associated documentation files
// (the "Software"), to deal in the Software without
// restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute,
// sublicense, and/or sell copies of the Software, and
// to permit persons to whom the Software is furnished
// to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice
// shall be included in all copies or substantial portions
// of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
// ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
// TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
// PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
// THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
// IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
// DEALINGS IN THE SOFTWARE.
'''
__author__ = 'RobertIan'
__version__ = '0.2.5'
import argparse
import pygame
import picamera
import time
import datetime
import netifaces
import RPi.GPIO as GPIO
import os.path
import sys
import select
import os
class Trial:
def __init__(self, stim, starttime, feedornot):
## initialize display
pygame.display.init()
pygame.mouse.set_visible(False)
self.screen = pygame.display.set_mode((0,0),pygame.FULLSCREEN)
## assign stimulus
self.stimulus = stim
## timing
self.start = float(starttime)
self.tLength = 4*60 #four minute trial
self.feedDelay = 30 #thirty second delay
## GPIO setup
GPIO.setmode(GPIO.BCM)
self.feeder = 17 ##????
self.notfeeder = 5 ##????
self.feederin = 26 ##????
self.notfeederin = 25 ##????
if feedornot == 'feed':
self.feederin = self.feederin
self.feederout = self.feeder
elif feedornot == 'notfeed':
self.feederin = self.notfeederin
self.feederout = self.notfeeder
else:
## currently a print, should be changed to send a message to
#the client
print 'feeder not assigned'
self.safeQuit()
GPIO.setup(self.feederin, GPIO.IN)
GPIO.add_event_detect(self.feederin, GPIO.RISING)
GPIO.setup(self.feederout, GPIO.OUT)
GPIO.output(self.feederout, True)
def checkPiIP(self):
## query IP address from system
self.ip = netifaces.ifaddresses('eth0')[2][0]['addr']
def whatStimulus(self):
## locate stimulus in src folder
self.stim, extension = os.path.splitext(self.stimulus)
if extension == '.png' or extension == '.PNG' or extension == '.jpg' \
or extension == '.JPG':
## still image
try:
self.image = pygame.image.load('/home/pi/ethoStim/individualtesting/src/10.png')
except IOError:
## currently a print, should be changed to send a message to
#the client
print 'are you sure this file exists? check the src folder \
ony jpg/JPG, png/PNG formats'
self.safeQuit()
def cameraInit(self):
## adjust camera settings here
self.camera = picamera.PiCamera()
self.camera.resolution = (1920, 1080)
self.camera.framerate = 30
self.camera.autofocus = False
self.camera.awb_mode = 'fluorescent'
def videoFileName(self, species, tround, sl, sex, fishid, day, session,
conditionside):
## adjust video naming convention here
self.vidout = ('data/'+str(self.ip)+'/'+(str(species)+'_'+str(tround)
+'_'+str(sl)+'_'+str(sex) +'_'+str(fishid)+'_'+str(day)+'_'+
str(session)+'_' +str(self.stim)+'_'+str(conditionside)))
def startRecording(self):
self.camera.start_recording(self.vidout+ '.h264') #output video
def stopRecording(self):
self.camera.stop_recording()
def cameraQuit(self):
self.camera.close()
def safeQuit(self):
GPIO.output(self.feeder, True) #reset feeder ????
GPIO.output(self.notfeeder, True) #reset notfeeder ????
GPIO.cleanup() #reset all GPIOs
pygame.quit()
exit()
def mainLoop(self, camera):
## hang until assigned start time
while time.time()<self.start:
print time.time()-self.start
pass
## start timer
self.startT = time.time()
fed = False # feed delay control variable
## start recording
if camera == 'record':
selft.startRecording()
elif camera == 'notrecord':
pass
## display stimulus/start main loop
while ((time.time() - self.startT) < self.tLength):
pygame.display.flip()
self.screen.blit(self.image, (250,100)) # location of stimulus
## control feeder delay
try:
if (time.time() - self.startT) > self.feedDelay:
if fed:
pass
elif GPIO.event_detected(self.feederin):
time.sleep(1.0)
GPIO.output(self.feederout,True)
fed = True
else:
GPIO.output(self.feederout, False)
except KeyboardInterrupt:
self.safeQuit()
if __name__ == '__main__':
## load in command line argumenents
ap = argparse.ArgumentParser()
ap.add_argument("-f","--fish", help="ID of fish in tank")
ap.add_argument("-ts", "--trainedStim",help="numerosity stimulus the individual is being trained to, e.g. 12")
ap.add_argument("-ps", "--presentedStim", help="stimulus being presented with this raspberry pi")
ap.add_argument("-d","--day", help="experiment day, e.g. 1-7")
ap.add_argument("-s","--session", help="trial session, e.g. 1-4")
ap.add_argument("-fs","--fedSide", help="side(self.ip feed on/conditioned side")
ap.add_argument("-x","--sex", help="fish sex")
ap.add_argument("-p","--proportion", help="training ratio")
ap.add_argument("-sp", "--species", help="species name")
ap.add_argument("-sl","--fishstandardlength", help="standard length of the")
ap.add_argument("-r","--round", help="training round")
ap.add_argument("-fd", "--feed", help="feed with this stimulus",action="store_true")
ap.add_argument("-c", "--camera",help="do you want to record using this pi?",action="store_true")
ap.add_argument("-m:", "--startTime", help="time since epoch that you want to start your trial")
args = vars(ap.parse_args())
## parse trial details and pass it to the Trial class
if args.["feed"]:
T = Trial(args["presentedStim"], args["startTime"], 'feed')
else:
T = Trial(args["presentedStim"], args["startTime"], 'notfeed'))
T.checkPiIP()
T.whatStimulus()
T.videoFileName(args["species"], args["round"], args["fishstandardlength"],
args["sex"], args["fish"], args["day"], args["session"], args["fedSide"])
## initialize camera IF attached to Pi
if args["camera"]:
T.cameraInit()
else:
pass
## start camera recording IF attached to Pi and begin mainloop of Trial
if args["camera"]:
T.mainLoop('record')
else:
T.mainLoop('notrecord')
## stop camera recording IF attached to Pi
if args["camera"]:
T.stopRecording()
else:
pass
## cleanup camera IF attached to Pi
if args["camera"]:
T.cameraQuit()
## cleanup remaining processes and exit
T.safeQuit()
|
RobertIan/ethoStim
|
individualtesting/trial.py
|
Python
|
mit
| 7,935 | 0.008696 |
import os
path = os.path.dirname(os.path.realpath(__file__))
sbmlFilePath = os.path.join(path, 'MODEL1310160000.xml')
with open(sbmlFilePath,'r') as f:
sbmlString = f.read()
def module_exists(module_name):
try:
__import__(module_name)
except ImportError:
return False
else:
return True
if module_exists('libsbml'):
import libsbml
sbml = libsbml.readSBMLFromString(sbmlString)
|
biomodels/MODEL1310160000
|
MODEL1310160000/model.py
|
Python
|
cc0-1.0
| 427 | 0.009368 |
def extractImpatientmtlreader533234643WordpressCom(item):
'''
Parser for 'impatientmtlreader533234643.wordpress.com'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return None
tagmap = [
('PRC', 'PRC', 'translated'),
('Loiterous', 'Loiterous', 'oel'),
]
for tagname, name, tl_type in tagmap:
if tagname in item['tags']:
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False
|
fake-name/ReadableWebProxy
|
WebMirror/management/rss_parser_funcs/feed_parse_extractImpatientmtlreader533234643WordpressCom.py
|
Python
|
bsd-3-clause
| 594 | 0.031987 |
#
# cocos2d
# http://python.cocos2d.org
#
from __future__ import division, print_function, unicode_literals
# This code is so you can run the samples without installing the package
import sys
import os
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
#
import cocos
from cocos.director import director
from cocos.actions import *
from cocos.layer import *
from cocos.sprite import Sprite
class TestLayer(cocos.layer.Layer):
def __init__(self):
super(TestLayer, self).__init__()
x, y = director.get_window_size()
sprite1 = Sprite('grossini.png')
sprite2 = Sprite('grossinis_sister1.png')
sprite3 = Sprite('grossinis_sister2.png')
sprite1.position = (x // 2, y // 2)
sprite2.position = (x // 4, y // 2)
sprite3.position = (3 * x / 4.0, y // 2)
self.add(sprite2)
self.add(sprite1)
self.add(sprite3)
sprite1.do(RotateBy(360, 1) * 16)
sprite2.do(RotateBy(-360, 1) * 16)
sprite3.do(RotateBy(-360, 1) * 16)
if __name__ == "__main__":
director.init(resizable=True)
main_scene = cocos.scene.Scene()
main_scene.transform_anchor = (320, 240)
child1_scene = cocos.scene.Scene()
child2_scene = cocos.scene.Scene()
child3_scene = cocos.scene.Scene()
child4_scene = cocos.scene.Scene()
sprites = TestLayer()
sprites.transform_anchor = 320, 240
child1_scene.add(ColorLayer(0, 0, 255, 255))
child1_scene.add(sprites)
child1_scene.scale = 1.5
child1_scene.position = (-160, -120)
child1_scene.transform_anchor = (320, 240)
child2_scene.add(ColorLayer(0, 255, 0, 255))
child2_scene.add(sprites)
child2_scene.scale = 1.5
child2_scene.position = (160, 120)
child2_scene.transform_anchor = (320, 240)
child3_scene.add(ColorLayer(255, 0, 0, 255))
child3_scene.add(sprites)
child3_scene.scale = 1.5
child3_scene.position = (-160, 120)
child3_scene.transform_anchor = (320, 240)
child4_scene.add(ColorLayer(255, 255, 255, 255))
child4_scene.add(sprites)
child4_scene.scale = 1.5
child4_scene.position = (160, -120)
child4_scene.transform_anchor = (320, 240)
main_scene.add(child1_scene)
main_scene.add(child2_scene)
main_scene.add(child3_scene)
main_scene.add(child4_scene)
rot = RotateBy(-360, 2)
rot2 = RotateBy(360, 4)
sleep = Delay(2)
sleep2 = Delay(2)
sc1 = ScaleTo(0.5, 0.5) + Delay(1.5)
sc2 = Delay(0.5) + ScaleTo(0.5, 0.5) + Delay(1.0)
sc3 = Delay(1.0) + ScaleTo(0.5, 0.5) + Delay(0.5)
sc4 = Delay(1.5) + ScaleTo(0.5, 0.5)
child1_scene.do(sc4 + sleep + rot + sleep + rot + rot)
child2_scene.do(sc3 + sleep + rot + sleep + rot + Reverse(rot))
child3_scene.do(sc2 + sleep + rot + sleep + rot + Reverse(rot))
child4_scene.do(sc1 + sleep + rot + sleep + rot + rot)
main_scene.do(sleep + Reverse(rot) * 2 + rot * 2 + sleep)
sprites.do(Delay(4) + rot2 * 3)
director.run(main_scene)
|
dangillet/cocos
|
samples/demo_multiple_scenes.py
|
Python
|
bsd-3-clause
| 3,011 | 0.001993 |
#!/usr/bin/env python
import rethinkdb as r
from optparse import OptionParser
def run(rql):
try:
return rql.run()
except r.RqlRuntimeError:
return None
def main(port, include_deleted):
conn = r.connect('localhost', port, db='materialscommons')
cursor = r.table('project2datadir') \
.eq_join('datadir_id', r.table('datadirs')) \
.merge({
'right': {
'name2': r.row['right']['name']
}
}).zip() \
.eq_join('project_id', r.table('projects')).zip() \
.run(conn)
for doc in cursor:
project_name = doc['name']
dir_name = doc['name2']
owner = doc['owner']
if (owner == 'delete@materialscommons.org') and not include_deleted:
continue
if len(dir_name.split('/')) == 1:
if not project_name == dir_name:
print("Project '{}'({})".format(project_name, doc['project_id']))
print(" -> dir '{}'({})".format(dir_name, doc['datadir_id']))
print(" project owner = {}".format(owner))
if __name__ == "__main__":
parser = OptionParser()
parser.add_option("-P", "--port", dest="port", type="int", help="rethinkdb port", default=30815)
parser.add_option("-I", "--include-deleted", dest="incd", action="store_true", help="include deleted files", default=False)
(options, args) = parser.parse_args()
include_deleted = options.incd
port = options.port
print("Using database port = {}".format(port))
if include_deleted:
print("Including deleted files in search")
else:
print("Excluding deleted files from search")
main(port, include_deleted)
|
materials-commons/materialscommons.org
|
backend/scripts/admin/check_for_top_dir.py
|
Python
|
mit
| 1,695 | 0.00295 |
# Copyright 2013 IBM Corp.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
import uuid
from oslo_config import cfg
import webob
from nova.api.openstack.compute import plugins
from nova.api.openstack.compute.plugins.v3 import block_device_mapping as \
block_device_mapping_v21
from nova.api.openstack.compute.plugins.v3 import multiple_create as \
multiple_create_v21
from nova.api.openstack.compute.plugins.v3 import servers as servers_v21
from nova.api.openstack.compute import servers as servers_v20
from nova.api.openstack import extensions as extensions_v20
from nova.compute import api as compute_api
from nova.compute import flavors
from nova import db
from nova import exception
from nova.network import manager
from nova import test
from nova.tests.unit.api.openstack import fakes
from nova.tests.unit import fake_instance
from nova.tests.unit.image import fake
CONF = cfg.CONF
FAKE_UUID = fakes.FAKE_UUID
def fake_gen_uuid():
return FAKE_UUID
def return_security_group(context, instance_id, security_group_id):
pass
class MultiCreateExtensionTestV21(test.TestCase):
validation_error = exception.ValidationError
def setUp(self):
"""Shared implementation for tests below that create instance."""
super(MultiCreateExtensionTestV21, self).setUp()
self.flags(verbose=True,
enable_instance_password=True)
self.instance_cache_num = 0
self.instance_cache_by_id = {}
self.instance_cache_by_uuid = {}
ext_info = plugins.LoadedExtensionInfo()
self.controller = servers_v21.ServersController(
extension_info=ext_info)
CONF.set_override('extensions_blacklist', 'os-multiple-create',
'osapi_v3')
self.no_mult_create_controller = servers_v21.ServersController(
extension_info=ext_info)
def instance_create(context, inst):
inst_type = flavors.get_flavor_by_flavor_id(3)
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
def_image_ref = 'http://localhost/images/%s' % image_uuid
self.instance_cache_num += 1
instance = fake_instance.fake_db_instance(**{
'id': self.instance_cache_num,
'display_name': inst['display_name'] or 'test',
'uuid': FAKE_UUID,
'instance_type': inst_type,
'access_ip_v4': '1.2.3.4',
'access_ip_v6': 'fead::1234',
'image_ref': inst.get('image_ref', def_image_ref),
'user_id': 'fake',
'project_id': 'fake',
'reservation_id': inst['reservation_id'],
"created_at": datetime.datetime(2010, 10, 10, 12, 0, 0),
"updated_at": datetime.datetime(2010, 11, 11, 11, 0, 0),
"progress": 0,
"fixed_ips": [],
"task_state": "",
"vm_state": "",
"security_groups": inst['security_groups'],
})
self.instance_cache_by_id[instance['id']] = instance
self.instance_cache_by_uuid[instance['uuid']] = instance
return instance
def instance_get(context, instance_id):
"""Stub for compute/api create() pulling in instance after
scheduling
"""
return self.instance_cache_by_id[instance_id]
def instance_update(context, uuid, values):
instance = self.instance_cache_by_uuid[uuid]
instance.update(values)
return instance
def server_update(context, instance_uuid, params, update_cells=True,
columns_to_join=None):
inst = self.instance_cache_by_uuid[instance_uuid]
inst.update(params)
return (inst, inst)
def fake_method(*args, **kwargs):
pass
def project_get_networks(context, user_id):
return dict(id='1', host='localhost')
fakes.stub_out_rate_limiting(self.stubs)
fakes.stub_out_key_pair_funcs(self.stubs)
fake.stub_out_image_service(self.stubs)
fakes.stub_out_nw_api(self.stubs)
self.stubs.Set(uuid, 'uuid4', fake_gen_uuid)
self.stubs.Set(db, 'instance_add_security_group',
return_security_group)
self.stubs.Set(db, 'project_get_networks',
project_get_networks)
self.stubs.Set(db, 'instance_create', instance_create)
self.stubs.Set(db, 'instance_system_metadata_update',
fake_method)
self.stubs.Set(db, 'instance_get', instance_get)
self.stubs.Set(db, 'instance_update', instance_update)
self.stubs.Set(db, 'instance_update_and_get_original',
server_update)
self.stubs.Set(manager.VlanManager, 'allocate_fixed_ip',
fake_method)
self.req = fakes.HTTPRequest.blank('')
def _test_create_extra(self, params, no_image=False,
override_controller=None):
image_uuid = 'c905cedb-7281-47e4-8a62-f26bc5fc4c77'
server = dict(name='server_test', imageRef=image_uuid, flavorRef=2)
if no_image:
server.pop('imageRef', None)
server.update(params)
body = dict(server=server)
if override_controller:
server = override_controller.create(self.req,
body=body).obj['server']
else:
server = self.controller.create(self.req,
body=body).obj['server']
def _check_multiple_create_extension_disabled(self, **kwargs):
# NOTE: on v2.1 API, "create a server" API doesn't add the following
# attributes into kwargs when non-loading multiple_create extension.
# However, v2.0 API adds them as values "1" instead. So we need to
# define checking methods for each API here.
self.assertNotIn('min_count', kwargs)
self.assertNotIn('max_count', kwargs)
def test_create_instance_with_multiple_create_disabled(self):
min_count = 2
max_count = 3
params = {
multiple_create_v21.MIN_ATTRIBUTE_NAME: min_count,
multiple_create_v21.MAX_ATTRIBUTE_NAME: max_count,
}
old_create = compute_api.API.create
def create(*args, **kwargs):
self._check_multiple_create_extension_disabled(**kwargs)
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
self._test_create_extra(
params,
override_controller=self.no_mult_create_controller)
def test_multiple_create_with_string_type_min_and_max(self):
min_count = '2'
max_count = '3'
params = {
multiple_create_v21.MIN_ATTRIBUTE_NAME: min_count,
multiple_create_v21.MAX_ATTRIBUTE_NAME: max_count,
}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertIsInstance(kwargs['min_count'], int)
self.assertIsInstance(kwargs['max_count'], int)
self.assertEqual(kwargs['min_count'], 2)
self.assertEqual(kwargs['max_count'], 3)
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
self._test_create_extra(params)
def test_create_instance_with_multiple_create_enabled(self):
min_count = 2
max_count = 3
params = {
multiple_create_v21.MIN_ATTRIBUTE_NAME: min_count,
multiple_create_v21.MAX_ATTRIBUTE_NAME: max_count,
}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['min_count'], 2)
self.assertEqual(kwargs['max_count'], 3)
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
self._test_create_extra(params)
def test_create_instance_invalid_negative_min(self):
image_href = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
multiple_create_v21.MIN_ATTRIBUTE_NAME: -1,
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
}
}
self.assertRaises(self.validation_error,
self.controller.create,
self.req,
body=body)
def test_create_instance_invalid_negative_max(self):
image_href = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
multiple_create_v21.MAX_ATTRIBUTE_NAME: -1,
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
}
}
self.assertRaises(self.validation_error,
self.controller.create,
self.req,
body=body)
def test_create_instance_with_blank_min(self):
image_href = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
multiple_create_v21.MIN_ATTRIBUTE_NAME: '',
'name': 'server_test',
'image_ref': image_href,
'flavor_ref': flavor_ref,
}
}
self.assertRaises(self.validation_error,
self.controller.create,
self.req,
body=body)
def test_create_instance_with_blank_max(self):
image_href = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
multiple_create_v21.MAX_ATTRIBUTE_NAME: '',
'name': 'server_test',
'image_ref': image_href,
'flavor_ref': flavor_ref,
}
}
self.assertRaises(self.validation_error,
self.controller.create,
self.req,
body=body)
def test_create_instance_invalid_min_greater_than_max(self):
image_href = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
multiple_create_v21.MIN_ATTRIBUTE_NAME: 4,
multiple_create_v21.MAX_ATTRIBUTE_NAME: 2,
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
}
}
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create,
self.req,
body=body)
def test_create_instance_invalid_alpha_min(self):
image_href = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
multiple_create_v21.MIN_ATTRIBUTE_NAME: 'abcd',
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
}
}
self.assertRaises(self.validation_error,
self.controller.create,
self.req,
body=body)
def test_create_instance_invalid_alpha_max(self):
image_href = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
multiple_create_v21.MAX_ATTRIBUTE_NAME: 'abcd',
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
}
}
self.assertRaises(self.validation_error,
self.controller.create,
self.req,
body=body)
def test_create_multiple_instances(self):
"""Test creating multiple instances but not asking for
reservation_id
"""
image_href = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
multiple_create_v21.MIN_ATTRIBUTE_NAME: 2,
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
'metadata': {'hello': 'world',
'open': 'stack'},
}
}
res = self.controller.create(self.req, body=body).obj
self.assertEqual(FAKE_UUID, res["server"]["id"])
self._check_admin_password_len(res["server"])
def test_create_multiple_instances_pass_disabled(self):
"""Test creating multiple instances but not asking for
reservation_id
"""
self.flags(enable_instance_password=False)
image_href = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
multiple_create_v21.MIN_ATTRIBUTE_NAME: 2,
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
'metadata': {'hello': 'world',
'open': 'stack'},
}
}
res = self.controller.create(self.req, body=body).obj
self.assertEqual(FAKE_UUID, res["server"]["id"])
self._check_admin_password_missing(res["server"])
def _check_admin_password_len(self, server_dict):
"""utility function - check server_dict for admin_password length."""
self.assertEqual(CONF.password_length,
len(server_dict["adminPass"]))
def _check_admin_password_missing(self, server_dict):
"""utility function - check server_dict for admin_password absence."""
self.assertNotIn("admin_password", server_dict)
def _create_multiple_instances_resv_id_return(self, resv_id_return):
"""Test creating multiple instances with asking for
reservation_id
"""
image_href = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
multiple_create_v21.MIN_ATTRIBUTE_NAME: 2,
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
'metadata': {'hello': 'world',
'open': 'stack'},
multiple_create_v21.RRID_ATTRIBUTE_NAME: resv_id_return
}
}
res = self.controller.create(self.req, body=body)
reservation_id = res.obj['reservation_id']
self.assertNotEqual(reservation_id, "")
self.assertIsNotNone(reservation_id)
self.assertTrue(len(reservation_id) > 1)
def test_create_multiple_instances_with_resv_id_return(self):
self._create_multiple_instances_resv_id_return(True)
def test_create_multiple_instances_with_string_resv_id_return(self):
self._create_multiple_instances_resv_id_return("True")
def test_create_multiple_instances_with_multiple_volume_bdm(self):
"""Test that a BadRequest is raised if multiple instances
are requested with a list of block device mappings for volumes.
"""
min_count = 2
bdm = [{'source_type': 'volume', 'uuid': 'vol-xxxx'},
{'source_type': 'volume', 'uuid': 'vol-yyyy'}
]
params = {
block_device_mapping_v21.ATTRIBUTE_NAME: bdm,
multiple_create_v21.MIN_ATTRIBUTE_NAME: min_count
}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['min_count'], 2)
self.assertEqual(len(kwargs['block_device_mapping']), 2)
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
exc = self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra, params, no_image=True)
self.assertEqual("Cannot attach one or more volumes to multiple "
"instances", exc.explanation)
def test_create_multiple_instances_with_single_volume_bdm(self):
"""Test that a BadRequest is raised if multiple instances
are requested to boot from a single volume.
"""
min_count = 2
bdm = [{'source_type': 'volume', 'uuid': 'vol-xxxx'}]
params = {
block_device_mapping_v21.ATTRIBUTE_NAME: bdm,
multiple_create_v21.MIN_ATTRIBUTE_NAME: min_count
}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['min_count'], 2)
self.assertEqual(kwargs['block_device_mapping'][0]['volume_id'],
'vol-xxxx')
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
exc = self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra, params, no_image=True)
self.assertEqual("Cannot attach one or more volumes to multiple "
"instances", exc.explanation)
def test_create_multiple_instance_with_non_integer_max_count(self):
image_href = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
multiple_create_v21.MAX_ATTRIBUTE_NAME: 2.5,
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
'metadata': {'hello': 'world',
'open': 'stack'},
}
}
self.assertRaises(self.validation_error,
self.controller.create, self.req, body=body)
def test_create_multiple_instance_with_non_integer_min_count(self):
image_href = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
multiple_create_v21.MIN_ATTRIBUTE_NAME: 2.5,
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
'metadata': {'hello': 'world',
'open': 'stack'},
}
}
self.assertRaises(self.validation_error,
self.controller.create, self.req, body=body)
class MultiCreateExtensionTestV2(MultiCreateExtensionTestV21):
validation_error = webob.exc.HTTPBadRequest
def setUp(self):
"""Shared implementation for tests below that create instance."""
super(MultiCreateExtensionTestV2, self).setUp()
self.flags(verbose=True,
enable_instance_password=True)
self.instance_cache_num = 0
self.instance_cache_by_id = {}
self.instance_cache_by_uuid = {}
fakes.stub_out_nw_api(self.stubs)
self.ext_mgr = extensions_v20.ExtensionManager()
self.ext_mgr.extensions = {
'os-volumes': 'fake',
'os-multiple-create': 'fake',
'os-block-device-mapping-v2-boot': 'fake'
}
self.controller = servers_v20.Controller(self.ext_mgr)
no_mult_ext_mgr = extensions_v20.ExtensionManager()
no_mult_ext_mgr.extensions = {
'os-volumes': 'fake',
'os-block-device-mapping-v2-boot': 'fake'
}
self.no_mult_create_controller = servers_v20.Controller(
no_mult_ext_mgr)
def instance_create(context, inst):
inst_type = flavors.get_flavor_by_flavor_id(3)
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
def_image_ref = 'http://localhost/images/%s' % image_uuid
self.instance_cache_num += 1
instance = fake_instance.fake_db_instance(**{
'id': self.instance_cache_num,
'display_name': inst['display_name'] or 'test',
'uuid': FAKE_UUID,
'instance_type': inst_type,
'access_ip_v4': '1.2.3.4',
'access_ip_v6': 'fead::1234',
'image_ref': inst.get('image_ref', def_image_ref),
'user_id': 'fake',
'project_id': 'fake',
'reservation_id': inst['reservation_id'],
"created_at": datetime.datetime(2010, 10, 10, 12, 0, 0),
"updated_at": datetime.datetime(2010, 11, 11, 11, 0, 0),
"config_drive": None,
"progress": 0,
"fixed_ips": [],
"task_state": "",
"vm_state": "",
"root_device_name": inst.get('root_device_name', 'vda'),
"security_groups": inst['security_groups'],
})
self.instance_cache_by_id[instance['id']] = instance
self.instance_cache_by_uuid[instance['uuid']] = instance
return instance
def instance_get(context, instance_id):
"""Stub for compute/api create() pulling in instance after
scheduling
"""
return self.instance_cache_by_id[instance_id]
fakes.stub_out_rate_limiting(self.stubs)
fakes.stub_out_key_pair_funcs(self.stubs)
fake.stub_out_image_service(self.stubs)
self.stubs.Set(uuid, 'uuid4', fake_gen_uuid)
self.stubs.Set(db, 'instance_create', instance_create)
self.stubs.Set(db, 'instance_get', instance_get)
def _check_multiple_create_extension_disabled(self, **kwargs):
self.assertEqual(kwargs['min_count'], 1)
self.assertEqual(kwargs['max_count'], 1)
|
petrutlucian94/nova
|
nova/tests/unit/api/openstack/compute/contrib/test_multiple_create.py
|
Python
|
apache-2.0
| 22,783 | 0.000088 |
#!/usr/bin/env python
# Copyright (C) 2012,2013,2015(H),2016
# Max Planck Institute for Polymer Research
# Copyright (C) 2008,2009,2010,2011
# Max-Planck-Institute for Polymer Research & Fraunhofer SCAI
#
# This file is part of ESPResSo++.
#
# ESPResSo++ is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ESPResSo++ is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import argparse
import math
import re
def convertTable(gro_in_file, esp_out_file, sigma=1.0, epsilon=1.0, c6=1.0, c12=1.0):
"""Convert GROMACS tabulated file into ESPResSo++ tabulated file (new file
is created). First column of input file can be either distance or angle.
For non-bonded files, c6 and c12 can be provided. Default value for sigma, epsilon,
c6 and c12 is 1.0. Electrostatics are not taken into account (f and fd columns).
Keyword arguments:
gro_in_file -- the GROMACS tabulated file name (bonded, nonbonded, angle
or dihedral).
esp_out_file -- filename of the ESPResSo++ tabulated file to be written.
sigma -- optional, depending on whether you want to convert units or not.
epsilon -- optional, depending on whether you want to convert units or not.
c6 -- optional
c12 -- optional
"""
# determine file type
bonded, angle, dihedral = False, False, False
re_bond = re.compile('.*_b[0-9]+.*')
re_angle = re.compile('.*_a[0-9]+.*')
re_dihedral = re.compile('.*_d[0-9]+.*')
if re.match(re_bond, gro_in_file):
bonded = True
elif re.match(re_angle, gro_in_file):
angle = True
bonded = True
elif re.match(re_dihedral, gro_in_file):
dihedral = True
bonded = True
fin = open(gro_in_file, 'r')
fout = open(esp_out_file, 'w')
if bonded: # bonded has 3 columns
for line in fin:
if line[0] == "#": # skip comment lines
continue
columns = line.split()
r = float(columns[0])
f = float(columns[1]) # energy
fd= float(columns[2]) # force
# convert units
if angle or dihedral: # degrees to radians
r = math.radians(r)
fd=fd*180/math.pi
else:
r = r / sigma
e = f / epsilon
f = fd*sigma / epsilon
if (not angle and not dihedral and r != 0) or \
(angle and r <= math.pi and r > 0) or \
(dihedral and r >= -math.pi and r <= math.pi):
fout.write("%15.8g %15.8g %15.8g\n" % (r, e, f))
else: # non-bonded has 7 columns
for line in fin:
if line.startswith('#'): # skip comment lines
continue
columns = line.split()
r = float(columns[0])
g = float(columns[3]) # dispersion
gd= float(columns[4])
h = float(columns[5]) # repulsion
hd= float(columns[6])
e = c6*g + c12*h
f = c6*gd+ c12*hd
# convert units
r = r / sigma
e = e / epsilon
f = f*sigma / epsilon
if r != 0: # skip 0
fout.write("%15.8g %15.8g %15.8g\n" % (r, e, f))
fin.close()
fout.close()
def _args():
parser = argparse.ArgumentParser()
parser.add_argument('in_file')
parser.add_argument('out_file')
return parser
def main():
args = _args().parse_args()
convertTable(args.in_file, args.out_file)
if __name__ == '__main__':
main()
|
cgchemlab/chemlab
|
tools/convert_gromacs2espp.py
|
Python
|
gpl-3.0
| 4,036 | 0.005699 |
"""
The symbols and rules for the CFG of C. I generated these myself by hand, so
they're probably not perfectly correct.
"""
from rules_obj import *
from lexer import *
import tokens
### Symbols ###
# Most symbols are either self-explanatory, or best understood by examining the
# rules below to see how they're used.
S = Symbol("S")
main_setup = Symbol("main_setup") #TODO: is this neccesary?
# `statments` is a buch of `statement`s
statements = Symbol("statements")
# `statement` is a single C statement, semicolon included
statement = Symbol("statement")
# a generic expression
E = Symbol("E")
declare_separator = Symbol("declare_separator")
declare_type = Symbol("declare_type")
declare_expression = Symbol("declare_expression");
arr_start = Symbol("arr_start")
arr_end = Symbol("arr_end")
arr_list = Symbol("arr_list")
if_start = Symbol("if_start");
if_statement = Symbol("if_statement");
else_statement = Symbol("else_statement");
while_start = Symbol("while_start")
while_statement = Symbol("while_statement")
for_start = Symbol("for_start")
for1 = Symbol("for1")
for2 = Symbol("for2")
for3 = Symbol("for3")
for_expr = Symbol("for_expr")
arg_start = Symbol("arg_start")
func_dec = Symbol("func_dec")
func_def = Symbol("func_def")
func_call_start = Symbol("func_call_start")
### Rules ###
# After adding a rule, make sure to add it to the rules list at the bottom!
# something that stands alone as a program, plus a function definition or
# declaration, can also stand alone as a program.
main_func_dec_cont = Rule(S, [S, func_dec])
main_func_def_cont = Rule(S, [S, func_def])
main_func_dec = Rule(S, [func_dec])
main_func_def = Rule(S, [func_def])
# make a `statements` symbol by extending another `statements` symbol
statements_cont = Rule(statements, [statements,
statement])
# make a single `statement` symbol into a `statements` symbol
statements_end = Rule(statements, [statement])
# return statement
return_form = Rule(statement, [tokens.return_command,
E,
tokens.semicolon])
# a print statement
# The print statement is not valid C. I added it for ease of use, however, as
# I do not forsee this compiler being able to inclue stdio.h anytime soon.
print_form = Rule(statement, [tokens.print_command,
E,
tokens.semicolon])
# a declaration of the form int;
useless_declaration = Rule(statement, [Token("type"), tokens.semicolon])
# a declaration of the form `int a;` or `int a, b = 0;`
real_declaration = Rule(statement, [declare_expression, tokens.semicolon])
# the type part of a declaration, along with any pointers on the first variable
declare_type_base = Rule(declare_type, [Token("type")])
declare_type_cont = Rule(declare_type, [declare_type, tokens.aster])
# used to separate declarations. all these are declare_separators:
# ,
# ,*
# , **
#
declare_separator_base = Rule(declare_separator, [tokens.comma])
declare_separator_cont = Rule(declare_separator, [declare_separator, tokens.aster])
# the base of a declaration, like `int hello` or `int* hello`.
base_declare = Rule(declare_expression, [declare_type, Token("name")])
# a non-array declaration with an assignment, like `int hello = 4` or `int* hello = &p`.
assign_declare = Rule(declare_expression, [declare_expression, tokens.equal, E], 49)
# an array declaration with assignment, like `int hi[4] = {1, 2, 3, 4}`.
# Note--I imagine a better parser would catch things like `int hi = {1, 3}`.
# Mine, however, catches these errors at the code generation stage.
arr_assign_declare = Rule(declare_expression, [declare_expression, tokens.equal, arr_list], 49)
# Converts things like `int a, b` into a fresh declare_expression to chain declarations
cont_declare = Rule(declare_expression, [declare_expression, declare_separator, Token("name")])
# Defines `int a[5]` as a valid declare expression
array_num_declare = Rule(declare_expression, [declare_expression,
tokens.open_sq_bracket,
E,
tokens.close_sq_bracket])
# Defines `int a[]` as a valid declare expression
array_nonum_declare = Rule(declare_expression, [declare_expression,
tokens.open_sq_bracket,
tokens.close_sq_bracket])
E_num = Rule(E, [Token("integer")])
E_parens = Rule(E, [tokens.open_paren,
E,
tokens.close_paren])
# Badly named--E_add can be binary addition or subtraction
E_add = Rule(E, [E,
Token("addop"),
E], 85)
E_mult = Rule(E, [E,
tokens.aster,
E], 90)
E_div = Rule(E, [E,
tokens.slash,
E], 90)
E_mod = Rule(E, [E,
tokens.percent,
E], 90)
E_boolean_and = Rule(E, [E,
tokens.logic_and,
E], 65)
E_boolean_or = Rule(E, [E,
tokens.logic_or,
E], 60)
E_eq_compare = Rule(E, [E,
Token("eq_compare"),
E], 70)
E_compare = Rule(E, [E,
Token("compare"),
E], 75)
# Again, badly named. E_neg can be either unary addition or subtraction
E_neg = Rule(E, [Token("addop"),
E], 95)
# Note this covers all of `a = 5`, `a *= 5`, `a /= 5`, etc.
# We give this rule a priority of 49, which is less than 50 (the priority) of
# the assignment symbols. This makes it right associative.
E_equal = Rule(E, [E,
Token("assignment"),
E], 49)
E_boolean_not = Rule(E, [tokens.logic_not, E], 95)
# Covers both a++ and a--
E_inc_after = Rule(E, [E,
Token("crement")], 100)
# Covers both ++a and --a
E_inc_before = Rule(E, [Token("crement"),
E], 95)
E_point = Rule(E, [tokens.aster, E], 95)
E_deref = Rule(E, [tokens.amper, E], 95)
# Calling a function like `f()`
E_func_noarg = Rule(E, [E, tokens.open_paren, tokens.close_paren])
# The start of a function call and first argument, like `f(1`
E_func_call_start = Rule(func_call_start, [E, tokens.open_paren, E], 0)
# Chaining more arguments onto the function call
E_func_call_cont = Rule(func_call_start, [func_call_start, tokens.comma, E], 0)
# Completing the function call
E_func_call_end = Rule(E, [func_call_start, tokens.close_paren])
# Array referencing, like `a[4]`
E_array = Rule(E, [E, tokens.open_sq_bracket, E, tokens.close_sq_bracket], 100)
E_var = Rule(E, [Token("name")])
E_form = Rule(statement, [E, tokens.semicolon])
# We have to separate out the start so (E) doesn't reduce to E in `if(E)`
if_start_form = Rule(if_start, [tokens.if_keyword,
tokens.open_paren])
# an if statement like `if(E) {}`
if_form_brackets = Rule(if_statement, [if_start,
E,
tokens.close_paren,
tokens.open_bracket,
tokens.close_bracket])
# a one line if statement like `if(E) a = 5;`
# it's OK to use "statements" here because statement -> statements immediately,
# so then this rule will apply right away
if_form_oneline = Rule(if_statement, [if_start,
E,
tokens.close_paren,
statements])
# the most common if form, like `if(E) {a = 5;}`
if_form_main = Rule(if_statement, [if_start,
E,
tokens.close_paren,
tokens.open_bracket,
statements,
tokens.close_bracket])
# Same things, but for else
else_form_brackets = Rule(else_statement, [tokens.else_keyword,
tokens.open_bracket,
tokens.close_bracket])
else_form_oneline = Rule(else_statement, [tokens.else_keyword,
statements])
else_form_main = Rule(else_statement, [tokens.else_keyword,
tokens.open_bracket,
statements,
tokens.close_bracket])
# We use a priority here so if an "else" follows an "if_statement", the parser
# won't apply the if_form_general rule (instead of the correct ifelse_form_general)
if_form_general = Rule(statement, [if_statement], 200)
ifelse_form_general = Rule(statement, [if_statement, else_statement])
break_form = Rule(statement, [tokens.break_keyword, tokens.semicolon])
cont_form = Rule(statement, [tokens.cont_keyword, tokens.semicolon])
# We have to separate out the start so (E) doesn't reduce to E
while_start_form = Rule(while_start, [tokens.while_keyword, tokens.open_paren])
# Same as if statement rules
while_form_brackets = Rule(statement, [while_start,
E,
tokens.close_paren,
tokens.open_bracket,
tokens.close_bracket])
while_form_oneline = Rule(statement, [while_start,
E,
tokens.close_paren,
statements])
while_form_main = Rule(statement, [while_start,
E,
tokens.close_paren,
tokens.open_bracket,
statements,
tokens.close_bracket])
# for statements
for_start_form = Rule(for_start, [tokens.for_keyword, tokens.open_paren])
for1_form = Rule(for1, [for_start, statements])
# The `statements` here better have a tree of the form:
# statements -> statement -> E, semicolon
# A better parser would probably check this while parsing, but I check during
# code gen.
for2_form = Rule(for2, [for1, statements])
for_expr_form = Rule(for_expr, [for2, E, tokens.close_paren])
for_expr_form_empty = Rule(for_expr, [for2, tokens.close_paren])
# Same as if statement rules
for_form_empty = Rule(statement, [for_expr,
tokens.semicolon])
for_form_brackets = Rule(statement, [for_expr,
tokens.open_bracket,
tokens.close_bracket])
for_form_oneline = Rule(statement, [for_expr,
statements])
for_form_main = Rule(statement, [for_expr,
tokens.open_bracket,
statements,
tokens.close_bracket])
# Array initializer with one element, like `{1}`
arr_list_one = Rule(arr_list, [tokens.open_bracket, E, tokens.close_bracket])
# Array initializer with no elements, like `{}`
arr_list_none = Rule(arr_list, [tokens.open_bracket, tokens.close_bracket])
# Start of array initializer and first element, like `{1,`
arr_list_start = Rule(arr_start, [tokens.open_bracket, E, tokens.comma])
# Contining array initalizer, like `{1, 2,`
arr_list_cont = Rule(arr_start, [arr_start, E, tokens.comma])
# Total array initializer, like `{1, 2, 3}`
arr_list_total = Rule(arr_list, [arr_start, arr_end])
# Array initializer end, like `3}`
arr_list_end = Rule(arr_end, [E, tokens.close_bracket])
# Argument list for defining/declaring functions
base_arg_form = Rule(arg_start, [declare_expression, # should have children [declare_type, name]
tokens.open_paren,
declare_expression])
cont_arg_form = Rule(arg_start, [arg_start,
tokens.comma,
declare_expression]) # should have kids [declare_type, name]
func_dec_form = Rule(func_dec, [arg_start, tokens.close_paren, tokens.semicolon])
func_def_form = Rule(func_def, [arg_start,
tokens.close_paren,
tokens.open_bracket,
statements,
tokens.close_bracket])
noarg_func_dec_form = Rule(func_dec, [declare_expression,
tokens.open_paren,
tokens.close_paren,
tokens.semicolon])
noarg_func_def_form = Rule(func_def, [declare_expression,
tokens.open_paren,
tokens.close_paren,
tokens.open_bracket,
statements,
tokens.close_bracket])
semicolon_form = Rule(statement, [tokens.semicolon])
# List of all the rules to apply. Applied in the listed order.
# In general, try to list rules above in the same order as they're listed here.
rules = [main_func_def_cont,
main_func_dec_cont,
main_func_def,
main_func_dec,
statements_cont,
statements_end,
return_form,
print_form,
useless_declaration,
real_declaration,
declare_type_base,
declare_type_cont,
declare_separator_base,
declare_separator_cont,
base_declare,
assign_declare,
arr_assign_declare,
cont_declare,
array_num_declare,
array_nonum_declare,
E_num,
E_parens,
E_add,
E_mult,
E_div,
E_mod,
E_boolean_and,
E_boolean_or,
E_eq_compare,
E_compare,
E_neg,
E_equal,
E_boolean_not,
E_inc_after,
E_inc_before,
E_point,
E_deref,
E_func_noarg,
E_func_call_start,
E_func_call_cont,
E_func_call_end,
E_array,
E_var,
E_form,
if_start_form,
if_form_brackets,
if_form_oneline,
if_form_main,
if_form_general,
else_form_brackets,
else_form_oneline,
else_form_main,
ifelse_form_general,
break_form,
cont_form,
while_start_form,
while_form_brackets,
while_form_oneline,
while_form_main,
for_start_form,
for1_form,
for2_form,
for_expr_form,
for_expr_form_empty,
for_form_brackets,
for_form_oneline,
for_form_main,
arr_list_one,
arr_list_none,
arr_list_start,
arr_list_cont,
arr_list_total,
arr_list_end,
base_arg_form,
cont_arg_form,
func_dec_form,
func_def_form,
noarg_func_dec_form,
noarg_func_def_form,
semicolon_form]
|
ShivamSarodia/ShivC
|
rules.py
|
Python
|
gpl-2.0
| 15,247 | 0.00164 |
#!/usr/bin/env python3
"""
* Copyright (c) 2015 BEEVC - Electronic Systems This file is part of BEESOFT
* software: you can redistribute it and/or modify it under the terms of the GNU
* General Public License as published by the Free Software Foundation, either
* version 3 of the License, or (at your option) any later version. BEESOFT is
* distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
* PARTICULAR PURPOSE. See the GNU General Public License for more details. You
* should have received a copy of the GNU General Public License along with
* BEESOFT. If not, see <http://www.gnu.org/licenses/>.
"""
__author__ = "Marcos Gomes"
__license__ = "MIT"
import FileFinder
import pygame
import Loaders.WaitForConnectionLoader
from beedriver import connection
import time
import FileFinder
class WaitScreen():
"""
@var connected: status of USB connection to the BTF
"""
connected = False
screen = None
exit = False
lblTop = None
lblBottom = None
bgImage = None
loader = None
nextPullTime = None
"""
BEEConnect vars
"""
beeCon = None
beeCmd = None
mode = None
displayWidth = 480
displayHeight = 320
"""*************************************************************************
Init Method
intis all compoments
*************************************************************************"""
def __init__(self, screen, dispWidth = 480, dispHeight = 320, shutdownCallback=None):
"""
.
"""
self.displayWidth = dispWidth
self.displayHeight = dispHeight
self.connected = False
print("Printer Connection: {0}".format(self.connected))
self.exit = False
self.screen = screen
self.currentScreen = 'WaitConnection'
self.loader = Loaders.WaitForConnectionLoader.WaitForConnectionLoader(self.displayWidth, self.displayHeight)
lblText = self.loader.GetLblsText()
lblX = self.loader.GetLblsXPos()
lblY = self.loader.GetLblsYPos()
lblFont = self.loader.GetLblsFont()
lblFontColor = self.loader.GetLblsFontColor()
for i in range(0,len(lblText)):
lbl = lblFont[i].render(lblText[i],1,lblFontColor[i])
self.screen.blit(lbl,(lblX[i],lblY[i]))
self.bgImage = pygame.image.load(self.loader.GetImagePath())
imgX = self.loader.GetImageX()
imgY = self.loader.GetImageY()
# Draw Image
self.screen.blit(self.bgImage,(imgX,imgY))
# update screen
pygame.display.update()
self.nextPullTime = time.time() + 0.5
tries = 10
while (not self.connected) and (not self.exit) and (tries > 0):
# Handle events
self.handle_events()
t = time.time()
if t > self.nextPullTime:
self.beeCon = connection.Conn(shutdownCallback)
# Connect to first Printer
self.beeCon.connectToFirstPrinter()
printerDict = self.beeCon.connectedPrinter
if(self.beeCon.isConnected() == True):
self.beeCmd = self.beeCon.getCommandIntf()
self.mode = self.beeCmd.getPrinterMode()
fwVersion = self.beeCmd.getFirmwareVersion()
#resp = self.beeCmd.startPrinter()
if('Firmware' in self.mode):
if '10.4.7' not in fwVersion and not self.beeCmd.isPrinting():
self.beeCmd.goToBootloader()
self.beeCon.close()
self.beeCon = None
else:
self.connected = self.beeCon.connected
elif('Bootloader' in self.mode):
printerVID = printerDict['VendorID']
printerPID = printerDict['ProductID']
fwName = ''
fwString = ''
if printerVID == '65535' and printerPID == '334':
#Old Bootloader Printer
fwString = 'BEEVC-BEETHEFIRST0-10.4.8'
fwName = '/Firmware/BEEVC-BEETHEFIRST0-Firmware-10.4.8.BIN'
elif printerVID == '10697':
#New Bootloader Printers
if printerPID == '1':
#BEETHEFIRST
fwString = 'BEEVC-BEETHEFIRST-10.4.8'
fwName = '/Firmware/BEEVC-BEETHEFIRST-Firmware-10.4.8.BIN'
elif printerPID == '2':
#BEETHEFIRST+
fwString = 'BEEVC-BEETHEFIRST_PLUS-10.4.8'
fwName = '/Firmware/BEEVC-BEETHEFIRST_PLUS-Firmware-10.4.8.BIN'
elif printerPID == '3':
#BEEME
fwString = 'BEEVC-BEEME-10.4.8'
fwName = '/Firmware/BEEVC-BEEME-Firmware-10.4.8.BIN'
elif printerPID == '4':
#BEEINSCHOOL
fwString = 'BEEVC-BEEINSCHOOL-10.4.8'
fwName = '/Firmware/BEEVC-BEEINSCHOOL-Firmware-10.4.8.BIN'
elif printerPID == '5':
#BEETHEFIRST_PLUS_A
fwString = 'BEEVC-BEETHEFIRST_PLUS_A-10.4.8'
fwName = '/Firmware/BEEVC-BEETHEFIRST_PLUS_A-Firmware-10.4.8.BIN'
if '10.4.8' not in fwVersion:
print('Falshing new Firmare')
ff = FileFinder.FileFinder()
fwPath = ff.GetAbsPath(fwName)
self.beeCmd.flashFirmware(fwPath,fwString)
while self.beeCmd.getTransferCompletionState() is not None:
time.sleep(0.5)
self.beeCon.close()
self.beeCon = None
else:
print("Changing to firmware")
self.beeCmd.goToFirmware()
#self.beeCon.close()
#time.sleep(1)
self.mode = self.beeCmd.getPrinterMode()
if 'Firmware' not in self.mode:
self.beeCon = None
else:
self.connected = self.beeCon.connected
#return True
else:
# USB Buffer need cleaning
print('Printer not responding... cleaning buffer\n')
self.beeCmd.cleanBuffer()
self.beeCon.close()
self.beeCon = None
# return None
self.nextPullTime = time.time() + 0.5
#print("Wait for connection")
tries -= 1
if(tries <= 0):
print('Printer not found')
return False
else:
status = self.beeCmd.getStatus()
if status is not None:
if 'Shutdown' in status:
self.beeCmd.clearShutdownFlag()
return
"""*************************************************************************
handle_events
waits for a USB conenction to be stablished
*************************************************************************"""
def handle_events(self):
"""handle all events."""
for event in pygame.event.get():
if event.type == pygame.QUIT:
self.exit = True
return
"""*************************************************************************
KillAll Method
Frees every element from memmory
*************************************************************************"""
def KillAll(self):
self.bgImage = None
self.lblTop = None
self.lblBottom = None
self.loader = None
self.nextPullTime = None
return
|
beeverycreative/beeconnect
|
WaitForConnection.py
|
Python
|
gpl-2.0
| 8,838 | 0.009391 |
""" tokenizer for tweets! might be appropriate for other social media dialects too.
general philosophy is to throw as little out as possible.
development philosophy: every time you change a rule, do a diff of this
program's output on ~100k tweets. if you iterate through many possible rules
and only accept the ones that seeem to result in good diffs, it's a sort of
statistical learning with in-the-loop human evaluation :)
"""
__author__="brendan o'connor (anyall.org)"
import re,sys
import emoticons
mycompile = lambda pat: re.compile(pat, re.UNICODE)
def regex_or(*items):
r = '|'.join(items)
r = '(' + r + ')'
return r
def pos_lookahead(r):
return '(?=' + r + ')'
def neg_lookahead(r):
return '(?!' + r + ')'
def optional(r):
return '(%s)?' % r
PunctChars = r'''['“".?!,:;]'''
Punct = '%s+' % PunctChars
Entity = '&(amp|lt|gt|quot);'
# one-liner URL recognition:
#Url = r'''https?://\S+'''
# more complex version:
UrlStart1 = regex_or('https?://', r'www\.')
CommonTLDs = regex_or('com','co\\.uk','org','net','info','ca')
UrlStart2 = r'[a-z0-9\.-]+?' + r'\.' + CommonTLDs + pos_lookahead(r'[/ \W\b]')
UrlBody = r'[^ \t\r\n<>]*?' # * not + for case of: "go to bla.com." -- don't want period
UrlExtraCrapBeforeEnd = '%s+?' % regex_or(PunctChars, Entity)
UrlEnd = regex_or( r'\.\.+', r'[<>]', r'\s', '$')
Url = (r'\b' +
regex_or(UrlStart1, UrlStart2) +
UrlBody +
pos_lookahead( optional(UrlExtraCrapBeforeEnd) + UrlEnd))
Url_RE = re.compile("(%s)" % Url, re.U|re.I)
Timelike = r'\d+:\d+'
NumNum = r'\d+\.\d+'
NumberWithCommas = r'(\d+,)+?\d{3}' + pos_lookahead(regex_or('[^,]','$'))
Abbrevs1 = ['am','pm','us','usa','ie','eg']
def regexify_abbrev(a):
chars = list(a)
icase = ["[%s%s]" % (c,c.upper()) for c in chars]
dotted = [r'%s\.' % x for x in icase]
return "".join(dotted)
Abbrevs = [regexify_abbrev(a) for a in Abbrevs1]
BoundaryNotDot = regex_or(r'\s', '[“"?!,:;]', Entity)
aa1 = r'''([A-Za-z]\.){2,}''' + pos_lookahead(BoundaryNotDot)
aa2 = r'''([A-Za-z]\.){1,}[A-Za-z]''' + pos_lookahead(BoundaryNotDot)
ArbitraryAbbrev = regex_or(aa1,aa2)
assert '-' != '―'
Separators = regex_or('--+', '―')
Decorations = r' [ ♫ ]+ '.replace(' ','')
EmbeddedApostrophe = r"\S+'\S+"
ProtectThese = [
emoticons.Emoticon,
Url,
Entity,
Timelike,
NumNum,
NumberWithCommas,
Punct,
ArbitraryAbbrev,
Separators,
Decorations,
EmbeddedApostrophe,
]
Protect_RE = mycompile(regex_or(*ProtectThese))
class Tokenization(list):
" list of tokens, plus extra info "
def __init__(self):
self.alignments = []
self.text = ""
def subset(self, tok_inds):
new = Tokenization()
new += [self[i] for i in tok_inds]
new.alignments = [self.alignments[i] for i in tok_inds]
new.text = self.text
return new
def assert_consistent(t):
assert len(t) == len(t.alignments)
assert [t.text[t.alignments[i] : (t.alignments[i]+len(t[i]))] for i in range(len(t))] == list(t)
def align(toks, orig):
s_i = 0
alignments = [None]*len(toks)
for tok_i in range(len(toks)):
while True:
length = len(toks[tok_i])
if orig[s_i:(s_i+length)] == toks[tok_i]:
alignments[tok_i] = s_i
s_i += length
break
s_i += 1
if s_i >= len(orig):
raise AlignmentFailed((orig, toks, alignments))
#if orig[s_i] != ' ': raise AlignmentFailed("nonspace advance: %s" % ((s_i,orig),))
if any(a is None for a in alignments):
raise AlignmentFailed((orig, toks, alignments))
return alignments
class AlignmentFailed(Exception):
pass
def unicodify(s, encoding='utf8', *args):
#if isinstance(s,str): return s
#if isinstance(s,str): return s.decode(encoding, *args)
return str(s)
def tokenize(tweet):
#text = unicodify(tweet)
text = squeeze_whitespace(tweet)
t = Tokenization()
t += simple_tokenize(text)
t.text = text
t.alignments = align(t, text)
return t
def simple_tokenize(text):
s = text
s = edge_punct_munge(s)
# strict alternating ordering through the string. first and last are goods.
# good bad good bad good bad good
goods = []
bads = []
i = 0
if Protect_RE.search(s):
for m in Protect_RE.finditer(s):
goods.append( (i, m.start()) )
bads.append(m.span())
i = m.end()
goods.append( (m.end(), len(s)) )
else:
goods = [ (0, len(s)) ]
assert len(bads)+1 == len(goods)
goods = [s[i:j] for i,j in goods]
bads = [s[i:j] for i,j in bads]
#print goods
#print bads
goods = [unprotected_tokenize(x) for x in goods]
res = []
for i in range(len(bads)):
res += goods[i]
res.append(bads[i])
res += goods[-1]
res = post_process(res)
return res
AposS = mycompile(r"(\S+)('s)$")
def post_process(pre_toks):
# hacky: further splitting of certain tokens
post_toks = []
for tok in pre_toks:
m = AposS.search(tok)
if m:
post_toks += m.groups()
else:
post_toks.append( tok )
return post_toks
WS_RE = mycompile(r'\s+')
def squeeze_whitespace(s):
new_string = WS_RE.sub(" ",s)
return new_string.strip()
# fun: copy and paste outta http://en.wikipedia.org/wiki/Smart_quotes
EdgePunct = r"""[ ' " “ ” ‘ ’ < > « » { } ( \) [ \] ]""".replace(' ','')
#NotEdgePunct = r"""[^'"([\)\]]""" # alignment failures?
NotEdgePunct = r"""[a-zA-Z0-9]"""
EdgePunctLeft = r"""(\s|^)(%s+)(%s)""" % (EdgePunct, NotEdgePunct)
EdgePunctRight = r"""(%s)(%s+)(\s|$)""" % (NotEdgePunct, EdgePunct)
EdgePunctLeft_RE = mycompile(EdgePunctLeft)
EdgePunctRight_RE= mycompile(EdgePunctRight)
def edge_punct_munge(s):
s = EdgePunctLeft_RE.sub( r"\1\2 \3", s)
s = EdgePunctRight_RE.sub(r"\1 \2\3", s)
return s
def unprotected_tokenize(s):
return s.split()
if __name__=='__main__':
for line in open('tweets.txt'):
print(" ".join(tokenize(line[:-1])))
#for line in sys.stdin:
#print u" ".join(tokenize(line[:-1])).encode('utf-8')
#print "CUR\t" + " ".join(tokenize(line[:-1]))
#print "WS\t" + " ".join(line[:-1].split())
#print ansi.color(line.strip(),'red')
#print ansi.color(" ".join(tokenize(line.strip())),'blue','bold')
|
ewan-klein/nltk_twitter
|
twokenise.py
|
Python
|
apache-2.0
| 6,480 | 0.013622 |
import numpy as np
import cv2
import math
# Calculates rotation matrix to euler angles
# The result is the same as MATLAB except the order
# of the euler angles ( x and z are swapped ).
def rot_vec_to_euler(r):
# Rotate around x axis by 180 degrees to have [0, 0, 0] when facing forward
R = np.dot(np.array([[1, 0, 0],
[0, -1, 0],
[0, 0, -1]]),
np.array(cv2.Rodrigues(r)[0]))
sy = math.sqrt(R[0, 0] * R[0, 0] + R[1, 0] * R[1, 0])
singular = sy < 1e-6
if not singular:
x = math.atan2(R[2, 1], R[2, 2])
y = math.atan2(-R[2, 0], sy)
z = math.atan2(R[1, 0], R[0, 0])
else:
x = math.atan2(-R[1, 2], R[1, 1])
y = math.atan2(-R[2, 0], sy)
z = 0
return np.array([x, y, z])
# Calculates Rotation Matrix given euler angles.
def euler_to_rot_vec(theta):
r_x = np.array([[1, 0, 0],
[0, math.cos(theta[0]), -math.sin(theta[0])],
[0, math.sin(theta[0]), math.cos(theta[0])]
])
r_y = np.array([[math.cos(theta[1]), 0, math.sin(theta[1])],
[0, 1, 0],
[-math.sin(theta[1]), 0, math.cos(theta[1])]
])
r_z = np.array([[math.cos(theta[2]), -math.sin(theta[2]), 0],
[math.sin(theta[2]), math.cos(theta[2]), 0],
[0, 0, 1]
])
return np.array(cv2.Rodrigues(np.dot(np.array([[1, 0, 0],
[0, -1, 0],
[0, 0, -1]]),
np.dot(r_z, np.dot(r_y, r_x))))[0])
class poseExtractor:
def __init__(self):
self.image_points = np.array([30, 29, 28, 27, 33, 32, 34, 31, 35,
36, 45, 39, 42,
21, 22, 20, 23, 19, 24, 18, 25
], dtype=np.intp)
self.model_points = np.array([
(0.0, 0.0, 0.0), # Nose tip
(0.0, 0.40412, -0.35702), # Nose 1
(0.0, 0.87034, -0.65485), # Nose 2
(0, 1.33462, -0.92843), # Nose 3
(0, -0.63441, -0.65887), # Under Nose #0
(0, 0, 0), # Under Nose #1, L
(0.25466, -0.59679, -0.80215), # Under Nose #1, R
(0, 0, 0), # Under Nose #2, L
(0.49277, -0.56169, -0.96709), # Under Nose #2, R
(0, 0, 0), # Left eye outer corner
(1.60745, 1.21855, -1.9585), # Right eye outer corner
(0, 0, 0), # Left eye inner corner
(0.53823, 1.15389, -1.37273), # Right eye inner corner
(0, 0, 0), # Eyebrow #0, L
(0.34309, 1.67208, -0.96486), # Eyebrow #0, R
(0, 0, 0), # Eyebrow #1, L
(0.65806, 1.85405, -1.04975), # Eyebrow #1, R
(0, 0, 0), # Eyebrow #2, L
(0.96421, 1.95277, -1.23015), # Eyebrow #2, R
(0, 0, 0), # Eyebrow #3, L
(1.32075, 1.95305, -1.48482) # Eyebrow #3, R
])
for i in range(5, self.model_points.shape[0], 2):
self.model_points[i, 0] = -self.model_points[i + 1, 0]
self.model_points[i, 1:3] = self.model_points[i + 1, 1:3]
self.camera_matrix = None # Hack so camera matrix can be used for printing later
self.dist_coeffs = np.zeros((4, 1)) # Assuming no lens distortion
self.rvec = None
self.tvec = None
def get_head_rotation(self, landmarks, img_size):
# Camera internals
focal_length = img_size[1]
center = (img_size[1] / 2, img_size[0] / 2)
self.camera_matrix = np.array(
[[focal_length, 0, center[0]],
[0, focal_length, center[1]],
[0, 0, 1]], dtype="double"
)
if self.rvec is None:
(success, self.rvec, self.tvec) = cv2.solvePnP(
self.model_points, landmarks[self.image_points[:, np.newaxis], :],
self.camera_matrix, self.dist_coeffs, flags=cv2.SOLVEPNP_EPNP)
else:
(success, self.rvec, self.tvec) = cv2.solvePnP(
self.model_points, landmarks[self.image_points[:, np.newaxis], :],
self.camera_matrix, self.dist_coeffs, flags=cv2.SOLVEPNP_EPNP,
rvec=self.rvec, tvec=self.tvec, useExtrinsicGuess=True)
return success
def get_positional_features(self, landmarks, img_size):
rotation_success = self.get_head_rotation(landmarks, img_size)
if not rotation_success:
return None
return self.tvec, rot_vec_to_euler(self.rvec)
def get_position_by_average(landmarks, img_size):
position = np.mean(landmarks, axis=0)
size = 2 * np.mean(np.linalg.norm((landmarks - position), axis=1, ord=2))
return np.append(position / img_size[0], size / img_size[0])
|
JustusSchwan/MasterThesis
|
trash/utility_positional.py
|
Python
|
mit
| 4,965 | 0.000604 |
import fileinput
def str_to_int(s):
return([ int(x) for x in s.split() ])
# args = [ 'line 1', 'line 2', ... ]
def proc_input(args):
pass
def solve(args, verbose=False):
r = proc_input(args)
def test():
assert(str_to_int('1 2 3') == [ 1, 2, 3 ])
if __name__ == '__main__':
from sys import argv
if argv.pop() == 'test':
test()
else:
solve(list(fileinput.input()), verbose=True)
|
cripplet/practice
|
hackerrank/quora/skeleton.py
|
Python
|
mit
| 393 | 0.045802 |
def extractAtarutranslationBlogspotCom(item):
'''
Parser for 'atarutranslation.blogspot.com'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return None
tagmap = [
('PRC', 'PRC', 'translated'),
('Loiterous', 'Loiterous', 'oel'),
]
for tagname, name, tl_type in tagmap:
if tagname in item['tags']:
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False
|
fake-name/ReadableWebProxy
|
WebMirror/management/rss_parser_funcs/feed_parse_extractAtarutranslationBlogspotCom.py
|
Python
|
bsd-3-clause
| 570 | 0.033333 |
from invoke import task, Collection
@task
def toplevel(ctx):
pass
@task
def subtask(ctx):
pass
ns = Collection(
toplevel,
Collection('a', subtask,
Collection('nother', subtask)
)
)
|
mkusz/invoke
|
tests/_support/deeper_ns_list.py
|
Python
|
bsd-2-clause
| 212 | 0.023585 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('tfm', '0004_auto_20151124_1307'),
]
operations = [
migrations.AlterField(
model_name='patient',
name='photo',
field=models.ImageField(default=None, upload_to=b'photos', blank=True),
preserve_default=False,
),
]
|
potray/TFM-Web
|
tfm/migrations/0005_auto_20151124_1311.py
|
Python
|
gpl-2.0
| 467 | 0.002141 |
"""autogenerated by genpy from arm/cartesian_moves.msg. Do not edit."""
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
import arm.msg
import genpy
import std_msgs.msg
class cartesian_moves(genpy.Message):
_md5sum = "56c11a250225b8cc4f58b0e6670caaa1"
_type = "arm/cartesian_moves"
_has_header = True #flag to mark the presence of a Header object
_full_text = """# Cartesian movement sequence message
Header header
time end
cartesian_move[] moves
================================================================================
MSG: std_msgs/Header
# Standard metadata for higher-level stamped data types.
# This is generally used to communicate timestamped data
# in a particular coordinate frame.
#
# sequence ID: consecutively increasing ID
uint32 seq
#Two-integer timestamp that is expressed as:
# * stamp.secs: seconds (stamp_secs) since epoch
# * stamp.nsecs: nanoseconds since stamp_secs
# time-handling sugar is provided by the client library
time stamp
#Frame this data is associated with
# 0: no frame
# 1: global frame
string frame_id
================================================================================
MSG: arm/cartesian_move
# Cartesian movement message
Header header
float32[7] positions
int8[7] speeds
"""
__slots__ = ['header','end','moves']
_slot_types = ['std_msgs/Header','time','arm/cartesian_move[]']
def __init__(self, *args, **kwds):
"""
Constructor. Any message fields that are implicitly/explicitly
set to None will be assigned a default value. The recommend
use is keyword arguments as this is more robust to future message
changes. You cannot mix in-order arguments and keyword arguments.
The available fields are:
header,end,moves
:param args: complete set of field values, in .msg order
:param kwds: use keyword arguments corresponding to message field names
to set specific fields.
"""
if args or kwds:
super(cartesian_moves, self).__init__(*args, **kwds)
#message fields cannot be None, assign default values for those that are
if self.header is None:
self.header = std_msgs.msg.Header()
if self.end is None:
self.end = genpy.Time()
if self.moves is None:
self.moves = []
else:
self.header = std_msgs.msg.Header()
self.end = genpy.Time()
self.moves = []
def _get_types(self):
"""
internal API method
"""
return self._slot_types
def serialize(self, buff):
"""
serialize message into buffer
:param buff: buffer, ``StringIO``
"""
try:
_x = self
buff.write(_struct_3I.pack(_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs))
_x = self.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = self
buff.write(_struct_2I.pack(_x.end.secs, _x.end.nsecs))
length = len(self.moves)
buff.write(_struct_I.pack(length))
for val1 in self.moves:
_v1 = val1.header
buff.write(_struct_I.pack(_v1.seq))
_v2 = _v1.stamp
_x = _v2
buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
_x = _v1.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
buff.write(_struct_7f.pack(*val1.positions))
buff.write(_struct_7b.pack(*val1.speeds))
except struct.error as se: self._check_types(se)
except TypeError as te: self._check_types(te)
def deserialize(self, str):
"""
unpack serialized message in str into this message instance
:param str: byte array of serialized message, ``str``
"""
try:
if self.header is None:
self.header = std_msgs.msg.Header()
if self.end is None:
self.end = genpy.Time()
if self.moves is None:
self.moves = None
end = 0
_x = self
start = end
end += 12
(_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs,) = _struct_3I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.header.frame_id = str[start:end].decode('utf-8')
else:
self.header.frame_id = str[start:end]
_x = self
start = end
end += 8
(_x.end.secs, _x.end.nsecs,) = _struct_2I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.moves = []
for i in range(0, length):
val1 = arm.msg.cartesian_move()
_v3 = val1.header
start = end
end += 4
(_v3.seq,) = _struct_I.unpack(str[start:end])
_v4 = _v3.stamp
_x = _v4
start = end
end += 8
(_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v3.frame_id = str[start:end].decode('utf-8')
else:
_v3.frame_id = str[start:end]
start = end
end += 28
val1.positions = _struct_7f.unpack(str[start:end])
start = end
end += 7
val1.speeds = _struct_7b.unpack(str[start:end])
self.moves.append(val1)
self.end.canon()
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
def serialize_numpy(self, buff, numpy):
"""
serialize message with numpy array types into buffer
:param buff: buffer, ``StringIO``
:param numpy: numpy python module
"""
try:
_x = self
buff.write(_struct_3I.pack(_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs))
_x = self.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = self
buff.write(_struct_2I.pack(_x.end.secs, _x.end.nsecs))
length = len(self.moves)
buff.write(_struct_I.pack(length))
for val1 in self.moves:
_v5 = val1.header
buff.write(_struct_I.pack(_v5.seq))
_v6 = _v5.stamp
_x = _v6
buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
_x = _v5.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
buff.write(val1.positions.tostring())
buff.write(val1.speeds.tostring())
except struct.error as se: self._check_types(se)
except TypeError as te: self._check_types(te)
def deserialize_numpy(self, str, numpy):
"""
unpack serialized message in str into this message instance using numpy for array types
:param str: byte array of serialized message, ``str``
:param numpy: numpy python module
"""
try:
if self.header is None:
self.header = std_msgs.msg.Header()
if self.end is None:
self.end = genpy.Time()
if self.moves is None:
self.moves = None
end = 0
_x = self
start = end
end += 12
(_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs,) = _struct_3I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.header.frame_id = str[start:end].decode('utf-8')
else:
self.header.frame_id = str[start:end]
_x = self
start = end
end += 8
(_x.end.secs, _x.end.nsecs,) = _struct_2I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.moves = []
for i in range(0, length):
val1 = arm.msg.cartesian_move()
_v7 = val1.header
start = end
end += 4
(_v7.seq,) = _struct_I.unpack(str[start:end])
_v8 = _v7.stamp
_x = _v8
start = end
end += 8
(_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v7.frame_id = str[start:end].decode('utf-8')
else:
_v7.frame_id = str[start:end]
start = end
end += 28
val1.positions = numpy.frombuffer(str[start:end], dtype=numpy.float32, count=7)
start = end
end += 7
val1.speeds = numpy.frombuffer(str[start:end], dtype=numpy.int8, count=7)
self.moves.append(val1)
self.end.canon()
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
_struct_I = genpy.struct_I
_struct_7f = struct.Struct("<7f")
_struct_3I = struct.Struct("<3I")
_struct_2I = struct.Struct("<2I")
_struct_7b = struct.Struct("<7b")
|
uml-robotics/manus_arm
|
arm/src/arm/msg/_cartesian_moves.py
|
Python
|
bsd-2-clause
| 9,235 | 0.015268 |
# Ivysalt's sentry module. It keeps track of people who join and leave a chat.
# LICENSE: This single module is licensed under a Creative Commons Attribution-NonCommercial-ShareAlike 4.0 International License.
# @category Tools
# @copyright Copyright (c) 2018 dpc
# @version 1.1
# @author dpc
import asyncio
import json
import os
from discord.ext import commands
from cogs.utils import checks
from cogs.utils.dataIO import fileIO
ban_message = "``Omae wa mou shindeiru.``"
joinleave_path = 'data/sentry/joinleave.json'
bans_path = 'data/sentry/bans.json'
def is_int(s):
"""Checks whether the input is an integer."""
try:
int(s)
if float(s) % 1 == 0:
return True
else:
return False
except ValueError:
return False
def check_folders():
folders = ["data/sentry"]
for folder in folders:
if not os.path.exists(folder):
print("Creating " + folder + " folder...")
os.makedirs(folder)
def check_files():
default = {}
if not os.path.isfile(joinleave_path):
print("Creating joinleave.json")
fileIO(joinleave_path, "save", default)
if not os.path.isfile(bans_path):
print("Creating bans.json")
fileIO(bans_path, "save", default)
# validating data
check_folders()
check_files()
with open(joinleave_path) as joinleave_file:
joinleave_data = json.load(joinleave_file)
with open(bans_path) as sentry_file:
sentry_bans = json.load(sentry_file)
def save(path, data):
with open(path, "w") as file:
json.dump(data, file, indent=4)
class Sentry:
"""Adds various sentry commands.
This module was written specifically for a few servers."""
def __init__(self, bot):
self.bot = bot
@commands.command(pass_context=True, no_pm=True)
@checks.admin_or_permissions(ban_members=True)
@asyncio.coroutine
def preban(self, ctx, user_id: str):
"""Users added with this command will be banned on sight.
Only admins may use this command."""
# adding user id to the ban list
if is_int(user_id):
if (ctx.message.server.id in sentry_bans):
if (user_id in sentry_bans[ctx.message.server.id]):
yield from self.bot.say("That user is already pre-banned from this server.")
else:
sentry_bans[ctx.message.server.id].append(user_id)
save(bans_path, sentry_bans)
yield from self.bot.say("User has been pre-banned from this server.")
else:
sentry_bans[ctx.message.server.id] = [user_id]
save(bans_path, sentry_bans)
yield from self.bot.say("User has been pre-banned from this server.")
else:
yield from self.bot.say("Improper command usage.")
# checking if user's already in the server, and banning them if they are
for member in ctx.message.server.members:
if (member.id in sentry_bans[member.server.id]):
#yield from self.bot.send_message(member, ban_message)
yield from (asyncio.sleep(2))
yield from self.bot.ban(member, 7)
print("Banning user {0}#{2} with id {3} from {1}...".format(member.name, member.server.name, member.discriminator, member.id))
@commands.command(pass_context=True, no_pm=True, description=
"Note: users that have been already banned will not be unbanned.")
@checks.admin_or_permissions(ban_members=True)
@asyncio.coroutine
def unpreban(self, ctx, user_id: str):
"""Users removed with this command will not be banned on sight.
Only admins may use this command."""
if (ctx.message.server.id in sentry_bans):
if (user_id in sentry_bans[ctx.message.server.id]):
sentry_bans[ctx.message.server.id].remove(user_id)
save(bans_path, sentry_bans)
yield from self.bot.say("User removed from pre-ban list on this server.")
else:
yield from self.bot.say("User is not pre-banned on this server.")
else:
yield from self.bot.say("User is not pre-banned on this server.")
@commands.command(pass_context=True, no_pm=True, description=
"Note: users that have been already banned will not be unbanned.")
@checks.admin_or_permissions(ban_members=True)
@asyncio.coroutine
def listpreban(self, ctx):
"""Users removed with this command will not be banned on sight.
Only admins may use this command."""
if (ctx.message.server.id in sentry_bans):
if len(sentry_bans[ctx.message.server.id]) > 0:
user_id_list = "```\n=== Prebans in server {} ===\n".format(ctx.message.server.name)
for user_id in sentry_bans[ctx.message.server.id]:
user_id_list += user_id
user_id_list += "\n"
user_id_list += "```"
yield from self.bot.send_message(ctx.message.author, user_id_list)
else:
yield from self.bot.say("No pre-bans on this server.")
else:
yield from self.bot.say("No pre-bans on this server.")
@commands.command(pass_context=True, no_pm=True)
@checks.admin_or_permissions(ban_members=True)
@asyncio.coroutine
def unban(self, ctx, *, uid: str = None):
"""Removes a ban from the server.
Only admins may use this command."""
user = yield from self.bot.get_user_info(uid)
yield from self.bot.unban(ctx.message.server, user)
yield from self.bot.say('User {} unbanned.'.format(user.name))
@commands.command(pass_context=True, no_pm=True)
@checks.admin_or_permissions(ban_members=True)
@asyncio.coroutine
def setannounce(self, ctx, channel: str = "current"):
"""Sets the channel to announce server's arrivals and parts.\n\nOnly admins may use this command."""
# parses the input as a channel id
if (len(ctx.message.channel_mentions) == 1):
channel_id = ctx.message.channel_mentions[0].id
elif is_int(channel):
channel_id = channel
elif channel == "current":
channel_id = ctx.message.channel
else:
yield from self.bot.say("Sorry, I don't know what channel that is.")
return
#checks if channel is in server
channel_object = ctx.message.server.get_channel(channel_id)
if channel_object is None:
yield from self.bot.say("Sorry, I can't tell what channel that is.")
return
# assigns the announce channel
if (ctx.message.server.id in joinleave_data):
joinleave_data[ctx.message.server.id]["announce_channel"] = channel_id
save(joinleave_path, joinleave_data)
yield from self.bot.say("Saved announce channel {}.".format(channel_object.mention))
else:
joinleave_data[ctx.message.server.id] = {"announce_channel": channel_id, "autoassign_role": "", "join_announce": False, "leave_announce": True}
save(joinleave_path, joinleave_data)
yield from self.bot.say("Saved announce channel {}.".format(channel_object.mention))
@commands.command(pass_context=True, no_pm=True)
@checks.admin_or_permissions(ban_members=True)
@asyncio.coroutine
def delannounce(self, ctx):
"""Removes the bot announcements in this server.\n\nOnly admins may use this command."""
# assigns the announce channel
if (ctx.message.server.id in joinleave_data):
joinleave_data[ctx.message.server.id]["announce_channel"] = ""
yield from self.bot.say("Removed announce channel for this server.")
else:
joinleave_data[ctx.message.server.id] = {"announce_channel": "", "autoassign_role": "", "join_announce": False, "leave_announce": True}
yield from self.bot.say("There was no announce channel for this server.")
@commands.command(pass_context=True, no_pm=True)
@checks.admin_or_permissions(ban_members=True)
@asyncio.coroutine
def announcejoin(self, ctx, join: bool = False):
"""Sets the bot to announce server's new arrivals.\n\nOnly admins may use this command."""
# assigns the announce channel
if (ctx.message.server.id in joinleave_data):
joinleave_data[ctx.message.server.id]["join_announce"] = join
save(joinleave_path, joinleave_data)
yield from self.bot.say("Setting for join announcement set to ``{}``.".format(join))
else:
yield from self.bot.say("Server data not found. Set an announcement channel with ``?setannounce`` first.")
@commands.command(pass_context=True, no_pm=True)
@checks.admin_or_permissions(ban_members=True)
@asyncio.coroutine
def announceleave(self, ctx, leave: bool = True):
"""Sets the bot to announce server's new arrivals.\n\nOnly admins may use this command."""
# assigns the announce channel
if (ctx.message.server.id in joinleave_data):
joinleave_data[ctx.message.server.id]["leave_announce"] = leave
save(joinleave_path, joinleave_data)
yield from self.bot.say("Setting for leave announcement set to ``{}``.".format(leave))
else:
yield from self.bot.say("Server data not found. Set an announcement channel with ``?setannounce`` first.")
@asyncio.coroutine
def on_member_join(self, member):
if (member.server.id in sentry_bans):
if (member.id in sentry_bans[member.server.id]):
#yield from self.bot.send_message(member, ban_message)
yield from (asyncio.sleep(2))
yield from self.bot.ban(member, 7)
print("Banning user {0}#{2} with ID {3} from {1}...".format(member.name, member.server.name, member.discriminator, member.id))
if (member.server.id in joinleave_data):
yield from self.bot.send_message(member.server.get_channel(joinleave_data[member.server.id]["announce_channel"]), "Intruder **{0}#{2}** with ID ``{3}`` sighted! Banning from {1}.".format(member.name, member.server.name, member.discriminator, member.id))
if (member.server.id in joinleave_data) and (joinleave_data[member.server.id]["join_announce"] == True):
yield from self.bot.send_message(member.server.get_channel(joinleave_data[member.server.id]["announce_channel"]),"**{0}#{1}**, with user ID {2}, just joined **{3}**!".format(member.name, member.discriminator, member.id, member.server.name))
@asyncio.coroutine
def on_member_remove(self, member):
if (member.server.id in joinleave_data) and (joinleave_data[member.server.id]["leave_announce"] != False):
yield from self.bot.send_message(member.server.get_channel(joinleave_data[member.server.id]["announce_channel"]),"**{0}#{1}**, with user ID {2}, just left **{3}**!".format(member.name, member.discriminator, member.id, member.server.name))
@asyncio.coroutine
def on_ready(self):
for server in self.bot.servers:
if (server.id in sentry_bans):
for member in server.members:
if (member.id in sentry_bans[server.id]):
#yield from self.bot.send_message(member, ban_message)
yield from (asyncio.sleep(2))
yield from self.bot.ban(member, 7)
print("Banning user {0}#{2} with ID {3} from {1}...".format(member.name, server.name, member.discriminator, member.id))
def setup(bot):
check_folders()
check_files()
bot.add_cog(Sentry(bot))
|
retrodpc/Bulbaspot-Cogs
|
sentry/sentry.py
|
Python
|
apache-2.0
| 11,849 | 0.005233 |
# -*- mode: python; indent-tabs-mode: nil; tab-width: 3 -*-
# vim: set tabstop=3 shiftwidth=3 expandtab:
#
# Copyright (C) 2001-2005 Ichiro Fujinaga, Michael Droettboom,
# and Karl MacMillan
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# TODO: These are fixed values. We need an intelligent way to vary them.
# This whole approach to fuzziness is syntactically convenient, but maybe
# not very efficient.
FUDGE_AMOUNT = 3
FUDGE_AMOUNT_2 = 6
from gamera.core import Rect, Point, Dim
# This is a factory function that looks like a constructor
def Fudge(o, amount=FUDGE_AMOUNT):
# For rectangles, just return a new rectangle that is slightly larger
if isinstance(o, Rect):
return Rect(Point(int(o.ul_x - amount), int(o.ul_y - amount)), Dim(int(o.ncols + amount * 2), int(o.nrows + amount * 2)))
# For integers, return one of our "fudge number proxies"
elif isinstance(o, int):
return FudgeInt(o, amount)
elif isinstance(o, float):
return FudgeFloat(o, amount)
F = Fudge
class FudgeNumber(object):
def __lt__(self, other):
return self.below < other
def __le__(self, other):
return self.below <= other
def __eq__(self, other):
return self.below <= other and self.above >= other
def __ne__(self, other):
return other < self.below and other > self.above
def __gt__(self, other):
return self.above > other
def __ge__(self, other):
return self.above >= other
class FudgeInt(FudgeNumber, int):
def __init__(self, value, amount=FUDGE_AMOUNT):
int.__init__(self, value)
self.below = int(value - amount)
self.above = int(value + amount)
class FudgeFloat(FudgeNumber, float):
def __init__(self, value, amount=FUDGE_AMOUNT):
int.__init__(self, value)
self.below = float(value - amount)
self.above = float(value + amount)
|
DDMAL/Gamera
|
gamera/fudge.py
|
Python
|
gpl-2.0
| 2,585 | 0.001161 |
"""Module for helper functions."""
import os
import tempfile
def tmpfile(suffix='', prefix='tmp', directory=None):
"""Wrapper around tempfile.mkstemp that creates a new temporary file path.
"""
filehandle, filename = tempfile.mkstemp(suffix, prefix, directory)
os.close(filehandle)
return filename
def expandpath(path):
"""Expands all the variables in a path.
"""
path = os.path.expandvars(path)
path = os.path.expanduser(path)
return path
|
c-w/GettyArt
|
getty_art/util.py
|
Python
|
mit
| 488 | 0 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import os
import shutil
from unittest import TestCase, TestSuite, TestLoader, TextTestRunner
from babelfish import Language
from subliminal import list_subtitles, download_subtitles, save_subtitles, download_best_subtitles, scan_video
from subliminal.tests.common import MOVIES, EPISODES
TEST_DIR = 'test_data'
class ApiTestCase(TestCase):
def setUp(self):
os.mkdir(TEST_DIR)
def tearDown(self):
shutil.rmtree(TEST_DIR)
def test_list_subtitles_movie_0(self):
videos = [MOVIES[0]]
languages = {Language('eng')}
subtitles = list_subtitles(videos, languages)
self.assertEqual(len(subtitles), len(videos))
self.assertGreater(len(subtitles[videos[0]]), 0)
def test_list_subtitles_movie_0_por_br(self):
videos = [MOVIES[0]]
languages = {Language('por', 'BR')}
subtitles = list_subtitles(videos, languages)
self.assertEqual(len(subtitles), len(videos))
self.assertGreater(len(subtitles[videos[0]]), 0)
def test_list_subtitles_episodes(self):
videos = [EPISODES[0], EPISODES[1]]
languages = {Language('eng'), Language('fra')}
subtitles = list_subtitles(videos, languages)
self.assertEqual(len(subtitles), len(videos))
self.assertGreater(len(subtitles[videos[0]]), 0)
def test_download_subtitles(self):
videos = [EPISODES[0]]
for video in videos:
video.name = os.path.join(TEST_DIR, os.path.split(video.name)[1])
languages = {Language('eng')}
subtitles = list_subtitles(videos, languages)
download_subtitles(subtitles[videos[0]][:5])
self.assertGreaterEqual(len([s for s in subtitles[videos[0]] if s.content is not None]), 4)
def test_download_best_subtitles(self):
videos = [EPISODES[0], EPISODES[1]]
for video in videos:
video.name = os.path.join(TEST_DIR, os.path.split(video.name)[1])
languages = {Language('eng'), Language('fra')}
subtitles = download_best_subtitles(videos, languages)
for video in videos:
self.assertIn(video, subtitles)
self.assertEqual(len(subtitles[video]), 2)
def test_save_subtitles(self):
videos = [EPISODES[0], EPISODES[1]]
for video in videos:
video.name = os.path.join(TEST_DIR, os.path.split(video.name)[1])
languages = {Language('eng'), Language('fra')}
subtitles = list_subtitles(videos, languages)
# make a list of subtitles to download (one per language per video)
subtitles_to_download = []
for video, video_subtitles in subtitles.items():
video_subtitle_languages = set()
for video_subtitle in video_subtitles:
if video_subtitle.language in video_subtitle_languages:
continue
subtitles_to_download.append(video_subtitle)
video_subtitle_languages.add(video_subtitle.language)
if video_subtitle_languages == languages:
break
self.assertEqual(len(subtitles_to_download), 4)
# download
download_subtitles(subtitles_to_download)
save_subtitles(subtitles)
for video in videos:
self.assertTrue(os.path.exists(os.path.splitext(video.name)[0] + '.en.srt'))
self.assertTrue(os.path.exists(os.path.splitext(video.name)[0] + '.fr.srt'))
def test_save_subtitles_single(self):
videos = [EPISODES[0], EPISODES[1]]
for video in videos:
video.name = os.path.join(TEST_DIR, os.path.split(video.name)[1])
languages = {Language('eng'), Language('fra')}
subtitles = download_best_subtitles(videos, languages)
save_subtitles(subtitles, single=True)
for video in videos:
self.assertIn(video, subtitles)
self.assertEqual(len(subtitles[video]), 2)
self.assertTrue(os.path.exists(os.path.splitext(video.name)[0] + '.srt'))
def test_download_best_subtitles_min_score(self):
videos = [MOVIES[0]]
for video in videos:
video.name = os.path.join(TEST_DIR, os.path.split(video.name)[1])
languages = {Language('eng'), Language('fra')}
subtitles = download_best_subtitles(videos, languages, min_score=1000)
self.assertEqual(len(subtitles), 0)
def test_download_best_subtitles_hearing_impaired(self):
videos = [MOVIES[0]]
for video in videos:
video.name = os.path.join(TEST_DIR, os.path.split(video.name)[1])
languages = {Language('eng')}
subtitles = download_best_subtitles(videos, languages, hearing_impaired=True)
self.assertTrue(subtitles[videos[0]][0].hearing_impaired)
class VideoTestCase(TestCase):
def setUp(self):
os.mkdir(TEST_DIR)
for video in MOVIES + EPISODES:
open(os.path.join(TEST_DIR, os.path.split(video.name)[1]), 'w').close()
def tearDown(self):
shutil.rmtree(TEST_DIR)
def test_scan_video_movie(self):
video = MOVIES[0]
scanned_video = scan_video(os.path.join(TEST_DIR, os.path.split(video.name)[1]))
self.assertEqual(scanned_video.name, os.path.join(TEST_DIR, os.path.split(video.name)[1]))
self.assertEqual(scanned_video.title.lower(), video.title.lower())
self.assertEqual(scanned_video.year, video.year)
self.assertEqual(scanned_video.video_codec, video.video_codec)
self.assertEqual(scanned_video.format, video.format)
self.assertEqual(scanned_video.resolution, video.resolution)
self.assertEqual(scanned_video.release_group, video.release_group)
self.assertEqual(scanned_video.subtitle_languages, set())
self.assertEqual(scanned_video.hashes, {})
self.assertIsNone(scanned_video.audio_codec)
self.assertIsNone(scanned_video.imdb_id)
self.assertEqual(scanned_video.size, 0)
def test_scan_video_episode(self):
video = EPISODES[0]
scanned_video = scan_video(os.path.join(TEST_DIR, os.path.split(video.name)[1]))
self.assertEqual(scanned_video.name, os.path.join(TEST_DIR, os.path.split(video.name)[1]))
self.assertEqual(scanned_video.series, video.series)
self.assertEqual(scanned_video.season, video.season)
self.assertEqual(scanned_video.episode, video.episode)
self.assertEqual(scanned_video.video_codec, video.video_codec)
self.assertEqual(scanned_video.format, video.format)
self.assertEqual(scanned_video.resolution, video.resolution)
self.assertEqual(scanned_video.release_group, video.release_group)
self.assertEqual(scanned_video.subtitle_languages, set())
self.assertEqual(scanned_video.hashes, {})
self.assertIsNone(scanned_video.title)
self.assertIsNone(scanned_video.tvdb_id)
self.assertIsNone(scanned_video.imdb_id)
self.assertIsNone(scanned_video.audio_codec)
self.assertEqual(scanned_video.size, 0)
def test_scan_video_subtitle_language_und(self):
video = EPISODES[0]
open(os.path.join(TEST_DIR, os.path.splitext(os.path.split(video.name)[1])[0]) + '.srt', 'w').close()
scanned_video = scan_video(os.path.join(TEST_DIR, os.path.split(video.name)[1]))
self.assertEqual(scanned_video.subtitle_languages, {Language('und')})
def test_scan_video_subtitles_language_eng(self):
video = EPISODES[0]
open(os.path.join(TEST_DIR, os.path.splitext(os.path.split(video.name)[1])[0]) + '.en.srt', 'w').close()
scanned_video = scan_video(os.path.join(TEST_DIR, os.path.split(video.name)[1]))
self.assertEqual(scanned_video.subtitle_languages, {Language('eng')})
def test_scan_video_subtitles_languages(self):
video = EPISODES[0]
open(os.path.join(TEST_DIR, os.path.splitext(os.path.split(video.name)[1])[0]) + '.en.srt', 'w').close()
open(os.path.join(TEST_DIR, os.path.splitext(os.path.split(video.name)[1])[0]) + '.fr.srt', 'w').close()
open(os.path.join(TEST_DIR, os.path.splitext(os.path.split(video.name)[1])[0]) + '.srt', 'w').close()
scanned_video = scan_video(os.path.join(TEST_DIR, os.path.split(video.name)[1]))
self.assertEqual(scanned_video.subtitle_languages, {Language('eng'), Language('fra'), Language('und')})
def suite():
suite = TestSuite()
suite.addTest(TestLoader().loadTestsFromTestCase(ApiTestCase))
suite.addTest(TestLoader().loadTestsFromTestCase(VideoTestCase))
return suite
if __name__ == '__main__':
TextTestRunner().run(suite())
|
ravselj/subliminal
|
subliminal/tests/test_subliminal.py
|
Python
|
mit
| 8,711 | 0.002296 |
from BOX.box_lib import requests
import os
import configparser
import traceback
import functools
import threading
configfile = os.path.join(os.path.dirname(__file__), 'EpicRace.ini')
config = configparser.ConfigParser()
config.read(configfile)
def async(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
t = threading.Thread(target=func, args=args, kwargs=kwargs)
t.daemon = True
t.start()
return t
return wrapper
def log(log):
log = ('update: ' + str(log))
with open("apps\\python\\EpicRace\\log.txt", 'w') as h:
h.write(log)
h.close()
#@async
def update():
with open("apps\\python\\EpicRace\\sha.txt", 'r') as g:
sha = g.read()
g.close()
try:
branch = config['SETTINGS']['branch']
check_link = "https://api.github.com/repos/Marocco2/EpicRace/commits/" + branch
headers = {'Accept': 'application/vnd.github.VERSION.sha'}
r = requests.get(check_link, headers=headers)
if r.text != sha: # Check if server version and client version is the same
with open("apps\\python\\EpicRace\\sha.txt", 'w') as j:
j.write(r.text)
j.close()
download_link_epicrace = "https://raw.githubusercontent.com/Marocco2/EpicRace/" + branch + "/EpicRace.py"
download_link_update = "https://raw.githubusercontent.com/Marocco2/EpicRace/" + branch + "/update.py"
download_link_ini = "https://raw.githubusercontent.com/Marocco2/EpicRace/" + branch + "/EpicRace.ini"
get_file(download_link_epicrace, "apps\\python\\EpicRace\\EpicRace.py")
get_file(download_link_ini, "apps\\python\\EpicRace\\EpicRace.ini")
get_file(download_link_update, "apps\\python\\EpicRace\\update.py")
update_status = 0 # ok
log(update_status)
return update_status
else:
# "No new update"
update_status = 2
log(update_status)
return update_status
except:
log(traceback.format_exc())
update_status = 3
return update_status
#@async
def get_file(link, filed):
f = requests.get(link)
with open(filed, 'w') as j:
j.write(f.text)
j.close()
|
Marocco2/EpicRace
|
update.py
|
Python
|
lgpl-3.0
| 2,289 | 0.004806 |
"""
Test objconfig.writer.AbstractWriter
"""
import pytest
from objconfig.exception import RuntimeException
from objconfig.writer import WriterInterface
from objconfig.writer import AbstractWriter
from objconfig import Config
import os
def test_methods_abstractwriter():
writer = AbstractWriter()
conf = Config({})
assert isinstance(writer, WriterInterface), "AbstractWriter not instance of WriterInterface"
with pytest.raises(RuntimeException):
writer.toFile(os.path.join(os.path.dirname(os.path.realpath(__file__)), "test"), conf)
with pytest.raises(RuntimeException):
writer.toString(conf)
os.remove(os.path.join(os.path.dirname(os.path.realpath(__file__)), "test"))
|
asherwunk/objconfig
|
tests/writer/test_abstractwriter.py
|
Python
|
mit
| 728 | 0.008242 |
"""
Tests for student enrollment.
"""
from mock import patch, Mock
import ddt
from django.core.cache import cache
from nose.tools import raises
import unittest
from django.test import TestCase
from django.test.utils import override_settings
from django.conf import settings
from course_modes.models import CourseMode
from enrollment import api
from enrollment.errors import EnrollmentApiLoadError, EnrollmentNotFoundError, CourseModeNotFoundError
from enrollment.tests import fake_data_api
from openedx.core.djangolib.testing.utils import CacheIsolationTestCase
@ddt.ddt
@override_settings(ENROLLMENT_DATA_API="enrollment.tests.fake_data_api")
@unittest.skipUnless(settings.ROOT_URLCONF == 'lms.urls', 'Test only valid in lms')
class EnrollmentTest(CacheIsolationTestCase):
"""
Test student enrollment, especially with different course modes.
"""
USERNAME = "Bob"
COURSE_ID = "some/great/course"
ENABLED_CACHES = ['default']
def setUp(self):
super(EnrollmentTest, self).setUp()
fake_data_api.reset()
@ddt.data(
# Default (no course modes in the database)
# Expect automatically being enrolled as "honor".
([], 'honor'),
# Audit / Verified / Honor
# We should always go to the "choose your course" page.
# We should also be enrolled as "honor" by default.
(['honor', 'verified', 'audit'], 'honor'),
# Check for professional ed happy path.
(['professional'], 'professional'),
(['no-id-professional'], 'no-id-professional')
)
@ddt.unpack
def test_enroll(self, course_modes, mode):
# Add a fake course enrollment information to the fake data API
fake_data_api.add_course(self.COURSE_ID, course_modes=course_modes)
# Enroll in the course and verify the URL we get sent to
result = api.add_enrollment(self.USERNAME, self.COURSE_ID, mode=mode)
self.assertIsNotNone(result)
self.assertEquals(result['student'], self.USERNAME)
self.assertEquals(result['course']['course_id'], self.COURSE_ID)
self.assertEquals(result['mode'], mode)
get_result = api.get_enrollment(self.USERNAME, self.COURSE_ID)
self.assertEquals(result, get_result)
@ddt.data(
([CourseMode.DEFAULT_MODE_SLUG, 'verified', 'credit'], CourseMode.DEFAULT_MODE_SLUG),
(['audit', 'verified', 'credit'], 'audit'),
(['honor', 'verified', 'credit'], 'honor'),
)
@ddt.unpack
def test_enroll_no_mode_success(self, course_modes, expected_mode):
# Add a fake course enrollment information to the fake data API
fake_data_api.add_course(self.COURSE_ID, course_modes=course_modes)
with patch('enrollment.api.CourseMode.modes_for_course') as mock_modes_for_course:
mock_course_modes = [Mock(slug=mode) for mode in course_modes]
mock_modes_for_course.return_value = mock_course_modes
# Enroll in the course and verify the URL we get sent to
result = api.add_enrollment(self.USERNAME, self.COURSE_ID)
self.assertIsNotNone(result)
self.assertEquals(result['student'], self.USERNAME)
self.assertEquals(result['course']['course_id'], self.COURSE_ID)
self.assertEquals(result['mode'], expected_mode)
@ddt.data(
['professional'],
['verified'],
['verified', 'professional'],
)
@raises(CourseModeNotFoundError)
def test_enroll_no_mode_error(self, course_modes):
# Add a fake course enrollment information to the fake data API
fake_data_api.add_course(self.COURSE_ID, course_modes=course_modes)
# Enroll in the course and verify that we raise CourseModeNotFoundError
api.add_enrollment(self.USERNAME, self.COURSE_ID)
@raises(CourseModeNotFoundError)
def test_prof_ed_enroll(self):
# Add a fake course enrollment information to the fake data API
fake_data_api.add_course(self.COURSE_ID, course_modes=['professional'])
# Enroll in the course and verify the URL we get sent to
api.add_enrollment(self.USERNAME, self.COURSE_ID, mode='verified')
@ddt.data(
# Default (no course modes in the database)
# Expect that users are automatically enrolled as "honor".
([], 'honor'),
# Audit / Verified / Honor
# We should always go to the "choose your course" page.
# We should also be enrolled as "honor" by default.
(['honor', 'verified', 'audit'], 'honor'),
# Check for professional ed happy path.
(['professional'], 'professional'),
(['no-id-professional'], 'no-id-professional')
)
@ddt.unpack
def test_unenroll(self, course_modes, mode):
# Add a fake course enrollment information to the fake data API
fake_data_api.add_course(self.COURSE_ID, course_modes=course_modes)
# Enroll in the course and verify the URL we get sent to
result = api.add_enrollment(self.USERNAME, self.COURSE_ID, mode=mode)
self.assertIsNotNone(result)
self.assertEquals(result['student'], self.USERNAME)
self.assertEquals(result['course']['course_id'], self.COURSE_ID)
self.assertEquals(result['mode'], mode)
self.assertTrue(result['is_active'])
result = api.update_enrollment(self.USERNAME, self.COURSE_ID, mode=mode, is_active=False)
self.assertIsNotNone(result)
self.assertEquals(result['student'], self.USERNAME)
self.assertEquals(result['course']['course_id'], self.COURSE_ID)
self.assertEquals(result['mode'], mode)
self.assertFalse(result['is_active'])
@raises(EnrollmentNotFoundError)
def test_unenroll_not_enrolled_in_course(self):
# Add a fake course enrollment information to the fake data API
fake_data_api.add_course(self.COURSE_ID, course_modes=['honor'])
api.update_enrollment(self.USERNAME, self.COURSE_ID, mode='honor', is_active=False)
@ddt.data(
# Simple test of honor and verified.
([
{'course_id': 'the/first/course', 'course_modes': [], 'mode': 'honor'},
{'course_id': 'the/second/course', 'course_modes': ['honor', 'verified'], 'mode': 'verified'}
]),
# No enrollments
([]),
# One Enrollment
([
{'course_id': 'the/third/course', 'course_modes': ['honor', 'verified', 'audit'], 'mode': 'audit'}
]),
)
def test_get_all_enrollments(self, enrollments):
for enrollment in enrollments:
fake_data_api.add_course(enrollment['course_id'], course_modes=enrollment['course_modes'])
api.add_enrollment(self.USERNAME, enrollment['course_id'], enrollment['mode'])
result = api.get_enrollments(self.USERNAME)
self.assertEqual(len(enrollments), len(result))
for result_enrollment in result:
self.assertIn(
result_enrollment['course']['course_id'],
[enrollment['course_id'] for enrollment in enrollments]
)
def test_update_enrollment(self):
# Add fake course enrollment information to the fake data API
fake_data_api.add_course(self.COURSE_ID, course_modes=['honor', 'verified', 'audit'])
# Enroll in the course and verify the URL we get sent to
result = api.add_enrollment(self.USERNAME, self.COURSE_ID, mode='audit')
get_result = api.get_enrollment(self.USERNAME, self.COURSE_ID)
self.assertEquals(result, get_result)
result = api.update_enrollment(self.USERNAME, self.COURSE_ID, mode='honor')
self.assertEquals('honor', result['mode'])
result = api.update_enrollment(self.USERNAME, self.COURSE_ID, mode='verified')
self.assertEquals('verified', result['mode'])
def test_update_enrollment_attributes(self):
# Add fake course enrollment information to the fake data API
fake_data_api.add_course(self.COURSE_ID, course_modes=['honor', 'verified', 'audit', 'credit'])
# Enroll in the course and verify the URL we get sent to
result = api.add_enrollment(self.USERNAME, self.COURSE_ID, mode='audit')
get_result = api.get_enrollment(self.USERNAME, self.COURSE_ID)
self.assertEquals(result, get_result)
enrollment_attributes = [
{
"namespace": "credit",
"name": "provider_id",
"value": "hogwarts",
}
]
result = api.update_enrollment(
self.USERNAME, self.COURSE_ID, mode='credit', enrollment_attributes=enrollment_attributes
)
self.assertEquals('credit', result['mode'])
attributes = api.get_enrollment_attributes(self.USERNAME, self.COURSE_ID)
self.assertEquals(enrollment_attributes[0], attributes[0])
def test_get_course_details(self):
# Add a fake course enrollment information to the fake data API
fake_data_api.add_course(self.COURSE_ID, course_modes=['honor', 'verified', 'audit'])
result = api.get_course_enrollment_details(self.COURSE_ID)
self.assertEquals(result['course_id'], self.COURSE_ID)
self.assertEquals(3, len(result['course_modes']))
@override_settings(ENROLLMENT_DATA_API='foo.bar.biz.baz')
@raises(EnrollmentApiLoadError)
def test_data_api_config_error(self):
# Enroll in the course and verify the URL we get sent to
api.add_enrollment(self.USERNAME, self.COURSE_ID, mode='audit')
def test_caching(self):
# Add fake course enrollment information to the fake data API
fake_data_api.add_course(self.COURSE_ID, course_modes=['honor', 'verified', 'audit'])
# Hit the fake data API.
details = api.get_course_enrollment_details(self.COURSE_ID)
# Reset the fake data API, should rely on the cache.
fake_data_api.reset()
cached_details = api.get_course_enrollment_details(self.COURSE_ID)
# The data matches
self.assertEqual(len(details['course_modes']), 3)
self.assertEqual(details, cached_details)
|
10clouds/edx-platform
|
common/djangoapps/enrollment/tests/test_api.py
|
Python
|
agpl-3.0
| 10,138 | 0.002071 |
import unittest
import os
import sys
import copy
import nose
from numpy.testing import *
from numpy import array, alltrue, ndarray, asarray, can_cast,zeros, dtype
from numpy.core.multiarray import typeinfo
import util
wrap = None
def setup():
"""
Build the required testing extension module
"""
global wrap
# Check compiler availability first
if not util.has_c_compiler():
raise nose.SkipTest("No C compiler available")
if wrap is None:
config_code = """
config.add_extension('test_array_from_pyobj_ext',
sources=['wrapmodule.c', 'fortranobject.c'],
define_macros=[])
"""
d = os.path.dirname(__file__)
src = [os.path.join(d, 'src', 'array_from_pyobj', 'wrapmodule.c'),
os.path.join(d, '..', 'src', 'fortranobject.c'),
os.path.join(d, '..', 'src', 'fortranobject.h')]
wrap = util.build_module_distutils(src, config_code,
'test_array_from_pyobj_ext')
def flags_info(arr):
flags = wrap.array_attrs(arr)[6]
return flags2names(flags)
def flags2names(flags):
info = []
for flagname in ['CONTIGUOUS','FORTRAN','OWNDATA','ENSURECOPY',
'ENSUREARRAY','ALIGNED','NOTSWAPPED','WRITEABLE',
'UPDATEIFCOPY','BEHAVED','BEHAVED_RO',
'CARRAY','FARRAY'
]:
if abs(flags) & getattr(wrap,flagname, 0):
info.append(flagname)
return info
class Intent(object):
def __init__(self,intent_list=[]):
self.intent_list = intent_list[:]
flags = 0
for i in intent_list:
if i=='optional':
flags |= wrap.F2PY_OPTIONAL
else:
flags |= getattr(wrap,'F2PY_INTENT_'+i.upper())
self.flags = flags
def __getattr__(self,name):
name = name.lower()
if name=='in_': name='in'
return self.__class__(self.intent_list+[name])
def __str__(self):
return 'intent(%s)' % (','.join(self.intent_list))
def __repr__(self):
return 'Intent(%r)' % (self.intent_list)
def is_intent(self,*names):
for name in names:
if name not in self.intent_list:
return False
return True
def is_intent_exact(self,*names):
return len(self.intent_list)==len(names) and self.is_intent(*names)
intent = Intent()
class Type(object):
_type_names = ['BOOL','BYTE','UBYTE','SHORT','USHORT','INT','UINT',
'LONG','ULONG','LONGLONG','ULONGLONG',
'FLOAT','DOUBLE','LONGDOUBLE','CFLOAT','CDOUBLE',
'CLONGDOUBLE']
_type_cache = {}
_cast_dict = {'BOOL':['BOOL']}
_cast_dict['BYTE'] = _cast_dict['BOOL'] + ['BYTE']
_cast_dict['UBYTE'] = _cast_dict['BOOL'] + ['UBYTE']
_cast_dict['BYTE'] = ['BYTE']
_cast_dict['UBYTE'] = ['UBYTE']
_cast_dict['SHORT'] = _cast_dict['BYTE'] + ['UBYTE','SHORT']
_cast_dict['USHORT'] = _cast_dict['UBYTE'] + ['BYTE','USHORT']
_cast_dict['INT'] = _cast_dict['SHORT'] + ['USHORT','INT']
_cast_dict['UINT'] = _cast_dict['USHORT'] + ['SHORT','UINT']
_cast_dict['LONG'] = _cast_dict['INT'] + ['LONG']
_cast_dict['ULONG'] = _cast_dict['UINT'] + ['ULONG']
_cast_dict['LONGLONG'] = _cast_dict['LONG'] + ['LONGLONG']
_cast_dict['ULONGLONG'] = _cast_dict['ULONG'] + ['ULONGLONG']
_cast_dict['FLOAT'] = _cast_dict['SHORT'] + ['USHORT','FLOAT']
_cast_dict['DOUBLE'] = _cast_dict['INT'] + ['UINT','FLOAT','DOUBLE']
_cast_dict['LONGDOUBLE'] = _cast_dict['LONG'] + ['ULONG','FLOAT','DOUBLE','LONGDOUBLE']
_cast_dict['CFLOAT'] = _cast_dict['FLOAT'] + ['CFLOAT']
_cast_dict['CDOUBLE'] = _cast_dict['DOUBLE'] + ['CFLOAT','CDOUBLE']
_cast_dict['CLONGDOUBLE'] = _cast_dict['LONGDOUBLE'] + ['CFLOAT','CDOUBLE','CLONGDOUBLE']
def __new__(cls,name):
if isinstance(name,dtype):
dtype0 = name
name = None
for n,i in typeinfo.items():
if isinstance(i,tuple) and dtype0.type is i[-1]:
name = n
break
obj = cls._type_cache.get(name.upper(),None)
if obj is not None:
return obj
obj = object.__new__(cls)
obj._init(name)
cls._type_cache[name.upper()] = obj
return obj
def _init(self,name):
self.NAME = name.upper()
self.type_num = getattr(wrap,'NPY_'+self.NAME)
assert_equal(self.type_num,typeinfo[self.NAME][1])
self.dtype = typeinfo[self.NAME][-1]
self.elsize = typeinfo[self.NAME][2] / 8
self.dtypechar = typeinfo[self.NAME][0]
def cast_types(self):
return map(self.__class__,self._cast_dict[self.NAME])
def all_types(self):
return map(self.__class__,self._type_names)
def smaller_types(self):
bits = typeinfo[self.NAME][3]
types = []
for name in self._type_names:
if typeinfo[name][3]<bits:
types.append(Type(name))
return types
def equal_types(self):
bits = typeinfo[self.NAME][3]
types = []
for name in self._type_names:
if name==self.NAME: continue
if typeinfo[name][3]==bits:
types.append(Type(name))
return types
def larger_types(self):
bits = typeinfo[self.NAME][3]
types = []
for name in self._type_names:
if typeinfo[name][3]>bits:
types.append(Type(name))
return types
class Array(object):
def __init__(self,typ,dims,intent,obj):
self.type = typ
self.dims = dims
self.intent = intent
self.obj_copy = copy.deepcopy(obj)
self.obj = obj
# arr.dtypechar may be different from typ.dtypechar
self.arr = wrap.call(typ.type_num,dims,intent.flags,obj)
assert_(isinstance(self.arr, ndarray),`type(self.arr)`)
self.arr_attr = wrap.array_attrs(self.arr)
if len(dims)>1:
if self.intent.is_intent('c'):
assert_(intent.flags & wrap.F2PY_INTENT_C)
assert_(not self.arr.flags['FORTRAN'],`self.arr.flags,getattr(obj,'flags',None)`)
assert_(self.arr.flags['CONTIGUOUS'])
assert_(not self.arr_attr[6] & wrap.FORTRAN)
else:
assert_(not intent.flags & wrap.F2PY_INTENT_C)
assert_(self.arr.flags['FORTRAN'])
assert_(not self.arr.flags['CONTIGUOUS'])
assert_(self.arr_attr[6] & wrap.FORTRAN)
if obj is None:
self.pyarr = None
self.pyarr_attr = None
return
if intent.is_intent('cache'):
assert_(isinstance(obj,ndarray),`type(obj)`)
self.pyarr = array(obj).reshape(*dims).copy()
else:
self.pyarr = array(array(obj,
dtype = typ.dtypechar).reshape(*dims),
order=self.intent.is_intent('c') and 'C' or 'F')
assert_(self.pyarr.dtype == typ, \
`self.pyarr.dtype,typ`)
assert_(self.pyarr.flags['OWNDATA'], (obj, intent))
self.pyarr_attr = wrap.array_attrs(self.pyarr)
if len(dims)>1:
if self.intent.is_intent('c'):
assert_(not self.pyarr.flags['FORTRAN'])
assert_(self.pyarr.flags['CONTIGUOUS'])
assert_(not self.pyarr_attr[6] & wrap.FORTRAN)
else:
assert_(self.pyarr.flags['FORTRAN'])
assert_(not self.pyarr.flags['CONTIGUOUS'])
assert_(self.pyarr_attr[6] & wrap.FORTRAN)
assert_(self.arr_attr[1]==self.pyarr_attr[1]) # nd
assert_(self.arr_attr[2]==self.pyarr_attr[2]) # dimensions
if self.arr_attr[1]<=1:
assert_(self.arr_attr[3]==self.pyarr_attr[3],\
`self.arr_attr[3],self.pyarr_attr[3],self.arr.tostring(),self.pyarr.tostring()`) # strides
assert_(self.arr_attr[5][-2:]==self.pyarr_attr[5][-2:],\
`self.arr_attr[5],self.pyarr_attr[5]`) # descr
assert_(self.arr_attr[6]==self.pyarr_attr[6],\
`self.arr_attr[6],self.pyarr_attr[6],flags2names(0*self.arr_attr[6]-self.pyarr_attr[6]),flags2names(self.arr_attr[6]),intent`) # flags
if intent.is_intent('cache'):
assert_(self.arr_attr[5][3]>=self.type.elsize,\
`self.arr_attr[5][3],self.type.elsize`)
else:
assert_(self.arr_attr[5][3]==self.type.elsize,\
`self.arr_attr[5][3],self.type.elsize`)
assert_(self.arr_equal(self.pyarr,self.arr))
if isinstance(self.obj,ndarray):
if typ.elsize==Type(obj.dtype).elsize:
if not intent.is_intent('copy') and self.arr_attr[1]<=1:
assert_(self.has_shared_memory())
def arr_equal(self,arr1,arr2):
if arr1.shape != arr2.shape:
return False
s = arr1==arr2
return alltrue(s.flatten())
def __str__(self):
return str(self.arr)
def has_shared_memory(self):
"""Check that created array shares data with input array.
"""
if self.obj is self.arr:
return True
if not isinstance(self.obj,ndarray):
return False
obj_attr = wrap.array_attrs(self.obj)
return obj_attr[0]==self.arr_attr[0]
##################################################
class test_intent(unittest.TestCase):
def test_in_out(self):
assert_equal(str(intent.in_.out),'intent(in,out)')
assert_(intent.in_.c.is_intent('c'))
assert_(not intent.in_.c.is_intent_exact('c'))
assert_(intent.in_.c.is_intent_exact('c','in'))
assert_(intent.in_.c.is_intent_exact('in','c'))
assert_(not intent.in_.is_intent('c'))
class _test_shared_memory:
num2seq = [1,2]
num23seq = [[1,2,3],[4,5,6]]
def test_in_from_2seq(self):
a = self.array([2],intent.in_,self.num2seq)
assert_(not a.has_shared_memory())
def test_in_from_2casttype(self):
for t in self.type.cast_types():
obj = array(self.num2seq,dtype=t.dtype)
a = self.array([len(self.num2seq)],intent.in_,obj)
if t.elsize==self.type.elsize:
assert_(a.has_shared_memory(),`self.type.dtype,t.dtype`)
else:
assert_(not a.has_shared_memory(),`t.dtype`)
def test_inout_2seq(self):
obj = array(self.num2seq,dtype=self.type.dtype)
a = self.array([len(self.num2seq)],intent.inout,obj)
assert_(a.has_shared_memory())
try:
a = self.array([2],intent.in_.inout,self.num2seq)
except TypeError,msg:
if not str(msg).startswith('failed to initialize intent(inout|inplace|cache) array'):
raise
else:
raise SystemError('intent(inout) should have failed on sequence')
def test_f_inout_23seq(self):
obj = array(self.num23seq,dtype=self.type.dtype,order='F')
shape = (len(self.num23seq),len(self.num23seq[0]))
a = self.array(shape,intent.in_.inout,obj)
assert_(a.has_shared_memory())
obj = array(self.num23seq,dtype=self.type.dtype,order='C')
shape = (len(self.num23seq),len(self.num23seq[0]))
try:
a = self.array(shape,intent.in_.inout,obj)
except ValueError,msg:
if not str(msg).startswith('failed to initialize intent(inout) array'):
raise
else:
raise SystemError('intent(inout) should have failed on improper array')
def test_c_inout_23seq(self):
obj = array(self.num23seq,dtype=self.type.dtype)
shape = (len(self.num23seq),len(self.num23seq[0]))
a = self.array(shape,intent.in_.c.inout,obj)
assert_(a.has_shared_memory())
def test_in_copy_from_2casttype(self):
for t in self.type.cast_types():
obj = array(self.num2seq,dtype=t.dtype)
a = self.array([len(self.num2seq)],intent.in_.copy,obj)
assert_(not a.has_shared_memory(),`t.dtype`)
def test_c_in_from_23seq(self):
a = self.array([len(self.num23seq),len(self.num23seq[0])],
intent.in_,self.num23seq)
assert_(not a.has_shared_memory())
def test_in_from_23casttype(self):
for t in self.type.cast_types():
obj = array(self.num23seq,dtype=t.dtype)
a = self.array([len(self.num23seq),len(self.num23seq[0])],
intent.in_,obj)
assert_(not a.has_shared_memory(),`t.dtype`)
def test_f_in_from_23casttype(self):
for t in self.type.cast_types():
obj = array(self.num23seq,dtype=t.dtype,order='F')
a = self.array([len(self.num23seq),len(self.num23seq[0])],
intent.in_,obj)
if t.elsize==self.type.elsize:
assert_(a.has_shared_memory(),`t.dtype`)
else:
assert_(not a.has_shared_memory(),`t.dtype`)
def test_c_in_from_23casttype(self):
for t in self.type.cast_types():
obj = array(self.num23seq,dtype=t.dtype)
a = self.array([len(self.num23seq),len(self.num23seq[0])],
intent.in_.c,obj)
if t.elsize==self.type.elsize:
assert_(a.has_shared_memory(),`t.dtype`)
else:
assert_(not a.has_shared_memory(),`t.dtype`)
def test_f_copy_in_from_23casttype(self):
for t in self.type.cast_types():
obj = array(self.num23seq,dtype=t.dtype,order='F')
a = self.array([len(self.num23seq),len(self.num23seq[0])],
intent.in_.copy,obj)
assert_(not a.has_shared_memory(),`t.dtype`)
def test_c_copy_in_from_23casttype(self):
for t in self.type.cast_types():
obj = array(self.num23seq,dtype=t.dtype)
a = self.array([len(self.num23seq),len(self.num23seq[0])],
intent.in_.c.copy,obj)
assert_(not a.has_shared_memory(),`t.dtype`)
def test_in_cache_from_2casttype(self):
for t in self.type.all_types():
if t.elsize != self.type.elsize:
continue
obj = array(self.num2seq,dtype=t.dtype)
shape = (len(self.num2seq),)
a = self.array(shape,intent.in_.c.cache,obj)
assert_(a.has_shared_memory(),`t.dtype`)
a = self.array(shape,intent.in_.cache,obj)
assert_(a.has_shared_memory(),`t.dtype`)
obj = array(self.num2seq,dtype=t.dtype,order='F')
a = self.array(shape,intent.in_.c.cache,obj)
assert_(a.has_shared_memory(),`t.dtype`)
a = self.array(shape,intent.in_.cache,obj)
assert_(a.has_shared_memory(),`t.dtype`)
try:
a = self.array(shape,intent.in_.cache,obj[::-1])
except ValueError,msg:
if not str(msg).startswith('failed to initialize intent(cache) array'):
raise
else:
raise SystemError('intent(cache) should have failed on multisegmented array')
def test_in_cache_from_2casttype_failure(self):
for t in self.type.all_types():
if t.elsize >= self.type.elsize:
continue
obj = array(self.num2seq,dtype=t.dtype)
shape = (len(self.num2seq),)
try:
a = self.array(shape,intent.in_.cache,obj)
except ValueError,msg:
if not str(msg).startswith('failed to initialize intent(cache) array'):
raise
else:
raise SystemError('intent(cache) should have failed on smaller array')
def test_cache_hidden(self):
shape = (2,)
a = self.array(shape,intent.cache.hide,None)
assert_(a.arr.shape==shape)
shape = (2,3)
a = self.array(shape,intent.cache.hide,None)
assert_(a.arr.shape==shape)
shape = (-1,3)
try:
a = self.array(shape,intent.cache.hide,None)
except ValueError,msg:
if not str(msg).startswith('failed to create intent(cache|hide)|optional array'):
raise
else:
raise SystemError('intent(cache) should have failed on undefined dimensions')
def test_hidden(self):
shape = (2,)
a = self.array(shape,intent.hide,None)
assert_(a.arr.shape==shape)
assert_(a.arr_equal(a.arr,zeros(shape,dtype=self.type.dtype)))
shape = (2,3)
a = self.array(shape,intent.hide,None)
assert_(a.arr.shape==shape)
assert_(a.arr_equal(a.arr,zeros(shape,dtype=self.type.dtype)))
assert_(a.arr.flags['FORTRAN'] and not a.arr.flags['CONTIGUOUS'])
shape = (2,3)
a = self.array(shape,intent.c.hide,None)
assert_(a.arr.shape==shape)
assert_(a.arr_equal(a.arr,zeros(shape,dtype=self.type.dtype)))
assert_(not a.arr.flags['FORTRAN'] and a.arr.flags['CONTIGUOUS'])
shape = (-1,3)
try:
a = self.array(shape,intent.hide,None)
except ValueError,msg:
if not str(msg).startswith('failed to create intent(cache|hide)|optional array'):
raise
else:
raise SystemError('intent(hide) should have failed on undefined dimensions')
def test_optional_none(self):
shape = (2,)
a = self.array(shape,intent.optional,None)
assert_(a.arr.shape==shape)
assert_(a.arr_equal(a.arr,zeros(shape,dtype=self.type.dtype)))
shape = (2,3)
a = self.array(shape,intent.optional,None)
assert_(a.arr.shape==shape)
assert_(a.arr_equal(a.arr,zeros(shape,dtype=self.type.dtype)))
assert_(a.arr.flags['FORTRAN'] and not a.arr.flags['CONTIGUOUS'])
shape = (2,3)
a = self.array(shape,intent.c.optional,None)
assert_(a.arr.shape==shape)
assert_(a.arr_equal(a.arr,zeros(shape,dtype=self.type.dtype)))
assert_(not a.arr.flags['FORTRAN'] and a.arr.flags['CONTIGUOUS'])
def test_optional_from_2seq(self):
obj = self.num2seq
shape = (len(obj),)
a = self.array(shape,intent.optional,obj)
assert_(a.arr.shape==shape)
assert_(not a.has_shared_memory())
def test_optional_from_23seq(self):
obj = self.num23seq
shape = (len(obj),len(obj[0]))
a = self.array(shape,intent.optional,obj)
assert_(a.arr.shape==shape)
assert_(not a.has_shared_memory())
a = self.array(shape,intent.optional.c,obj)
assert_(a.arr.shape==shape)
assert_(not a.has_shared_memory())
def test_inplace(self):
obj = array(self.num23seq,dtype=self.type.dtype)
assert_(not obj.flags['FORTRAN'] and obj.flags['CONTIGUOUS'])
shape = obj.shape
a = self.array(shape,intent.inplace,obj)
assert_(obj[1][2]==a.arr[1][2],`obj,a.arr`)
a.arr[1][2]=54
assert_(obj[1][2]==a.arr[1][2]==array(54,dtype=self.type.dtype),`obj,a.arr`)
assert_(a.arr is obj)
assert_(obj.flags['FORTRAN']) # obj attributes are changed inplace!
assert_(not obj.flags['CONTIGUOUS'])
def test_inplace_from_casttype(self):
for t in self.type.cast_types():
if t is self.type:
continue
obj = array(self.num23seq,dtype=t.dtype)
assert_(obj.dtype.type==t.dtype)
assert_(obj.dtype.type is not self.type.dtype)
assert_(not obj.flags['FORTRAN'] and obj.flags['CONTIGUOUS'])
shape = obj.shape
a = self.array(shape,intent.inplace,obj)
assert_(obj[1][2]==a.arr[1][2],`obj,a.arr`)
a.arr[1][2]=54
assert_(obj[1][2]==a.arr[1][2]==array(54,dtype=self.type.dtype),`obj,a.arr`)
assert_(a.arr is obj)
assert_(obj.flags['FORTRAN']) # obj attributes are changed inplace!
assert_(not obj.flags['CONTIGUOUS'])
assert_(obj.dtype.type is self.type.dtype) # obj type is changed inplace!
for t in Type._type_names:
exec '''\
class test_%s_gen(unittest.TestCase,
_test_shared_memory
):
def setUp(self):
self.type = Type(%r)
array = lambda self,dims,intent,obj: Array(Type(%r),dims,intent,obj)
''' % (t,t,t)
if __name__ == "__main__":
setup()
import nose
nose.runmodule()
|
beiko-lab/gengis
|
bin/Lib/site-packages/numpy/f2py/tests/test_array_from_pyobj.py
|
Python
|
gpl-3.0
| 21,255 | 0.017549 |
#!/usr/bin/python3
#
# Copyright (c) 2014-2022 The Voxie Authors
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
import numpy as np
import voxie
import dbus
args = voxie.parser.parse_args()
context = voxie.VoxieContext(args)
instance = context.createInstance()
if args.voxie_action != 'RunTool':
raise Exception('Invalid operation: ' + args.voxie_action)
inputObjectPath = args.voxie_script_target_object
inputObject = context.makeObject(context.bus, context.busName, inputObjectPath, [
'de.uni_stuttgart.Voxie.Object']).CastTo('de.uni_stuttgart.Voxie.DataObject')
inputData = inputObject.Data.CastTo('de.uni_stuttgart.Voxie.TomographyRawData2DAccessor')
# print('Number of images: %d' % (inputData.NumberOfImages,)) # Not implemented
print('Current version: %s' % (inputData.CurrentVersionString,))
|
voxie-viewer/voxie
|
ext/RawDataTestScript.py
|
Python
|
mit
| 1,854 | 0.001618 |
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import re
from collections import OrderedDict
from uuid import uuid4
from warnings import warn
from numpy import object as np_object
from numpy import array, inf, isinf
from six import string_types
from cobra.core import Metabolite, Model, Reaction
from cobra.util import create_stoichiometric_matrix
from cobra.util.solver import set_objective
try:
import scipy.sparse as scipy_sparse
import scipy.io as scipy_io
except ImportError:
scipy_sparse = None
scipy_io = None
# precompiled regular expressions
_bracket_re = re.compile("r\[[a-z]\]$")
_underscore_re = re.compile(r"_[a-z]$")
def _get_id_compartment(id):
"""extract the compartment from the id string"""
bracket_search = _bracket_re.findall(id)
if len(bracket_search) == 1:
return bracket_search[0][1]
underscore_search = _underscore_re.findall(id)
if len(underscore_search) == 1:
return underscore_search[0][1]
return None
def _cell(x):
"""translate an array x into a MATLAB cell array"""
x_no_none = [i if i is not None else "" for i in x]
return array(x_no_none, dtype=np_object)
def load_matlab_model(infile_path, variable_name=None, inf=inf):
"""Load a cobra model stored as a .mat file
Parameters
----------
infile_path: str
path to the file to to read
variable_name: str, optional
The variable name of the model in the .mat file. If this is not
specified, then the first MATLAB variable which looks like a COBRA
model will be used
inf: value
The value to use for infinite bounds. Some solvers do not handle
infinite values so for using those, set this to a high numeric value.
Returns
-------
cobra.core.Model.Model:
The resulting cobra model
"""
if not scipy_io:
raise ImportError('load_matlab_model requires scipy')
data = scipy_io.loadmat(infile_path)
possible_names = []
if variable_name is None:
# skip meta variables
meta_vars = {"__globals__", "__header__", "__version__"}
possible_names = sorted(i for i in data if i not in meta_vars)
if len(possible_names) == 1:
variable_name = possible_names[0]
if variable_name is not None:
return from_mat_struct(data[variable_name], model_id=variable_name,
inf=inf)
for possible_name in possible_names:
try:
return from_mat_struct(data[possible_name], model_id=possible_name,
inf=inf)
except ValueError:
pass
# If code here is executed, then no model was found.
raise IOError("no COBRA model found")
def save_matlab_model(model, file_name, varname=None):
"""Save the cobra model as a .mat file.
This .mat file can be used directly in the MATLAB version of COBRA.
Parameters
----------
model : cobra.core.Model.Model object
The model to save
file_name : str or file-like object
The file to save to
varname : string
The name of the variable within the workspace
"""
if not scipy_io:
raise ImportError('load_matlab_model requires scipy')
if varname is None:
varname = str(model.id) \
if model.id is not None and len(model.id) > 0 \
else "exported_model"
mat = create_mat_dict(model)
scipy_io.savemat(file_name, {varname: mat},
appendmat=True, oned_as="column")
def create_mat_metabolite_id(model):
for met in model.metabolites:
if not _get_id_compartment(met.id) and met.compartment:
yield '{}[{}]'.format(met.id,
model.compartments[met.compartment].lower())
else:
yield met.id
def create_mat_dict(model):
"""create a dict mapping model attributes to arrays"""
rxns = model.reactions
mets = model.metabolites
mat = OrderedDict()
mat["mets"] = _cell([met_id for met_id in create_mat_metabolite_id(model)])
mat["metNames"] = _cell(mets.list_attr("name"))
mat["metFormulas"] = _cell([str(m.formula) for m in mets])
try:
mat["metCharge"] = array(mets.list_attr("charge")) * 1.
except TypeError:
# can't have any None entries for charge, or this will fail
pass
mat["genes"] = _cell(model.genes.list_attr("id"))
# make a matrix for rxnGeneMat
# reactions are rows, genes are columns
rxn_gene = scipy_sparse.dok_matrix((len(model.reactions),
len(model.genes)))
if min(rxn_gene.shape) > 0:
for i, reaction in enumerate(model.reactions):
for gene in reaction.genes:
rxn_gene[i, model.genes.index(gene)] = 1
mat["rxnGeneMat"] = rxn_gene
mat["grRules"] = _cell(rxns.list_attr("gene_reaction_rule"))
mat["rxns"] = _cell(rxns.list_attr("id"))
mat["rxnNames"] = _cell(rxns.list_attr("name"))
mat["subSystems"] = _cell(rxns.list_attr("subsystem"))
mat["csense"] = "".join((
met._constraint_sense for met in model.metabolites))
stoich_mat = create_stoichiometric_matrix(model)
mat["S"] = stoich_mat if stoich_mat is not None else [[]]
# multiply by 1 to convert to float, working around scipy bug
# https://github.com/scipy/scipy/issues/4537
mat["lb"] = array(rxns.list_attr("lower_bound")) * 1.
mat["ub"] = array(rxns.list_attr("upper_bound")) * 1.
mat["b"] = array(mets.list_attr("_bound")) * 1.
mat["c"] = array(rxns.list_attr("objective_coefficient")) * 1.
mat["rev"] = array(rxns.list_attr("reversibility")) * 1
mat["description"] = str(model.id)
return mat
def from_mat_struct(mat_struct, model_id=None, inf=inf):
"""create a model from the COBRA toolbox struct
The struct will be a dict read in by scipy.io.loadmat
"""
m = mat_struct
if m.dtype.names is None:
raise ValueError("not a valid mat struct")
if not {"rxns", "mets", "S", "lb", "ub"} <= set(m.dtype.names):
raise ValueError("not a valid mat struct")
if "c" in m.dtype.names:
c_vec = m["c"][0, 0]
else:
c_vec = None
warn("objective vector 'c' not found")
model = Model()
if model_id is not None:
model.id = model_id
elif "description" in m.dtype.names:
description = m["description"][0, 0][0]
if not isinstance(description, string_types) and len(description) > 1:
model.id = description[0]
warn("Several IDs detected, only using the first.")
else:
model.id = description
else:
model.id = "imported_model"
for i, name in enumerate(m["mets"][0, 0]):
new_metabolite = Metabolite()
new_metabolite.id = str(name[0][0])
if all(var in m.dtype.names for var in
['metComps', 'comps', 'compNames']):
comp_index = m["metComps"][0, 0][i][0] - 1
new_metabolite.compartment = m['comps'][0, 0][comp_index][0][0]
if new_metabolite.compartment not in model.compartments:
comp_name = m['compNames'][0, 0][comp_index][0][0]
model.compartments[new_metabolite.compartment] = comp_name
else:
new_metabolite.compartment = _get_id_compartment(new_metabolite.id)
if new_metabolite.compartment not in model.compartments:
model.compartments[
new_metabolite.compartment] = new_metabolite.compartment
try:
new_metabolite.name = str(m["metNames"][0, 0][i][0][0])
except (IndexError, ValueError):
pass
try:
new_metabolite.formula = str(m["metFormulas"][0][0][i][0][0])
except (IndexError, ValueError):
pass
try:
new_metabolite.charge = float(m["metCharge"][0, 0][i][0])
int_charge = int(new_metabolite.charge)
if new_metabolite.charge == int_charge:
new_metabolite.charge = int_charge
except (IndexError, ValueError):
pass
model.add_metabolites([new_metabolite])
new_reactions = []
coefficients = {}
for i, name in enumerate(m["rxns"][0, 0]):
new_reaction = Reaction()
new_reaction.id = str(name[0][0])
new_reaction.lower_bound = float(m["lb"][0, 0][i][0])
new_reaction.upper_bound = float(m["ub"][0, 0][i][0])
if isinf(new_reaction.lower_bound) and new_reaction.lower_bound < 0:
new_reaction.lower_bound = -inf
if isinf(new_reaction.upper_bound) and new_reaction.upper_bound > 0:
new_reaction.upper_bound = inf
if c_vec is not None:
coefficients[new_reaction] = float(c_vec[i][0])
try:
new_reaction.gene_reaction_rule = str(m['grRules'][0, 0][i][0][0])
except (IndexError, ValueError):
pass
try:
new_reaction.name = str(m["rxnNames"][0, 0][i][0][0])
except (IndexError, ValueError):
pass
try:
new_reaction.subsystem = str(m['subSystems'][0, 0][i][0][0])
except (IndexError, ValueError):
pass
new_reactions.append(new_reaction)
model.add_reactions(new_reactions)
set_objective(model, coefficients)
coo = scipy_sparse.coo_matrix(m["S"][0, 0])
for i, j, v in zip(coo.row, coo.col, coo.data):
model.reactions[j].add_metabolites({model.metabolites[i]: v})
return model
def _check(result):
"""ensure success of a pymatbridge operation"""
if result["success"] is not True:
raise RuntimeError(result["content"]["stdout"])
def model_to_pymatbridge(model, variable_name="model", matlab=None):
"""send the model to a MATLAB workspace through pymatbridge
This model can then be manipulated through the COBRA toolbox
Parameters
----------
variable_name : str
The variable name to which the model will be assigned in the
MATLAB workspace
matlab : None or pymatbridge.Matlab instance
The MATLAB workspace to which the variable will be sent. If
this is None, then this will be sent to the same environment
used in IPython magics.
"""
if scipy_sparse is None:
raise ImportError("`model_to_pymatbridge` requires scipy!")
if matlab is None: # assumed to be running an IPython magic
from IPython import get_ipython
matlab = get_ipython().magics_manager.registry["MatlabMagics"].Matlab
model_info = create_mat_dict(model)
S = model_info["S"].todok()
model_info["S"] = 0
temp_S_name = "cobra_pymatbridge_temp_" + uuid4().hex
_check(matlab.set_variable(variable_name, model_info))
_check(matlab.set_variable(temp_S_name, S))
_check(matlab.run_code("%s.S = %s;" % (variable_name, temp_S_name)))
# all vectors need to be transposed
for i in model_info.keys():
if i == "S":
continue
_check(matlab.run_code("{0}.{1} = {0}.{1}';".format(variable_name, i)))
_check(matlab.run_code("clear %s;" % temp_S_name))
|
zakandrewking/cobrapy
|
cobra/io/mat.py
|
Python
|
lgpl-2.1
| 11,174 | 0.000179 |
from catkin_pkg.python_setup import generate_distutils_setup
from setuptools import setup
d = generate_distutils_setup(
packages=['catkin'],
package_dir={'': 'python'},
scripts=[
'bin/catkin_find',
'bin/catkin_init_workspace',
'bin/catkin_make',
'bin/catkin_make_isolated',
'bin/catkin_test_results',
'bin/catkin_topological_order',
],
)
setup(**d)
|
ros/catkin
|
setup.py
|
Python
|
bsd-3-clause
| 416 | 0 |
"""Command Line Interface"""
import os
import re
import codecs
import logging
from holland.core.exceptions import BackupError
from holland.lib.compression import open_stream, lookup_compression
from holland.lib.mysql import MySQLSchema, connect, MySQLError
from holland.lib.mysql import include_glob, exclude_glob, \
include_glob_qualified, \
exclude_glob_qualified
from holland.lib.mysql import DatabaseIterator, MetadataTableIterator, \
SimpleTableIterator
from holland.backup.mysqldump.base import start
from holland.backup.mysqldump.util import INIConfig, update_config
from holland.backup.mysqldump.util.ini import OptionLine, CommentLine
from holland.lib.mysql.option import load_options, \
write_options, \
build_mysql_config
from holland.backup.mysqldump.command import MySQLDump, MySQLDumpError, \
MyOptionError
from holland.backup.mysqldump.mock import MockEnvironment
LOG = logging.getLogger(__name__)
# We validate our config against the following spec
CONFIGSPEC = """
[mysqldump]
extra-defaults = boolean(default=no)
mysql-binpath = force_list(default=list())
lock-method = option('flush-lock', 'lock-tables', 'single-transaction', 'auto-detect', 'none', default='auto-detect')
databases = force_list(default=list('*'))
exclude-databases = force_list(default=list())
tables = force_list(default=list("*"))
exclude-tables = force_list(default=list())
engines = force_list(default=list("*"))
exclude-engines = force_list(default=list())
exclude-invalid-views = boolean(default=no)
flush-logs = boolean(default=no)
flush-privileges = boolean(default=yes)
dump-routines = boolean(default=yes)
dump-events = boolean(default=yes)
stop-slave = boolean(default=no)
max-allowed-packet = string(default=128M)
bin-log-position = boolean(default=no)
file-per-database = boolean(default=yes)
additional-options = force_list(default=list())
estimate-method = string(default='plugin')
[compression]
method = option('none', 'gzip', 'gzip-rsyncable', 'pigz', 'bzip2', 'pbzip2', 'lzma', 'lzop', 'gpg', default='gzip')
options = string(default="")
inline = boolean(default=yes)
level = integer(min=0, max=9, default=1)
[mysql:client]
defaults-extra-file = force_list(default=list('~/.my.cnf'))
user = string(default=None)
password = string(default=None)
socket = string(default=None)
host = string(default=None)
port = integer(min=0, default=None)
""".splitlines()
class MySQLDumpPlugin(object):
"""MySQLDump Backup Plugin interface for Holland"""
CONFIGSPEC = CONFIGSPEC
def __init__(self, name, config, target_directory, dry_run=False):
self.name = name
self.config = config
self.target_directory = target_directory
self.dry_run = dry_run
self.config.validate_config(self.CONFIGSPEC) # -> ValidationError
# Setup a discovery shell to find schema items
# This will iterate over items during the estimate
# or backup phase, which will call schema.refresh()
self.schema = MySQLSchema()
config = self.config['mysqldump']
self.schema.add_database_filter(include_glob(*config['databases']))
self.schema.add_database_filter(
exclude_glob(*config['exclude-databases'])
)
self.schema.add_table_filter(include_glob_qualified(*config['tables']))
self.schema.add_table_filter(exclude_glob_qualified(*config['exclude-tables']))
self.schema.add_engine_filter(include_glob(*config['engines']))
self.schema.add_engine_filter(exclude_glob(*config['exclude-engines']))
self.mysql_config = build_mysql_config(self.config['mysql:client'])
self.client = connect(self.mysql_config['client'])
def estimate_backup_size(self):
"""Estimate the size of the backup this plugin will generate"""
LOG.info("Estimating size of mysqldump backup")
estimate_method = self.config['mysqldump']['estimate-method']
if estimate_method.startswith('const:'):
try:
return parse_size(estimate_method[6:])
except ValueError, exc:
raise BackupError(str(exc))
if estimate_method != 'plugin':
raise BackupError("Invalid estimate-method '%s'" % estimate_method)
try:
db_iter = DatabaseIterator(self.client)
tbl_iter = MetadataTableIterator(self.client)
try:
self.client.connect()
self.schema.refresh(db_iter=db_iter, tbl_iter=tbl_iter)
except MySQLError, exc:
LOG.error("Failed to estimate backup size")
LOG.error("[%d] %s", *exc.args)
raise BackupError("MySQL Error [%d] %s" % exc.args)
return sum([db.size for db in self.schema.databases])
finally:
self.client.disconnect()
def _fast_refresh_schema(self):
# determine if we can skip expensive table metadata lookups entirely
# and just worry about finding database names
# However, with lock-method=auto-detect we must look at table engines
# to determine what lock method to use
config = self.config['mysqldump']
fast_iterate = config['lock-method'] != 'auto-detect' and \
not config['exclude-invalid-views']
try:
db_iter = DatabaseIterator(self.client)
tbl_iter = SimpleTableIterator(self.client, record_engines=True)
try:
self.client.connect()
self.schema.refresh(db_iter=db_iter,
tbl_iter=tbl_iter,
fast_iterate=fast_iterate)
except MySQLError, exc:
LOG.debug("MySQLdb error [%d] %s", exc_info=True, *exc.args)
raise BackupError("MySQL Error [%d] %s" % exc.args)
finally:
self.client.disconnect()
def backup(self):
"""Run a MySQL backup"""
if self.schema.timestamp is None:
self._fast_refresh_schema()
mock_env = None
if self.dry_run:
mock_env = MockEnvironment()
mock_env.replace_environment()
LOG.info("Running in dry-run mode.")
try:
if self.config['mysqldump']['stop-slave']:
self.client = connect(self.mysql_config['client'])
if self.client.show_status('Slave_running', session=None) != 'ON':
raise BackupError("stop-slave enabled, but replication is "
"either not configured or the slave is not "
"running.")
self.config.setdefault('mysql:replication', {})
_stop_slave(self.client, self.config['mysql:replication'])
self._backup()
finally:
if self.config['mysqldump']['stop-slave'] and \
'mysql:replication' in self.config:
_start_slave(self.client, self.config['mysql:replication'])
if mock_env:
mock_env.restore_environment()
def _backup(self):
"""Real backup method. May raise BackupError exceptions"""
config = self.config['mysqldump']
# setup defaults_file with ignore-table exclusions
defaults_file = os.path.join(self.target_directory, 'my.cnf')
write_options(self.mysql_config, defaults_file)
if config['exclude-invalid-views']:
LOG.info("* Finding and excluding invalid views...")
definitions_path = os.path.join(self.target_directory,
'invalid_views.sql')
exclude_invalid_views(self.schema, self.client, definitions_path)
add_exclusions(self.schema, defaults_file)
# find the path to the mysqldump command
mysqldump_bin = find_mysqldump(path=config['mysql-binpath'])
LOG.info("Using mysqldump executable: %s", mysqldump_bin)
# setup the mysqldump environment
extra_defaults = config['extra-defaults']
try:
mysqldump = MySQLDump(defaults_file,
mysqldump_bin,
extra_defaults=extra_defaults)
except MySQLDumpError, exc:
raise BackupError(str(exc))
LOG.info("mysqldump version %s", '.'.join([str(digit)
for digit in mysqldump.version]))
options = collect_mysqldump_options(config, mysqldump, self.client)
validate_mysqldump_options(mysqldump, options)
os.mkdir(os.path.join(self.target_directory, 'backup_data'))
if self.config['compression']['method'] != 'none' and \
self.config['compression']['level'] > 0:
try:
cmd, ext = lookup_compression(self.config['compression']['method'])
except OSError, exc:
raise BackupError("Unable to load compression method '%s': %s" %
(self.config['compression']['method'], exc))
LOG.info("Using %s compression level %d with args %s",
self.config['compression']['method'],
self.config['compression']['level'],
self.config['compression']['options'])
else:
LOG.info("Not compressing mysqldump output")
cmd = ''
ext = ''
try:
start(mysqldump=mysqldump,
schema=self.schema,
lock_method=config['lock-method'],
file_per_database=config['file-per-database'],
open_stream=self._open_stream,
compression_ext=ext)
except MySQLDumpError, exc:
raise BackupError(str(exc))
def _open_stream(self, path, mode, method=None):
"""Open a stream through the holland compression api, relative to
this instance's target directory
"""
path = os.path.join(self.target_directory, 'backup_data', path)
compression_method = method or self.config['compression']['method']
compression_level = self.config['compression']['level']
compression_options = self.config['compression']['options']
stream = open_stream(path,
mode,
compression_method,
compression_level,
extra_args=compression_options)
return stream
def info(self):
"""Summarize information about this backup"""
import textwrap
return textwrap.dedent("""
lock-method = %s
file-per-database = %s
Options used:
flush-logs = %s
flush-privileges = %s
routines = %s
events = %s
Schema Filters:
databases = %s
exclude-databases = %s
tables = %s
exclude-tables = %s
""").strip() % (
self.config['mysqldump']['lock-method'],
self.config['mysqldump']['file-per-database'] and 'yes' or 'no',
self.config['mysqldump']['flush-logs'],
self.config['mysqldump']['flush-privileges'],
self.config['mysqldump']['dump-routines'],
self.config['mysqldump']['dump-events'],
','.join(self.config['mysqldump']['databases']),
','.join(self.config['mysqldump']['exclude-databases']),
','.join(self.config['mysqldump']['tables']),
','.join(self.config['mysqldump']['exclude-tables'])
)
def find_mysqldump(path=None):
"""Find a usable mysqldump binary in path or ENV[PATH]"""
search_path = ':'.join(path) or os.environ.get('PATH', '')
for _path in search_path.split(':'):
if os.path.isfile(_path):
return os.path.realpath(_path)
if os.path.exists(os.path.join(_path, 'mysqldump')):
return os.path.realpath(os.path.join(_path, 'mysqldump'))
raise BackupError("Failed to find mysqldump in %s" % search_path)
def collect_mysqldump_options(config, mysqldump, client):
"""Do intelligent collection of mysqldump options from the config
and add any additional options for further validation"""
options = []
if config['flush-logs']:
options.append('--flush-logs')
if config['flush-privileges']:
if mysqldump.version < (5,0,26):
LOG.warning("--flush privileges is available only for mysqldump "
"in 5.0.26+")
else:
options.append('--flush-privileges')
if config['dump-routines']:
if mysqldump.version < (5, 0, 13):
LOG.warning("--routines is not available before mysqldump 5.0.13+")
else:
if mysqldump.version < (5, 0, 20):
LOG.warning("mysqldump will not dump DEFINER values before "
"version 5.0.20. You are running mysqldump from "
"version %s", mysqldump.version_str)
options.append('--routines')
if config['dump-events']:
if mysqldump.version < (5, 1, 8):
LOG.warning("--events only available for mysqldump 5.1.8+. skipping")
else:
options.append('--events')
if config['max-allowed-packet']:
options.append('--max-allowed-packet=' + config['max-allowed-packet'])
if config['bin-log-position']:
if client.show_variable('log_bin') != 'ON':
raise BackupError("bin-log-position requested but "
"bin-log on server not active")
options.append('--master-data=2')
options.extend(config['additional-options'])
return options
def validate_mysqldump_options(mysqldump, options):
"""Validate and add the requested options to the mysqldump instance"""
error = False
options = [opt for opt in options if opt]
for option in options:
try:
mysqldump.add_option(option)
LOG.info("Using mysqldump option %s", option)
except MyOptionError, exc:
LOG.warning(str(exc))
def _stop_slave(client, config=None):
"""Stop MySQL replication"""
try:
client.stop_slave(sql_thread_only=True)
LOG.info("Stopped slave")
except MySQLError, exc:
raise BackupError("Failed to stop slave[%d]: %s" % exc.args)
if config is not None:
try:
slave_info = client.show_slave_status()
if slave_info:
# update config with replication info
config['slave_master_log_pos'] = slave_info['exec_master_log_pos']
config['slave_master_log_file'] = slave_info['relay_master_log_file']
except MySQLError, exc:
raise BackupError("Failed to acquire slave status[%d]: %s" % \
exc.args)
try:
master_info = client.show_master_status()
if master_info:
config['master_log_file'] = master_info['file']
config['master_log_pos'] = master_info['position']
except MySQLError, exc:
raise BackupError("Failed to acquire master status [%d] %s" % exc.args)
LOG.info("MySQL Replication has been stopped.")
def _start_slave(client, config=None):
"""Start MySQL replication"""
try:
slave_info = client.show_slave_status()
if slave_info and slave_info['exec_master_log_pos'] != config['slave_master_log_pos']:
LOG.warning("Sanity check on slave status failed. "
"Previously recorded %s:%s but currently found %s:%s",
config['slave_master_log_file'], config['slave_master_log_pos'],
slave_info['relay_master_log_file'], slave_info['exec_master_log_pos'])
LOG.warning("ALERT! Slave position changed during backup!")
except MySQLError, exc:
LOG.warning("Failed to sanity check replication[%d]: %s",
*exc.args)
try:
master_info = client.show_master_status()
if master_info and master_info['position'] != config['master_log_pos']:
LOG.warning("Sanity check on master status failed. "
"Previously recorded %s:%s but currently found %s:%s",
config['master_log_file'], config['master_log_pos'],
master_info['file'], master_info['position'])
LOG.warning("ALERT! Binary log position changed during backup!")
except MySQLError, exc:
LOG.warning("Failed to sanity check master status. [%d] %s", *exc.args)
try:
client.start_slave()
LOG.info("Restarted slave")
except MySQLError, exc:
raise BackupError("Failed to restart slave [%d] %s" % exc.args)
def exclude_invalid_views(schema, client, definitions_file):
"""Flag invalid MySQL views as excluded to skip them during a mysqldump
"""
sqlf = open(definitions_file, 'w')
LOG.info("* Invalid and excluded views will be saved to %s",
definitions_file)
cursor = client.cursor()
try:
print >>sqlf, "--"
print >>sqlf, "-- DDL of Invalid Views"
print >>sqlf, "-- Created automatically by Holland"
print >>sqlf, "--"
print >>sqlf
for db in schema.databases:
if db.excluded:
continue
for table in db.tables:
if table.excluded:
continue
if table.engine != 'view':
continue
LOG.debug("Testing view %s.%s", db.name, table.name)
invalid_view = False
try:
cursor.execute('SHOW FIELDS FROM `%s`.`%s`' %
(db.name, table.name))
# check for missing definers that would bork
# lock-tables
for _, error_code, msg in client.show_warnings():
if error_code == 1449: # ER_NO_SUCH_USER
raise MySQLError(error_code, msg)
except MySQLError, exc:
# 1356 = View references invalid table(s)...
if exc.args[0] in (1356, 1142, 1143, 1449, 1267, 1271):
invalid_view = True
else:
LOG.error("Unexpected error when checking invalid "
"view %s.%s: [%d] %s",
db.name,
table.name,
*exc.args)
raise BackupError("[%d] %s" % exc.args)
if invalid_view:
LOG.warning("* Excluding invalid view `%s`.`%s`: [%d] %s",
db.name, table.name, *exc.args)
table.excluded = True
view_definition = client.show_create_view(db.name,
table.name,
use_information_schema=True)
if view_definition is None:
LOG.error("!!! Failed to retrieve view definition for "
"`%s`.`%s`", db.name, table.name)
LOG.warning("!!! View definition for `%s`.`%s` will "
"not be included in this backup", db.name,
table.name)
continue
LOG.info("* Saving view definition for "
"`%s`.`%s`",
db.name, table.name)
print >>sqlf, "--"
print >>sqlf, "-- Current View: `%s`.`%s`" % \
(db.name, table.name)
print >>sqlf, "--"
print >>sqlf
print >>sqlf, view_definition + ';'
print >>sqlf
finally:
sqlf.close()
def add_exclusions(schema, config):
"""Given a MySQLSchema add --ignore-table options in a [mysqldump]
section for any excluded tables.
"""
exclusions = []
for db in schema.databases:
if db.excluded:
continue
for table in db.tables:
if table.excluded:
LOG.info("Excluding table %s.%s", table.database, table.name)
exclusions.append("ignore-table = " + table.database + '.' + table.name)
if not exclusions:
return
try:
my_cnf = codecs.open(config, 'a', 'utf8')
print >>my_cnf
print >>my_cnf, "[mysqldump]"
for excl in exclusions:
print >>my_cnf, excl
my_cnf.close()
except IOError, exc:
LOG.error("Failed to write ignore-table exclusions to %s", config)
raise
def parse_size(units_string):
"""Parse a MySQL-like size string into bytes
>> parse_size('4G')
4294967296
"""
units_string = str(units_string)
units = "kKmMgGtTpPeE"
match = re.match(r'^(\d+(?:[.]\d+)?)([%s])$' % units, units_string)
if not match:
raise ValueError("Invalid constant size syntax %r" % units_string)
number, unit = match.groups()
unit = unit.upper()
exponent = "KMGTPE".find(unit)
return int(float(number) * 1024 ** (exponent + 1))
|
m00dawg/holland
|
plugins/holland.backup.mysqldump/holland/backup/mysqldump/plugin.py
|
Python
|
bsd-3-clause
| 21,830 | 0.002199 |
__author__ = 'Aditya Vikram Agarwal'
import pygame
from random import randint
import random
import player
import princess
import donkey
import block
import fireball
import coin
import ladder
class Board:
def __init__(self, screen, testmode):
self.MODE = testmode
self.blocks = []
self.ladders = []
self.coins = []
self.fireballs = []
self.castleblocks = []
self.levellimits = {}
self.ladderlimits = {}
self.donkey = None
self.princess = None
self.donkey_group = []
self.princess_group = []
# start defining Constamts here
self.PLAYER_SPEED = 10
self.PLAYER_CLIMB_SPEED = 5
self.FULL_LADDER_HEIGHT = 95
self.LADDER_WIDTH = 30
self.HALF_LADDER_HEIGHT = 35
self.PLAYER_HEIGHT = 20
self.PLAYER_WIDTH = 20
self.COIN_WIDTH = 20
self.COIN_HEIGHT = 20
self.COIN_LEVELS = [470, 390, 310, 230, 150, 70]
self.FIREBALL_HEIGHT = 25
self.FIREBALL_WIDTH = 25
self.FIREBALL_SPEED = 5
self.JUMP_LIMIT = 30
self.PLAYER_SPAWN_LEVEL = 480
self.DONKEY_SPEED = 3
self.PLAYER_DROP_LEVEL = None
# End defining constants
self.block_group = pygame.sprite.RenderPlain(*self.blocks)
self.ladder_group = pygame.sprite.RenderPlain(*self.ladders)
self.coin_group = pygame.sprite.RenderPlain(*self.coins)
self.fireball_group = pygame.sprite.RenderPlain(*self.fireballs)
self.castle_block_group = pygame.sprite.RenderPlain(*self.blocks)
self.initlogs(screen)
self.initladders(screen)
self.initcoins(screen)
self.initdonkey(screen)
self.initprincess(screen)
self.initcastle(screen)
self.plr = [player.Player("Images/player2.png", "Images/player.png", "Images/player3.png", "Images/player4.png",
(0, self.PLAYER_SPAWN_LEVEL), self.PLAYER_WIDTH, self.PLAYER_HEIGHT, 0, 2)]
self.plr_group = pygame.sprite.RenderPlain(*self.plr)
if(self.MODE == 1):
self.plr_group.draw(screen)
self.playerparentdict = {}
self.fireballparentdict = {}
self.playerparentdict[500] = self.PLAYER_SPAWN_LEVEL
for i in range(499, 0, -1): # Player's regular positions in each level
if i in [480, 400, 320, 240, 160, 80]:
self.playerparentdict[i] = i
else:
self.playerparentdict[i] = self.playerparentdict[i + 1]
self.fireballparentdict[500] = self.PLAYER_SPAWN_LEVEL
for i in range(499, 0, -1): # Fireballs' regular positions in each level
if i in [480, 400, 320, 240, 160, 80]:
self.fireballparentdict[i] = i
else:
self.fireballparentdict[i] = self.fireballparentdict[i + 1]
def initlogs(self, screen): # Initialize all blocks
self.levellimits = {400: 1, 320: 2, 240: 1, 160: 2, 80: 1, 30: 3}
self.blocks = [block.Block("Images/log.png", "Images/log.png", (0, 0), 1200, 20),
block.Block("Images/log.png", "Images/log.png", (0, 100), 700, 20),
block.Block("Images/log.png", "Images/log.png", (200, 180), 1000, 20),
block.Block("Images/log.png", "Images/log.png", (0, 260), 1000, 20),
block.Block("Images/log.png", "Images/log.png", (200, 340), 1000, 20),
block.Block("Images/log.png", "Images/log.png", (0, 420), 1000, 20),
block.Block("Images/log.png", "Images/log.png", (0, 500), 1200, 20),
]
self.block_group = pygame.sprite.RenderPlain(*self.blocks)
if(self.MODE == 1): #1 implies game mode , 0 implies test mode
self.block_group.draw(screen)
def initdonkey(self, screen): # Initialize donkey
self.donkey = donkey.Donkey("Images/Donkey2.png", "Images/Donkey.png", (20, 50), 40, 50, 0)
self.donkey_group = pygame.sprite.RenderPlain(self.donkey)
if(self.MODE == 1):
self.donkey_group.draw(screen)
def initprincess(self, screen): # Initialize princess
self.princess = princess.Princess("Images/princess2.png", "Images/princess2.png", (120, 20), 20, 30, 0)
self.princess_group = pygame.sprite.RenderPlain(self.princess)
if(self.MODE == 1):
self.princess_group.draw(screen)
def initladders(self, screen): # Initialize all ladders
self.ladders = [ladder.Ladder("Images/ladder.png", "Images/ladder.png", (800, 419), self.LADDER_WIDTH,
self.FULL_LADDER_HEIGHT),
ladder.Ladder("Images/ladder.png", "Images/ladder.png", (300, 339), self.LADDER_WIDTH,
self.FULL_LADDER_HEIGHT),
ladder.Ladder("Images/ladder.png", "Images/ladder.png", (500, 259), self.LADDER_WIDTH,
self.FULL_LADDER_HEIGHT),
ladder.Ladder("Images/ladder.png", "Images/ladder.png", (900, 179), self.LADDER_WIDTH,
self.FULL_LADDER_HEIGHT),
ladder.Ladder("Images/ladder.png", "Images/ladder.png", (600, 99), self.LADDER_WIDTH,
self.FULL_LADDER_HEIGHT),
ladder.Ladder("Images/ladder_broken.png", "Images/ladder_broken.png", (650, 335),
self.LADDER_WIDTH, self.HALF_LADDER_HEIGHT),
ladder.Ladder("Images/ladder_broken_down.png", "Images/ladder_broken_down.png", (650, 400),
self.LADDER_WIDTH, self.HALF_LADDER_HEIGHT),
ladder.Ladder("Images/ladder_broken.png", "Images/ladder_broken.png", (850, 255),
self.LADDER_WIDTH, self.HALF_LADDER_HEIGHT),
ladder.Ladder("Images/ladder_broken_down.png", "Images/ladder_broken_down.png", (850, 320),
self.LADDER_WIDTH, self.HALF_LADDER_HEIGHT),
ladder.Ladder("Images/ladder_broken.png", "Images/ladder_broken.png", (300, 95),
self.LADDER_WIDTH, self.HALF_LADDER_HEIGHT),
ladder.Ladder("Images/ladder_broken_down.png", "Images/ladder_broken_down.png", (300, 160),
self.LADDER_WIDTH, self.HALF_LADDER_HEIGHT),
ladder.Ladder("Images/castleladder.png", "Images/castleladder.png", (220, 45),
self.LADDER_WIDTH, ((self.FULL_LADDER_HEIGHT - 5) * 2) / 3)
]
for l in self.ladders:
x, y = l.getPosition()
w, h = l.getSize()
if h == self.FULL_LADDER_HEIGHT:
self.ladderlimits[l.getPosition()] = y + 1 + 60
else:
if h == ((self.FULL_LADDER_HEIGHT - 5) * 2) / 3:
self.ladderlimits[l.getPosition()] = y + 5 + 30
elif y % 10 == 0:
self.ladderlimits[l.getPosition()] = y
else:
self.ladderlimits[l.getPosition()] = y + 5 + 60
self.ladder_group = pygame.sprite.RenderPlain(*self.ladders)
if(self.MODE == 1):
self.ladder_group.draw(screen)
def initcoins(self, screen): # Initialize all coins
self.coins = []
x = 0
for i in range(0, 20):
y = self.COIN_LEVELS[randint(0, 5)]
if y == 470:
x = random.randrange(0, 1170, 30)
elif y in [390, 230]:
x = random.randrange(0, 1000, 30)
elif y in [310, 150]:
x = random.randrange(200, 1170, 30)
elif y == 70:
x = random.randrange(350, 700, 30)
self.coins += [coin.Coin("Images/coin.png", "Images/coin.png", (x, y), self.COIN_WIDTH, self.COIN_HEIGHT)]
self.coin_group = pygame.sprite.RenderPlain(*self.coins)
if(self.MODE == 1):
self.coin_group.draw(screen)
def initcastle(self, screen):
self.castleblocks = [block.Block("Images/castle.png", "Images/castle.png", (110, 50), 180, 10),
block.Block("Images/castlepillar.png", "Images/castlepillar.png", (100, 20), 20, 40),
block.Block("Images/castlepillar.png", "Images/castlepillar.png", (280, 20), 20, 40),
]
self.castle_block_group = pygame.sprite.RenderPlain(*self.castleblocks)
if(self.MODE == 1):
self.castle_block_group.draw(screen)
def createfireball(self): # Creating fireballs
donkeyx, donkeyy = self.donkey.getPosition()
self.fireballs += [fireball.Fireball("Images/fireball.png", "Images/fireball.png", (donkeyx + 5, 80),
self.FIREBALL_WIDTH, self.FIREBALL_HEIGHT, randint(1, 2))]
self.fireball_group = pygame.sprite.RenderPlain(*self.fireballs)
def key_pressed(self, event): # Handling a key pressed event
x, y = self.plr[0].getPosition()
if event == 1:
self.plr[0].setState(0)
x += self.PLAYER_SPEED
if event == 2:
self.plr[0].setState(1)
x -= self.PLAYER_SPEED
if event == 3:
y -= self.PLAYER_CLIMB_SPEED
if event == 4:
y += self.PLAYER_SPEED
x = max(x, 0)
y = max(y, 0)
x = min(x, 1170)
y = min(y, self.PLAYER_SPAWN_LEVEL)
self.plr[0].setPosition((x, y))
def checkMidAir(self): # Detecting that player should drop beyond block limits
x, y = self.plr[0].getPosition()
if y == 80 and x > 700:
y += 0.1 * self.PLAYER_SPEED
if y in self.levellimits and int(self.levellimits[y]) == 1 and x > 1000:
y += 0.1 * self.PLAYER_SPEED
if y in self.levellimits and int(self.levellimits[y]) == 2 and x < 170:
y += 0.1 * self.PLAYER_SPEED
self.plr[0].setPosition((x, y))
def update(self, screen): # Update the board
if(self.MODE == 1):
self.coin_group.draw(screen)
self.block_group.draw(screen)
self.castle_block_group.draw(screen)
self.ladder_group.draw(screen)
screen.blit(self.donkey.image, self.donkey.getPosition())
self.fireball_group.draw(screen)
self.princess_group.draw(screen)
self.plr_group.draw(screen)
def getLadderCollisions(self): # Check if player is in touch with any ladder
state = 0
broken_ladders = [(650, 335), (650, 400), (850, 255), (850, 320), (300, 95), (300, 160)]
castleladder = (220, 50)
for s in self.ladder_group.sprites():
rect1 = self.plr[0].rect
rect1.topleft = self.plr[0].getPosition()
playerx, playery = rect1.topleft
rect1.height = self.PLAYER_HEIGHT
rect1.width = self.PLAYER_WIDTH
rect2 = s.rect
ladderx, laddery = s.rect.topleft
rect2.height = self.FULL_LADDER_HEIGHT
rect2.width = self.LADDER_WIDTH
if rect2.topleft == castleladder:
rect2.height = ((self.FULL_LADDER_HEIGHT - 5) * 2) / 3
if rect2.topleft in broken_ladders:
rect2.height = self.HALF_LADDER_HEIGHT
if rect1.colliderect(rect2):
if playery not in self.levellimits and playery != self.PLAYER_SPAWN_LEVEL:
self.plr[0].setPosition((ladderx + 5, playery))
self.plr[0].setState(2)
state = 1
break
if state == 1:
return 1
else:
return 0
def checkfireballcollision(self): # Check if player is dead and respawn
for s in self.fireball_group.sprites():
rect1 = self.plr[0].rect
rect1.topleft = self.plr[0].getPosition()
rect1.height = rect1.width = 20
rect2 = s.rect
rect2.height = self.FIREBALL_HEIGHT
rect2.width = self.FIREBALL_WIDTH
if rect1.colliderect(rect2):
if self.plr[0].getLives() == 0:
return 0
else:
self.respawnPlayer()
self.plr[0].setState(0)
self.plr[0].setLives(self.plr[0].getLives() - 1)
return 1
return -1
def dropplayer(self): # Drop if player is in middle of air
x, y = self.plr[0].getPosition()
levelpos = y
levelpos = min(self.PLAYER_SPAWN_LEVEL, levelpos)
levelpos = self.playerparentdict[levelpos]
if y == levelpos:
return
self.plr[0].setPosition((x, min(y + 10, levelpos)))
def getCoinCollisions(self): # Checking collisions with any coin
for c in self.coin_group.sprites():
rect1 = self.plr[0].rect
rect1.topleft = self.plr[0].getPosition()
rect1.height = self.PLAYER_HEIGHT
rect1.width = self.PLAYER_WIDTH
rect2 = c.rect
rect2.height = self.COIN_HEIGHT
rect2.width = self.COIN_WIDTH
if rect1.colliderect(rect2):
c.kill()
return 1
return 0
def playerjump(self, jumpspeed): # Jumping up function
x, y = self.plr[0].getPosition()
levelpos = y
levelpos = min(self.PLAYER_SPAWN_LEVEL, levelpos)
levelpos = self.playerparentdict[levelpos]
if y == levelpos:
self.PLAYER_DROP_LEVEL = y
if y <= levelpos - self.JUMP_LIMIT:
self.plr[0].setPosition((x, levelpos - self.JUMP_LIMIT))
return 1
else:
self.plr[0].setPosition((x, y - jumpspeed))
return 0
def playerjumpdown(self, jumpspeed): # Jumping down function
x, y = self.plr[0].getPosition()
levelpos = y
if self.PLAYER_DROP_LEVEL:
if min(levelpos, self.PLAYER_DROP_LEVEL) == self.PLAYER_DROP_LEVEL:
levelpos = min(levelpos, self.PLAYER_DROP_LEVEL)
self.PLAYER_DROP_LEVEL = None
levelpos = self.playerparentdict[levelpos]
if y >= levelpos:
self.plr[0].setPosition((x, levelpos))
return 1
else:
self.plr[0].setPosition((x, y + jumpspeed))
return 0
def checkplayerlevel(self): # checks that player should not fall down beyond ladder through a block
x, y = self.plr[0].getPosition()
for s in self.ladder_group.sprites():
rect1 = self.plr[0].rect
rect1.topleft = self.plr[0].getPosition()
rect1.height = self.PLAYER_HEIGHT
rect1.width = self.PLAYER_WIDTH
rect2 = s.rect
if rect1.colliderect(rect2):
y = min(y, self.ladderlimits[rect2.topleft])
self.plr[0].setPosition((x, y))
break
def updatefireballs(self): # Update fireball positions and directions
i = 0
for s in self.fireball_group.sprites():
x, y = s.getPosition()
if x <= 0 and y == self.PLAYER_SPAWN_LEVEL:
pass
else:
state = s.getState()
if x <= 0:
state = 1
if x >= 1180:
state = 2
if state != 3:
if state == 1:
x += self.FIREBALL_SPEED
else:
x -= self.FIREBALL_SPEED
collisions = pygame.sprite.spritecollide(s, self.ladder_group, False)
if collisions:
ly = self.ladderlimits[collisions[0].rect.topleft]
ladderx, laddery = collisions[0].rect.topleft
if y != ly:
val = randint(1, 10)
if val == 5:
y += 2 * self.FIREBALL_SPEED
x = ladderx
state = 3
if y == 80 and x > 700:
y += 2 * self.FIREBALL_SPEED
state = 3
if y in self.levellimits and int(self.levellimits[y]) == 1 and x > 1000:
y += 2 * self.FIREBALL_SPEED
state = 3
if y in self.levellimits and int(self.levellimits[y]) == 2 and x < 170:
y += 2 * self.FIREBALL_SPEED
state = 3
else:
y = min(self.fireballparentdict[y], y + 2 * self.FIREBALL_SPEED)
if self.fireballparentdict[y] == y:
state = randint(0, 1)
self.fireballs[i] = fireball.Fireball("Images/fireball.png", "Images/fireball.png", (x, y),
self.FIREBALL_WIDTH, self.FIREBALL_HEIGHT, state)
i += 1
del self.fireballs[i:]
self.fireball_group = pygame.sprite.RenderPlain(*self.fireballs)
def updatedonkey(self, flipdonkey): # Update donkey position and direction
self.donkey.setState(self.donkey.getState() ^ flipdonkey)
direction = self.donkey.getdirection()
x, y = self.donkey.getPosition()
if x >= 180:
direction = 1
if x <= 0:
direction = 0
if direction == 0:
x += self.DONKEY_SPEED
else:
x -= self.DONKEY_SPEED
x = min(x,180)
x = max(x,0)
self.donkey.setdirection(direction)
self.donkey.setPosition((x, y))
self.donkey_group = pygame.sprite.RenderPlain(self.donkey)
def getPlayerScore(self):
return self.plr[0].getScore()
def setPlayerScore(self, newscore):
self.plr[0].setScore(newscore)
def getPlayerLives(self):
return self.plr[0].getLives()
def checkwin(self): # check if player reached destination
x, y = self.plr[0].getPosition()
if y <= 35:
for b in self.castle_block_group.sprites():
rect1 = self.plr[0].rect
rect1.topleft = self.plr[0].getPosition()
rect1.height = self.PLAYER_HEIGHT
rect1.width = self.PLAYER_WIDTH
rect2 = b.rect
if rect1.colliderect(rect2):
return 1
return 0
def setPlayerstraight(self): # Set player straight when not moving
self.plr[0].setState(3)
def respawnPlayer(self): # Respawn player at left bottom
self.killfireballs()
self.plr[0].setPosition((0, self.PLAYER_SPAWN_LEVEL))
def setplayerlives(self):
self.plr[0].setLives(2)
def killfireballs(self): # Kill all fireballs
self.fireballs = []
self.fireball_group = pygame.sprite.RenderPlain(*self.fireballs)
def upgradeplayerlevel(self):
self.plr[0].upgradelevel()
def getplayerlevel(self):
return self.plr[0].getlevel()
def boostfireball(self): # Increase speed of fireball
self.FIREBALL_SPEED += 2
|
adityavagarwal/DonkeyKong
|
board.py
|
Python
|
mpl-2.0
| 19,483 | 0.00272 |
from django.shortcuts import render
from django.http import HttpResponseRedirect
from django.core.urlresolvers import reverse
from djforms.music.theatre.summer_camp import BCC, TO_LIST, REG_FEE
from djforms.processors.models import Contact, Order
from djforms.processors.forms import TrustCommerceForm
from djforms.music.theatre.summer_camp.forms import RegistrationForm
from djtools.utils.mail import send_mail
def registration(request):
status = None
msg = None
if request.POST:
form_reg = RegistrationForm(request.POST)
if form_reg.is_valid():
contact = form_reg.save()
# credit card payment
if contact.payment_method == 'Credit Card':
order = Order(
total=REG_FEE,auth='sale',status='In Process',
operator='DJMusicTheatreCamp'
)
form_proc = TrustCommerceForm(order, contact, request.POST)
if form_proc.is_valid():
r = form_proc.processor_response
order.status = r.msg['status']
order.transid = r.msg['transid']
order.cc_name = form_proc.name
order.cc_4_digits = form_proc.card[-4:]
order.save()
contact.order.add(order)
order.reg = contact
sent = send_mail(
request, TO_LIST,
'Music Theatre summer camp registration',
contact.email,
'music/theatre/summer_camp/registration_email.html',
order, BCC
)
order.send_mail = sent
order.save()
return HttpResponseRedirect(
reverse('music_theatre_summer_camp_success')
)
else:
r = form_proc.processor_response
if r:
order.status = r.status
else:
order.status = 'Form Invalid'
order.cc_name = form_proc.name
if form_proc.card:
order.cc_4_digits = form_proc.card[-4:]
order.save()
contact.order.add(order)
status = order.status
order.reg = contact
else:
order = Order(
total=REG_FEE,auth='COD',status='Pay later',
operator='DJMusicTheatreCamp'
)
order.save()
contact.order.add(order)
order.reg = contact
sent = send_mail(
request, TO_LIST,
'Music Theatre summer camp registration',
contact.email,
'music/theatre/summer_camp/registration_email.html',
order, BCC
)
order.send_mail = sent
order.save()
return HttpResponseRedirect(
reverse('music_theatre_summer_camp_success')
)
else:
if request.POST.get('payment_method') == 'Credit Card':
form_proc = TrustCommerceForm(None, request.POST)
form_proc.is_valid()
else:
form_proc = TrustCommerceForm()
else:
form_reg = RegistrationForm()
form_proc = TrustCommerceForm()
return render(
request,
'music/theatre/summer_camp/registration_form.html',
{
'form_reg': form_reg,'form_proc':form_proc,
'status':status,'msg':msg,
}
)
|
carthagecollege/django-djforms
|
djforms/music/theatre/summer_camp/views.py
|
Python
|
unlicense
| 3,884 | 0.002317 |
import facebook
from functools import update_wrapper, wraps
from django.contrib.auth import REDIRECT_FIELD_NAME
from django.http import HttpResponse, HttpResponseRedirect, HttpResponseBadRequest
from django.utils.decorators import available_attrs
from django.utils.http import urlquote
from django.conf import settings
def canvas_only(function=None):
"""
Decorator ensures that a page is only accessed from within a facebook application.
"""
def _dec(view_func):
def _view(request, *args, **kwargs):
# Make sure we're receiving a signed_request from facebook
if not request.POST.get('signed_request'):
return HttpResponseBadRequest('<h1>400 Bad Request</h1><p>Missing <em>signed_request</em>.</p>')
# Parse the request and ensure it's valid
signed_request = request.POST["signed_request"]
data = facebook.parse_signed_request(signed_request, settings.FACEBOOK_SECRET_KEY)
if data is False:
return HttpResponseBadRequest('<h1>400 Bad Request</h1><p>Malformed <em>signed_request</em>.</p>')
# If the user has not authorised redirect them
if not data.get('user_id'):
scope = getattr(settings, 'FACEBOOK_PERMS', None)
auth_url = facebook.auth_url(settings.FACEBOOK_APP_ID, settings.FACEBOOK_CANVAS_PAGE, scope)
markup = '<script type="text/javascript">top.location.href="%s"</script>' % auth_url
return HttpResponse(markup)
# Success so return the view
return view_func(request, *args, **kwargs)
return _view
return _dec(function)
def facebook_required(function=None, redirect_field_name=REDIRECT_FIELD_NAME):
"""
Decorator for views that checks that the user is logged in, redirecting
to the log-in page if necessary.
"""
def _passes_test(test_func, login_url=None, redirect_field_name=REDIRECT_FIELD_NAME):
if not login_url:
from django.conf import settings
login_url = settings.LOGIN_URL
def decorator(view_func):
def _wrapped_view(request, *args, **kwargs):
if test_func(request):
return view_func(request, *args, **kwargs)
path = urlquote(request.get_full_path())
tup = login_url, redirect_field_name, path
return HttpResponseRedirect('%s?%s=%s' % tup)
return wraps(view_func, assigned=available_attrs(view_func))(_wrapped_view)
return decorator
actual_decorator = _passes_test(
lambda r: r.facebook,
redirect_field_name=redirect_field_name
)
if function:
return actual_decorator(function)
return actual_decorator
|
srijanmishra/django-facebook
|
django_facebook/decorators.py
|
Python
|
mit
| 2,793 | 0.003222 |
from django.conf.urls import include, url
from django.contrib.staticfiles.templatetags.staticfiles import static
from django.core import urlresolvers
from django.utils.html import format_html
from wagtail.wagtailcore import hooks
from modeladminutils import admin_urls
@hooks.register('register_admin_urls')
def register_admin_urls():
return [
url(r'^adminmodel/', include(admin_urls,
namespace='modeladminutils',
app_name='modeladminutils')),
]
@hooks.register('insert_editor_js')
def editor_js():
return format_html(
"""
<script src="{0}"></script>
<script>window.chooserUrls.adminmodelChooser = '{1}';</script>
""",
static('modeladminutils/js/adminmodel-chooser.js'),
urlresolvers.reverse('modeladminutils:choose_adminmodel')
)
|
nott/next.filmfest.by
|
modeladminutils/wagtail_hooks.py
|
Python
|
unlicense
| 891 | 0 |
#!/usr/bin/env python
#******************************************
#collection of handy tools when dealing with fits and search pahse
#******************************************
#import stuff
import sys, os, math, ROOT
#******************************************
def simpleFit(fileName, histDir, histName, hmin=1100., hmax=13000., nPar=3, draw=False):
ROOT.TH1.SetDefaultSumw2()
ROOT.TH1.StatOverflows()
file = ROOT.TFile(fileName)
if not file:
raise SystemExit('\n***ERROR*** couldn\'t find file: %s'%fileName)
if histDir != '':
hist = file.GetDirectory(histDir).Get(histName)
else:
hist = file.Get(histName)
if not hist:
raise SystemExit('\n***ERROR*** couldn\'t find hist: %s'%histName)
hist.Scale(1.,'width')
hist.GetXaxis().SetTitle('m [GeV]');
hist.GetYaxis().SetTitle('entries/GeV');#NOTE it's scaled
hist.SetMarkerColor(1);
hist.SetLineColor(1);
if draw is True:
c1 = ROOT.TCanvas('c1', 'c1', 100, 50, 800, 600)
c1.SetLogy(1)
c1.SetLogx(1)
hist.Draw();
if nPar == 5:
func = ROOT.TF1('mjjpar5function','[0] * pow(1-(x/13e3), [1]) * pow((x/13e3), [2]+[3]*log(x/13e3)+[4]*pow(log(x/13e3), 2))', hmin, hmax); #5 par
elif nPar == 4:
func = ROOT.TF1('mjj4parfunction','[0] * pow(1-(x/13e3), [1]) * pow((x/13e3), [2]+[3]*log(x/13e3))', hmin, hmax) #4 par
else:
func = ROOT.TF1('mjj4parfunction','[0] * pow(1-(x/13e3), [1]) * pow((x/13e3), [2])', hmin, hmax) #3 par
func.SetLineColor(2);
#dummy fit parameter values
func.SetParameter(0,0.000001)
func.SetParameter(1,0.94)
func.SetParameter(2,8.7)
if nPar == 4:
func.SetParameter(3,0.46)
if nPar == 5:
func.SetParameter(4,0.)
#fit twice
hist.Fit(func,'NMR')
hist.Fit(func,'NMR')
if draw is True:
func.Draw('same')
c1.Update()
c1.WaitPrimitive()
pars=[]
pars.append(func.GetParameter(0))
pars.append(func.GetParameter(1))
pars.append(func.GetParameter(2))
if nPar == 4:
pars.append(func.GetParameter(3))
if nPar == 5:
pars.append(func.GetParameter(4))
return pars
#******************************************
|
guescio/toolbox
|
searchPhaseTools.py
|
Python
|
gpl-2.0
| 2,268 | 0.019841 |
from setuptools import setup, find_packages
setup(
name = 'project',
version = '1.0',
packages = find_packages(),
entry_points = {'scrapy': ['settings = bgmapi.settings']},
)
|
wattlebird/Bangumi_Spider
|
setup_bgmapi.py
|
Python
|
bsd-2-clause
| 207 | 0.057971 |
data = [set(open(i).read().split()) for i in ('C:\\Users\\Aliwka\\Desktop\\ДЗ-курсы\\Homework6\\first.txt', 'C:\\Users\\Aliwka\\Desktop\\ДЗ-курсы\\Homework6\\second.txt')]
diff = data[0].difference(data[1])
if diff:
print(diff, 'слова которые есть в первом файле, но нет во втором')
print(data[1],data[0],'слова из обоих файлов')
|
Torkvamedo/smx
|
Homework/lesson 6/second.py
|
Python
|
unlicense
| 414 | 0.01173 |
# encoding: utf-8
from django.apps import AppConfig
class ManagerConfig(AppConfig):
name = 'manager'
|
valdergallo/raidmanager
|
manager/apps.py
|
Python
|
mit
| 107 | 0 |
# I'm pretty sure this idea is going to end up being a lot of code so its going in a separate file
#
# The idea is simple, don't create a new search tab but instead narrow down the library view as we type.
# Each letter typed should narrow down the library view instantly and without causing any interruption to the user.
#
# The way I'm planning on doing this is kind of convoluted but seems to be the best way to have the effect I want.
# The dataset that contains all songs will be static and instead we will make a dictionary tracking the "state" of
# each entry in the dataset, whether its valid or invalid. Changes to the song name, artist name, or lyrics fields will
# create a new object that we will store in a stack of similar objects. These objects each contain the filter they are
# applying and the resulting dataset that filter creates (created empty). They also have a function that tells the
# object to recompute its filtered dataset based on the new dataset entry provided by the function call.
#
# The idea is that we would have a chain of successive filtering stages that each were being called/recomputed
# less and less as we go. E.g. the very first entry in the chain is going through every single entry in the original
# dataset and forwarding anything that matches to the next object in the filtering chain, which applies its filter to
# that entry and forwards to the next if it matches, until we reach the end of the chain.
#
# Theoretically adding another letter won't slow us down or cause us to have to recalculate everything before again,
# even if we haven't finished the previous filtering calculations. It should be seamless to the user as they type
# that the library view is being narrowed down.
#
# The very first letter will cause the library view to go completely blank as it starts from 0 and adds entries that
# match. Then as the user continues to type many of those entries will be removed from the library view.
#
# Sounds simple lets do it.
from PyQt4 import QtCore
from collections import namedtuple
class FilterOperationObject(object):
def __init__(self, filterstr, field):
self.filterstr = filterstr
self.field = field
self.filtereddataset = {}
def setupConnections(self, otherentry):
# Single-linked-list-style connection here. This will be linked to the previous FilterOperationObject
pass
def filterEntry(self, entry):
#Here we apply our filter to the entry and if its good we add it to filtereddataset
pass
SearchResultItem = namedtuple("SearchResultItem", ["song", "artist", "lyrics"])
class ActiveFilteringFunctions(object):
def __init__(self, searchDialog, lyricsdataset):
self.lyricsdataset = lyricsdataset # list of namedtuples identical to SearchResultItem (song, artist, lyrics)
self.searchDialog = searchDialog
self.filteropchain = []
self.setupConnection()
print("Active filtering ready. Dataset size is %s" % len(lyricsdataset))
def setupConnection(self):
QtCore.QObject.connect(self.searchDialog.songNameInput, QtCore.SIGNAL("textChanged(QString)"), lambda qstr: self.entryChanged(qstr, "song"))
QtCore.QObject.connect(self.searchDialog.artistNameInput, QtCore.SIGNAL("textChanged(QString)"), lambda qstr: self.entryChanged(qstr, "artist"))
QtCore.QObject.connect(self.searchDialog.lyricsSearchStringInput, QtCore.SIGNAL("textChanged(QString)"), lambda qstr: self.entryChanged(qstr, "lyrics"))
def entryChanged(self, newstr, field):
print "EC: %s %s" % (newstr, field)
newfilterop = FilterOperationObject(newstr,field)
self.filteropchain.append(newfilterop)
self.updateFilters()
def updateFilters(self):
#All *Changed functions call this after adding thier new FilterOperationObject to filteropchain
pass
#Called when the very first filter object is created. We need to do things different with it since it iterates over
#the main library.
def chainInit(self):
pass
|
TheRealBanana/bLyrics
|
src/dialogs/logic/active_filtering_search.py
|
Python
|
gpl-2.0
| 4,055 | 0.008878 |
# -*- coding: utf-8 -*-
import time
import mock
import datetime
import unittest
from nose.tools import * # noqa
import httplib as http
import jwe
import jwt
import furl
import itsdangerous
from modularodm import storage, Q
from framework.auth import cas
from framework.auth import signing
from framework.auth.core import Auth
from framework.exceptions import HTTPError
from framework.sessions.model import Session
from framework.mongo import set_up_storage
from tests import factories
from website import settings
from website.files import models
from website.files.models.base import PROVIDER_MAP, StoredFileNode, TrashedFileNode
from website.project.model import MetaSchema, ensure_schemas
from website.util import api_url_for, rubeus
from website.project import new_private_link
from website.project.views.node import _view_project as serialize_node
from website.addons.base import AddonConfig, AddonNodeSettingsBase, views
from tests.base import OsfTestCase, get_default_metaschema
from tests.factories import AuthUserFactory, ProjectFactory
from website.addons.github.exceptions import ApiError
from website.addons.github.tests.factories import GitHubAccountFactory
class TestAddonConfig(unittest.TestCase):
def setUp(self):
self.addon_config = AddonConfig(
short_name='test', full_name='test', owners=['node'],
added_to={'node': False}, categories=[],
settings_model=AddonNodeSettingsBase,
)
def test_static_url_relative(self):
url = self.addon_config._static_url('foo')
assert_equal(
url,
'/static/addons/test/foo'
)
def test_deleted_defaults_to_false(self):
class MyAddonSettings(AddonNodeSettingsBase):
pass
config = MyAddonSettings()
assert_is(config.deleted, False)
def test_static_url_absolute(self):
url = self.addon_config._static_url('/foo')
assert_equal(
url,
'/foo'
)
class SetEnvironMiddleware(object):
def __init__(self, app, **kwargs):
self.app = app
self.kwargs = kwargs
def __call__(self, environ, start_response):
environ.update(self.kwargs)
return self.app(environ, start_response)
class TestAddonAuth(OsfTestCase):
def setUp(self):
super(TestAddonAuth, self).setUp()
self.user = AuthUserFactory()
self.auth_obj = Auth(user=self.user)
self.node = ProjectFactory(creator=self.user)
self.session = Session(data={'auth_user_id': self.user._id})
self.session.save()
self.cookie = itsdangerous.Signer(settings.SECRET_KEY).sign(self.session._id)
self.configure_addon()
self.JWE_KEY = jwe.kdf(settings.WATERBUTLER_JWE_SECRET.encode('utf-8'), settings.WATERBUTLER_JWE_SALT.encode('utf-8'))
def configure_addon(self):
self.user.add_addon('github')
self.user_addon = self.user.get_addon('github')
self.oauth_settings = GitHubAccountFactory(display_name='john')
self.oauth_settings.save()
self.user.external_accounts.append(self.oauth_settings)
self.user.save()
self.node.add_addon('github', self.auth_obj)
self.node_addon = self.node.get_addon('github')
self.node_addon.user = 'john'
self.node_addon.repo = 'youre-my-best-friend'
self.node_addon.user_settings = self.user_addon
self.node_addon.external_account = self.oauth_settings
self.node_addon.save()
def build_url(self, **kwargs):
options = {'payload': jwe.encrypt(jwt.encode({'data': dict(dict(
action='download',
nid=self.node._id,
provider=self.node_addon.config.short_name,
), **kwargs),
'exp': datetime.datetime.utcnow() + datetime.timedelta(seconds=settings.WATERBUTLER_JWT_EXPIRATION),
}, settings.WATERBUTLER_JWT_SECRET, algorithm=settings.WATERBUTLER_JWT_ALGORITHM), self.JWE_KEY)}
return api_url_for('get_auth', **options)
def test_auth_download(self):
url = self.build_url()
res = self.app.get(url, auth=self.user.auth)
data = jwt.decode(jwe.decrypt(res.json['payload'].encode('utf-8'), self.JWE_KEY), settings.WATERBUTLER_JWT_SECRET, algorithm=settings.WATERBUTLER_JWT_ALGORITHM)['data']
assert_equal(data['auth'], views.make_auth(self.user))
assert_equal(data['credentials'], self.node_addon.serialize_waterbutler_credentials())
assert_equal(data['settings'], self.node_addon.serialize_waterbutler_settings())
expected_url = furl.furl(self.node.api_url_for('create_waterbutler_log', _absolute=True))
observed_url = furl.furl(data['callback_url'])
observed_url.port = expected_url.port
assert_equal(expected_url, observed_url)
def test_auth_missing_args(self):
url = self.build_url(cookie=None)
res = self.app.get(url, expect_errors=True)
assert_equal(res.status_code, 401)
def test_auth_bad_cookie(self):
url = self.build_url(cookie=self.cookie)
res = self.app.get(url, expect_errors=True)
assert_equal(res.status_code, 200)
data = jwt.decode(jwe.decrypt(res.json['payload'].encode('utf-8'), self.JWE_KEY), settings.WATERBUTLER_JWT_SECRET, algorithm=settings.WATERBUTLER_JWT_ALGORITHM)['data']
assert_equal(data['auth'], views.make_auth(self.user))
assert_equal(data['credentials'], self.node_addon.serialize_waterbutler_credentials())
assert_equal(data['settings'], self.node_addon.serialize_waterbutler_settings())
expected_url = furl.furl(self.node.api_url_for('create_waterbutler_log', _absolute=True))
observed_url = furl.furl(data['callback_url'])
observed_url.port = expected_url.port
assert_equal(expected_url, observed_url)
def test_auth_cookie(self):
url = self.build_url(cookie=self.cookie[::-1])
res = self.app.get(url, expect_errors=True)
assert_equal(res.status_code, 401)
def test_auth_missing_addon(self):
url = self.build_url(provider='queenhub')
res = self.app.get(url, expect_errors=True, auth=self.user.auth)
assert_equal(res.status_code, 400)
@mock.patch('website.addons.base.views.cas.get_client')
def test_auth_bad_bearer_token(self, mock_cas_client):
mock_cas_client.return_value = mock.Mock(profile=mock.Mock(return_value=cas.CasResponse(authenticated=False)))
url = self.build_url()
res = self.app.get(url, headers={'Authorization': 'Bearer invalid_access_token'}, expect_errors=True)
assert_equal(res.status_code, 403)
class TestAddonLogs(OsfTestCase):
def setUp(self):
super(TestAddonLogs, self).setUp()
self.user = AuthUserFactory()
self.auth_obj = Auth(user=self.user)
self.node = ProjectFactory(creator=self.user)
self.session = Session(data={'auth_user_id': self.user._id})
self.session.save()
self.cookie = itsdangerous.Signer(settings.SECRET_KEY).sign(self.session._id)
self.configure_addon()
def configure_addon(self):
self.user.add_addon('github')
self.user_addon = self.user.get_addon('github')
self.oauth_settings = GitHubAccountFactory(display_name='john')
self.oauth_settings.save()
self.user.external_accounts.append(self.oauth_settings)
self.user.save()
self.node.add_addon('github', self.auth_obj)
self.node_addon = self.node.get_addon('github')
self.node_addon.user = 'john'
self.node_addon.repo = 'youre-my-best-friend'
self.node_addon.user_settings = self.user_addon
self.node_addon.external_account = self.oauth_settings
self.node_addon.save()
def build_payload(self, metadata, **kwargs):
options = dict(
auth={'id': self.user._id},
action='create',
provider=self.node_addon.config.short_name,
metadata=metadata,
time=time.time() + 1000,
)
options.update(kwargs)
options = {
key: value
for key, value in options.iteritems()
if value is not None
}
message, signature = signing.default_signer.sign_payload(options)
return {
'payload': message,
'signature': signature,
}
@mock.patch('website.notifications.events.files.FileAdded.perform')
def test_add_log(self, mock_perform):
path = 'pizza'
url = self.node.api_url_for('create_waterbutler_log')
payload = self.build_payload(metadata={'path': path})
nlogs = len(self.node.logs)
self.app.put_json(url, payload, headers={'Content-Type': 'application/json'})
self.node.reload()
assert_equal(len(self.node.logs), nlogs + 1)
# # Mocking form_message and perform so that the payload need not be exact.
# assert_true(mock_form_message.called, "form_message not called")
assert_true(mock_perform.called, "perform not called")
def test_add_log_missing_args(self):
path = 'pizza'
url = self.node.api_url_for('create_waterbutler_log')
payload = self.build_payload(metadata={'path': path}, auth=None)
nlogs = len(self.node.logs)
res = self.app.put_json(
url,
payload,
headers={'Content-Type': 'application/json'},
expect_errors=True,
)
assert_equal(res.status_code, 400)
self.node.reload()
assert_equal(len(self.node.logs), nlogs)
def test_add_log_no_user(self):
path = 'pizza'
url = self.node.api_url_for('create_waterbutler_log')
payload = self.build_payload(metadata={'path': path}, auth={'id': None})
nlogs = len(self.node.logs)
res = self.app.put_json(
url,
payload,
headers={'Content-Type': 'application/json'},
expect_errors=True,
)
assert_equal(res.status_code, 400)
self.node.reload()
assert_equal(len(self.node.logs), nlogs)
def test_add_log_no_addon(self):
path = 'pizza'
node = ProjectFactory(creator=self.user)
url = node.api_url_for('create_waterbutler_log')
payload = self.build_payload(metadata={'path': path})
nlogs = len(node.logs)
res = self.app.put_json(
url,
payload,
headers={'Content-Type': 'application/json'},
expect_errors=True,
)
assert_equal(res.status_code, 400)
self.node.reload()
assert_equal(len(node.logs), nlogs)
def test_add_log_bad_action(self):
path = 'pizza'
url = self.node.api_url_for('create_waterbutler_log')
payload = self.build_payload(metadata={'path': path}, action='dance')
nlogs = len(self.node.logs)
res = self.app.put_json(
url,
payload,
headers={'Content-Type': 'application/json'},
expect_errors=True,
)
assert_equal(res.status_code, 400)
self.node.reload()
assert_equal(len(self.node.logs), nlogs)
def test_action_file_rename(self):
url = self.node.api_url_for('create_waterbutler_log')
payload = self.build_payload(
action='rename',
metadata={
'path': 'foo',
},
source={
'materialized': 'foo',
'provider': 'github',
'node': {'_id': self.node._id},
'name': 'new.txt',
'kind': 'file',
},
destination={
'path': 'foo',
'materialized': 'foo',
'provider': 'github',
'node': {'_id': self.node._id},
'name': 'old.txt',
'kind': 'file',
},
)
self.app.put_json(
url,
payload,
headers={'Content-Type': 'application/json'}
)
self.node.reload()
assert_equal(
self.node.logs[-1].action,
'github_addon_file_renamed',
)
class TestCheckAuth(OsfTestCase):
def setUp(self):
super(TestCheckAuth, self).setUp()
self.user = AuthUserFactory()
self.node = ProjectFactory(creator=self.user)
def test_has_permission(self):
res = views.check_access(self.node, Auth(user=self.user), 'upload', None)
assert_true(res)
def test_not_has_permission_read_public(self):
self.node.is_public = True
self.node.save()
res = views.check_access(self.node, Auth(), 'download', None)
def test_not_has_permission_read_has_link(self):
link = new_private_link('red-special', self.user, [self.node], anonymous=False)
res = views.check_access(self.node, Auth(private_key=link.key), 'download', None)
def test_not_has_permission_logged_in(self):
user2 = AuthUserFactory()
with assert_raises(HTTPError) as exc_info:
views.check_access(self.node, Auth(user=user2), 'download', None)
assert_equal(exc_info.exception.code, 403)
def test_not_has_permission_not_logged_in(self):
with assert_raises(HTTPError) as exc_info:
views.check_access(self.node, Auth(), 'download', None)
assert_equal(exc_info.exception.code, 401)
def test_has_permission_on_parent_node_copyto_pass_if_registration(self):
component_admin = AuthUserFactory()
component = ProjectFactory(creator=component_admin, parent=self.node)
component.is_registration = True
assert_false(component.has_permission(self.user, 'write'))
res = views.check_access(component, Auth(user=self.user), 'copyto', None)
assert_true(res)
def test_has_permission_on_parent_node_copyto_fail_if_not_registration(self):
component_admin = AuthUserFactory()
component = ProjectFactory(creator=component_admin, parent=self.node)
assert_false(component.has_permission(self.user, 'write'))
with assert_raises(HTTPError):
views.check_access(component, Auth(user=self.user), 'copyto', None)
def test_has_permission_on_parent_node_copyfrom(self):
component_admin = AuthUserFactory()
component = ProjectFactory(creator=component_admin, is_public=False, parent=self.node)
assert_false(component.has_permission(self.user, 'write'))
res = views.check_access(component, Auth(user=self.user), 'copyfrom', None)
assert_true(res)
class TestCheckPreregAuth(OsfTestCase):
def setUp(self):
super(TestCheckPreregAuth, self).setUp()
ensure_schemas()
self.prereg_challenge_admin_user = AuthUserFactory()
self.prereg_challenge_admin_user.system_tags.append(settings.PREREG_ADMIN_TAG)
self.prereg_challenge_admin_user.save()
prereg_schema = MetaSchema.find_one(
Q('name', 'eq', 'Prereg Challenge') &
Q('schema_version', 'eq', 2)
)
self.user = AuthUserFactory()
self.node = factories.ProjectFactory(creator=self.user)
self.parent = factories.ProjectFactory()
self.child = factories.NodeFactory(parent=self.parent)
self.draft_registration = factories.DraftRegistrationFactory(
initiator=self.user,
registration_schema=prereg_schema,
branched_from=self.parent
)
def test_has_permission_download_prereg_challenge_admin(self):
res = views.check_access(self.draft_registration.branched_from,
Auth(user=self.prereg_challenge_admin_user), 'download', None)
assert_true(res)
def test_has_permission_download_on_component_prereg_challenge_admin(self):
try:
res = views.check_access(self.draft_registration.branched_from.nodes[0],
Auth(user=self.prereg_challenge_admin_user), 'download', None)
except Exception:
self.fail()
assert_true(res)
def test_has_permission_download_not_prereg_challenge_admin(self):
new_user = AuthUserFactory()
with assert_raises(HTTPError) as exc_info:
views.check_access(self.draft_registration.branched_from,
Auth(user=new_user), 'download', None)
assert_equal(exc_info.exception.code, http.FORBIDDEN)
def test_has_permission_download_prereg_challenge_admin_not_draft(self):
with assert_raises(HTTPError) as exc_info:
views.check_access(self.node,
Auth(user=self.prereg_challenge_admin_user), 'download', None)
assert_equal(exc_info.exception.code, http.FORBIDDEN)
def test_has_permission_write_prereg_challenge_admin(self):
with assert_raises(HTTPError) as exc_info:
views.check_access(self.draft_registration.branched_from,
Auth(user=self.prereg_challenge_admin_user), 'write', None)
assert_equal(exc_info.exception.code, http.FORBIDDEN)
class TestCheckOAuth(OsfTestCase):
def setUp(self):
super(TestCheckOAuth, self).setUp()
self.user = AuthUserFactory()
self.node = ProjectFactory(creator=self.user)
def test_has_permission_private_not_authenticated(self):
component_admin = AuthUserFactory()
component = ProjectFactory(creator=component_admin, is_public=False, parent=self.node)
cas_resp = cas.CasResponse(authenticated=False)
assert_false(component.has_permission(self.user, 'write'))
with assert_raises(HTTPError) as exc_info:
views.check_access(component, Auth(user=self.user), 'download', cas_resp)
assert_equal(exc_info.exception.code, 403)
def test_has_permission_private_no_scope_forbidden(self):
component_admin = AuthUserFactory()
component = ProjectFactory(creator=component_admin, is_public=False, parent=self.node)
cas_resp = cas.CasResponse(authenticated=True, status=None, user=self.user._id,
attributes={'accessTokenScope': {}})
assert_false(component.has_permission(self.user, 'write'))
with assert_raises(HTTPError) as exc_info:
views.check_access(component, Auth(user=self.user), 'download', cas_resp)
assert_equal(exc_info.exception.code, 403)
def test_has_permission_public_irrelevant_scope_allowed(self):
component_admin = AuthUserFactory()
component = ProjectFactory(creator=component_admin, is_public=True, parent=self.node)
cas_resp = cas.CasResponse(authenticated=True, status=None, user=self.user._id,
attributes={'accessTokenScope': {'osf.users.all_read'}})
assert_false(component.has_permission(self.user, 'write'))
res = views.check_access(component, Auth(user=self.user), 'download', cas_resp)
assert_true(res)
def test_has_permission_private_irrelevant_scope_forbidden(self):
component_admin = AuthUserFactory()
component = ProjectFactory(creator=component_admin, is_public=False, parent=self.node)
cas_resp = cas.CasResponse(authenticated=True, status=None, user=self.user._id,
attributes={'accessTokenScope': {'osf.users.all_read'}})
assert_false(component.has_permission(self.user, 'write'))
with assert_raises(HTTPError) as exc_info:
views.check_access(component, Auth(user=self.user), 'download', cas_resp)
assert_equal(exc_info.exception.code, 403)
def test_has_permission_decommissioned_scope_no_error(self):
component_admin = AuthUserFactory()
component = ProjectFactory(creator=component_admin, is_public=False, parent=self.node)
cas_resp = cas.CasResponse(authenticated=True, status=None, user=self.user._id,
attributes={'accessTokenScope': {
'decommissioned.scope+write',
'osf.nodes.data_read',
}})
assert_false(component.has_permission(self.user, 'write'))
res = views.check_access(component, Auth(user=self.user), 'download', cas_resp)
assert_true(res)
def test_has_permission_write_scope_read_action(self):
component_admin = AuthUserFactory()
component = ProjectFactory(creator=component_admin, is_public=False, parent=self.node)
cas_resp = cas.CasResponse(authenticated=True, status=None, user=self.user._id,
attributes={'accessTokenScope': {'osf.nodes.data_write'}})
assert_false(component.has_permission(self.user, 'write'))
res = views.check_access(component, Auth(user=self.user), 'download', cas_resp)
assert_true(res)
def test_has_permission_read_scope_write_action_forbidden(self):
component = ProjectFactory(creator=self.user, is_public=False, parent=self.node)
cas_resp = cas.CasResponse(authenticated=True, status=None, user=self.user._id,
attributes={'accessTokenScope': {'osf.nodes.data_read'}})
assert_true(component.has_permission(self.user, 'write'))
with assert_raises(HTTPError) as exc_info:
views.check_access(component, Auth(user=self.user), 'upload', cas_resp)
assert_equal(exc_info.exception.code, 403)
def assert_urls_equal(url1, url2):
furl1 = furl.furl(url1)
furl2 = furl.furl(url2)
for attr in ['scheme', 'host', 'port']:
setattr(furl1, attr, None)
setattr(furl2, attr, None)
# Note: furl params are ordered and cause trouble
assert_equal(dict(furl1.args), dict(furl2.args))
furl1.args = {}
furl2.args = {}
assert_equal(furl1, furl2)
class TestFileNode(models.FileNode):
provider = 'test_addons'
def touch(self, bearer, version=None, revision=None, **kwargs):
if version:
if self.versions:
try:
return self.versions[int(version) - 1]
except (IndexError, ValueError):
return None
else:
return None
return models.FileVersion()
class TestFile(TestFileNode, models.File):
pass
class TestFolder(TestFileNode, models.Folder):
pass
@mock.patch('website.addons.github.model.GitHubClient.repo', mock.Mock(side_effect=ApiError))
class TestAddonFileViews(OsfTestCase):
@classmethod
def setUpClass(cls):
super(TestAddonFileViews, cls).setUpClass()
PROVIDER_MAP['github'] = [TestFolder, TestFile, TestFileNode]
TestFileNode.provider = 'github'
def setUp(self):
super(TestAddonFileViews, self).setUp()
self.user = AuthUserFactory()
self.project = ProjectFactory(creator=self.user)
self.user.add_addon('github')
self.project.add_addon('github', auth=Auth(self.user))
self.user_addon = self.user.get_addon('github')
self.node_addon = self.project.get_addon('github')
self.oauth = GitHubAccountFactory()
self.oauth.save()
self.user.external_accounts.append(self.oauth)
self.user.save()
self.node_addon.user_settings = self.user_addon
self.node_addon.external_account = self.oauth
self.node_addon.repo = 'Truth'
self.node_addon.user = 'E'
self.node_addon.save()
@classmethod
def tearDownClass(cls):
super(TestAddonFileViews, cls).tearDownClass()
PROVIDER_MAP['github'] = [models.GithubFolder, models.GithubFile, models.GithubFileNode]
del PROVIDER_MAP['test_addons']
TrashedFileNode.remove()
def get_test_file(self):
version = models.FileVersion(identifier='1')
version.save()
versions = [version]
ret = TestFile(
name='Test',
node=self.project,
path='/test/Test',
materialized_path='/test/Test',
versions=versions
)
ret.save()
return ret
def get_second_test_file(self):
version = models.FileVersion(identifier='1')
version.save()
ret = TestFile(
name='Test2',
node=self.project,
path='/test/Test2',
materialized_path='/test/Test2',
versions=[version]
)
ret.save()
return ret
def get_mako_return(self):
ret = serialize_node(self.project, Auth(self.user), primary=True)
ret.update({
'error': '',
'provider': '',
'file_path': '',
'sharejs_uuid': '',
'private': '',
'urls': {
'files': '',
'render': '',
'sharejs': '',
'mfr': '',
'gravatar': '',
'external': '',
'archived_from': '',
},
'size': '',
'extra': '',
'file_name': '',
'materialized_path': '',
'file_id': '',
})
ret.update(rubeus.collect_addon_assets(self.project))
return ret
def test_redirects_to_guid(self):
file_node = self.get_test_file()
guid = file_node.get_guid(create=True)
resp = self.app.get(
self.project.web_url_for(
'addon_view_or_download_file',
path=file_node.path.strip('/'),
provider='github'
),
auth=self.user.auth
)
assert_equals(resp.status_code, 302)
assert_equals(resp.location, 'http://localhost:80/{}/'.format(guid._id))
def test_action_download_redirects_to_download(self):
file_node = self.get_test_file()
guid = file_node.get_guid(create=True)
resp = self.app.get('/{}/?action=download'.format(guid._id), auth=self.user.auth)
assert_equals(resp.status_code, 302)
location = furl.furl(resp.location)
assert_urls_equal(location.url, file_node.generate_waterbutler_url(action='download', direct=None, version=None))
def test_action_download_redirects_to_download_with_version(self):
file_node = self.get_test_file()
guid = file_node.get_guid(create=True)
resp = self.app.get('/{}/?action=download&revision=1'.format(guid._id), auth=self.user.auth)
assert_equals(resp.status_code, 302)
location = furl.furl(resp.location)
# Note: version is added but us but all other url params are added as well
assert_urls_equal(location.url, file_node.generate_waterbutler_url(action='download', direct=None, revision=1, version=None))
@mock.patch('website.addons.base.views.addon_view_file')
def test_action_view_calls_view_file(self, mock_view_file):
self.user.reload()
self.project.reload()
file_node = self.get_test_file()
guid = file_node.get_guid(create=True)
mock_view_file.return_value = self.get_mako_return()
self.app.get('/{}/?action=view'.format(guid._id), auth=self.user.auth)
args, kwargs = mock_view_file.call_args
assert_equals(kwargs, {})
assert_equals(args[0].user._id, self.user._id)
assert_equals(args[1], self.project)
assert_equals(args[2], file_node)
assert_true(isinstance(args[3], file_node.touch(None).__class__))
@mock.patch('website.addons.base.views.addon_view_file')
def test_no_action_calls_view_file(self, mock_view_file):
self.user.reload()
self.project.reload()
file_node = self.get_test_file()
guid = file_node.get_guid(create=True)
mock_view_file.return_value = self.get_mako_return()
self.app.get('/{}/'.format(guid._id), auth=self.user.auth)
args, kwargs = mock_view_file.call_args
assert_equals(kwargs, {})
assert_equals(args[0].user._id, self.user._id)
assert_equals(args[1], self.project)
assert_equals(args[2], file_node)
assert_true(isinstance(args[3], file_node.touch(None).__class__))
def test_download_create_guid(self):
file_node = self.get_test_file()
assert_is(file_node.get_guid(), None)
self.app.get(
self.project.web_url_for(
'addon_view_or_download_file',
path=file_node.path.strip('/'),
provider='github',
),
auth=self.user.auth
)
assert_true(file_node.get_guid())
def test_view_file_does_not_delete_file_when_requesting_invalid_version(self):
with mock.patch('website.addons.github.model.GitHubNodeSettings.is_private',
new_callable=mock.PropertyMock) as mock_is_private:
mock_is_private.return_value = False
file_node = self.get_test_file()
assert_is(file_node.get_guid(), None)
url = self.project.web_url_for(
'addon_view_or_download_file',
path=file_node.path.strip('/'),
provider='github',
)
# First view generated GUID
self.app.get(url, auth=self.user.auth)
self.app.get(url + '?version=invalid', auth=self.user.auth, expect_errors=True)
assert_is_not_none(StoredFileNode.load(file_node._id))
assert_is_none(TrashedFileNode.load(file_node._id))
def test_unauthorized_addons_raise(self):
path = 'cloudfiles'
self.node_addon.user_settings = None
self.node_addon.save()
resp = self.app.get(
self.project.web_url_for(
'addon_view_or_download_file',
path=path,
provider='github',
action='download'
),
auth=self.user.auth,
expect_errors=True
)
assert_equals(resp.status_code, 401)
def test_nonstorage_addons_raise(self):
resp = self.app.get(
self.project.web_url_for(
'addon_view_or_download_file',
path='sillywiki',
provider='wiki',
action='download'
),
auth=self.user.auth,
expect_errors=True
)
assert_equals(resp.status_code, 400)
def test_head_returns_url(self):
file_node = self.get_test_file()
guid = file_node.get_guid(create=True)
resp = self.app.head('/{}/'.format(guid._id), auth=self.user.auth)
location = furl.furl(resp.location)
assert_urls_equal(location.url, file_node.generate_waterbutler_url(direct=None, version=None))
def test_head_returns_url_with_version(self):
file_node = self.get_test_file()
guid = file_node.get_guid(create=True)
resp = self.app.head('/{}/?revision=1&foo=bar'.format(guid._id), auth=self.user.auth)
location = furl.furl(resp.location)
# Note: version is added but us but all other url params are added as well
assert_urls_equal(location.url, file_node.generate_waterbutler_url(direct=None, revision=1, version=None, foo='bar'))
def test_nonexistent_addons_raise(self):
path = 'cloudfiles'
self.project.delete_addon('github', Auth(self.user))
self.project.save()
resp = self.app.get(
self.project.web_url_for(
'addon_view_or_download_file',
path=path,
provider='github',
action='download'
),
auth=self.user.auth,
expect_errors=True
)
assert_equals(resp.status_code, 400)
def test_unauth_addons_raise(self):
path = 'cloudfiles'
self.node_addon.user_settings = None
self.node_addon.save()
resp = self.app.get(
self.project.web_url_for(
'addon_view_or_download_file',
path=path,
provider='github',
action='download'
),
auth=self.user.auth,
expect_errors=True
)
assert_equals(resp.status_code, 401)
def test_delete_action_creates_trashed_file_node(self):
file_node = self.get_test_file()
payload = {
'provider': file_node.provider,
'metadata': {
'path': '/test/Test',
'materialized': '/test/Test'
}
}
views.addon_delete_file_node(self=None, node=self.project, user=self.user, event_type='file_removed', payload=payload)
assert_false(StoredFileNode.load(file_node._id))
assert_true(TrashedFileNode.load(file_node._id))
def test_delete_action_for_folder_deletes_subfolders_and_creates_trashed_file_nodes(self):
file_node = self.get_test_file()
subfolder = TestFolder(
name='folder',
node=self.project,
path='/test/folder/',
materialized_path='/test/folder/',
versions=[]
)
subfolder.save()
payload = {
'provider': file_node.provider,
'metadata': {
'path': '/test/',
'materialized': '/test/'
}
}
views.addon_delete_file_node(self=None, node=self.project, user=self.user, event_type='file_removed', payload=payload)
assert_false(StoredFileNode.load(file_node._id))
assert_true(TrashedFileNode.load(file_node._id))
assert_false(StoredFileNode.load(subfolder._id))
@mock.patch('website.archiver.tasks.archive')
def test_archived_from_url(self, mock_archive):
file_node = self.get_test_file()
second_file_node = self.get_second_test_file()
file_node.copied_from = second_file_node
registered_node = self.project.register_node(
schema=get_default_metaschema(),
auth=Auth(self.user),
data=None,
)
archived_from_url = views.get_archived_from_url(registered_node, file_node)
view_url = self.project.web_url_for('addon_view_or_download_file', provider=file_node.provider, path=file_node.copied_from._id)
assert_true(archived_from_url)
assert_urls_equal(archived_from_url, view_url)
@mock.patch('website.archiver.tasks.archive')
def test_archived_from_url_without_copied_from(self, mock_archive):
file_node = self.get_test_file()
registered_node = self.project.register_node(
schema=get_default_metaschema(),
auth=Auth(self.user),
data=None,
)
archived_from_url = views.get_archived_from_url(registered_node, file_node)
assert_false(archived_from_url)
@mock.patch('website.archiver.tasks.archive')
def test_copied_from_id_trashed(self, mock_archive):
file_node = self.get_test_file()
second_file_node = self.get_second_test_file()
file_node.copied_from = second_file_node
self.project.register_node(
schema=get_default_metaschema(),
auth=Auth(self.user),
data=None,
)
trashed_node = second_file_node.delete()
assert_false(trashed_node.copied_from)
class TestLegacyViews(OsfTestCase):
def setUp(self):
super(TestLegacyViews, self).setUp()
self.path = 'mercury.png'
self.user = AuthUserFactory()
self.project = ProjectFactory(creator=self.user)
self.node_addon = self.project.get_addon('osfstorage')
file_record = self.node_addon.get_root().append_file(self.path)
self.expected_path = file_record._id
self.node_addon.save()
file_record.save()
def test_view_file_redirect(self):
url = '/{0}/osffiles/{1}/'.format(self.project._id, self.path)
res = self.app.get(url, auth=self.user.auth)
assert_equal(res.status_code, 301)
expected_url = self.project.web_url_for(
'addon_view_or_download_file',
action='view',
path=self.expected_path,
provider='osfstorage',
)
assert_urls_equal(res.location, expected_url)
def test_download_file_redirect(self):
url = '/{0}/osffiles/{1}/download/'.format(self.project._id, self.path)
res = self.app.get(url, auth=self.user.auth)
assert_equal(res.status_code, 301)
expected_url = self.project.web_url_for(
'addon_view_or_download_file',
path=self.expected_path,
action='download',
provider='osfstorage',
)
assert_urls_equal(res.location, expected_url)
def test_download_file_version_redirect(self):
url = '/{0}/osffiles/{1}/version/3/download/'.format(
self.project._id,
self.path,
)
res = self.app.get(url, auth=self.user.auth)
assert_equal(res.status_code, 301)
expected_url = self.project.web_url_for(
'addon_view_or_download_file',
version=3,
path=self.expected_path,
action='download',
provider='osfstorage',
)
assert_urls_equal(res.location, expected_url)
def test_api_download_file_redirect(self):
url = '/api/v1/project/{0}/osffiles/{1}/'.format(self.project._id, self.path)
res = self.app.get(url, auth=self.user.auth)
assert_equal(res.status_code, 301)
expected_url = self.project.web_url_for(
'addon_view_or_download_file',
path=self.expected_path,
action='download',
provider='osfstorage',
)
assert_urls_equal(res.location, expected_url)
def test_api_download_file_version_redirect(self):
url = '/api/v1/project/{0}/osffiles/{1}/version/3/'.format(
self.project._id,
self.path,
)
res = self.app.get(url, auth=self.user.auth)
assert_equal(res.status_code, 301)
expected_url = self.project.web_url_for(
'addon_view_or_download_file',
version=3,
path=self.expected_path,
action='download',
provider='osfstorage',
)
assert_urls_equal(res.location, expected_url)
def test_no_provider_name(self):
url = '/{0}/files/{1}'.format(
self.project._id,
self.path,
)
res = self.app.get(url, auth=self.user.auth)
assert_equal(res.status_code, 301)
expected_url = self.project.web_url_for(
'addon_view_or_download_file',
action='view',
path=self.expected_path,
provider='osfstorage',
)
assert_urls_equal(res.location, expected_url)
def test_action_as_param(self):
url = '/{}/osfstorage/files/{}/?action=download'.format(
self.project._id,
self.path,
)
res = self.app.get(url, auth=self.user.auth)
assert_equal(res.status_code, 301)
expected_url = self.project.web_url_for(
'addon_view_or_download_file',
path=self.expected_path,
action='download',
provider='osfstorage',
)
assert_urls_equal(res.location, expected_url)
def test_other_addon_redirect(self):
url = '/project/{0}/mycooladdon/files/{1}/'.format(
self.project._id,
self.path,
)
res = self.app.get(url, auth=self.user.auth)
assert_equal(res.status_code, 301)
expected_url = self.project.web_url_for(
'addon_view_or_download_file',
action='view',
path=self.path,
provider='mycooladdon',
)
assert_urls_equal(res.location, expected_url)
def test_other_addon_redirect_download(self):
url = '/project/{0}/mycooladdon/files/{1}/download/'.format(
self.project._id,
self.path,
)
res = self.app.get(url, auth=self.user.auth)
assert_equal(res.status_code, 301)
expected_url = self.project.web_url_for(
'addon_view_or_download_file',
path=self.path,
action='download',
provider='mycooladdon',
)
assert_urls_equal(res.location, expected_url)
|
wearpants/osf.io
|
tests/test_addons.py
|
Python
|
apache-2.0
| 40,457 | 0.002027 |
# gizela
#
# Copyright (C) 2010 Michal Seidl, Tomas Kubin
# Author: Tomas Kubin <tomas.kubin@fsv.cvut.cz>
# URL: <http://slon.fsv.cvut.cz/gizela>
#
# $Id$
"""
module with functions for
gama-data-obs.py and gama-data-adj.py scripts
"""
import sys
def read_configuration_file(configFile, localSystem2D, localSystem3D):
"""
reads configuration file
returns: configuration dictionary
localSystem
"""
configDict = []
localSystem = None
if configFile is not None:
from gizela.util.parse_config_file import parse_config_file
try:
configDict = parse_config_file(configFile)
except Exception, e:
print >>sys.stderr, \
"Parsing of configuration file '%s' failed." % configFile
print >>sys.stderr, e
sys.exit(1)
if localSystem2D:
if "localSystem2D" not in configDict:
print >>sys.stderr, \
"No localSystem2D section in config file %s" % configFile
sys.exit(1)
else:
from gizela.util.CoordSystemLocal2D import CoordSystemLocal2D
localSystem = CoordSystemLocal2D()
localSystem.parse_config_dict(configDict)
if localSystem3D:
if "localSystem3D" not in configDict:
print >>sys.stderr, \
"No localSystem3D section in config file %s" % configFile
sys.exit(1)
else:
from gizela.util.CoordSystemLocal3D import CoordSystemLocal3D
localSystem = CoordSystemLocal3D()
localSystem.parse_config_dict(configDict)
return configDict, localSystem
|
gizela/gizela
|
gizela/util/gama_data_fun.py
|
Python
|
gpl-3.0
| 1,739 | 0.00575 |
# coding: utf-8
"""
Trakerr API
Get your application events and errors to Trakerr via the *Trakerr API*.
OpenAPI spec version: 1.0.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from pprint import pformat
from six import iteritems
import re
class AppEvent(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self, api_key=None, classification=None, event_type=None, event_message=None, event_time=None, event_stacktrace=None, event_user=None, event_session=None, context_app_version=None, context_env_name=None, context_env_version=None, context_env_hostname=None, context_app_browser=None, context_app_browser_version=None, context_app_os=None, context_app_os_version=None, context_data_center=None, context_data_center_region=None, custom_properties=None, custom_segments=None):
"""
AppEvent - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'api_key': 'str',
'classification': 'str',
'event_type': 'str',
'event_message': 'str',
'event_time': 'int',
'event_stacktrace': 'Stacktrace',
'event_user': 'str',
'event_session': 'str',
'context_app_version': 'str',
'context_env_name': 'str',
'context_env_version': 'str',
'context_env_hostname': 'str',
'context_app_browser': 'str',
'context_app_browser_version': 'str',
'context_app_os': 'str',
'context_app_os_version': 'str',
'context_data_center': 'str',
'context_data_center_region': 'str',
'custom_properties': 'CustomData',
'custom_segments': 'CustomData'
}
self.attribute_map = {
'api_key': 'apiKey',
'classification': 'classification',
'event_type': 'eventType',
'event_message': 'eventMessage',
'event_time': 'eventTime',
'event_stacktrace': 'eventStacktrace',
'event_user': 'eventUser',
'event_session': 'eventSession',
'context_app_version': 'contextAppVersion',
'context_env_name': 'contextEnvName',
'context_env_version': 'contextEnvVersion',
'context_env_hostname': 'contextEnvHostname',
'context_app_browser': 'contextAppBrowser',
'context_app_browser_version': 'contextAppBrowserVersion',
'context_app_os': 'contextAppOS',
'context_app_os_version': 'contextAppOSVersion',
'context_data_center': 'contextDataCenter',
'context_data_center_region': 'contextDataCenterRegion',
'custom_properties': 'customProperties',
'custom_segments': 'customSegments'
}
self._api_key = api_key
self._classification = classification
self._event_type = event_type
self._event_message = event_message
self._event_time = event_time
self._event_stacktrace = event_stacktrace
self._event_user = event_user
self._event_session = event_session
self._context_app_version = context_app_version
self._context_env_name = context_env_name
self._context_env_version = context_env_version
self._context_env_hostname = context_env_hostname
self._context_app_browser = context_app_browser
self._context_app_browser_version = context_app_browser_version
self._context_app_os = context_app_os
self._context_app_os_version = context_app_os_version
self._context_data_center = context_data_center
self._context_data_center_region = context_data_center_region
self._custom_properties = custom_properties
self._custom_segments = custom_segments
@property
def api_key(self):
"""
Gets the api_key of this AppEvent.
API key generated for the application
:return: The api_key of this AppEvent.
:rtype: str
"""
return self._api_key
@api_key.setter
def api_key(self, api_key):
"""
Sets the api_key of this AppEvent.
API key generated for the application
:param api_key: The api_key of this AppEvent.
:type: str
"""
self._api_key = api_key
@property
def classification(self):
"""
Gets the classification of this AppEvent.
one of 'debug','info','warning','error' or a custom string
:return: The classification of this AppEvent.
:rtype: str
"""
return self._classification
@classification.setter
def classification(self, classification):
"""
Sets the classification of this AppEvent.
one of 'debug','info','warning','error' or a custom string
:param classification: The classification of this AppEvent.
:type: str
"""
self._classification = classification
@property
def event_type(self):
"""
Gets the event_type of this AppEvent.
type or event or error (eg. NullPointerException)
:return: The event_type of this AppEvent.
:rtype: str
"""
return self._event_type
@event_type.setter
def event_type(self, event_type):
"""
Sets the event_type of this AppEvent.
type or event or error (eg. NullPointerException)
:param event_type: The event_type of this AppEvent.
:type: str
"""
self._event_type = event_type
@property
def event_message(self):
"""
Gets the event_message of this AppEvent.
message containing details of the event or error
:return: The event_message of this AppEvent.
:rtype: str
"""
return self._event_message
@event_message.setter
def event_message(self, event_message):
"""
Sets the event_message of this AppEvent.
message containing details of the event or error
:param event_message: The event_message of this AppEvent.
:type: str
"""
self._event_message = event_message
@property
def event_time(self):
"""
Gets the event_time of this AppEvent.
(optional) event time in ms since epoch
:return: The event_time of this AppEvent.
:rtype: int
"""
return self._event_time
@event_time.setter
def event_time(self, event_time):
"""
Sets the event_time of this AppEvent.
(optional) event time in ms since epoch
:param event_time: The event_time of this AppEvent.
:type: int
"""
self._event_time = event_time
@property
def event_stacktrace(self):
"""
Gets the event_stacktrace of this AppEvent.
:return: The event_stacktrace of this AppEvent.
:rtype: Stacktrace
"""
return self._event_stacktrace
@event_stacktrace.setter
def event_stacktrace(self, event_stacktrace):
"""
Sets the event_stacktrace of this AppEvent.
:param event_stacktrace: The event_stacktrace of this AppEvent.
:type: Stacktrace
"""
self._event_stacktrace = event_stacktrace
@property
def event_user(self):
"""
Gets the event_user of this AppEvent.
(optional) event user identifying a user
:return: The event_user of this AppEvent.
:rtype: str
"""
return self._event_user
@event_user.setter
def event_user(self, event_user):
"""
Sets the event_user of this AppEvent.
(optional) event user identifying a user
:param event_user: The event_user of this AppEvent.
:type: str
"""
self._event_user = event_user
@property
def event_session(self):
"""
Gets the event_session of this AppEvent.
(optional) session identification
:return: The event_session of this AppEvent.
:rtype: str
"""
return self._event_session
@event_session.setter
def event_session(self, event_session):
"""
Sets the event_session of this AppEvent.
(optional) session identification
:param event_session: The event_session of this AppEvent.
:type: str
"""
self._event_session = event_session
@property
def context_app_version(self):
"""
Gets the context_app_version of this AppEvent.
(optional) application version information
:return: The context_app_version of this AppEvent.
:rtype: str
"""
return self._context_app_version
@context_app_version.setter
def context_app_version(self, context_app_version):
"""
Sets the context_app_version of this AppEvent.
(optional) application version information
:param context_app_version: The context_app_version of this AppEvent.
:type: str
"""
self._context_app_version = context_app_version
@property
def context_env_name(self):
"""
Gets the context_env_name of this AppEvent.
(optional) one of 'development','staging','production' or a custom string
:return: The context_env_name of this AppEvent.
:rtype: str
"""
return self._context_env_name
@context_env_name.setter
def context_env_name(self, context_env_name):
"""
Sets the context_env_name of this AppEvent.
(optional) one of 'development','staging','production' or a custom string
:param context_env_name: The context_env_name of this AppEvent.
:type: str
"""
self._context_env_name = context_env_name
@property
def context_env_version(self):
"""
Gets the context_env_version of this AppEvent.
(optional) version of environment
:return: The context_env_version of this AppEvent.
:rtype: str
"""
return self._context_env_version
@context_env_version.setter
def context_env_version(self, context_env_version):
"""
Sets the context_env_version of this AppEvent.
(optional) version of environment
:param context_env_version: The context_env_version of this AppEvent.
:type: str
"""
self._context_env_version = context_env_version
@property
def context_env_hostname(self):
"""
Gets the context_env_hostname of this AppEvent.
(optional) hostname or ID of environment
:return: The context_env_hostname of this AppEvent.
:rtype: str
"""
return self._context_env_hostname
@context_env_hostname.setter
def context_env_hostname(self, context_env_hostname):
"""
Sets the context_env_hostname of this AppEvent.
(optional) hostname or ID of environment
:param context_env_hostname: The context_env_hostname of this AppEvent.
:type: str
"""
self._context_env_hostname = context_env_hostname
@property
def context_app_browser(self):
"""
Gets the context_app_browser of this AppEvent.
(optional) browser name if running in a browser (eg. Chrome)
:return: The context_app_browser of this AppEvent.
:rtype: str
"""
return self._context_app_browser
@context_app_browser.setter
def context_app_browser(self, context_app_browser):
"""
Sets the context_app_browser of this AppEvent.
(optional) browser name if running in a browser (eg. Chrome)
:param context_app_browser: The context_app_browser of this AppEvent.
:type: str
"""
self._context_app_browser = context_app_browser
@property
def context_app_browser_version(self):
"""
Gets the context_app_browser_version of this AppEvent.
(optional) browser version if running in a browser
:return: The context_app_browser_version of this AppEvent.
:rtype: str
"""
return self._context_app_browser_version
@context_app_browser_version.setter
def context_app_browser_version(self, context_app_browser_version):
"""
Sets the context_app_browser_version of this AppEvent.
(optional) browser version if running in a browser
:param context_app_browser_version: The context_app_browser_version of this AppEvent.
:type: str
"""
self._context_app_browser_version = context_app_browser_version
@property
def context_app_os(self):
"""
Gets the context_app_os of this AppEvent.
(optional) OS the application is running on
:return: The context_app_os of this AppEvent.
:rtype: str
"""
return self._context_app_os
@context_app_os.setter
def context_app_os(self, context_app_os):
"""
Sets the context_app_os of this AppEvent.
(optional) OS the application is running on
:param context_app_os: The context_app_os of this AppEvent.
:type: str
"""
self._context_app_os = context_app_os
@property
def context_app_os_version(self):
"""
Gets the context_app_os_version of this AppEvent.
(optional) OS version the application is running on
:return: The context_app_os_version of this AppEvent.
:rtype: str
"""
return self._context_app_os_version
@context_app_os_version.setter
def context_app_os_version(self, context_app_os_version):
"""
Sets the context_app_os_version of this AppEvent.
(optional) OS version the application is running on
:param context_app_os_version: The context_app_os_version of this AppEvent.
:type: str
"""
self._context_app_os_version = context_app_os_version
@property
def context_data_center(self):
"""
Gets the context_data_center of this AppEvent.
(optional) Data center the application is running on or connected to
:return: The context_data_center of this AppEvent.
:rtype: str
"""
return self._context_data_center
@context_data_center.setter
def context_data_center(self, context_data_center):
"""
Sets the context_data_center of this AppEvent.
(optional) Data center the application is running on or connected to
:param context_data_center: The context_data_center of this AppEvent.
:type: str
"""
self._context_data_center = context_data_center
@property
def context_data_center_region(self):
"""
Gets the context_data_center_region of this AppEvent.
(optional) Data center region
:return: The context_data_center_region of this AppEvent.
:rtype: str
"""
return self._context_data_center_region
@context_data_center_region.setter
def context_data_center_region(self, context_data_center_region):
"""
Sets the context_data_center_region of this AppEvent.
(optional) Data center region
:param context_data_center_region: The context_data_center_region of this AppEvent.
:type: str
"""
self._context_data_center_region = context_data_center_region
@property
def custom_properties(self):
"""
Gets the custom_properties of this AppEvent.
:return: The custom_properties of this AppEvent.
:rtype: CustomData
"""
return self._custom_properties
@custom_properties.setter
def custom_properties(self, custom_properties):
"""
Sets the custom_properties of this AppEvent.
:param custom_properties: The custom_properties of this AppEvent.
:type: CustomData
"""
self._custom_properties = custom_properties
@property
def custom_segments(self):
"""
Gets the custom_segments of this AppEvent.
:return: The custom_segments of this AppEvent.
:rtype: CustomData
"""
return self._custom_segments
@custom_segments.setter
def custom_segments(self, custom_segments):
"""
Sets the custom_segments of this AppEvent.
:param custom_segments: The custom_segments of this AppEvent.
:type: CustomData
"""
self._custom_segments = custom_segments
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
|
severr/severr-python
|
trakerr_client/models/app_event.py
|
Python
|
apache-2.0
| 18,881 | 0.000371 |
### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>
import bpy, time, sys, hashlib
from bpy.types import UILayout
from math import *
def ifloor(x):
return int(x) if x >= 0.0 else int(x) - 1
def iceil(x):
return int(x) + 1 if x >= 0.0 else int(x)
# based on http://code.activestate.com/recipes/578114-round-number-to-specified-number-of-significant-di/
def round_sigfigs(num, sig_figs):
if num != 0:
return round(num, -int(floor(log10(abs(num))) - (sig_figs - 1)))
else:
return 0 # Can't take the log of 0
def data_uuid(id_data, path=""):
identifier = id_data.name.encode(encoding="utf-8")
if id_data.library:
identifier += b'\0' + id_data.library.filepath.encode(encoding="utf-8")
if path:
identifier += b'\0' + path.encode(encoding="utf-8")
m = hashlib.md5()
m.update(identifier)
return m.hexdigest(), int.from_bytes(m.digest(), byteorder='big') % 0xFFFFFFFF
_id_collections = [ c.identifier for c in bpy.types.BlendData.bl_rna.properties if isinstance(c, bpy.types.CollectionProperty) and isinstance(c.fixed_type, bpy.types.ID) ]
def _id_data_blocks(blend_data):
for name in _id_collections:
coll = getattr(blend_data, name)
for id_data in coll:
yield id_data
def find_id_data(blend_data, name, library):
if library:
for id_data in _id_data_blocks(blend_data):
if id_data.library and id_data.library.filepath == library and id_data.name == name:
return id_data
else:
for id_data in _id_data_blocks(blend_data):
if not id_data.library and id_data.name == name:
return id_data
def id_data_from_enum(identifier):
for id_data in _id_data_blocks(bpy.data):
if str(id_data.as_pointer()) == identifier:
return id_data
def id_data_enum_item(id_data):
#identifier, number = id_data_uuid(id_data)
number = id_data.as_pointer() % 0xFFFFFFFF
identifier = str(id_data.as_pointer())
return (identifier, id_data.name, "", UILayout.icon(id_data), number)
class OperatorCallContext():
def __enter__(self):
scene = bpy.context.scene
prefs = bpy.context.user_preferences
# store active/selected state to restore it after operator execution
self.curact = scene.objects.active
self.cursel = { ob : ob.select for ob in scene.objects }
# undo can store files a lot when running operators internally,
# disable since we only need one undo step after main operators anyway
self.use_global_undo = prefs.edit.use_global_undo
prefs.edit.use_global_undo = False
return (self.curact, self.cursel)
def __exit__(self, exc_type, exc_value, traceback):
scene = bpy.context.scene
prefs = bpy.context.user_preferences
# restore active/selected state
scene.objects.active = self.curact
for ob in scene.objects:
ob.select = self.cursel.get(ob, False)
prefs.edit.use_global_undo = self.use_global_undo
def select_single_object(ob):
scene = bpy.context.scene
scene.objects.active = ob
for tob in scene.objects:
tob.select = (tob == ob)
|
Microvellum/Fluid-Designer
|
win64-vc/2.78/Python/bin/2.78/scripts/addons_contrib/data_overrides/util.py
|
Python
|
gpl-3.0
| 3,986 | 0.006021 |
"""
relation between the length of material coiled around cylinder and its width (toilet paper)
http://math.stackexchange.com/questions/1633704/the-length-of-toilet-roll
"""
import numpy as np
x = 1 # width of one sheet
w = 80 #partial radius (total radius - minus radius of paper tube)
r = 30 # radius of paper tube
L = (np.pi/x)*w*(w+x+2*r)
print L
|
jboissard/mathExperiments
|
toiletRoll.py
|
Python
|
apache-2.0
| 357 | 0.016807 |
"""
Support for Velbus Binary Sensors.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/binary_sensor.velbus/
"""
import logging
from homeassistant.components.binary_sensor import BinarySensorDevice
from homeassistant.components.velbus import (
DOMAIN as VELBUS_DOMAIN, VelbusEntity)
_LOGGER = logging.getLogger(__name__)
DEPENDENCIES = ['velbus']
async def async_setup_platform(hass, config, async_add_entities,
discovery_info=None):
"""Set up Velbus binary sensors."""
if discovery_info is None:
return
sensors = []
for sensor in discovery_info:
module = hass.data[VELBUS_DOMAIN].get_module(sensor[0])
channel = sensor[1]
sensors.append(VelbusBinarySensor(module, channel))
async_add_entities(sensors)
class VelbusBinarySensor(VelbusEntity, BinarySensorDevice):
"""Representation of a Velbus Binary Sensor."""
@property
def is_on(self):
"""Return true if the sensor is on."""
return self._module.is_closed(self._channel)
|
PetePriority/home-assistant
|
homeassistant/components/velbus/binary_sensor.py
|
Python
|
apache-2.0
| 1,110 | 0 |
#!/usr/bin/python
import sys
text = sys.stdin.read()
print 'Text:',text
words = text.split()
print 'Words:',words
wordcount = len(words)
print 'Wordcount:',wordcount
|
MarsBighead/mustang
|
Python/somescript.py
|
Python
|
mit
| 167 | 0.023952 |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-02-05 15:01
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('loginapp', '0001_initial'),
]
operations = [
migrations.RenameField(
model_name='registration',
old_name='comments',
new_name='comment',
),
]
|
sailuh/perceive
|
Websites/Experiments/loginapp/migrations/0002_auto_20170205_1501.py
|
Python
|
gpl-2.0
| 427 | 0 |
from Plugins.Plugin import PluginDescriptor
from Screens.Console import Console
from Screens.ChoiceBox import ChoiceBox
from Screens.MessageBox import MessageBox
from Screens.Screen import Screen
from Screens.Standby import TryQuitMainloop
from Screens.Ipkg import Ipkg
from Screens.SoftwareUpdate import UpdatePlugin
from Components.ActionMap import ActionMap, NumberActionMap
from Components.Input import Input
from Components.Ipkg import IpkgComponent
from Components.Sources.StaticText import StaticText
from Components.ScrollLabel import ScrollLabel
from Components.Pixmap import Pixmap
from Components.MenuList import MenuList
from Components.Sources.List import List
from Components.Slider import Slider
from Components.Harddisk import harddiskmanager
from Components.config import config,getConfigListEntry, ConfigSubsection, ConfigText, ConfigLocations, ConfigYesNo, ConfigSelection
from Components.ConfigList import ConfigListScreen
from Components.Console import Console
from Components.MultiContent import MultiContentEntryText, MultiContentEntryPixmapAlphaTest
from Components.SelectionList import SelectionList
from Components.PluginComponent import plugins
from Components.About import about
from Components.PackageInfo import PackageInfoHandler
from Components.Language import language
from Components.AVSwitch import AVSwitch
from Components.Task import job_manager
from Tools.Directories import pathExists, fileExists, resolveFilename, SCOPE_PLUGINS, SCOPE_CURRENT_PLUGIN, SCOPE_ACTIVE_SKIN, SCOPE_METADIR
from Tools.LoadPixmap import LoadPixmap
from Tools.NumericalTextInput import NumericalTextInput
from enigma import eTimer, RT_HALIGN_LEFT, RT_VALIGN_CENTER, eListboxPythonMultiContent, eListbox, gFont, getDesktop, ePicLoad, eRCInput, getPrevAsciiCode, eEnv, iRecordableService
from cPickle import dump, load
from os import path as os_path, system as os_system, unlink, stat, mkdir, popen, makedirs, listdir, access, rename, remove, W_OK, R_OK, F_OK
from time import time, gmtime, strftime, localtime
from stat import ST_MTIME
from datetime import date
from twisted.web import client
from twisted.internet import reactor
from ImageWizard import ImageWizard
from BackupRestore import BackupSelection, RestoreMenu, BackupScreen, RestoreScreen, getBackupPath, getBackupFilename
from SoftwareTools import iSoftwareTools
config.plugins.configurationbackup = ConfigSubsection()
config.plugins.configurationbackup.backuplocation = ConfigText(default = '/media/hdd/', visible_width = 50, fixed_size = False)
config.plugins.configurationbackup.backupdirs = ConfigLocations(default=[eEnv.resolve('${sysconfdir}/enigma2/'), '/etc/network/interfaces', '/etc/wpa_supplicant.conf', '/etc/wpa_supplicant.ath0.conf', '/etc/wpa_supplicant.wlan0.conf', '/etc/resolv.conf', '/etc/default_gw', '/etc/hostname'])
config.plugins.softwaremanager = ConfigSubsection()
config.plugins.softwaremanager.overwriteConfigFiles = ConfigSelection(
[
("Y", _("Yes, always")),
("N", _("No, never")),
("ask", _("Always ask"))
], "Y")
config.plugins.softwaremanager.onSetupMenu = ConfigYesNo(default=False)
config.plugins.softwaremanager.onBlueButton = ConfigYesNo(default=False)
def write_cache(cache_file, cache_data):
#Does a cPickle dump
if not os_path.isdir( os_path.dirname(cache_file) ):
try:
mkdir( os_path.dirname(cache_file) )
except OSError:
print os_path.dirname(cache_file), 'is a file'
fd = open(cache_file, 'w')
dump(cache_data, fd, -1)
fd.close()
def valid_cache(cache_file, cache_ttl):
#See if the cache file exists and is still living
try:
mtime = stat(cache_file)[ST_MTIME]
except:
return 0
curr_time = time()
if (curr_time - mtime) > cache_ttl:
return 0
else:
return 1
def load_cache(cache_file):
#Does a cPickle load
fd = open(cache_file)
cache_data = load(fd)
fd.close()
return cache_data
class UpdatePluginMenu(Screen):
skin = """
<screen name="UpdatePluginMenu" position="center,center" size="610,410" >
<ePixmap pixmap="buttons/red.png" position="0,0" size="140,40" alphatest="on" />
<widget source="key_red" render="Label" position="0,0" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#9f1313" transparent="1" />
<ePixmap pixmap="border_menu_350.png" position="5,50" zPosition="1" size="350,300" transparent="1" alphatest="on" />
<widget source="menu" render="Listbox" position="15,60" size="330,290" scrollbarMode="showOnDemand">
<convert type="TemplatedMultiContent">
{"template": [
MultiContentEntryText(pos = (2, 2), size = (330, 24), flags = RT_HALIGN_LEFT, text = 1), # index 0 is the MenuText,
],
"fonts": [gFont("Regular", 22)],
"itemHeight": 25
}
</convert>
</widget>
<widget source="menu" render="Listbox" position="360,50" size="240,300" scrollbarMode="showNever" selectionDisabled="1">
<convert type="TemplatedMultiContent">
{"template": [
MultiContentEntryText(pos = (2, 2), size = (240, 300), flags = RT_HALIGN_CENTER|RT_VALIGN_CENTER|RT_WRAP, text = 2), # index 2 is the Description,
],
"fonts": [gFont("Regular", 22)],
"itemHeight": 300
}
</convert>
</widget>
<widget source="status" render="Label" position="5,360" zPosition="10" size="600,50" halign="center" valign="center" font="Regular;22" transparent="1" shadowColor="black" shadowOffset="-1,-1" />
</screen>"""
def __init__(self, session, args = 0):
Screen.__init__(self, session)
Screen.setTitle(self, _("Software management"))
self.skin_path = plugin_path
self.menu = args
self.list = []
self.oktext = _("\nPress OK on your remote control to continue.")
self.menutext = _("Press MENU on your remote control for additional options.")
self.infotext = _("Press INFO on your remote control for additional information.")
self.text = ""
self.backupdirs = ' '.join( config.plugins.configurationbackup.backupdirs.getValue() )
if self.menu == 0:
print "building menu entries"
self.list.append(("install-extensions", _("Manage extensions"), _("\nManage extensions or plugins for your STB_BOX" ) + self.oktext, None))
self.list.append(("software-update", _("Software update"), _("\nOnline update of your STB_BOX software." ) + self.oktext, None))
self.list.append(("software-restore", _("Software restore"), _("\nRestore your STB_BOX with a new firmware." ) + self.oktext, None))
self.list.append(("system-backup", _("Backup system settings"), _("\nBackup your STB_BOX settings." ) + self.oktext + "\n\n" + self.infotext, None))
self.list.append(("system-restore",_("Restore system settings"), _("\nRestore your STB_BOX settings." ) + self.oktext, None))
self.list.append(("ipkg-install", _("Install local extension"), _("\nScan for local extensions and install them." ) + self.oktext, None))
for p in plugins.getPlugins(PluginDescriptor.WHERE_SOFTWAREMANAGER):
if p.__call__.has_key("SoftwareSupported"):
callFnc = p.__call__["SoftwareSupported"](None)
if callFnc is not None:
if p.__call__.has_key("menuEntryName"):
menuEntryName = p.__call__["menuEntryName"](None)
else:
menuEntryName = _('Extended Software')
if p.__call__.has_key("menuEntryDescription"):
menuEntryDescription = p.__call__["menuEntryDescription"](None)
else:
menuEntryDescription = _('Extended Software Plugin')
self.list.append(('default-plugin', menuEntryName, menuEntryDescription + self.oktext, callFnc))
if config.usage.setup_level.index >= 2: # expert+
self.list.append(("advanced", _("Advanced options"), _("\nAdvanced options and settings." ) + self.oktext, None))
elif self.menu == 1:
self.list.append(("advancedrestore", _("Advanced restore"), _("\nRestore your backups by date." ) + self.oktext, None))
self.list.append(("backuplocation", _("Select backup location"), _("\nSelect your backup device.\nCurrent device: " ) + config.plugins.configurationbackup.backuplocation.getValue() + self.oktext, None))
self.list.append(("backupfiles", _("Select backup files"), _("Select files for backup.") + self.oktext + "\n\n" + self.infotext, None))
if config.usage.setup_level.index >= 2: # expert+
self.list.append(("ipkg-manager", _("Packet management"), _("\nView, install and remove available or installed packages." ) + self.oktext, None))
self.list.append(("ipkg-source",_("Select upgrade source"), _("\nEdit the upgrade source address." ) + self.oktext, None))
for p in plugins.getPlugins(PluginDescriptor.WHERE_SOFTWAREMANAGER):
if p.__call__.has_key("AdvancedSoftwareSupported"):
callFnc = p.__call__["AdvancedSoftwareSupported"](None)
if callFnc is not None:
if p.__call__.has_key("menuEntryName"):
menuEntryName = p.__call__["menuEntryName"](None)
else:
menuEntryName = _('Advanced software')
if p.__call__.has_key("menuEntryDescription"):
menuEntryDescription = p.__call__["menuEntryDescription"](None)
else:
menuEntryDescription = _('Advanced software plugin')
self.list.append(('advanced-plugin', menuEntryName, menuEntryDescription + self.oktext, callFnc))
self["menu"] = List(self.list)
self["key_red"] = StaticText(_("Close"))
self["status"] = StaticText(self.menutext)
self["shortcuts"] = NumberActionMap(["ShortcutActions", "WizardActions", "InfobarEPGActions", "MenuActions", "NumberActions"],
{
"ok": self.go,
"back": self.close,
"red": self.close,
"menu": self.handleMenu,
"showEventInfo": self.handleInfo,
"1": self.go,
"2": self.go,
"3": self.go,
"4": self.go,
"5": self.go,
"6": self.go,
"7": self.go,
"8": self.go,
"9": self.go,
}, -1)
self.onLayoutFinish.append(self.layoutFinished)
self.backuppath = getBackupPath()
self.backupfile = getBackupFilename()
self.fullbackupfilename = self.backuppath + "/" + self.backupfile
self.onShown.append(self.setWindowTitle)
self.onChangedEntry = []
self["menu"].onSelectionChanged.append(self.selectionChanged)
def createSummary(self):
from Screens.PluginBrowser import PluginBrowserSummary
return PluginBrowserSummary
def selectionChanged(self):
item = self["menu"].getCurrent()
if item:
name = item[1]
desc = item[2]
else:
name = "-"
desc = ""
for cb in self.onChangedEntry:
cb(name, desc)
def layoutFinished(self):
idx = 0
self["menu"].index = idx
def setWindowTitle(self):
self.setTitle(_("Software management"))
def cleanup(self):
iSoftwareTools.cleanupSoftwareTools()
def getUpdateInfos(self):
if iSoftwareTools.NetworkConnectionAvailable is True:
if iSoftwareTools.available_updates is not 0:
self.text = _("There are at least %s updates available.") % (str(iSoftwareTools.available_updates))
else:
self.text = "" #_("There are no updates available.")
if iSoftwareTools.list_updating is True:
self.text += "\n" + _("A search for available updates is currently in progress.")
else:
self.text = _("No network connection available.")
self["status"].setText(self.text)
def handleMenu(self):
self.session.open(SoftwareManagerSetup)
def handleInfo(self):
current = self["menu"].getCurrent()
if current:
currentEntry = current[0]
if currentEntry in ("system-backup","backupfiles"):
self.session.open(SoftwareManagerInfo, mode = "backupinfo")
def go(self, num = None):
if num is not None:
num -= 1
if not num < self["menu"].count():
return
self["menu"].setIndex(num)
current = self["menu"].getCurrent()
if current:
currentEntry = current[0]
if self.menu == 0:
if (currentEntry == "software-update"):
self.session.open(UpdatePlugin)
elif (currentEntry == "software-restore"):
self.session.open(ImageWizard)
elif (currentEntry == "install-extensions"):
self.session.open(PluginManager, self.skin_path)
elif (currentEntry == "system-backup"):
self.session.openWithCallback(self.backupDone,BackupScreen, runBackup = True)
elif (currentEntry == "system-restore"):
if os_path.exists(self.fullbackupfilename):
self.session.openWithCallback(self.startRestore, MessageBox, _("Are you sure you want to restore the backup?\nYour receiver will restart after the backup has been restored!"))
else:
self.session.open(MessageBox, _("Sorry, no backups found!"), MessageBox.TYPE_INFO, timeout = 10)
elif (currentEntry == "ipkg-install"):
try:
from Plugins.Extensions.MediaScanner.plugin import main
main(self.session)
except:
self.session.open(MessageBox, _("Sorry, %s has not been installed!") % ("MediaScanner"), MessageBox.TYPE_INFO, timeout = 10)
elif (currentEntry == "default-plugin"):
self.extended = current[3]
self.extended(self.session, None)
elif (currentEntry == "advanced"):
self.session.open(UpdatePluginMenu, 1)
elif self.menu == 1:
if (currentEntry == "ipkg-manager"):
self.session.open(PacketManager, self.skin_path)
elif (currentEntry == "backuplocation"):
parts = [ (r.description, r.mountpoint, self.session) for r in harddiskmanager.getMountedPartitions(onlyhotplug = False)]
for x in parts:
if not access(x[1], F_OK|R_OK|W_OK) or x[1] == '/':
parts.remove(x)
if len(parts):
self.session.openWithCallback(self.backuplocation_choosen, ChoiceBox, title = _("Please select medium to use as backup location"), list = parts)
elif (currentEntry == "backupfiles"):
self.session.openWithCallback(self.backupfiles_choosen,BackupSelection)
elif (currentEntry == "advancedrestore"):
self.session.open(RestoreMenu, self.skin_path)
elif (currentEntry == "ipkg-source"):
self.session.open(IPKGMenu, self.skin_path)
elif (currentEntry == "advanced-plugin"):
self.extended = current[3]
self.extended(self.session, None)
def backupfiles_choosen(self, ret):
self.backupdirs = ' '.join( config.plugins.configurationbackup.backupdirs.getValue() )
config.plugins.configurationbackup.backupdirs.save()
config.plugins.configurationbackup.save()
config.save()
def backuplocation_choosen(self, option):
oldpath = config.plugins.configurationbackup.backuplocation.getValue()
if option is not None:
config.plugins.configurationbackup.backuplocation.value = str(option[1])
config.plugins.configurationbackup.backuplocation.save()
config.plugins.configurationbackup.save()
config.save()
newpath = config.plugins.configurationbackup.backuplocation.getValue()
if newpath != oldpath:
self.createBackupfolders()
def createBackupfolders(self):
print "Creating backup folder if not already there..."
self.backuppath = getBackupPath()
try:
if (os_path.exists(self.backuppath) == False):
makedirs(self.backuppath)
except OSError:
self.session.open(MessageBox, _("Sorry, your backup destination is not writeable.\nPlease select a different one."), MessageBox.TYPE_INFO, timeout = 10)
def backupDone(self,retval = None):
if retval is True:
self.session.open(MessageBox, _("Backup completed."), MessageBox.TYPE_INFO, timeout = 10)
else:
self.session.open(MessageBox, _("Backup failed."), MessageBox.TYPE_INFO, timeout = 10)
def startRestore(self, ret = False):
if (ret == True):
self.exe = True
self.session.open(RestoreScreen, runRestore = True)
class SoftwareManagerSetup(Screen, ConfigListScreen):
skin = """
<screen name="SoftwareManagerSetup" position="center,center" size="560,440" title="SoftwareManager setup">
<ePixmap pixmap="buttons/red.png" position="0,0" size="140,40" alphatest="on" />
<ePixmap pixmap="buttons/green.png" position="140,0" size="140,40" alphatest="on" />
<ePixmap pixmap="buttons/yellow.png" position="280,0" size="140,40" alphatest="on" />
<ePixmap pixmap="buttons/blue.png" position="420,0" size="140,40" alphatest="on" />
<widget source="key_red" render="Label" position="0,0" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#9f1313" transparent="1" />
<widget source="key_green" render="Label" position="140,0" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#1f771f" transparent="1" />
<widget source="key_yellow" render="Label" position="280,0" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#a08500" transparent="1" />
<widget source="key_blue" render="Label" position="420,0" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#18188b" transparent="1" />
<widget name="config" position="5,50" size="550,350" scrollbarMode="showOnDemand" />
<ePixmap pixmap="div-h.png" position="0,400" zPosition="1" size="560,2" />
<widget source="introduction" render="Label" position="5,410" size="550,30" zPosition="10" font="Regular;21" halign="center" valign="center" backgroundColor="#25062748" transparent="1" />
</screen>"""
def __init__(self, session, skin_path = None):
Screen.__init__(self, session)
self.session = session
self.skin_path = skin_path
if self.skin_path == None:
self.skin_path = resolveFilename(SCOPE_CURRENT_PLUGIN, "SystemPlugins/SoftwareManager")
self.onChangedEntry = [ ]
self.setup_title = _("Software manager setup")
self.overwriteConfigfilesEntry = None
self.list = [ ]
ConfigListScreen.__init__(self, self.list, session = session, on_change = self.changedEntry)
self["actions"] = ActionMap(["SetupActions", "MenuActions"],
{
"cancel": self.keyCancel,
"save": self.apply,
"menu": self.closeRecursive,
}, -2)
self["key_red"] = StaticText(_("Cancel"))
self["key_green"] = StaticText(_("OK"))
self["key_yellow"] = StaticText()
self["key_blue"] = StaticText()
self["introduction"] = StaticText()
self.createSetup()
self.onLayoutFinish.append(self.layoutFinished)
def layoutFinished(self):
self.setTitle(self.setup_title)
def createSetup(self):
self.list = [ ]
self.overwriteConfigfilesEntry = getConfigListEntry(_("Overwrite configuration files?"), config.plugins.softwaremanager.overwriteConfigFiles)
self.list.append(self.overwriteConfigfilesEntry)
self.list.append(getConfigListEntry(_("show softwaremanager in plugin menu"), config.plugins.softwaremanager.onSetupMenu))
self.list.append(getConfigListEntry(_("show softwaremanager on blue button"), config.plugins.softwaremanager.onBlueButton))
self["config"].list = self.list
self["config"].l.setSeperation(400)
self["config"].l.setList(self.list)
if not self.selectionChanged in self["config"].onSelectionChanged:
self["config"].onSelectionChanged.append(self.selectionChanged)
self.selectionChanged()
def selectionChanged(self):
if self["config"].getCurrent() == self.overwriteConfigfilesEntry:
self["introduction"].setText(_("Overwrite configuration files during software upgrade?"))
else:
self["introduction"].setText("")
def newConfig(self):
pass
def keyLeft(self):
ConfigListScreen.keyLeft(self)
def keyRight(self):
ConfigListScreen.keyRight(self)
def confirm(self, confirmed):
if not confirmed:
print "not confirmed"
return
else:
self.keySave()
plugins.clearPluginList()
plugins.readPluginList(resolveFilename(SCOPE_PLUGINS))
def apply(self):
self.session.openWithCallback(self.confirm, MessageBox, _("Use these settings?"), MessageBox.TYPE_YESNO, timeout = 20, default = True)
def cancelConfirm(self, result):
if not result:
return
for x in self["config"].list:
x[1].cancel()
self.close()
def keyCancel(self):
if self["config"].isChanged():
self.session.openWithCallback(self.cancelConfirm, MessageBox, _("Really close without saving settings?"), MessageBox.TYPE_YESNO, timeout = 20, default = False)
else:
self.close()
# for summary:
def changedEntry(self):
for x in self.onChangedEntry:
x()
self.selectionChanged()
def getCurrentEntry(self):
return self["config"].getCurrent()[0]
def getCurrentValue(self):
return str(self["config"].getCurrent()[1].getValue())
def createSummary(self):
from Screens.Setup import SetupSummary
return SetupSummary
class SoftwareManagerInfo(Screen):
skin = """
<screen name="SoftwareManagerInfo" position="center,center" size="560,440" title="SoftwareManager information">
<ePixmap pixmap="buttons/red.png" position="0,0" size="140,40" alphatest="on" />
<ePixmap pixmap="buttons/green.png" position="140,0" size="140,40" alphatest="on" />
<ePixmap pixmap="buttons/yellow.png" position="280,0" size="140,40" alphatest="on" />
<ePixmap pixmap="buttons/blue.png" position="420,0" size="140,40" alphatest="on" />
<widget source="key_red" render="Label" position="0,0" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#9f1313" transparent="1" />
<widget source="key_green" render="Label" position="140,0" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#1f771f" transparent="1" />
<widget source="key_yellow" render="Label" position="280,0" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#a08500" transparent="1" />
<widget source="key_blue" render="Label" position="420,0" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#18188b" transparent="1" />
<widget source="list" render="Listbox" position="5,50" size="550,340" scrollbarMode="showOnDemand" selectionDisabled="0">
<convert type="TemplatedMultiContent">
{"template": [
MultiContentEntryText(pos = (5, 0), size = (540, 26), font=0, flags = RT_HALIGN_LEFT | RT_HALIGN_CENTER, text = 0), # index 0 is the name
],
"fonts": [gFont("Regular", 24),gFont("Regular", 22)],
"itemHeight": 26
}
</convert>
</widget>
<ePixmap pixmap="div-h.png" position="0,400" zPosition="1" size="560,2" />
<widget source="introduction" render="Label" position="5,410" size="550,30" zPosition="10" font="Regular;21" halign="center" valign="center" backgroundColor="#25062748" transparent="1" />
</screen>"""
def __init__(self, session, skin_path = None, mode = None):
Screen.__init__(self, session)
self.session = session
self.mode = mode
self.skin_path = skin_path
if self.skin_path == None:
self.skin_path = resolveFilename(SCOPE_CURRENT_PLUGIN, "SystemPlugins/SoftwareManager")
self["actions"] = ActionMap(["ShortcutActions", "WizardActions"],
{
"back": self.close,
"red": self.close,
}, -2)
self.list = []
self["list"] = List(self.list)
self["key_red"] = StaticText(_("Close"))
self["key_green"] = StaticText()
self["key_yellow"] = StaticText()
self["key_blue"] = StaticText()
self["introduction"] = StaticText()
self.onLayoutFinish.append(self.layoutFinished)
def layoutFinished(self):
self.setTitle(_("Softwaremanager information"))
if self.mode is not None:
self.showInfos()
def showInfos(self):
if self.mode == "backupinfo":
self.list = []
backupfiles = config.plugins.configurationbackup.backupdirs.getValue()
for entry in backupfiles:
self.list.append((entry,))
self['list'].setList(self.list)
class PluginManager(Screen, PackageInfoHandler):
skin = """
<screen name="PluginManager" position="center,center" size="560,440" >
<ePixmap pixmap="buttons/red.png" position="0,0" size="140,40" alphatest="on" />
<ePixmap pixmap="buttons/green.png" position="140,0" size="140,40" alphatest="on" />
<ePixmap pixmap="buttons/yellow.png" position="280,0" size="140,40" alphatest="on" />
<ePixmap pixmap="buttons/blue.png" position="420,0" size="140,40" alphatest="on" />
<widget source="key_red" render="Label" position="0,0" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#9f1313" transparent="1" />
<widget source="key_green" render="Label" position="140,0" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#1f771f" transparent="1" />
<widget source="key_yellow" render="Label" position="280,0" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#a08500" transparent="1" />
<widget source="key_blue" render="Label" position="420,0" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#18188b" transparent="1" />
<widget source="list" render="Listbox" position="5,50" size="550,360" scrollbarMode="showOnDemand">
<convert type="TemplatedMultiContent">
{"templates":
{"default": (51,[
MultiContentEntryText(pos = (0, 1), size = (470, 24), font=0, flags = RT_HALIGN_LEFT, text = 0), # index 0 is the name
MultiContentEntryText(pos = (0, 25), size = (470, 24), font=1, flags = RT_HALIGN_LEFT, text = 2), # index 2 is the description
MultiContentEntryPixmapAlphaTest(pos = (475, 0), size = (48, 48), png = 5), # index 5 is the status pixmap
MultiContentEntryPixmapAlphaTest(pos = (0, 49), size = (550, 2), png = 6), # index 6 is the div pixmap
]),
"category": (40,[
MultiContentEntryText(pos = (30, 0), size = (500, 22), font=0, flags = RT_HALIGN_LEFT, text = 0), # index 0 is the name
MultiContentEntryText(pos = (30, 22), size = (500, 16), font=2, flags = RT_HALIGN_LEFT, text = 1), # index 1 is the description
MultiContentEntryPixmapAlphaTest(pos = (0, 38), size = (550, 2), png = 3), # index 3 is the div pixmap
])
},
"fonts": [gFont("Regular", 22),gFont("Regular", 20),gFont("Regular", 16)],
"itemHeight": 52
}
</convert>
</widget>
<widget source="status" render="Label" position="5,410" zPosition="10" size="540,30" halign="center" valign="center" font="Regular;22" transparent="1" shadowColor="black" shadowOffset="-1,-1" />
</screen>"""
def __init__(self, session, plugin_path = None, args = None):
Screen.__init__(self, session)
Screen.setTitle(self, _("Extensions management"))
self.session = session
self.skin_path = plugin_path
if self.skin_path == None:
self.skin_path = resolveFilename(SCOPE_CURRENT_PLUGIN, "SystemPlugins/SoftwareManager")
self["shortcuts"] = ActionMap(["ShortcutActions", "WizardActions", "InfobarEPGActions", "HelpActions" ],
{
"ok": self.handleCurrent,
"back": self.exit,
"red": self.exit,
"green": self.handleCurrent,
"yellow": self.handleSelected,
"showEventInfo": self.handleSelected,
"displayHelp": self.handleHelp,
}, -1)
self.list = []
self.statuslist = []
self.selectedFiles = []
self.categoryList = []
self.packetlist = []
self["list"] = List(self.list)
self["key_red"] = StaticText(_("Close"))
self["key_green"] = StaticText("")
self["key_yellow"] = StaticText("")
self["key_blue"] = StaticText("")
self["status"] = StaticText("")
self.cmdList = []
self.oktext = _("\nAfter pressing OK, please wait!")
if not self.selectionChanged in self["list"].onSelectionChanged:
self["list"].onSelectionChanged.append(self.selectionChanged)
self.currList = ""
self.currentSelectedTag = None
self.currentSelectedIndex = None
self.currentSelectedPackage = None
self.saved_currentSelectedPackage = None
self.restartRequired = False
self.onShown.append(self.setWindowTitle)
self.onLayoutFinish.append(self.getUpdateInfos)
def setWindowTitle(self):
self.setTitle(_("Extensions management"))
def exit(self):
if self.currList == "packages":
self.currList = "category"
self.currentSelectedTag = None
self["list"].style = "category"
self['list'].setList(self.categoryList)
self["list"].setIndex(self.currentSelectedIndex)
self["list"].updateList(self.categoryList)
self.selectionChanged()
else:
iSoftwareTools.cleanupSoftwareTools()
self.prepareInstall()
if len(self.cmdList):
self.session.openWithCallback(self.runExecute, PluginManagerInfo, self.skin_path, self.cmdList)
else:
self.close()
def handleHelp(self):
if self.currList != "status":
self.session.open(PluginManagerHelp, self.skin_path)
def setState(self,status = None):
if status:
self.currList = "status"
self.statuslist = []
self["key_green"].setText("")
self["key_blue"].setText("")
self["key_yellow"].setText("")
divpng = LoadPixmap(cached=True, path=resolveFilename(SCOPE_ACTIVE_SKIN, "div-h.png"))
if status == 'update':
statuspng = LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_PLUGIN, "SystemPlugins/SoftwareManager/upgrade.png"))
self.statuslist.append(( _("Updating software catalog"), '', _("Searching for available updates. Please wait..." ),'', '', statuspng, divpng, None, '' ))
elif status == 'sync':
statuspng = LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_PLUGIN, "SystemPlugins/SoftwareManager/upgrade.png"))
self.statuslist.append(( _("Package list update"), '', _("Searching for new installed or removed packages. Please wait..." ),'', '', statuspng, divpng, None, '' ))
elif status == 'error':
self["key_green"].setText(_("Continue"))
statuspng = LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_PLUGIN, "SystemPlugins/SoftwareManager/remove.png"))
self.statuslist.append(( _("Error"), '', _("An error occurred while downloading the packetlist. Please try again." ),'', '', statuspng, divpng, None, '' ))
self["list"].style = "default"
self['list'].setList(self.statuslist)
def getUpdateInfos(self):
if (iSoftwareTools.lastDownloadDate is not None and iSoftwareTools.NetworkConnectionAvailable is False):
self.rebuildList()
else:
self.setState('update')
iSoftwareTools.startSoftwareTools(self.getUpdateInfosCB)
def getUpdateInfosCB(self, retval = None):
if retval is not None:
if retval is True:
if iSoftwareTools.available_updates is not 0:
self["status"].setText(_("There are at least ") + str(iSoftwareTools.available_updates) + ' ' + _("updates available."))
else:
self["status"].setText(_("There are no updates available."))
self.rebuildList()
elif retval is False:
if iSoftwareTools.lastDownloadDate is None:
self.setState('error')
if iSoftwareTools.NetworkConnectionAvailable:
self["status"].setText(_("Updatefeed not available."))
else:
self["status"].setText(_("No network connection available."))
else:
iSoftwareTools.lastDownloadDate = time()
iSoftwareTools.list_updating = True
self.setState('update')
iSoftwareTools.getUpdates(self.getUpdateInfosCB)
def rebuildList(self, retval = None):
if self.currentSelectedTag is None:
self.buildCategoryList()
else:
self.buildPacketList(self.currentSelectedTag)
def selectionChanged(self):
current = self["list"].getCurrent()
self["status"].setText("")
if current:
if self.currList == "packages":
self["key_red"].setText(_("Back"))
if current[4] == 'installed':
self["key_green"].setText(_("Uninstall"))
elif current[4] == 'installable':
self["key_green"].setText(_("Install"))
if iSoftwareTools.NetworkConnectionAvailable is False:
self["key_green"].setText("")
elif current[4] == 'remove':
self["key_green"].setText(_("Undo uninstall"))
elif current[4] == 'install':
self["key_green"].setText(_("Undo install"))
if iSoftwareTools.NetworkConnectionAvailable is False:
self["key_green"].setText("")
self["key_yellow"].setText(_("View details"))
self["key_blue"].setText("")
if len(self.selectedFiles) == 0 and iSoftwareTools.available_updates is not 0:
self["status"].setText(_("There are at least ") + str(iSoftwareTools.available_updates) + ' ' + _("updates available."))
elif len(self.selectedFiles) is not 0:
self["status"].setText(str(len(self.selectedFiles)) + ' ' + _("packages selected."))
else:
self["status"].setText(_("There are currently no outstanding actions."))
elif self.currList == "category":
self["key_red"].setText(_("Close"))
self["key_green"].setText("")
self["key_yellow"].setText("")
self["key_blue"].setText("")
if len(self.selectedFiles) == 0 and iSoftwareTools.available_updates is not 0:
self["status"].setText(_("There are at least ") + str(iSoftwareTools.available_updates) + ' ' + _("updates available."))
self["key_yellow"].setText(_("Update"))
elif len(self.selectedFiles) is not 0:
self["status"].setText(str(len(self.selectedFiles)) + ' ' + _("packages selected."))
self["key_yellow"].setText(_("Process"))
else:
self["status"].setText(_("There are currently no outstanding actions."))
def getSelectionState(self, detailsFile):
for entry in self.selectedFiles:
if entry[0] == detailsFile:
return True
return False
def handleCurrent(self):
current = self["list"].getCurrent()
if current:
if self.currList == "category":
self.currentSelectedIndex = self["list"].index
selectedTag = current[2]
self.buildPacketList(selectedTag)
elif self.currList == "packages":
if current[7] is not '':
idx = self["list"].getIndex()
detailsFile = self.list[idx][1]
if self.list[idx][7] == True:
for entry in self.selectedFiles:
if entry[0] == detailsFile:
self.selectedFiles.remove(entry)
else:
alreadyinList = False
for entry in self.selectedFiles:
if entry[0] == detailsFile:
alreadyinList = True
if not alreadyinList:
if (iSoftwareTools.NetworkConnectionAvailable is False and current[4] in ('installable','install')):
pass
else:
self.selectedFiles.append((detailsFile,current[4],current[3]))
self.currentSelectedPackage = ((detailsFile,current[4],current[3]))
if current[4] == 'installed':
self.list[idx] = self.buildEntryComponent(current[0], current[1], current[2], current[3], 'remove', True)
elif current[4] == 'installable':
if iSoftwareTools.NetworkConnectionAvailable:
self.list[idx] = self.buildEntryComponent(current[0], current[1], current[2], current[3], 'install', True)
elif current[4] == 'remove':
self.list[idx] = self.buildEntryComponent(current[0], current[1], current[2], current[3], 'installed', False)
elif current[4] == 'install':
if iSoftwareTools.NetworkConnectionAvailable:
self.list[idx] = self.buildEntryComponent(current[0], current[1], current[2], current[3], 'installable',False)
self["list"].setList(self.list)
self["list"].setIndex(idx)
self["list"].updateList(self.list)
self.selectionChanged()
elif self.currList == "status":
iSoftwareTools.lastDownloadDate = time()
iSoftwareTools.list_updating = True
self.setState('update')
iSoftwareTools.getUpdates(self.getUpdateInfosCB)
def handleSelected(self):
current = self["list"].getCurrent()
if current:
if self.currList == "packages":
if current[7] is not '':
detailsfile = iSoftwareTools.directory[0] + "/" + current[1]
if (os_path.exists(detailsfile) == True):
self.saved_currentSelectedPackage = self.currentSelectedPackage
self.session.openWithCallback(self.detailsClosed, PluginDetails, self.skin_path, current)
else:
self.session.open(MessageBox, _("Sorry, no details available!"), MessageBox.TYPE_INFO, timeout = 10)
elif self.currList == "category":
self.prepareInstall()
if len(self.cmdList):
self.session.openWithCallback(self.runExecute, PluginManagerInfo, self.skin_path, self.cmdList)
def detailsClosed(self, result = None):
if result is not None:
if result is not False:
self.setState('sync')
iSoftwareTools.lastDownloadDate = time()
for entry in self.selectedFiles:
if entry == self.saved_currentSelectedPackage:
self.selectedFiles.remove(entry)
iSoftwareTools.startIpkgListInstalled(self.rebuildList)
def buildEntryComponent(self, name, details, description, packagename, state, selected = False):
divpng = LoadPixmap(cached=True, path=resolveFilename(SCOPE_ACTIVE_SKIN, "div-h.png"))
installedpng = LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_PLUGIN, "SystemPlugins/SoftwareManager/installed.png"))
installablepng = LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_PLUGIN, "SystemPlugins/SoftwareManager/installable.png"))
removepng = LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_PLUGIN, "SystemPlugins/SoftwareManager/remove.png"))
installpng = LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_PLUGIN, "SystemPlugins/SoftwareManager/install.png"))
if state == 'installed':
return((name, details, description, packagename, state, installedpng, divpng, selected))
elif state == 'installable':
return((name, details, description, packagename, state, installablepng, divpng, selected))
elif state == 'remove':
return((name, details, description, packagename, state, removepng, divpng, selected))
elif state == 'install':
return((name, details, description, packagename, state, installpng, divpng, selected))
def buildPacketList(self, categorytag = None):
if categorytag is not None:
self.currList = "packages"
self.currentSelectedTag = categorytag
self.packetlist = []
for package in iSoftwareTools.packagesIndexlist[:]:
prerequisites = package[0]["prerequisites"]
if prerequisites.has_key("tag"):
for foundtag in prerequisites["tag"]:
if categorytag == foundtag:
attributes = package[0]["attributes"]
if attributes.has_key("packagetype"):
if attributes["packagetype"] == "internal":
continue
self.packetlist.append([attributes["name"], attributes["details"], attributes["shortdescription"], attributes["packagename"]])
else:
self.packetlist.append([attributes["name"], attributes["details"], attributes["shortdescription"], attributes["packagename"]])
self.list = []
for x in self.packetlist:
status = ""
name = x[0].strip()
details = x[1].strip()
description = x[2].strip()
if not description:
description = "No description available."
packagename = x[3].strip()
selectState = self.getSelectionState(details)
if iSoftwareTools.installed_packetlist.has_key(packagename):
if selectState == True:
status = "remove"
else:
status = "installed"
self.list.append(self.buildEntryComponent(name, _(details), _(description), packagename, status, selected = selectState))
else:
if selectState == True:
status = "install"
else:
status = "installable"
self.list.append(self.buildEntryComponent(name, _(details), _(description), packagename, status, selected = selectState))
if len(self.list):
self.list.sort(key=lambda x: x[0])
self["list"].style = "default"
self['list'].setList(self.list)
self["list"].updateList(self.list)
self.selectionChanged()
def buildCategoryList(self):
self.currList = "category"
self.categories = []
self.categoryList = []
for package in iSoftwareTools.packagesIndexlist[:]:
prerequisites = package[0]["prerequisites"]
if prerequisites.has_key("tag"):
for foundtag in prerequisites["tag"]:
attributes = package[0]["attributes"]
if foundtag not in self.categories:
self.categories.append(foundtag)
self.categoryList.append(self.buildCategoryComponent(foundtag))
self.categoryList.sort(key=lambda x: x[0])
self["list"].style = "category"
self['list'].setList(self.categoryList)
self["list"].updateList(self.categoryList)
self.selectionChanged()
def buildCategoryComponent(self, tag = None):
divpng = LoadPixmap(cached=True, path=resolveFilename(SCOPE_ACTIVE_SKIN, "div-h.png"))
if tag is not None:
if tag == 'System':
return(( _("System"), _("View list of available system extensions" ), tag, divpng ))
elif tag == 'Skin':
return(( _("Skins"), _("View list of available skins" ), tag, divpng ))
elif tag == 'Recording':
return(( _("Recordings"), _("View list of available recording extensions" ), tag, divpng ))
elif tag == 'Network':
return(( _("Network"), _("View list of available networking extensions" ), tag, divpng ))
elif tag == 'CI':
return(( _("Common Interface"), _("View list of available CommonInterface extensions" ), tag, divpng ))
elif tag == 'Default':
return(( _("Default settings"), _("View list of available default settings" ), tag, divpng ))
elif tag == 'SAT':
return(( _("Satellite equipment"), _("View list of available Satellite equipment extensions." ), tag, divpng ))
elif tag == 'Software':
return(( _("Software"), _("View list of available software extensions" ), tag, divpng ))
elif tag == 'Multimedia':
return(( _("Multimedia"), _("View list of available multimedia extensions." ), tag, divpng ))
elif tag == 'Display':
return(( _("Display and userinterface"), _("View list of available display and userinterface extensions." ), tag, divpng ))
elif tag == 'EPG':
return(( _("Electronic Program Guide"), _("View list of available EPG extensions." ), tag, divpng ))
elif tag == 'Communication':
return(( _("Communication"), _("View list of available communication extensions." ), tag, divpng ))
else: # dynamically generate non existent tags
return(( str(tag), _("View list of available ") + str(tag) + ' ' + _("extensions." ), tag, divpng ))
def prepareInstall(self):
self.cmdList = []
if iSoftwareTools.available_updates > 0:
self.cmdList.append((IpkgComponent.CMD_UPGRADE, { "test_only": False }))
if self.selectedFiles and len(self.selectedFiles):
for plugin in self.selectedFiles:
detailsfile = iSoftwareTools.directory[0] + "/" + plugin[0]
if (os_path.exists(detailsfile) == True):
iSoftwareTools.fillPackageDetails(plugin[0])
self.package = iSoftwareTools.packageDetails[0]
if self.package[0].has_key("attributes"):
self.attributes = self.package[0]["attributes"]
if self.attributes.has_key("needsRestart"):
self.restartRequired = True
if self.attributes.has_key("package"):
self.packagefiles = self.attributes["package"]
if plugin[1] == 'installed':
if self.packagefiles:
for package in self.packagefiles[:]:
self.cmdList.append((IpkgComponent.CMD_REMOVE, { "package": package["name"] }))
else:
self.cmdList.append((IpkgComponent.CMD_REMOVE, { "package": plugin[2] }))
else:
if self.packagefiles:
for package in self.packagefiles[:]:
self.cmdList.append((IpkgComponent.CMD_INSTALL, { "package": package["name"] }))
else:
self.cmdList.append((IpkgComponent.CMD_INSTALL, { "package": plugin[2] }))
else:
if plugin[1] == 'installed':
self.cmdList.append((IpkgComponent.CMD_REMOVE, { "package": plugin[2] }))
else:
self.cmdList.append((IpkgComponent.CMD_INSTALL, { "package": plugin[2] }))
def runExecute(self, result = None):
if result is not None:
if result[0] is True:
self.session.openWithCallback(self.runExecuteFinished, Ipkg, cmdList = self.cmdList)
elif result[0] is False:
self.cmdList = result[1]
self.session.openWithCallback(self.runExecuteFinished, Ipkg, cmdList = self.cmdList)
else:
self.close()
def runExecuteFinished(self):
self.reloadPluginlist()
if plugins.restartRequired or self.restartRequired:
self.session.openWithCallback(self.ExecuteReboot, MessageBox, _("Install or remove finished.") +" "+_("Do you want to reboot your receiver?"), MessageBox.TYPE_YESNO)
else:
self.selectedFiles = []
self.restartRequired = False
self.detailsClosed(True)
def ExecuteReboot(self, result):
if result:
self.session.open(TryQuitMainloop,retvalue=3)
else:
self.selectedFiles = []
self.restartRequired = False
self.detailsClosed(True)
def reloadPluginlist(self):
plugins.readPluginList(resolveFilename(SCOPE_PLUGINS))
class PluginManagerInfo(Screen):
skin = """
<screen name="PluginManagerInfo" position="center,center" size="560,450" >
<ePixmap pixmap="buttons/red.png" position="0,0" size="140,40" alphatest="on" />
<ePixmap pixmap="buttons/green.png" position="140,0" size="140,40" alphatest="on" />
<widget source="key_red" render="Label" position="0,0" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#9f1313" transparent="1" />
<widget source="key_green" render="Label" position="140,0" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#1f771f" transparent="1" />
<widget source="list" render="Listbox" position="5,50" size="550,350" scrollbarMode="showOnDemand" selectionDisabled="1">
<convert type="TemplatedMultiContent">
{"template": [
MultiContentEntryText(pos = (50, 0), size = (150, 26), font=0, flags = RT_HALIGN_LEFT, text = 0), # index 0 is the name
MultiContentEntryText(pos = (50, 27), size = (540, 23), font=1, flags = RT_HALIGN_LEFT, text = 1), # index 1 is the state
MultiContentEntryPixmapAlphaTest(pos = (0, 1), size = (48, 48), png = 2), # index 2 is the status pixmap
MultiContentEntryPixmapAlphaTest(pos = (0, 48), size = (550, 2), png = 3), # index 3 is the div pixmap
],
"fonts": [gFont("Regular", 24),gFont("Regular", 22)],
"itemHeight": 50
}
</convert>
</widget>
<ePixmap pixmap="div-h.png" position="0,404" zPosition="10" size="560,2" transparent="1" alphatest="on" />
<widget source="status" render="Label" position="5,408" zPosition="10" size="550,44" halign="center" valign="center" font="Regular;22" transparent="1" shadowColor="black" shadowOffset="-1,-1" />
</screen>"""
def __init__(self, session, plugin_path, cmdlist = None):
Screen.__init__(self, session)
Screen.setTitle(self, _("Plugin manager activity information"))
self.session = session
self.skin_path = plugin_path
self.cmdlist = cmdlist
self["shortcuts"] = ActionMap(["ShortcutActions", "WizardActions"],
{
"ok": self.process_all,
"back": self.exit,
"red": self.exit,
"green": self.process_extensions,
}, -1)
self.list = []
self["list"] = List(self.list)
self["key_red"] = StaticText(_("Cancel"))
self["key_green"] = StaticText(_("Only extensions."))
self["status"] = StaticText(_("Following tasks will be done after you press OK!"))
self.onShown.append(self.setWindowTitle)
self.onLayoutFinish.append(self.rebuildList)
def setWindowTitle(self):
self.setTitle(_("Plugin manager activity information"))
def rebuildList(self):
self.list = []
if self.cmdlist is not None:
for entry in self.cmdlist:
action = ""
info = ""
cmd = entry[0]
if cmd == 0:
action = 'install'
elif cmd == 2:
action = 'remove'
else:
action = 'upgrade'
args = entry[1]
if cmd == 0:
info = args['package']
elif cmd == 2:
info = args['package']
else:
info = _("STB_BOX software because updates are available.")
self.list.append(self.buildEntryComponent(action,info))
self['list'].setList(self.list)
self['list'].updateList(self.list)
def buildEntryComponent(self, action,info):
divpng = LoadPixmap(cached=True, path=resolveFilename(SCOPE_ACTIVE_SKIN, "div-h.png"))
upgradepng = LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_PLUGIN, "SystemPlugins/SoftwareManager/upgrade.png"))
installpng = LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_PLUGIN, "SystemPlugins/SoftwareManager/install.png"))
removepng = LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_PLUGIN, "SystemPlugins/SoftwareManager/remove.png"))
if action == 'install':
return(( _('Installing'), info, installpng, divpng))
elif action == 'remove':
return(( _('Removing'), info, removepng, divpng))
else:
return(( _('Upgrading'), info, upgradepng, divpng))
def exit(self):
self.close()
def process_all(self):
self.close((True,None))
def process_extensions(self):
self.list = []
if self.cmdlist is not None:
for entry in self.cmdlist:
cmd = entry[0]
if entry[0] in (0,2):
self.list.append((entry))
self.close((False,self.list))
class PluginManagerHelp(Screen):
skin = """
<screen name="PluginManagerHelp" position="center,center" size="560,450" >
<ePixmap pixmap="buttons/red.png" position="0,0" size="140,40" alphatest="on" />
<widget source="key_red" render="Label" position="0,0" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#9f1313" transparent="1" />
<widget source="list" render="Listbox" position="5,50" size="550,350" scrollbarMode="showOnDemand" selectionDisabled="1">
<convert type="TemplatedMultiContent">
{"template": [
MultiContentEntryText(pos = (50, 0), size = (540, 26), font=0, flags = RT_HALIGN_LEFT, text = 0), # index 0 is the name
MultiContentEntryText(pos = (50, 27), size = (540, 23), font=1, flags = RT_HALIGN_LEFT, text = 1), # index 1 is the state
MultiContentEntryPixmapAlphaTest(pos = (0, 1), size = (48, 48), png = 2), # index 2 is the status pixmap
MultiContentEntryPixmapAlphaTest(pos = (0, 48), size = (550, 2), png = 3), # index 3 is the div pixmap
],
"fonts": [gFont("Regular", 24),gFont("Regular", 22)],
"itemHeight": 50
}
</convert>
</widget>
<ePixmap pixmap="div-h.png" position="0,404" zPosition="10" size="560,2" transparent="1" alphatest="on" />
<widget source="status" render="Label" position="5,408" zPosition="10" size="550,44" halign="center" valign="center" font="Regular;22" transparent="1" shadowColor="black" shadowOffset="-1,-1" />
</screen>"""
def __init__(self, session, plugin_path):
Screen.__init__(self, session)
Screen.setTitle(self, _("Plugin manager help"))
self.session = session
self.skin_path = plugin_path
self["shortcuts"] = ActionMap(["ShortcutActions", "WizardActions"],
{
"back": self.exit,
"red": self.exit,
}, -1)
self.list = []
self["list"] = List(self.list)
self["key_red"] = StaticText(_("Close"))
self["status"] = StaticText(_("A small overview of the available icon states and actions."))
self.onShown.append(self.setWindowTitle)
self.onLayoutFinish.append(self.rebuildList)
def setWindowTitle(self):
self.setTitle(_("Plugin manager help"))
def rebuildList(self):
self.list = []
self.list.append(self.buildEntryComponent('install'))
self.list.append(self.buildEntryComponent('installable'))
self.list.append(self.buildEntryComponent('installed'))
self.list.append(self.buildEntryComponent('remove'))
self['list'].setList(self.list)
self['list'].updateList(self.list)
def buildEntryComponent(self, state):
divpng = LoadPixmap(cached=True, path=resolveFilename(SCOPE_ACTIVE_SKIN, "div-h.png"))
installedpng = LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_PLUGIN, "SystemPlugins/SoftwareManager/installed.png"))
installablepng = LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_PLUGIN, "SystemPlugins/SoftwareManager/installable.png"))
removepng = LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_PLUGIN, "SystemPlugins/SoftwareManager/remove.png"))
installpng = LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_PLUGIN, "SystemPlugins/SoftwareManager/install.png"))
if state == 'installed':
return(( _('This plugin is installed.'), _('You can remove this plugin.'), installedpng, divpng))
elif state == 'installable':
return(( _('This plugin is not installed.'), _('You can install this plugin.'), installablepng, divpng))
elif state == 'install':
return(( _('This plugin will be installed.'), _('You can cancel the installation.'), installpng, divpng))
elif state == 'remove':
return(( _('This plugin will be removed.'), _('You can cancel the removal.'), removepng, divpng))
def exit(self):
self.close()
class PluginDetails(Screen, PackageInfoHandler):
skin = """
<screen name="PluginDetails" position="center,center" size="600,440" title="Plugin details" >
<ePixmap pixmap="buttons/red.png" position="0,0" size="140,40" alphatest="on" />
<ePixmap pixmap="buttons/green.png" position="140,0" size="140,40" alphatest="on" />
<widget source="key_red" render="Label" position="0,0" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#9f1313" transparent="1" />
<widget source="key_green" render="Label" position="140,0" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#1f771f" transparent="1" />
<widget source="author" render="Label" position="10,50" size="500,25" zPosition="10" font="Regular;21" transparent="1" />
<widget name="statuspic" position="550,40" size="48,48" alphatest="on"/>
<widget name="divpic" position="0,80" size="600,2" alphatest="on"/>
<widget name="detailtext" position="10,90" size="270,330" zPosition="10" font="Regular;21" transparent="1" halign="left" valign="top"/>
<widget name="screenshot" position="290,90" size="300,330" alphatest="on"/>
</screen>"""
def __init__(self, session, plugin_path, packagedata = None):
Screen.__init__(self, session)
Screen.setTitle(self, _("Plugin details"))
self.skin_path = plugin_path
self.language = language.getLanguage()[:2] # getLanguage returns e.g. "fi_FI" for "language_country"
self.attributes = None
PackageInfoHandler.__init__(self, self.statusCallback, blocking = False)
self.directory = resolveFilename(SCOPE_METADIR)
if packagedata:
self.pluginname = packagedata[0]
self.details = packagedata[1]
self.pluginstate = packagedata[4]
self.statuspicinstance = packagedata[5]
self.divpicinstance = packagedata[6]
self.fillPackageDetails(self.details)
self.thumbnail = ""
self["shortcuts"] = ActionMap(["ShortcutActions", "WizardActions"],
{
"back": self.exit,
"red": self.exit,
"green": self.go,
"up": self.pageUp,
"down": self.pageDown,
"left": self.pageUp,
"right": self.pageDown,
}, -1)
self["key_red"] = StaticText(_("Close"))
self["key_green"] = StaticText("")
self["author"] = StaticText()
self["statuspic"] = Pixmap()
self["divpic"] = Pixmap()
self["screenshot"] = Pixmap()
self["detailtext"] = ScrollLabel()
self["statuspic"].hide()
self["screenshot"].hide()
self["divpic"].hide()
self.package = self.packageDetails[0]
if self.package[0].has_key("attributes"):
self.attributes = self.package[0]["attributes"]
self.restartRequired = False
self.cmdList = []
self.oktext = _("\nAfter pressing OK, please wait!")
self.picload = ePicLoad()
self.picload.PictureData.get().append(self.paintScreenshotPixmapCB)
self.onShown.append(self.setWindowTitle)
self.onLayoutFinish.append(self.setInfos)
def setWindowTitle(self):
self.setTitle(_("Details for plugin: ") + self.pluginname )
def exit(self):
self.close(False)
def pageUp(self):
self["detailtext"].pageUp()
def pageDown(self):
self["detailtext"].pageDown()
def statusCallback(self, status, progress):
pass
def setInfos(self):
if self.attributes.has_key("screenshot"):
self.loadThumbnail(self.attributes)
if self.attributes.has_key("name"):
self.pluginname = self.attributes["name"]
else:
self.pluginname = _("unknown")
if self.attributes.has_key("author"):
self.author = self.attributes["author"]
else:
self.author = _("unknown")
if self.attributes.has_key("description"):
self.description = _(self.attributes["description"].replace("\\n", "\n"))
else:
self.description = _("No description available.")
self["author"].setText(_("Author: ") + self.author)
self["detailtext"].setText(_(self.description))
if self.pluginstate in ('installable', 'install'):
if iSoftwareTools.NetworkConnectionAvailable:
self["key_green"].setText(_("Install"))
else:
self["key_green"].setText("")
else:
self["key_green"].setText(_("Remove"))
def loadThumbnail(self, entry):
thumbnailUrl = None
if entry.has_key("screenshot"):
thumbnailUrl = entry["screenshot"]
if self.language == "de":
if thumbnailUrl[-7:] == "_en.jpg":
thumbnailUrl = thumbnailUrl[:-7] + "_de.jpg"
if thumbnailUrl is not None:
self.thumbnail = "/tmp/" + thumbnailUrl.split('/')[-1]
print "[PluginDetails] downloading screenshot " + thumbnailUrl + " to " + self.thumbnail
if iSoftwareTools.NetworkConnectionAvailable:
client.downloadPage(thumbnailUrl,self.thumbnail).addCallback(self.setThumbnail).addErrback(self.fetchFailed)
else:
self.setThumbnail(noScreenshot = True)
else:
self.setThumbnail(noScreenshot = True)
def setThumbnail(self, noScreenshot = False):
if not noScreenshot:
filename = self.thumbnail
else:
filename = resolveFilename(SCOPE_CURRENT_PLUGIN, "SystemPlugins/SoftwareManager/noprev.png")
sc = AVSwitch().getFramebufferScale()
self.picload.setPara((self["screenshot"].instance.size().width(), self["screenshot"].instance.size().height(), sc[0], sc[1], False, 1, "#00000000"))
self.picload.startDecode(filename)
if self.statuspicinstance != None:
self["statuspic"].instance.setPixmap(self.statuspicinstance.__deref__())
self["statuspic"].show()
if self.divpicinstance != None:
self["divpic"].instance.setPixmap(self.divpicinstance.__deref__())
self["divpic"].show()
def paintScreenshotPixmapCB(self, picInfo=None):
ptr = self.picload.getData()
if ptr != None:
self["screenshot"].instance.setPixmap(ptr.__deref__())
self["screenshot"].show()
else:
self.setThumbnail(noScreenshot = True)
def go(self):
if self.attributes.has_key("package"):
self.packagefiles = self.attributes["package"]
if self.attributes.has_key("needsRestart"):
self.restartRequired = True
self.cmdList = []
if self.pluginstate in ('installed', 'remove'):
if self.packagefiles:
for package in self.packagefiles[:]:
self.cmdList.append((IpkgComponent.CMD_REMOVE, { "package": package["name"] }))
if len(self.cmdList):
self.session.openWithCallback(self.runRemove, MessageBox, _("Do you want to remove the package:\n") + self.pluginname + "\n" + self.oktext)
else:
if iSoftwareTools.NetworkConnectionAvailable:
if self.packagefiles:
for package in self.packagefiles[:]:
self.cmdList.append((IpkgComponent.CMD_INSTALL, { "package": package["name"] }))
if len(self.cmdList):
self.session.openWithCallback(self.runUpgrade, MessageBox, _("Do you want to install the package:\n") + self.pluginname + "\n" + self.oktext)
def runUpgrade(self, result):
if result:
self.session.openWithCallback(self.runUpgradeFinished, Ipkg, cmdList = self.cmdList)
def runUpgradeFinished(self):
self.reloadPluginlist()
if plugins.restartRequired or self.restartRequired:
self.session.openWithCallback(self.UpgradeReboot, MessageBox, _("Installation finished.") +" "+_("Do you want to reboot your receiver?"), MessageBox.TYPE_YESNO)
else:
self.close(True)
def UpgradeReboot(self, result):
if result:
self.session.open(TryQuitMainloop,retvalue=3)
self.close(True)
def runRemove(self, result):
if result:
self.session.openWithCallback(self.runRemoveFinished, Ipkg, cmdList = self.cmdList)
def runRemoveFinished(self):
self.close(True)
def reloadPluginlist(self):
plugins.readPluginList(resolveFilename(SCOPE_PLUGINS))
def fetchFailed(self,string):
self.setThumbnail(noScreenshot = True)
print "[PluginDetails] fetch failed " + string.getErrorMessage()
class IPKGMenu(Screen):
skin = """
<screen name="IPKGMenu" position="center,center" size="560,400" title="Select upgrade source to edit." >
<ePixmap pixmap="buttons/red.png" position="0,0" size="140,40" alphatest="on" />
<ePixmap pixmap="buttons/green.png" position="140,0" size="140,40" alphatest="on" />
<widget source="key_red" render="Label" position="0,0" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#9f1313" transparent="1" />
<widget source="key_green" render="Label" position="140,0" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#1f771f" transparent="1" />
<widget name="filelist" position="5,50" size="550,340" scrollbarMode="showOnDemand" />
</screen>"""
def __init__(self, session, plugin_path):
Screen.__init__(self, session)
Screen.setTitle(self, _("Select upgrade source to edit."))
self.skin_path = plugin_path
self["key_red"] = StaticText(_("Close"))
self["key_green"] = StaticText(_("Edit"))
self.sel = []
self.val = []
self.entry = False
self.exe = False
self.path = ""
self["actions"] = NumberActionMap(["SetupActions"],
{
"ok": self.KeyOk,
"cancel": self.keyCancel
}, -1)
self["shortcuts"] = ActionMap(["ShortcutActions"],
{
"red": self.keyCancel,
"green": self.KeyOk,
})
self["filelist"] = MenuList([])
self.fill_list()
self.onLayoutFinish.append(self.layoutFinished)
def layoutFinished(self):
self.setWindowTitle()
def setWindowTitle(self):
self.setTitle(_("Select upgrade source to edit."))
def fill_list(self):
flist = []
self.path = '/etc/opkg/'
if (os_path.exists(self.path) == False):
self.entry = False
return
for file in listdir(self.path):
if file.endswith(".conf"):
if file not in ('arch.conf', 'opkg.conf'):
flist.append((file))
self.entry = True
self["filelist"].l.setList(flist)
def KeyOk(self):
if (self.exe == False) and (self.entry == True):
self.sel = self["filelist"].getCurrent()
self.val = self.path + self.sel
self.session.open(IPKGSource, self.val)
def keyCancel(self):
self.close()
def Exit(self):
self.close()
class IPKGSource(Screen):
skin = """
<screen name="IPKGSource" position="center,center" size="560,80" title="Edit upgrade source url." >
<ePixmap pixmap="buttons/red.png" position="0,0" size="140,40" alphatest="on" />
<ePixmap pixmap="buttons/green.png" position="140,0" size="140,40" alphatest="on" />
<widget source="key_red" render="Label" position="0,0" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#9f1313" transparent="1" />
<widget source="key_green" render="Label" position="140,0" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#1f771f" transparent="1" />
<widget name="text" position="5,50" size="550,25" font="Regular;20" backgroundColor="background" foregroundColor="#cccccc" />
</screen>"""
def __init__(self, session, configfile = None):
Screen.__init__(self, session)
self.session = session
self.configfile = configfile
text = ""
if self.configfile:
try:
fp = file(configfile, 'r')
sources = fp.readlines()
if sources:
text = sources[0]
fp.close()
except IOError:
pass
desk = getDesktop(0)
x= int(desk.size().width())
y= int(desk.size().height())
self["key_red"] = StaticText(_("Cancel"))
self["key_green"] = StaticText(_("Save"))
if (y>=720):
self["text"] = Input(text, maxSize=False, type=Input.TEXT)
else:
self["text"] = Input(text, maxSize=False, visible_width = 55, type=Input.TEXT)
self["actions"] = NumberActionMap(["WizardActions", "InputActions", "TextEntryActions", "KeyboardInputActions","ShortcutActions"],
{
"ok": self.go,
"back": self.close,
"red": self.close,
"green": self.go,
"left": self.keyLeft,
"right": self.keyRight,
"home": self.keyHome,
"end": self.keyEnd,
"deleteForward": self.keyDeleteForward,
"deleteBackward": self.keyDeleteBackward,
"1": self.keyNumberGlobal,
"2": self.keyNumberGlobal,
"3": self.keyNumberGlobal,
"4": self.keyNumberGlobal,
"5": self.keyNumberGlobal,
"6": self.keyNumberGlobal,
"7": self.keyNumberGlobal,
"8": self.keyNumberGlobal,
"9": self.keyNumberGlobal,
"0": self.keyNumberGlobal
}, -1)
self.onLayoutFinish.append(self.layoutFinished)
def layoutFinished(self):
self.setWindowTitle()
self["text"].right()
def setWindowTitle(self):
self.setTitle(_("Edit upgrade source url."))
def go(self):
text = self["text"].getText()
if text:
fp = file(self.configfile, 'w')
fp.write(text)
fp.write("\n")
fp.close()
self.close()
def keyLeft(self):
self["text"].left()
def keyRight(self):
self["text"].right()
def keyHome(self):
self["text"].home()
def keyEnd(self):
self["text"].end()
def keyDeleteForward(self):
self["text"].delete()
def keyDeleteBackward(self):
self["text"].deleteBackward()
def keyNumberGlobal(self, number):
self["text"].number(number)
class PacketManager(Screen, NumericalTextInput):
skin = """
<screen name="PacketManager" position="center,center" size="530,420" title="Packet manager" >
<ePixmap pixmap="buttons/red.png" position="0,0" size="140,40" alphatest="on" />
<ePixmap pixmap="buttons/green.png" position="140,0" size="140,40" alphatest="on" />
<widget source="key_red" render="Label" position="0,0" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#9f1313" transparent="1" />
<widget source="key_green" render="Label" position="140,0" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#1f771f" transparent="1" />
<widget source="list" render="Listbox" position="5,50" size="520,365" scrollbarMode="showOnDemand">
<convert type="TemplatedMultiContent">
{"template": [
MultiContentEntryText(pos = (5, 1), size = (440, 28), font=0, flags = RT_HALIGN_LEFT, text = 0), # index 0 is the name
MultiContentEntryText(pos = (5, 26), size = (440, 20), font=1, flags = RT_HALIGN_LEFT, text = 2), # index 2 is the description
MultiContentEntryPixmapAlphaTest(pos = (445, 2), size = (48, 48), png = 4), # index 4 is the status pixmap
MultiContentEntryPixmapAlphaTest(pos = (5, 50), size = (510, 2), png = 5), # index 4 is the div pixmap
],
"fonts": [gFont("Regular", 22),gFont("Regular", 14)],
"itemHeight": 52
}
</convert>
</widget>
</screen>"""
def __init__(self, session, plugin_path, args = None):
Screen.__init__(self, session)
NumericalTextInput.__init__(self)
self.session = session
self.skin_path = plugin_path
if config.usage.show_channel_jump_in_servicelist.getValue() == "alpha":
self.setUseableChars(u'abcdefghijklmnopqrstuvwxyz1234567890')
else:
self.setUseableChars(u'1234567890abcdefghijklmnopqrstuvwxyz')
self["shortcuts"] = NumberActionMap(["ShortcutActions", "WizardActions", "NumberActions", "InputActions", "InputAsciiActions", "KeyboardInputActions" ],
{
"ok": self.go,
"back": self.exit,
"red": self.exit,
"green": self.reload,
"gotAsciiCode": self.keyGotAscii,
"1": self.keyNumberGlobal,
"2": self.keyNumberGlobal,
"3": self.keyNumberGlobal,
"4": self.keyNumberGlobal,
"5": self.keyNumberGlobal,
"6": self.keyNumberGlobal,
"7": self.keyNumberGlobal,
"8": self.keyNumberGlobal,
"9": self.keyNumberGlobal,
"0": self.keyNumberGlobal
}, -1)
self.list = []
self.statuslist = []
self["list"] = List(self.list)
self["key_red"] = StaticText(_("Close"))
self["key_green"] = StaticText(_("Reload"))
self.list_updating = True
self.packetlist = []
self.installed_packetlist = {}
self.upgradeable_packages = {}
self.Console = Console()
self.cmdList = []
self.cachelist = []
self.cache_ttl = 86400 #600 is default, 0 disables, Seconds cache is considered valid (24h should be ok for caching ipkgs)
self.cache_file = eEnv.resolve('${libdir}/enigma2/python/Plugins/SystemPlugins/SoftwareManager/packetmanager.cache') #Path to cache directory
self.oktext = _("\nAfter pressing OK, please wait!")
self.unwanted_extensions = ('-dbg', '-dev', '-doc', '-staticdev', 'busybox')
self.ipkg = IpkgComponent()
self.ipkg.addCallback(self.ipkgCallback)
self.onShown.append(self.setWindowTitle)
self.onLayoutFinish.append(self.rebuildList)
rcinput = eRCInput.getInstance()
if config.misc.remotecontrol_text_support.getValue():
rcinput.setKeyboardMode(rcinput.kmNone)
else:
rcinput.setKeyboardMode(rcinput.kmAscii)
def keyNumberGlobal(self, val):
key = self.getKey(val)
if key is not None:
keyvalue = key.encode("utf-8")
if len(keyvalue) == 1:
self.setNextIdx(keyvalue[0])
def keyGotAscii(self):
keyvalue = unichr(getPrevAsciiCode()).encode("utf-8")
if len(keyvalue) == 1:
self.setNextIdx(keyvalue[0])
def setNextIdx(self,char):
if char in ("0", "1", "a"):
self["list"].setIndex(0)
else:
idx = self.getNextIdx(char)
if idx and idx <= self["list"].count:
self["list"].setIndex(idx)
def getNextIdx(self,char):
for idx, i in enumerate(self["list"].list):
if i[0] and (i[0][0] == char):
return idx
def exit(self):
self.ipkg.stop()
if self.Console is not None:
if len(self.Console.appContainers):
for name in self.Console.appContainers.keys():
self.Console.kill(name)
rcinput = eRCInput.getInstance()
rcinput.setKeyboardMode(rcinput.kmNone)
self.close()
def reload(self):
if (os_path.exists(self.cache_file) == True):
remove(self.cache_file)
self.list_updating = True
self.rebuildList()
def setWindowTitle(self):
self.setTitle(_("Packet manager"))
def setStatus(self,status = None):
if status:
self.statuslist = []
divpng = LoadPixmap(cached=True, path=resolveFilename(SCOPE_ACTIVE_SKIN, "div-h.png"))
if status == 'update':
statuspng = LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_PLUGIN, "SystemPlugins/SoftwareManager/upgrade.png"))
self.statuslist.append(( _("Package list update"), '', _("Trying to download a new packetlist. Please wait..." ),'',statuspng, divpng ))
self['list'].setList(self.statuslist)
elif status == 'error':
statuspng = LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_PLUGIN, "SystemPlugins/SoftwareManager/remove.png"))
self.statuslist.append(( _("Error"), '', _("An error occurred while downloading the packetlist. Please try again." ),'',statuspng, divpng ))
self['list'].setList(self.statuslist)
def rebuildList(self):
self.setStatus('update')
self.inv_cache = 0
self.vc = valid_cache(self.cache_file, self.cache_ttl)
if self.cache_ttl > 0 and self.vc != 0:
try:
self.buildPacketList()
except:
self.inv_cache = 1
if self.cache_ttl == 0 or self.inv_cache == 1 or self.vc == 0:
self.run = 0
self.ipkg.startCmd(IpkgComponent.CMD_UPDATE)
def go(self, returnValue = None):
cur = self["list"].getCurrent()
if cur:
status = cur[3]
package = cur[0]
self.cmdList = []
if status == 'installed':
self.cmdList.append((IpkgComponent.CMD_REMOVE, { "package": package }))
if len(self.cmdList):
self.session.openWithCallback(self.runRemove, MessageBox, _("Do you want to remove the package:\n") + package + "\n" + self.oktext)
elif status == 'upgradeable':
self.cmdList.append((IpkgComponent.CMD_INSTALL, { "package": package }))
if len(self.cmdList):
self.session.openWithCallback(self.runUpgrade, MessageBox, _("Do you want to upgrade the package:\n") + package + "\n" + self.oktext)
elif status == "installable":
self.cmdList.append((IpkgComponent.CMD_INSTALL, { "package": package }))
if len(self.cmdList):
self.session.openWithCallback(self.runUpgrade, MessageBox, _("Do you want to install the package:\n") + package + "\n" + self.oktext)
def runRemove(self, result):
if result:
self.session.openWithCallback(self.runRemoveFinished, Ipkg, cmdList = self.cmdList)
def runRemoveFinished(self):
self.session.openWithCallback(self.RemoveReboot, MessageBox, _("Remove finished.") +" "+_("Do you want to reboot your receiver?"), MessageBox.TYPE_YESNO)
def RemoveReboot(self, result):
if result is None:
return
if result is False:
cur = self["list"].getCurrent()
if cur:
item = self['list'].getIndex()
self.list[item] = self.buildEntryComponent(cur[0], cur[1], cur[2], 'installable')
self.cachelist[item] = [cur[0], cur[1], cur[2], 'installable']
self['list'].setList(self.list)
write_cache(self.cache_file, self.cachelist)
self.reloadPluginlist()
if result:
self.session.open(TryQuitMainloop,retvalue=3)
def runUpgrade(self, result):
if result:
self.session.openWithCallback(self.runUpgradeFinished, Ipkg, cmdList = self.cmdList)
def runUpgradeFinished(self):
self.session.openWithCallback(self.UpgradeReboot, MessageBox, _("Upgrade finished.") +" "+_("Do you want to reboot your receiver?"), MessageBox.TYPE_YESNO)
def UpgradeReboot(self, result):
if result is None:
return
if result is False:
cur = self["list"].getCurrent()
if cur:
item = self['list'].getIndex()
self.list[item] = self.buildEntryComponent(cur[0], cur[1], cur[2], 'installed')
self.cachelist[item] = [cur[0], cur[1], cur[2], 'installed']
self['list'].setList(self.list)
write_cache(self.cache_file, self.cachelist)
self.reloadPluginlist()
if result:
self.session.open(TryQuitMainloop,retvalue=3)
def ipkgCallback(self, event, param):
if event == IpkgComponent.EVENT_ERROR:
self.list_updating = False
self.setStatus('error')
elif event == IpkgComponent.EVENT_DONE:
if self.list_updating:
self.list_updating = False
if not self.Console:
self.Console = Console()
cmd = self.ipkg.ipkg + " list"
self.Console.ePopen(cmd, self.IpkgList_Finished)
pass
def IpkgList_Finished(self, result, retval, extra_args = None):
result = result.replace('\n ',' - ')
if result:
self.packetlist = []
last_name = ""
for x in result.splitlines():
tokens = x.split(' - ')
name = tokens[0].strip()
if not any((name.endswith(x) or name.find('locale') != -1) for x in self.unwanted_extensions):
l = len(tokens)
version = l > 1 and tokens[1].strip() or ""
descr = l > 3 and tokens[3].strip() or l > 2 and tokens[2].strip() or ""
if name == last_name:
continue
last_name = name
self.packetlist.append([name, version, descr])
if not self.Console:
self.Console = Console()
cmd = self.ipkg.ipkg + " list_installed"
self.Console.ePopen(cmd, self.IpkgListInstalled_Finished)
def IpkgListInstalled_Finished(self, result, retval, extra_args = None):
if result:
self.installed_packetlist = {}
for x in result.splitlines():
tokens = x.split(' - ')
name = tokens[0].strip()
if not any(name.endswith(x) for x in self.unwanted_extensions):
l = len(tokens)
version = l > 1 and tokens[1].strip() or ""
self.installed_packetlist[name] = version
if not self.Console:
self.Console = Console()
cmd = "opkg list-upgradable"
self.Console.ePopen(cmd, self.OpkgListUpgradeable_Finished)
def OpkgListUpgradeable_Finished(self, result, retval, extra_args = None):
if result:
self.upgradeable_packages = {}
for x in result.splitlines():
tokens = x.split(' - ')
name = tokens[0].strip()
if not any(name.endswith(x) for x in self.unwanted_extensions):
l = len(tokens)
version = l > 2 and tokens[2].strip() or ""
self.upgradeable_packages[name] = version
self.buildPacketList()
def buildEntryComponent(self, name, version, description, state):
divpng = LoadPixmap(cached=True, path=resolveFilename(SCOPE_ACTIVE_SKIN, "div-h.png"))
if not description:
description = "No description available."
if state == 'installed':
installedpng = LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_PLUGIN, "SystemPlugins/SoftwareManager/installed.png"))
return((name, version, _(description), state, installedpng, divpng))
elif state == 'upgradeable':
upgradeablepng = LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_PLUGIN, "SystemPlugins/SoftwareManager/upgradeable.png"))
return((name, version, _(description), state, upgradeablepng, divpng))
else:
installablepng = LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_PLUGIN, "SystemPlugins/SoftwareManager/installable.png"))
return((name, version, _(description), state, installablepng, divpng))
def buildPacketList(self):
self.list = []
self.cachelist = []
if self.cache_ttl > 0 and self.vc != 0:
print 'Loading packagelist cache from ',self.cache_file
try:
self.cachelist = load_cache(self.cache_file)
if len(self.cachelist) > 0:
for x in self.cachelist:
self.list.append(self.buildEntryComponent(x[0], x[1], x[2], x[3]))
self['list'].setList(self.list)
except:
self.inv_cache = 1
if self.cache_ttl == 0 or self.inv_cache == 1 or self.vc == 0:
print 'rebuilding fresh package list'
for x in self.packetlist:
status = ""
if self.installed_packetlist.has_key(x[0]):
if self.upgradeable_packages.has_key(x[0]):
status = "upgradeable"
else:
status = "installed"
else:
status = "installable"
self.list.append(self.buildEntryComponent(x[0], x[1], x[2], status))
self.cachelist.append([x[0], x[1], x[2], status])
write_cache(self.cache_file, self.cachelist)
self['list'].setList(self.list)
def reloadPluginlist(self):
plugins.readPluginList(resolveFilename(SCOPE_PLUGINS))
class IpkgInstaller(Screen):
skin = """
<screen name="IpkgInstaller" position="center,center" size="550,450" title="Install extensions" >
<ePixmap pixmap="buttons/red.png" position="0,0" size="140,40" alphatest="on" />
<ePixmap pixmap="buttons/green.png" position="140,0" size="140,40" alphatest="on" />
<ePixmap pixmap="buttons/yellow.png" position="280,0" size="140,40" alphatest="on" />
<ePixmap pixmap="buttons/blue.png" position="420,0" size="140,40" alphatest="on" />
<widget source="key_red" render="Label" position="0,0" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#9f1313" transparent="1" />
<widget source="key_green" render="Label" position="140,0" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#1f771f" transparent="1" />
<widget source="key_yellow" render="Label" position="280,0" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#a08500" transparent="1" />
<widget source="key_blue" render="Label" position="420,0" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#18188b" transparent="1" />
<widget name="list" position="5,50" size="540,360" />
<ePixmap pixmap="div-h.png" position="0,410" zPosition="10" size="560,2" transparent="1" alphatest="on" />
<widget source="introduction" render="Label" position="5,420" zPosition="10" size="550,30" halign="center" valign="center" font="Regular;22" transparent="1" shadowColor="black" shadowOffset="-1,-1" />
</screen>"""
def __init__(self, session, list):
Screen.__init__(self, session)
self.list = SelectionList()
self["list"] = self.list
for listindex in range(len(list)):
self.list.addSelection(list[listindex], list[listindex], listindex, False)
self["key_red"] = StaticText(_("Close"))
self["key_green"] = StaticText(_("Install"))
self["key_yellow"] = StaticText()
self["key_blue"] = StaticText(_("Invert"))
self["introduction"] = StaticText(_("Press OK to toggle the selection."))
self["actions"] = ActionMap(["OkCancelActions", "ColorActions"],
{
"ok": self.list.toggleSelection,
"cancel": self.close,
"red": self.close,
"green": self.install,
"blue": self.list.toggleAllSelection
}, -1)
def install(self):
list = self.list.getSelectionsList()
cmdList = []
for item in list:
cmdList.append((IpkgComponent.CMD_INSTALL, { "package": item[1] }))
self.session.open(Ipkg, cmdList = cmdList)
def filescan_open(list, session, **kwargs):
filelist = [x.path for x in list]
session.open(IpkgInstaller, filelist) # list
def filescan(**kwargs):
from Components.Scanner import Scanner, ScanPath
return \
Scanner(mimetypes = ["application/x-debian-package"],
paths_to_scan =
[
ScanPath(path = "ipk", with_subdirs = True),
ScanPath(path = "", with_subdirs = False),
],
name = "Ipkg",
description = _("Install extensions."),
openfnc = filescan_open, )
def UpgradeMain(session, **kwargs):
session.open(UpdatePluginMenu)
def startSetup(menuid):
if menuid == "setup" and config.plugins.softwaremanager.onSetupMenu.getValue():
return [(_("Software management"), UpgradeMain, "software_manager", 50)]
return [ ]
def Plugins(path, **kwargs):
global plugin_path
plugin_path = path
list = [
PluginDescriptor(name=_("Software management"), description=_("Manage your STB_BOX's software"), where = PluginDescriptor.WHERE_MENU, needsRestart = False, fnc=startSetup),
PluginDescriptor(name=_("Ipkg"), where = PluginDescriptor.WHERE_FILESCAN, needsRestart = False, fnc = filescan)
]
if not config.plugins.softwaremanager.onSetupMenu.getValue() and not config.plugins.softwaremanager.onBlueButton.getValue():
list.append(PluginDescriptor(name=_("Software management"), description=_("Manage your STB_BOX's software"), where = PluginDescriptor.WHERE_PLUGINMENU, needsRestart = False, fnc=UpgradeMain))
if config.plugins.softwaremanager.onBlueButton.getValue():
list.append(PluginDescriptor(name=_("Software management"), description=_("Manage your STB_BOX's software"), where = PluginDescriptor.WHERE_EXTENSIONSMENU, needsRestart = False, fnc=UpgradeMain))
return list
|
popazerty/dvbapp2-gui
|
lib/python/Plugins/SystemPlugins/SoftwareManager/plugin.py
|
Python
|
gpl-2.0
| 81,566 | 0.028676 |
"""Test the split module"""
from __future__ import division
import warnings
import pytest
import numpy as np
from scipy.sparse import coo_matrix, csc_matrix, csr_matrix
from scipy import stats
from itertools import combinations
from itertools import combinations_with_replacement
from sklearn.utils.testing import assert_true
from sklearn.utils.testing import assert_false
from sklearn.utils.testing import assert_equal
from sklearn.utils.testing import assert_almost_equal
from sklearn.utils.testing import assert_raises
from sklearn.utils.testing import assert_raises_regexp
from sklearn.utils.testing import assert_greater
from sklearn.utils.testing import assert_greater_equal
from sklearn.utils.testing import assert_not_equal
from sklearn.utils.testing import assert_array_almost_equal
from sklearn.utils.testing import assert_array_equal
from sklearn.utils.testing import assert_warns_message
from sklearn.utils.testing import assert_warns
from sklearn.utils.testing import assert_raise_message
from sklearn.utils.testing import ignore_warnings
from sklearn.utils.testing import assert_no_warnings
from sklearn.utils.validation import _num_samples
from sklearn.utils.mocking import MockDataFrame
from sklearn.model_selection import cross_val_score
from sklearn.model_selection import KFold
from sklearn.model_selection import StratifiedKFold
from sklearn.model_selection import GroupKFold
from sklearn.model_selection import TimeSeriesSplit
from sklearn.model_selection import LeaveOneOut
from sklearn.model_selection import LeaveOneGroupOut
from sklearn.model_selection import LeavePOut
from sklearn.model_selection import LeavePGroupsOut
from sklearn.model_selection import ShuffleSplit
from sklearn.model_selection import GroupShuffleSplit
from sklearn.model_selection import StratifiedShuffleSplit
from sklearn.model_selection import PredefinedSplit
from sklearn.model_selection import check_cv
from sklearn.model_selection import train_test_split
from sklearn.model_selection import GridSearchCV
from sklearn.model_selection import RepeatedKFold
from sklearn.model_selection import RepeatedStratifiedKFold
from sklearn.linear_model import Ridge
from sklearn.model_selection._split import _validate_shuffle_split
from sklearn.model_selection._split import _CVIterableWrapper
from sklearn.model_selection._split import _build_repr
from sklearn.model_selection._split import CV_WARNING
from sklearn.model_selection._split import NSPLIT_WARNING
from sklearn.datasets import load_digits
from sklearn.datasets import make_classification
from sklearn.externals import six
from sklearn.externals.six.moves import zip
from sklearn.utils.fixes import comb
from sklearn.svm import SVC
X = np.ones(10)
y = np.arange(10) // 2
P_sparse = coo_matrix(np.eye(5))
test_groups = (
np.array([1, 1, 1, 1, 2, 2, 2, 3, 3, 3, 3, 3]),
np.array([0, 0, 0, 1, 1, 1, 2, 2, 2, 3, 3, 3]),
np.array([0, 1, 2, 3, 0, 1, 2, 3, 0, 1, 2, 3, 0, 1, 2]),
np.array([1, 1, 2, 2, 2, 3, 3, 3, 4, 4, 4, 4, 4, 4, 4, 4]),
[1, 1, 1, 1, 2, 2, 2, 3, 3, 3, 3, 3],
['1', '1', '1', '1', '2', '2', '2', '3', '3', '3', '3', '3'])
digits = load_digits()
class MockClassifier(object):
"""Dummy classifier to test the cross-validation"""
def __init__(self, a=0, allow_nd=False):
self.a = a
self.allow_nd = allow_nd
def fit(self, X, Y=None, sample_weight=None, class_prior=None,
sparse_sample_weight=None, sparse_param=None, dummy_int=None,
dummy_str=None, dummy_obj=None, callback=None):
"""The dummy arguments are to test that this fit function can
accept non-array arguments through cross-validation, such as:
- int
- str (this is actually array-like)
- object
- function
"""
self.dummy_int = dummy_int
self.dummy_str = dummy_str
self.dummy_obj = dummy_obj
if callback is not None:
callback(self)
if self.allow_nd:
X = X.reshape(len(X), -1)
if X.ndim >= 3 and not self.allow_nd:
raise ValueError('X cannot be d')
if sample_weight is not None:
assert_true(sample_weight.shape[0] == X.shape[0],
'MockClassifier extra fit_param sample_weight.shape[0]'
' is {0}, should be {1}'.format(sample_weight.shape[0],
X.shape[0]))
if class_prior is not None:
assert_true(class_prior.shape[0] == len(np.unique(y)),
'MockClassifier extra fit_param class_prior.shape[0]'
' is {0}, should be {1}'.format(class_prior.shape[0],
len(np.unique(y))))
if sparse_sample_weight is not None:
fmt = ('MockClassifier extra fit_param sparse_sample_weight'
'.shape[0] is {0}, should be {1}')
assert_true(sparse_sample_weight.shape[0] == X.shape[0],
fmt.format(sparse_sample_weight.shape[0], X.shape[0]))
if sparse_param is not None:
fmt = ('MockClassifier extra fit_param sparse_param.shape '
'is ({0}, {1}), should be ({2}, {3})')
assert_true(sparse_param.shape == P_sparse.shape,
fmt.format(sparse_param.shape[0],
sparse_param.shape[1],
P_sparse.shape[0], P_sparse.shape[1]))
return self
def predict(self, T):
if self.allow_nd:
T = T.reshape(len(T), -1)
return T[:, 0]
def score(self, X=None, Y=None):
return 1. / (1 + np.abs(self.a))
def get_params(self, deep=False):
return {'a': self.a, 'allow_nd': self.allow_nd}
@ignore_warnings
def test_cross_validator_with_default_params():
n_samples = 4
n_unique_groups = 4
n_splits = 2
p = 2
n_shuffle_splits = 10 # (the default value)
X = np.array([[1, 2], [3, 4], [5, 6], [7, 8]])
X_1d = np.array([1, 2, 3, 4])
y = np.array([1, 1, 2, 2])
groups = np.array([1, 2, 3, 4])
loo = LeaveOneOut()
lpo = LeavePOut(p)
kf = KFold(n_splits)
skf = StratifiedKFold(n_splits)
lolo = LeaveOneGroupOut()
lopo = LeavePGroupsOut(p)
ss = ShuffleSplit(random_state=0)
ps = PredefinedSplit([1, 1, 2, 2]) # n_splits = np of unique folds = 2
loo_repr = "LeaveOneOut()"
lpo_repr = "LeavePOut(p=2)"
kf_repr = "KFold(n_splits=2, random_state=None, shuffle=False)"
skf_repr = "StratifiedKFold(n_splits=2, random_state=None, shuffle=False)"
lolo_repr = "LeaveOneGroupOut()"
lopo_repr = "LeavePGroupsOut(n_groups=2)"
ss_repr = ("ShuffleSplit(n_splits=10, random_state=0, "
"test_size='default',\n train_size=None)")
ps_repr = "PredefinedSplit(test_fold=array([1, 1, 2, 2]))"
n_splits_expected = [n_samples, comb(n_samples, p), n_splits, n_splits,
n_unique_groups, comb(n_unique_groups, p),
n_shuffle_splits, 2]
for i, (cv, cv_repr) in enumerate(zip(
[loo, lpo, kf, skf, lolo, lopo, ss, ps],
[loo_repr, lpo_repr, kf_repr, skf_repr, lolo_repr, lopo_repr,
ss_repr, ps_repr])):
# Test if get_n_splits works correctly
assert_equal(n_splits_expected[i], cv.get_n_splits(X, y, groups))
# Test if the cross-validator works as expected even if
# the data is 1d
np.testing.assert_equal(list(cv.split(X, y, groups)),
list(cv.split(X_1d, y, groups)))
# Test that train, test indices returned are integers
for train, test in cv.split(X, y, groups):
assert_equal(np.asarray(train).dtype.kind, 'i')
assert_equal(np.asarray(train).dtype.kind, 'i')
# Test if the repr works without any errors
assert_equal(cv_repr, repr(cv))
# ValueError for get_n_splits methods
msg = "The 'X' parameter should not be None."
assert_raise_message(ValueError, msg,
loo.get_n_splits, None, y, groups)
assert_raise_message(ValueError, msg,
lpo.get_n_splits, None, y, groups)
@pytest.mark.filterwarnings('ignore: You should specify a value') # 0.22
def test_2d_y():
# smoke test for 2d y and multi-label
n_samples = 30
rng = np.random.RandomState(1)
X = rng.randint(0, 3, size=(n_samples, 2))
y = rng.randint(0, 3, size=(n_samples,))
y_2d = y.reshape(-1, 1)
y_multilabel = rng.randint(0, 2, size=(n_samples, 3))
groups = rng.randint(0, 3, size=(n_samples,))
splitters = [LeaveOneOut(), LeavePOut(p=2), KFold(), StratifiedKFold(),
RepeatedKFold(), RepeatedStratifiedKFold(),
ShuffleSplit(), StratifiedShuffleSplit(test_size=.5),
GroupShuffleSplit(), LeaveOneGroupOut(),
LeavePGroupsOut(n_groups=2), GroupKFold(), TimeSeriesSplit(),
PredefinedSplit(test_fold=groups)]
for splitter in splitters:
list(splitter.split(X, y, groups))
list(splitter.split(X, y_2d, groups))
try:
list(splitter.split(X, y_multilabel, groups))
except ValueError as e:
allowed_target_types = ('binary', 'multiclass')
msg = "Supported target types are: {}. Got 'multilabel".format(
allowed_target_types)
assert msg in str(e)
def check_valid_split(train, test, n_samples=None):
# Use python sets to get more informative assertion failure messages
train, test = set(train), set(test)
# Train and test split should not overlap
assert_equal(train.intersection(test), set())
if n_samples is not None:
# Check that the union of train an test split cover all the indices
assert_equal(train.union(test), set(range(n_samples)))
def check_cv_coverage(cv, X, y, groups, expected_n_splits=None):
n_samples = _num_samples(X)
# Check that a all the samples appear at least once in a test fold
if expected_n_splits is not None:
assert_equal(cv.get_n_splits(X, y, groups), expected_n_splits)
else:
expected_n_splits = cv.get_n_splits(X, y, groups)
collected_test_samples = set()
iterations = 0
for train, test in cv.split(X, y, groups):
check_valid_split(train, test, n_samples=n_samples)
iterations += 1
collected_test_samples.update(test)
# Check that the accumulated test samples cover the whole dataset
assert_equal(iterations, expected_n_splits)
if n_samples is not None:
assert_equal(collected_test_samples, set(range(n_samples)))
def test_kfold_valueerrors():
X1 = np.array([[1, 2], [3, 4], [5, 6]])
X2 = np.array([[1, 2], [3, 4], [5, 6], [7, 8], [9, 10]])
# Check that errors are raised if there is not enough samples
(ValueError, next, KFold(4).split(X1))
# Check that a warning is raised if the least populated class has too few
# members.
y = np.array([3, 3, -1, -1, 3])
skf_3 = StratifiedKFold(3)
assert_warns_message(Warning, "The least populated class",
next, skf_3.split(X2, y))
# Check that despite the warning the folds are still computed even
# though all the classes are not necessarily represented at on each
# side of the split at each split
with warnings.catch_warnings():
warnings.simplefilter("ignore")
check_cv_coverage(skf_3, X2, y, groups=None, expected_n_splits=3)
# Check that errors are raised if all n_groups for individual
# classes are less than n_splits.
y = np.array([3, 3, -1, -1, 2])
assert_raises(ValueError, next, skf_3.split(X2, y))
# Error when number of folds is <= 1
assert_raises(ValueError, KFold, 0)
assert_raises(ValueError, KFold, 1)
error_string = ("k-fold cross-validation requires at least one"
" train/test split")
assert_raise_message(ValueError, error_string,
StratifiedKFold, 0)
assert_raise_message(ValueError, error_string,
StratifiedKFold, 1)
# When n_splits is not integer:
assert_raises(ValueError, KFold, 1.5)
assert_raises(ValueError, KFold, 2.0)
assert_raises(ValueError, StratifiedKFold, 1.5)
assert_raises(ValueError, StratifiedKFold, 2.0)
# When shuffle is not a bool:
assert_raises(TypeError, KFold, n_splits=4, shuffle=None)
def test_kfold_indices():
# Check all indices are returned in the test folds
X1 = np.ones(18)
kf = KFold(3)
check_cv_coverage(kf, X1, y=None, groups=None, expected_n_splits=3)
# Check all indices are returned in the test folds even when equal-sized
# folds are not possible
X2 = np.ones(17)
kf = KFold(3)
check_cv_coverage(kf, X2, y=None, groups=None, expected_n_splits=3)
# Check if get_n_splits returns the number of folds
assert_equal(5, KFold(5).get_n_splits(X2))
def test_kfold_no_shuffle():
# Manually check that KFold preserves the data ordering on toy datasets
X2 = [[1, 2], [3, 4], [5, 6], [7, 8], [9, 10]]
splits = KFold(2).split(X2[:-1])
train, test = next(splits)
assert_array_equal(test, [0, 1])
assert_array_equal(train, [2, 3])
train, test = next(splits)
assert_array_equal(test, [2, 3])
assert_array_equal(train, [0, 1])
splits = KFold(2).split(X2)
train, test = next(splits)
assert_array_equal(test, [0, 1, 2])
assert_array_equal(train, [3, 4])
train, test = next(splits)
assert_array_equal(test, [3, 4])
assert_array_equal(train, [0, 1, 2])
def test_stratified_kfold_no_shuffle():
# Manually check that StratifiedKFold preserves the data ordering as much
# as possible on toy datasets in order to avoid hiding sample dependencies
# when possible
X, y = np.ones(4), [1, 1, 0, 0]
splits = StratifiedKFold(2).split(X, y)
train, test = next(splits)
assert_array_equal(test, [0, 2])
assert_array_equal(train, [1, 3])
train, test = next(splits)
assert_array_equal(test, [1, 3])
assert_array_equal(train, [0, 2])
X, y = np.ones(7), [1, 1, 1, 0, 0, 0, 0]
splits = StratifiedKFold(2).split(X, y)
train, test = next(splits)
assert_array_equal(test, [0, 1, 3, 4])
assert_array_equal(train, [2, 5, 6])
train, test = next(splits)
assert_array_equal(test, [2, 5, 6])
assert_array_equal(train, [0, 1, 3, 4])
# Check if get_n_splits returns the number of folds
assert_equal(5, StratifiedKFold(5).get_n_splits(X, y))
# Make sure string labels are also supported
X = np.ones(7)
y1 = ['1', '1', '1', '0', '0', '0', '0']
y2 = [1, 1, 1, 0, 0, 0, 0]
np.testing.assert_equal(
list(StratifiedKFold(2).split(X, y1)),
list(StratifiedKFold(2).split(X, y2)))
def test_stratified_kfold_ratios():
# Check that stratified kfold preserves class ratios in individual splits
# Repeat with shuffling turned off and on
n_samples = 1000
X = np.ones(n_samples)
y = np.array([4] * int(0.10 * n_samples) +
[0] * int(0.89 * n_samples) +
[1] * int(0.01 * n_samples))
for shuffle in (False, True):
for train, test in StratifiedKFold(5, shuffle=shuffle).split(X, y):
assert_almost_equal(np.sum(y[train] == 4) / len(train), 0.10, 2)
assert_almost_equal(np.sum(y[train] == 0) / len(train), 0.89, 2)
assert_almost_equal(np.sum(y[train] == 1) / len(train), 0.01, 2)
assert_almost_equal(np.sum(y[test] == 4) / len(test), 0.10, 2)
assert_almost_equal(np.sum(y[test] == 0) / len(test), 0.89, 2)
assert_almost_equal(np.sum(y[test] == 1) / len(test), 0.01, 2)
def test_kfold_balance():
# Check that KFold returns folds with balanced sizes
for i in range(11, 17):
kf = KFold(5).split(X=np.ones(i))
sizes = []
for _, test in kf:
sizes.append(len(test))
assert_true((np.max(sizes) - np.min(sizes)) <= 1)
assert_equal(np.sum(sizes), i)
def test_stratifiedkfold_balance():
# Check that KFold returns folds with balanced sizes (only when
# stratification is possible)
# Repeat with shuffling turned off and on
X = np.ones(17)
y = [0] * 3 + [1] * 14
for shuffle in (True, False):
cv = StratifiedKFold(3, shuffle=shuffle)
for i in range(11, 17):
skf = cv.split(X[:i], y[:i])
sizes = []
for _, test in skf:
sizes.append(len(test))
assert_true((np.max(sizes) - np.min(sizes)) <= 1)
assert_equal(np.sum(sizes), i)
def test_shuffle_kfold():
# Check the indices are shuffled properly
kf = KFold(3)
kf2 = KFold(3, shuffle=True, random_state=0)
kf3 = KFold(3, shuffle=True, random_state=1)
X = np.ones(300)
all_folds = np.zeros(300)
for (tr1, te1), (tr2, te2), (tr3, te3) in zip(
kf.split(X), kf2.split(X), kf3.split(X)):
for tr_a, tr_b in combinations((tr1, tr2, tr3), 2):
# Assert that there is no complete overlap
assert_not_equal(len(np.intersect1d(tr_a, tr_b)), len(tr1))
# Set all test indices in successive iterations of kf2 to 1
all_folds[te2] = 1
# Check that all indices are returned in the different test folds
assert_equal(sum(all_folds), 300)
def test_shuffle_kfold_stratifiedkfold_reproducibility():
# Check that when the shuffle is True multiple split calls produce the
# same split when random_state is set
X = np.ones(15) # Divisible by 3
y = [0] * 7 + [1] * 8
X2 = np.ones(16) # Not divisible by 3
y2 = [0] * 8 + [1] * 8
kf = KFold(3, shuffle=True, random_state=0)
skf = StratifiedKFold(3, shuffle=True, random_state=0)
for cv in (kf, skf):
np.testing.assert_equal(list(cv.split(X, y)), list(cv.split(X, y)))
np.testing.assert_equal(list(cv.split(X2, y2)), list(cv.split(X2, y2)))
kf = KFold(3, shuffle=True)
skf = StratifiedKFold(3, shuffle=True)
for cv in (kf, skf):
for data in zip((X, X2), (y, y2)):
# Test if the two splits are different cv
for (_, test_a), (_, test_b) in zip(cv.split(*data),
cv.split(*data)):
# cv.split(...) returns an array of tuples, each tuple
# consisting of an array with train indices and test indices
with pytest.raises(AssertionError,
message="The splits for data, are same even"
" when random state is not set"):
np.testing.assert_array_equal(test_a, test_b)
def test_shuffle_stratifiedkfold():
# Check that shuffling is happening when requested, and for proper
# sample coverage
X_40 = np.ones(40)
y = [0] * 20 + [1] * 20
kf0 = StratifiedKFold(5, shuffle=True, random_state=0)
kf1 = StratifiedKFold(5, shuffle=True, random_state=1)
for (_, test0), (_, test1) in zip(kf0.split(X_40, y),
kf1.split(X_40, y)):
assert_not_equal(set(test0), set(test1))
check_cv_coverage(kf0, X_40, y, groups=None, expected_n_splits=5)
def test_kfold_can_detect_dependent_samples_on_digits(): # see #2372
# The digits samples are dependent: they are apparently grouped by authors
# although we don't have any information on the groups segment locations
# for this data. We can highlight this fact by computing k-fold cross-
# validation with and without shuffling: we observe that the shuffling case
# wrongly makes the IID assumption and is therefore too optimistic: it
# estimates a much higher accuracy (around 0.93) than that the non
# shuffling variant (around 0.81).
X, y = digits.data[:600], digits.target[:600]
model = SVC(C=10, gamma=0.005)
n_splits = 3
cv = KFold(n_splits=n_splits, shuffle=False)
mean_score = cross_val_score(model, X, y, cv=cv).mean()
assert_greater(0.92, mean_score)
assert_greater(mean_score, 0.80)
# Shuffling the data artificially breaks the dependency and hides the
# overfitting of the model with regards to the writing style of the authors
# by yielding a seriously overestimated score:
cv = KFold(n_splits, shuffle=True, random_state=0)
mean_score = cross_val_score(model, X, y, cv=cv).mean()
assert_greater(mean_score, 0.92)
cv = KFold(n_splits, shuffle=True, random_state=1)
mean_score = cross_val_score(model, X, y, cv=cv).mean()
assert_greater(mean_score, 0.92)
# Similarly, StratifiedKFold should try to shuffle the data as little
# as possible (while respecting the balanced class constraints)
# and thus be able to detect the dependency by not overestimating
# the CV score either. As the digits dataset is approximately balanced
# the estimated mean score is close to the score measured with
# non-shuffled KFold
cv = StratifiedKFold(n_splits)
mean_score = cross_val_score(model, X, y, cv=cv).mean()
assert_greater(0.93, mean_score)
assert_greater(mean_score, 0.80)
def test_shuffle_split():
ss1 = ShuffleSplit(test_size=0.2, random_state=0).split(X)
ss2 = ShuffleSplit(test_size=2, random_state=0).split(X)
ss3 = ShuffleSplit(test_size=np.int32(2), random_state=0).split(X)
for typ in six.integer_types:
ss4 = ShuffleSplit(test_size=typ(2), random_state=0).split(X)
for t1, t2, t3, t4 in zip(ss1, ss2, ss3, ss4):
assert_array_equal(t1[0], t2[0])
assert_array_equal(t2[0], t3[0])
assert_array_equal(t3[0], t4[0])
assert_array_equal(t1[1], t2[1])
assert_array_equal(t2[1], t3[1])
assert_array_equal(t3[1], t4[1])
@ignore_warnings
def test_stratified_shuffle_split_init():
X = np.arange(7)
y = np.asarray([0, 1, 1, 1, 2, 2, 2])
# Check that error is raised if there is a class with only one sample
assert_raises(ValueError, next,
StratifiedShuffleSplit(3, 0.2).split(X, y))
# Check that error is raised if the test set size is smaller than n_classes
assert_raises(ValueError, next, StratifiedShuffleSplit(3, 2).split(X, y))
# Check that error is raised if the train set size is smaller than
# n_classes
assert_raises(ValueError, next,
StratifiedShuffleSplit(3, 3, 2).split(X, y))
X = np.arange(9)
y = np.asarray([0, 0, 0, 1, 1, 1, 2, 2, 2])
# Check that errors are raised if there is not enough samples
assert_raises(ValueError, StratifiedShuffleSplit, 3, 0.5, 0.6)
assert_raises(ValueError, next,
StratifiedShuffleSplit(3, 8, 0.6).split(X, y))
assert_raises(ValueError, next,
StratifiedShuffleSplit(3, 0.6, 8).split(X, y))
# Train size or test size too small
assert_raises(ValueError, next,
StratifiedShuffleSplit(train_size=2).split(X, y))
assert_raises(ValueError, next,
StratifiedShuffleSplit(test_size=2).split(X, y))
def test_stratified_shuffle_split_respects_test_size():
y = np.array([0, 1, 2, 3, 0, 1, 2, 3, 0, 1, 2, 3, 0, 1, 2])
test_size = 5
train_size = 10
sss = StratifiedShuffleSplit(6, test_size=test_size, train_size=train_size,
random_state=0).split(np.ones(len(y)), y)
for train, test in sss:
assert_equal(len(train), train_size)
assert_equal(len(test), test_size)
def test_stratified_shuffle_split_iter():
ys = [np.array([1, 1, 1, 1, 2, 2, 2, 3, 3, 3, 3, 3]),
np.array([0, 0, 0, 1, 1, 1, 2, 2, 2, 3, 3, 3]),
np.array([0, 1, 2, 3, 0, 1, 2, 3, 0, 1, 2, 3, 0, 1, 2] * 2),
np.array([1, 1, 2, 2, 2, 3, 3, 3, 4, 4, 4, 4, 4, 4, 4, 4]),
np.array([-1] * 800 + [1] * 50),
np.concatenate([[i] * (100 + i) for i in range(11)]),
[1, 1, 1, 1, 2, 2, 2, 3, 3, 3, 3, 3],
['1', '1', '1', '1', '2', '2', '2', '3', '3', '3', '3', '3'],
]
for y in ys:
sss = StratifiedShuffleSplit(6, test_size=0.33,
random_state=0).split(np.ones(len(y)), y)
y = np.asanyarray(y) # To make it indexable for y[train]
# this is how test-size is computed internally
# in _validate_shuffle_split
test_size = np.ceil(0.33 * len(y))
train_size = len(y) - test_size
for train, test in sss:
assert_array_equal(np.unique(y[train]), np.unique(y[test]))
# Checks if folds keep classes proportions
p_train = (np.bincount(np.unique(y[train],
return_inverse=True)[1]) /
float(len(y[train])))
p_test = (np.bincount(np.unique(y[test],
return_inverse=True)[1]) /
float(len(y[test])))
assert_array_almost_equal(p_train, p_test, 1)
assert_equal(len(train) + len(test), y.size)
assert_equal(len(train), train_size)
assert_equal(len(test), test_size)
assert_array_equal(np.lib.arraysetops.intersect1d(train, test), [])
def test_stratified_shuffle_split_even():
# Test the StratifiedShuffleSplit, indices are drawn with a
# equal chance
n_folds = 5
n_splits = 1000
def assert_counts_are_ok(idx_counts, p):
# Here we test that the distribution of the counts
# per index is close enough to a binomial
threshold = 0.05 / n_splits
bf = stats.binom(n_splits, p)
for count in idx_counts:
prob = bf.pmf(count)
assert_true(prob > threshold,
"An index is not drawn with chance corresponding "
"to even draws")
for n_samples in (6, 22):
groups = np.array((n_samples // 2) * [0, 1])
splits = StratifiedShuffleSplit(n_splits=n_splits,
test_size=1. / n_folds,
random_state=0)
train_counts = [0] * n_samples
test_counts = [0] * n_samples
n_splits_actual = 0
for train, test in splits.split(X=np.ones(n_samples), y=groups):
n_splits_actual += 1
for counter, ids in [(train_counts, train), (test_counts, test)]:
for id in ids:
counter[id] += 1
assert_equal(n_splits_actual, n_splits)
n_train, n_test = _validate_shuffle_split(
n_samples, test_size=1. / n_folds, train_size=1. - (1. / n_folds))
assert_equal(len(train), n_train)
assert_equal(len(test), n_test)
assert_equal(len(set(train).intersection(test)), 0)
group_counts = np.unique(groups)
assert_equal(splits.test_size, 1.0 / n_folds)
assert_equal(n_train + n_test, len(groups))
assert_equal(len(group_counts), 2)
ex_test_p = float(n_test) / n_samples
ex_train_p = float(n_train) / n_samples
assert_counts_are_ok(train_counts, ex_train_p)
assert_counts_are_ok(test_counts, ex_test_p)
def test_stratified_shuffle_split_overlap_train_test_bug():
# See https://github.com/scikit-learn/scikit-learn/issues/6121 for
# the original bug report
y = [0, 1, 2, 3] * 3 + [4, 5] * 5
X = np.ones_like(y)
sss = StratifiedShuffleSplit(n_splits=1,
test_size=0.5, random_state=0)
train, test = next(sss.split(X=X, y=y))
# no overlap
assert_array_equal(np.intersect1d(train, test), [])
# complete partition
assert_array_equal(np.union1d(train, test), np.arange(len(y)))
def test_stratified_shuffle_split_multilabel():
# fix for issue 9037
for y in [np.array([[0, 1], [1, 0], [1, 0], [0, 1]]),
np.array([[0, 1], [1, 1], [1, 1], [0, 1]])]:
X = np.ones_like(y)
sss = StratifiedShuffleSplit(n_splits=1, test_size=0.5, random_state=0)
train, test = next(sss.split(X=X, y=y))
y_train = y[train]
y_test = y[test]
# no overlap
assert_array_equal(np.intersect1d(train, test), [])
# complete partition
assert_array_equal(np.union1d(train, test), np.arange(len(y)))
# correct stratification of entire rows
# (by design, here y[:, 0] uniquely determines the entire row of y)
expected_ratio = np.mean(y[:, 0])
assert_equal(expected_ratio, np.mean(y_train[:, 0]))
assert_equal(expected_ratio, np.mean(y_test[:, 0]))
def test_stratified_shuffle_split_multilabel_many_labels():
# fix in PR #9922: for multilabel data with > 1000 labels, str(row)
# truncates with an ellipsis for elements in positions 4 through
# len(row) - 4, so labels were not being correctly split using the powerset
# method for transforming a multilabel problem to a multiclass one; this
# test checks that this problem is fixed.
row_with_many_zeros = [1, 0, 1] + [0] * 1000 + [1, 0, 1]
row_with_many_ones = [1, 0, 1] + [1] * 1000 + [1, 0, 1]
y = np.array([row_with_many_zeros] * 10 + [row_with_many_ones] * 100)
X = np.ones_like(y)
sss = StratifiedShuffleSplit(n_splits=1, test_size=0.5, random_state=0)
train, test = next(sss.split(X=X, y=y))
y_train = y[train]
y_test = y[test]
# correct stratification of entire rows
# (by design, here y[:, 4] uniquely determines the entire row of y)
expected_ratio = np.mean(y[:, 4])
assert_equal(expected_ratio, np.mean(y_train[:, 4]))
assert_equal(expected_ratio, np.mean(y_test[:, 4]))
def test_predefinedsplit_with_kfold_split():
# Check that PredefinedSplit can reproduce a split generated by Kfold.
folds = np.full(10, -1.)
kf_train = []
kf_test = []
for i, (train_ind, test_ind) in enumerate(KFold(5, shuffle=True).split(X)):
kf_train.append(train_ind)
kf_test.append(test_ind)
folds[test_ind] = i
ps_train = []
ps_test = []
ps = PredefinedSplit(folds)
# n_splits is simply the no of unique folds
assert_equal(len(np.unique(folds)), ps.get_n_splits())
for train_ind, test_ind in ps.split():
ps_train.append(train_ind)
ps_test.append(test_ind)
assert_array_equal(ps_train, kf_train)
assert_array_equal(ps_test, kf_test)
def test_group_shuffle_split():
for groups_i in test_groups:
X = y = np.ones(len(groups_i))
n_splits = 6
test_size = 1. / 3
slo = GroupShuffleSplit(n_splits, test_size=test_size, random_state=0)
# Make sure the repr works
repr(slo)
# Test that the length is correct
assert_equal(slo.get_n_splits(X, y, groups=groups_i), n_splits)
l_unique = np.unique(groups_i)
l = np.asarray(groups_i)
for train, test in slo.split(X, y, groups=groups_i):
# First test: no train group is in the test set and vice versa
l_train_unique = np.unique(l[train])
l_test_unique = np.unique(l[test])
assert_false(np.any(np.in1d(l[train], l_test_unique)))
assert_false(np.any(np.in1d(l[test], l_train_unique)))
# Second test: train and test add up to all the data
assert_equal(l[train].size + l[test].size, l.size)
# Third test: train and test are disjoint
assert_array_equal(np.intersect1d(train, test), [])
# Fourth test:
# unique train and test groups are correct, +- 1 for rounding error
assert_true(abs(len(l_test_unique) -
round(test_size * len(l_unique))) <= 1)
assert_true(abs(len(l_train_unique) -
round((1.0 - test_size) * len(l_unique))) <= 1)
def test_leave_one_p_group_out():
logo = LeaveOneGroupOut()
lpgo_1 = LeavePGroupsOut(n_groups=1)
lpgo_2 = LeavePGroupsOut(n_groups=2)
# Make sure the repr works
assert_equal(repr(logo), 'LeaveOneGroupOut()')
assert_equal(repr(lpgo_1), 'LeavePGroupsOut(n_groups=1)')
assert_equal(repr(lpgo_2), 'LeavePGroupsOut(n_groups=2)')
assert_equal(repr(LeavePGroupsOut(n_groups=3)),
'LeavePGroupsOut(n_groups=3)')
for j, (cv, p_groups_out) in enumerate(((logo, 1), (lpgo_1, 1),
(lpgo_2, 2))):
for i, groups_i in enumerate(test_groups):
n_groups = len(np.unique(groups_i))
n_splits = (n_groups if p_groups_out == 1
else n_groups * (n_groups - 1) / 2)
X = y = np.ones(len(groups_i))
# Test that the length is correct
assert_equal(cv.get_n_splits(X, y, groups=groups_i), n_splits)
groups_arr = np.asarray(groups_i)
# Split using the original list / array / list of string groups_i
for train, test in cv.split(X, y, groups=groups_i):
# First test: no train group is in the test set and vice versa
assert_array_equal(np.intersect1d(groups_arr[train],
groups_arr[test]).tolist(),
[])
# Second test: train and test add up to all the data
assert_equal(len(train) + len(test), len(groups_i))
# Third test:
# The number of groups in test must be equal to p_groups_out
assert_true(np.unique(groups_arr[test]).shape[0], p_groups_out)
# check get_n_splits() with dummy parameters
assert_equal(logo.get_n_splits(None, None, ['a', 'b', 'c', 'b', 'c']), 3)
assert_equal(logo.get_n_splits(groups=[1.0, 1.1, 1.0, 1.2]), 3)
assert_equal(lpgo_2.get_n_splits(None, None, np.arange(4)), 6)
assert_equal(lpgo_1.get_n_splits(groups=np.arange(4)), 4)
# raise ValueError if a `groups` parameter is illegal
with assert_raises(ValueError):
logo.get_n_splits(None, None, [0.0, np.nan, 0.0])
with assert_raises(ValueError):
lpgo_2.get_n_splits(None, None, [0.0, np.inf, 0.0])
msg = "The 'groups' parameter should not be None."
assert_raise_message(ValueError, msg,
logo.get_n_splits, None, None, None)
assert_raise_message(ValueError, msg,
lpgo_1.get_n_splits, None, None, None)
def test_leave_group_out_changing_groups():
# Check that LeaveOneGroupOut and LeavePGroupsOut work normally if
# the groups variable is changed before calling split
groups = np.array([0, 1, 2, 1, 1, 2, 0, 0])
X = np.ones(len(groups))
groups_changing = np.array(groups, copy=True)
lolo = LeaveOneGroupOut().split(X, groups=groups)
lolo_changing = LeaveOneGroupOut().split(X, groups=groups)
lplo = LeavePGroupsOut(n_groups=2).split(X, groups=groups)
lplo_changing = LeavePGroupsOut(n_groups=2).split(X, groups=groups)
groups_changing[:] = 0
for llo, llo_changing in [(lolo, lolo_changing), (lplo, lplo_changing)]:
for (train, test), (train_chan, test_chan) in zip(llo, llo_changing):
assert_array_equal(train, train_chan)
assert_array_equal(test, test_chan)
# n_splits = no of 2 (p) group combinations of the unique groups = 3C2 = 3
assert_equal(
3, LeavePGroupsOut(n_groups=2).get_n_splits(X, y=X,
groups=groups))
# n_splits = no of unique groups (C(uniq_lbls, 1) = n_unique_groups)
assert_equal(3, LeaveOneGroupOut().get_n_splits(X, y=X,
groups=groups))
def test_leave_one_p_group_out_error_on_fewer_number_of_groups():
X = y = groups = np.ones(0)
assert_raise_message(ValueError, "Found array with 0 sample(s)", next,
LeaveOneGroupOut().split(X, y, groups))
X = y = groups = np.ones(1)
msg = ("The groups parameter contains fewer than 2 unique groups ({}). "
"LeaveOneGroupOut expects at least 2.").format(groups)
assert_raise_message(ValueError, msg, next,
LeaveOneGroupOut().split(X, y, groups))
X = y = groups = np.ones(1)
msg = ("The groups parameter contains fewer than (or equal to) n_groups "
"(3) numbers of unique groups ({}). LeavePGroupsOut expects "
"that at least n_groups + 1 (4) unique groups "
"be present").format(groups)
assert_raise_message(ValueError, msg, next,
LeavePGroupsOut(n_groups=3).split(X, y, groups))
X = y = groups = np.arange(3)
msg = ("The groups parameter contains fewer than (or equal to) n_groups "
"(3) numbers of unique groups ({}). LeavePGroupsOut expects "
"that at least n_groups + 1 (4) unique groups "
"be present").format(groups)
assert_raise_message(ValueError, msg, next,
LeavePGroupsOut(n_groups=3).split(X, y, groups))
@ignore_warnings
def test_repeated_cv_value_errors():
# n_repeats is not integer or <= 0
for cv in (RepeatedKFold, RepeatedStratifiedKFold):
assert_raises(ValueError, cv, n_repeats=0)
assert_raises(ValueError, cv, n_repeats=1.5)
def test_repeated_kfold_determinstic_split():
X = [[1, 2], [3, 4], [5, 6], [7, 8], [9, 10]]
random_state = 258173307
rkf = RepeatedKFold(
n_splits=2,
n_repeats=2,
random_state=random_state)
# split should produce same and deterministic splits on
# each call
for _ in range(3):
splits = rkf.split(X)
train, test = next(splits)
assert_array_equal(train, [2, 4])
assert_array_equal(test, [0, 1, 3])
train, test = next(splits)
assert_array_equal(train, [0, 1, 3])
assert_array_equal(test, [2, 4])
train, test = next(splits)
assert_array_equal(train, [0, 1])
assert_array_equal(test, [2, 3, 4])
train, test = next(splits)
assert_array_equal(train, [2, 3, 4])
assert_array_equal(test, [0, 1])
assert_raises(StopIteration, next, splits)
def test_get_n_splits_for_repeated_kfold():
n_splits = 3
n_repeats = 4
rkf = RepeatedKFold(n_splits, n_repeats)
expected_n_splits = n_splits * n_repeats
assert_equal(expected_n_splits, rkf.get_n_splits())
def test_get_n_splits_for_repeated_stratified_kfold():
n_splits = 3
n_repeats = 4
rskf = RepeatedStratifiedKFold(n_splits, n_repeats)
expected_n_splits = n_splits * n_repeats
assert_equal(expected_n_splits, rskf.get_n_splits())
def test_repeated_stratified_kfold_determinstic_split():
X = [[1, 2], [3, 4], [5, 6], [7, 8], [9, 10]]
y = [1, 1, 1, 0, 0]
random_state = 1944695409
rskf = RepeatedStratifiedKFold(
n_splits=2,
n_repeats=2,
random_state=random_state)
# split should produce same and deterministic splits on
# each call
for _ in range(3):
splits = rskf.split(X, y)
train, test = next(splits)
assert_array_equal(train, [1, 4])
assert_array_equal(test, [0, 2, 3])
train, test = next(splits)
assert_array_equal(train, [0, 2, 3])
assert_array_equal(test, [1, 4])
train, test = next(splits)
assert_array_equal(train, [2, 3])
assert_array_equal(test, [0, 1, 4])
train, test = next(splits)
assert_array_equal(train, [0, 1, 4])
assert_array_equal(test, [2, 3])
assert_raises(StopIteration, next, splits)
def test_train_test_split_errors():
assert_raises(ValueError, train_test_split)
with warnings.catch_warnings():
# JvR: Currently, a future warning is raised if test_size is not
# given. As that is the point of this test, ignore the future warning
warnings.filterwarnings("ignore", category=FutureWarning)
assert_raises(ValueError, train_test_split, range(3), train_size=1.1)
assert_raises(ValueError, train_test_split, range(3), test_size=0.6,
train_size=0.6)
assert_raises(ValueError, train_test_split, range(3),
test_size=np.float32(0.6), train_size=np.float32(0.6))
assert_raises(ValueError, train_test_split, range(3),
test_size="wrong_type")
assert_raises(ValueError, train_test_split, range(3), test_size=2,
train_size=4)
assert_raises(TypeError, train_test_split, range(3),
some_argument=1.1)
assert_raises(ValueError, train_test_split, range(3), range(42))
assert_raises(ValueError, train_test_split, range(10),
shuffle=False, stratify=True)
def test_train_test_split():
X = np.arange(100).reshape((10, 10))
X_s = coo_matrix(X)
y = np.arange(10)
# simple test
split = train_test_split(X, y, test_size=None, train_size=.5)
X_train, X_test, y_train, y_test = split
assert_equal(len(y_test), len(y_train))
# test correspondence of X and y
assert_array_equal(X_train[:, 0], y_train * 10)
assert_array_equal(X_test[:, 0], y_test * 10)
# don't convert lists to anything else by default
split = train_test_split(X, X_s, y.tolist())
X_train, X_test, X_s_train, X_s_test, y_train, y_test = split
assert_true(isinstance(y_train, list))
assert_true(isinstance(y_test, list))
# allow nd-arrays
X_4d = np.arange(10 * 5 * 3 * 2).reshape(10, 5, 3, 2)
y_3d = np.arange(10 * 7 * 11).reshape(10, 7, 11)
split = train_test_split(X_4d, y_3d)
assert_equal(split[0].shape, (7, 5, 3, 2))
assert_equal(split[1].shape, (3, 5, 3, 2))
assert_equal(split[2].shape, (7, 7, 11))
assert_equal(split[3].shape, (3, 7, 11))
# test stratification option
y = np.array([1, 1, 1, 1, 2, 2, 2, 2])
for test_size, exp_test_size in zip([2, 4, 0.25, 0.5, 0.75],
[2, 4, 2, 4, 6]):
train, test = train_test_split(y, test_size=test_size,
stratify=y,
random_state=0)
assert_equal(len(test), exp_test_size)
assert_equal(len(test) + len(train), len(y))
# check the 1:1 ratio of ones and twos in the data is preserved
assert_equal(np.sum(train == 1), np.sum(train == 2))
# test unshuffled split
y = np.arange(10)
for test_size in [2, 0.2]:
train, test = train_test_split(y, shuffle=False, test_size=test_size)
assert_array_equal(test, [8, 9])
assert_array_equal(train, [0, 1, 2, 3, 4, 5, 6, 7])
@ignore_warnings
def train_test_split_pandas():
# check train_test_split doesn't destroy pandas dataframe
types = [MockDataFrame]
try:
from pandas import DataFrame
types.append(DataFrame)
except ImportError:
pass
for InputFeatureType in types:
# X dataframe
X_df = InputFeatureType(X)
X_train, X_test = train_test_split(X_df)
assert_true(isinstance(X_train, InputFeatureType))
assert_true(isinstance(X_test, InputFeatureType))
def train_test_split_sparse():
# check that train_test_split converts scipy sparse matrices
# to csr, as stated in the documentation
X = np.arange(100).reshape((10, 10))
sparse_types = [csr_matrix, csc_matrix, coo_matrix]
for InputFeatureType in sparse_types:
X_s = InputFeatureType(X)
X_train, X_test = train_test_split(X_s)
assert_true(isinstance(X_train, csr_matrix))
assert_true(isinstance(X_test, csr_matrix))
def train_test_split_mock_pandas():
# X mock dataframe
X_df = MockDataFrame(X)
X_train, X_test = train_test_split(X_df)
assert_true(isinstance(X_train, MockDataFrame))
assert_true(isinstance(X_test, MockDataFrame))
X_train_arr, X_test_arr = train_test_split(X_df)
def train_test_split_list_input():
# Check that when y is a list / list of string labels, it works.
X = np.ones(7)
y1 = ['1'] * 4 + ['0'] * 3
y2 = np.hstack((np.ones(4), np.zeros(3)))
y3 = y2.tolist()
for stratify in (True, False):
X_train1, X_test1, y_train1, y_test1 = train_test_split(
X, y1, stratify=y1 if stratify else None, random_state=0)
X_train2, X_test2, y_train2, y_test2 = train_test_split(
X, y2, stratify=y2 if stratify else None, random_state=0)
X_train3, X_test3, y_train3, y_test3 = train_test_split(
X, y3, stratify=y3 if stratify else None, random_state=0)
np.testing.assert_equal(X_train1, X_train2)
np.testing.assert_equal(y_train2, y_train3)
np.testing.assert_equal(X_test1, X_test3)
np.testing.assert_equal(y_test3, y_test2)
@ignore_warnings
def test_shufflesplit_errors():
# When the {test|train}_size is a float/invalid, error is raised at init
assert_raises(ValueError, ShuffleSplit, test_size=None, train_size=None)
assert_raises(ValueError, ShuffleSplit, test_size=2.0)
assert_raises(ValueError, ShuffleSplit, test_size=1.0)
assert_raises(ValueError, ShuffleSplit, test_size=0.1, train_size=0.95)
assert_raises(ValueError, ShuffleSplit, train_size=1j)
# When the {test|train}_size is an int, validation is based on the input X
# and happens at split(...)
assert_raises(ValueError, next, ShuffleSplit(test_size=11).split(X))
assert_raises(ValueError, next, ShuffleSplit(test_size=10).split(X))
assert_raises(ValueError, next, ShuffleSplit(test_size=8,
train_size=3).split(X))
def test_shufflesplit_reproducible():
# Check that iterating twice on the ShuffleSplit gives the same
# sequence of train-test when the random_state is given
ss = ShuffleSplit(random_state=21)
assert_array_equal(list(a for a, b in ss.split(X)),
list(a for a, b in ss.split(X)))
def test_stratifiedshufflesplit_list_input():
# Check that when y is a list / list of string labels, it works.
sss = StratifiedShuffleSplit(test_size=2, random_state=42)
X = np.ones(7)
y1 = ['1'] * 4 + ['0'] * 3
y2 = np.hstack((np.ones(4), np.zeros(3)))
y3 = y2.tolist()
np.testing.assert_equal(list(sss.split(X, y1)),
list(sss.split(X, y2)))
np.testing.assert_equal(list(sss.split(X, y3)),
list(sss.split(X, y2)))
def test_train_test_split_allow_nans():
# Check that train_test_split allows input data with NaNs
X = np.arange(200, dtype=np.float64).reshape(10, -1)
X[2, :] = np.nan
y = np.repeat([0, 1], X.shape[0] / 2)
train_test_split(X, y, test_size=0.2, random_state=42)
def test_check_cv():
X = np.ones(9)
cv = check_cv(3, classifier=False)
# Use numpy.testing.assert_equal which recursively compares
# lists of lists
np.testing.assert_equal(list(KFold(3).split(X)), list(cv.split(X)))
y_binary = np.array([0, 1, 0, 1, 0, 0, 1, 1, 1])
cv = check_cv(3, y_binary, classifier=True)
np.testing.assert_equal(list(StratifiedKFold(3).split(X, y_binary)),
list(cv.split(X, y_binary)))
y_multiclass = np.array([0, 1, 0, 1, 2, 1, 2, 0, 2])
cv = check_cv(3, y_multiclass, classifier=True)
np.testing.assert_equal(list(StratifiedKFold(3).split(X, y_multiclass)),
list(cv.split(X, y_multiclass)))
# also works with 2d multiclass
y_multiclass_2d = y_multiclass.reshape(-1, 1)
cv = check_cv(3, y_multiclass_2d, classifier=True)
np.testing.assert_equal(list(StratifiedKFold(3).split(X, y_multiclass_2d)),
list(cv.split(X, y_multiclass_2d)))
assert_false(np.all(
next(StratifiedKFold(3).split(X, y_multiclass_2d))[0] ==
next(KFold(3).split(X, y_multiclass_2d))[0]))
X = np.ones(5)
y_multilabel = np.array([[0, 0, 0, 0], [0, 1, 1, 0], [0, 0, 0, 1],
[1, 1, 0, 1], [0, 0, 1, 0]])
cv = check_cv(3, y_multilabel, classifier=True)
np.testing.assert_equal(list(KFold(3).split(X)), list(cv.split(X)))
y_multioutput = np.array([[1, 2], [0, 3], [0, 0], [3, 1], [2, 0]])
cv = check_cv(3, y_multioutput, classifier=True)
np.testing.assert_equal(list(KFold(3).split(X)), list(cv.split(X)))
assert_raises(ValueError, check_cv, cv="lolo")
def test_cv_iterable_wrapper():
kf_iter = KFold(n_splits=5).split(X, y)
kf_iter_wrapped = check_cv(kf_iter)
# Since the wrapped iterable is enlisted and stored,
# split can be called any number of times to produce
# consistent results.
np.testing.assert_equal(list(kf_iter_wrapped.split(X, y)),
list(kf_iter_wrapped.split(X, y)))
# If the splits are randomized, successive calls to split yields different
# results
kf_randomized_iter = KFold(n_splits=5, shuffle=True).split(X, y)
kf_randomized_iter_wrapped = check_cv(kf_randomized_iter)
# numpy's assert_array_equal properly compares nested lists
np.testing.assert_equal(list(kf_randomized_iter_wrapped.split(X, y)),
list(kf_randomized_iter_wrapped.split(X, y)))
try:
np.testing.assert_equal(list(kf_iter_wrapped.split(X, y)),
list(kf_randomized_iter_wrapped.split(X, y)))
splits_are_equal = True
except AssertionError:
splits_are_equal = False
assert_false(splits_are_equal, "If the splits are randomized, "
"successive calls to split should yield different results")
def test_group_kfold():
rng = np.random.RandomState(0)
# Parameters of the test
n_groups = 15
n_samples = 1000
n_splits = 5
X = y = np.ones(n_samples)
# Construct the test data
tolerance = 0.05 * n_samples # 5 percent error allowed
groups = rng.randint(0, n_groups, n_samples)
ideal_n_groups_per_fold = n_samples // n_splits
len(np.unique(groups))
# Get the test fold indices from the test set indices of each fold
folds = np.zeros(n_samples)
lkf = GroupKFold(n_splits=n_splits)
for i, (_, test) in enumerate(lkf.split(X, y, groups)):
folds[test] = i
# Check that folds have approximately the same size
assert_equal(len(folds), len(groups))
for i in np.unique(folds):
assert_greater_equal(tolerance,
abs(sum(folds == i) - ideal_n_groups_per_fold))
# Check that each group appears only in 1 fold
for group in np.unique(groups):
assert_equal(len(np.unique(folds[groups == group])), 1)
# Check that no group is on both sides of the split
groups = np.asarray(groups, dtype=object)
for train, test in lkf.split(X, y, groups):
assert_equal(len(np.intersect1d(groups[train], groups[test])), 0)
# Construct the test data
groups = np.array(['Albert', 'Jean', 'Bertrand', 'Michel', 'Jean',
'Francis', 'Robert', 'Michel', 'Rachel', 'Lois',
'Michelle', 'Bernard', 'Marion', 'Laura', 'Jean',
'Rachel', 'Franck', 'John', 'Gael', 'Anna', 'Alix',
'Robert', 'Marion', 'David', 'Tony', 'Abel', 'Becky',
'Madmood', 'Cary', 'Mary', 'Alexandre', 'David',
'Francis', 'Barack', 'Abdoul', 'Rasha', 'Xi', 'Silvia'])
n_groups = len(np.unique(groups))
n_samples = len(groups)
n_splits = 5
tolerance = 0.05 * n_samples # 5 percent error allowed
ideal_n_groups_per_fold = n_samples // n_splits
X = y = np.ones(n_samples)
# Get the test fold indices from the test set indices of each fold
folds = np.zeros(n_samples)
for i, (_, test) in enumerate(lkf.split(X, y, groups)):
folds[test] = i
# Check that folds have approximately the same size
assert_equal(len(folds), len(groups))
for i in np.unique(folds):
assert_greater_equal(tolerance,
abs(sum(folds == i) - ideal_n_groups_per_fold))
# Check that each group appears only in 1 fold
with warnings.catch_warnings():
warnings.simplefilter("ignore", DeprecationWarning)
for group in np.unique(groups):
assert_equal(len(np.unique(folds[groups == group])), 1)
# Check that no group is on both sides of the split
groups = np.asarray(groups, dtype=object)
for train, test in lkf.split(X, y, groups):
assert_equal(len(np.intersect1d(groups[train], groups[test])), 0)
# groups can also be a list
cv_iter = list(lkf.split(X, y, groups.tolist()))
for (train1, test1), (train2, test2) in zip(lkf.split(X, y, groups),
cv_iter):
assert_array_equal(train1, train2)
assert_array_equal(test1, test2)
# Should fail if there are more folds than groups
groups = np.array([1, 1, 1, 2, 2])
X = y = np.ones(len(groups))
assert_raises_regexp(ValueError, "Cannot have number of splits.*greater",
next, GroupKFold(n_splits=3).split(X, y, groups))
def test_time_series_cv():
X = [[1, 2], [3, 4], [5, 6], [7, 8], [9, 10], [11, 12], [13, 14]]
# Should fail if there are more folds than samples
assert_raises_regexp(ValueError, "Cannot have number of folds.*greater",
next,
TimeSeriesSplit(n_splits=7).split(X))
tscv = TimeSeriesSplit(2)
# Manually check that Time Series CV preserves the data
# ordering on toy datasets
splits = tscv.split(X[:-1])
train, test = next(splits)
assert_array_equal(train, [0, 1])
assert_array_equal(test, [2, 3])
train, test = next(splits)
assert_array_equal(train, [0, 1, 2, 3])
assert_array_equal(test, [4, 5])
splits = TimeSeriesSplit(2).split(X)
train, test = next(splits)
assert_array_equal(train, [0, 1, 2])
assert_array_equal(test, [3, 4])
train, test = next(splits)
assert_array_equal(train, [0, 1, 2, 3, 4])
assert_array_equal(test, [5, 6])
# Check get_n_splits returns the correct number of splits
splits = TimeSeriesSplit(2).split(X)
n_splits_actual = len(list(splits))
assert_equal(n_splits_actual, tscv.get_n_splits())
assert_equal(n_splits_actual, 2)
def _check_time_series_max_train_size(splits, check_splits, max_train_size):
for (train, test), (check_train, check_test) in zip(splits, check_splits):
assert_array_equal(test, check_test)
assert_true(len(check_train) <= max_train_size)
suffix_start = max(len(train) - max_train_size, 0)
assert_array_equal(check_train, train[suffix_start:])
def test_time_series_max_train_size():
X = np.zeros((6, 1))
splits = TimeSeriesSplit(n_splits=3).split(X)
check_splits = TimeSeriesSplit(n_splits=3, max_train_size=3).split(X)
_check_time_series_max_train_size(splits, check_splits, max_train_size=3)
# Test for the case where the size of a fold is greater than max_train_size
check_splits = TimeSeriesSplit(n_splits=3, max_train_size=2).split(X)
_check_time_series_max_train_size(splits, check_splits, max_train_size=2)
# Test for the case where the size of each fold is less than max_train_size
check_splits = TimeSeriesSplit(n_splits=3, max_train_size=5).split(X)
_check_time_series_max_train_size(splits, check_splits, max_train_size=2)
@pytest.mark.filterwarnings('ignore: You should specify a value') # 0.22
def test_nested_cv():
# Test if nested cross validation works with different combinations of cv
rng = np.random.RandomState(0)
X, y = make_classification(n_samples=15, n_classes=2, random_state=0)
groups = rng.randint(0, 5, 15)
cvs = [LeaveOneGroupOut(), LeaveOneOut(), GroupKFold(), StratifiedKFold(),
StratifiedShuffleSplit(n_splits=3, random_state=0)]
for inner_cv, outer_cv in combinations_with_replacement(cvs, 2):
gs = GridSearchCV(Ridge(), param_grid={'alpha': [1, .1]},
cv=inner_cv, error_score='raise', iid=False)
cross_val_score(gs, X=X, y=y, groups=groups, cv=outer_cv,
fit_params={'groups': groups})
def test_train_test_default_warning():
assert_warns(FutureWarning, ShuffleSplit, train_size=0.75)
assert_warns(FutureWarning, GroupShuffleSplit, train_size=0.75)
assert_warns(FutureWarning, StratifiedShuffleSplit, train_size=0.75)
assert_warns(FutureWarning, train_test_split, range(3),
train_size=0.75)
def test_nsplit_default_warn():
# Test that warnings are raised. Will be removed in 0.22
assert_warns_message(FutureWarning, NSPLIT_WARNING, KFold)
assert_warns_message(FutureWarning, NSPLIT_WARNING, GroupKFold)
assert_warns_message(FutureWarning, NSPLIT_WARNING, StratifiedKFold)
assert_warns_message(FutureWarning, NSPLIT_WARNING, TimeSeriesSplit)
assert_no_warnings(KFold, n_splits=5)
assert_no_warnings(GroupKFold, n_splits=5)
assert_no_warnings(StratifiedKFold, n_splits=5)
assert_no_warnings(TimeSeriesSplit, n_splits=5)
def test_check_cv_default_warn():
# Test that warnings are raised. Will be removed in 0.22
assert_warns_message(FutureWarning, CV_WARNING, check_cv)
assert_warns_message(FutureWarning, CV_WARNING, check_cv, None)
assert_no_warnings(check_cv, cv=5)
def test_build_repr():
class MockSplitter:
def __init__(self, a, b=0, c=None):
self.a = a
self.b = b
self.c = c
def __repr__(self):
return _build_repr(self)
assert_equal(repr(MockSplitter(5, 6)), "MockSplitter(a=5, b=6, c=None)")
|
vortex-ape/scikit-learn
|
sklearn/model_selection/tests/test_split.py
|
Python
|
bsd-3-clause
| 57,882 | 0.000017 |
# generated from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "".split(';') if "" != "" else []
PROJECT_CATKIN_DEPENDS = "".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "".split(';') if "" != "" else []
PROJECT_NAME = "commanding_velocity"
PROJECT_SPACE_DIR = "/home/computing/catkin_ws/install"
PROJECT_VERSION = "0.0.0"
|
lukeexton96/Robotics
|
catkin_ws/build/commanding_velocity/catkin_generated/pkg.installspace.context.pc.py
|
Python
|
gpl-3.0
| 386 | 0 |
import collections
import glob
import os
import re
import sys
import traceback
if 'mtimes' not in globals():
mtimes = {}
if 'lastfiles' not in globals():
lastfiles = set()
def make_signature(f):
return f.func_code.co_filename, f.func_name, f.func_code.co_firstlineno
def format_plug(plug, kind='', lpad=0):
out = ' ' * lpad + '{}:{}:{}'.format(*make_signature(plug[0]))
if kind == 'command':
out += ' ' * (50 - len(out)) + plug[1]['name']
if kind == 'event':
out += ' ' * (50 - len(out)) + ', '.join(plug[1]['events'])
if kind == 'regex':
out += ' ' * (50 - len(out)) + plug[1]['regex']
return out
def reload(init=False):
changed = False
if init:
bot.plugs = collections.defaultdict(list)
bot.threads = {}
core_fileset = set(glob.glob(os.path.join("core", "*.py")))
for filename in core_fileset:
mtime = os.stat(filename).st_mtime
if mtime != mtimes.get(filename):
mtimes[filename] = mtime
changed = True
try:
eval(compile(open(filename, 'U').read(), filename, 'exec'),
globals())
except Exception:
traceback.print_exc()
if init: # stop if there's an error (syntax?) in a core
sys.exit() # script on startup
continue
if filename == os.path.join('core', 'reload.py'):
reload(init=init)
return
fileset = set(glob.glob(os.path.join('plugins', '*.py')))
# remove deleted/moved plugins
for name, data in bot.plugs.iteritems():
bot.plugs[name] = [x for x in data if x[0]._filename in fileset]
for filename in list(mtimes):
if filename not in fileset and filename not in core_fileset:
mtimes.pop(filename)
for func, handler in list(bot.threads.iteritems()):
if func._filename not in fileset:
handler.stop()
del bot.threads[func]
# compile new plugins
for filename in fileset:
output = ''
mtime = os.stat(filename).st_mtime
if mtime != mtimes.get(filename):
mtimes[filename] = mtime
changed = True
try:
code = compile(open(filename, 'U').read(), filename, 'exec')
namespace = {}
eval(code, namespace)
except Exception:
traceback.print_exc()
continue
# output = '<module class="module" name="{}">\n\t<info>{}</info>\n\t'.format(filename.replace(".py",""), filename.replace(".py","<span>.py</span>"))
# remove plugins already loaded from this filename
for name, data in bot.plugs.iteritems():
bot.plugs[name] = [x for x in data
if x[0]._filename != filename]
for func, handler in list(bot.threads.iteritems()):
if func._filename == filename:
handler.stop()
del bot.threads[func]
for obj in namespace.itervalues():
if hasattr(obj, '_hook'): # check for magic
if obj._thread:
bot.threads[obj] = Handler(obj)
for type, data in obj._hook:
bot.plugs[type] += [data]
if not init:
# output+='<div class="command">{}</div>'.format(format_plug(data).replace('[','<opt>').replace(']','</opt>').replace('<','<req>').replace('>','</req>'))
print '### new plugin (type: %s) loaded:' % type, format_plug(data)
# output += '</module>'
# with open('index.txt', 'a') as file:
# file.write(u'{}\n'.format(output).encode('utf-8'))
if changed:
bot.commands = {}
for plug in bot.plugs['command']:
name = plug[1]['name'].lower()
if not re.match(r'^\w+$', name):
print '### ERROR: invalid command name "{}" ({})'.format(name, format_plug(plug))
continue
if name in bot.commands:
print "### ERROR: command '{}' already registered ({}, {})".format(name,
format_plug(bot.commands[name]),
format_plug(plug))
continue
bot.commands[name] = plug
bot.events = collections.defaultdict(list)
for func, args in bot.plugs['event']:
for event in args['events']:
bot.events[event].append((func, args))
if init:
print ' plugin listing:'
if bot.commands:
# hack to make commands with multiple aliases
# print nicely
print ' command:'
commands = collections.defaultdict(list)
for name, (func, args) in bot.commands.iteritems():
commands[make_signature(func)].append(name)
for sig, names in sorted(commands.iteritems()):
names.sort(key=lambda x: (-len(x), x)) # long names first
out = ' ' * 6 + '%s:%s:%s' % sig
out += ' ' * (50 - len(out)) + ', '.join(names)
print out
for kind, plugs in sorted(bot.plugs.iteritems()):
if kind == 'command':
continue
print ' {}:'.format(kind)
for plug in plugs:
try:
print format_plug(plug, kind=kind, lpad=6)
except UnicodeEncodeError:
pass
print
|
FrozenPigs/Taigabot
|
core/reload.py
|
Python
|
gpl-3.0
| 5,824 | 0.001545 |
import datetime
import logging
import hashlib
import random
import sys
import pymongo
import scrapelib
from .mongolog import MongoHandler
from .storage import engines
from celery.execute import send_task
class Kernel(object):
""" oyster's workhorse, handles tracking """
def __init__(self, mongo_host='localhost', mongo_port=27017,
mongo_db='oyster', mongo_log_maxsize=100000000,
user_agent='oyster', rpm=60, timeout=300,
retry_attempts=3, retry_wait_minutes=60,
doc_classes=None, default_storage_engine='dummy',
):
"""
configurable for ease of testing, only one should be instantiated
"""
# set up the log
self.db = pymongo.Connection(mongo_host, mongo_port)[mongo_db]
self.log = logging.getLogger('oyster')
self.log.setLevel(logging.DEBUG)
self.log.addHandler(MongoHandler(mongo_db, host=mongo_host,
port=mongo_port,
capped_size=mongo_log_maxsize))
# create status document if it doesn't exist
if self.db.status.count() == 0:
self.db.status.insert({'update_queue': 0})
# ensure an index on _random
self.db.tracked.ensure_index('_random')
self.db.tracked.ensure_index('url')
self.scraper = scrapelib.Scraper(user_agent=user_agent,
requests_per_minute=rpm,
follow_robots=False,
raise_errors=True,
timeout=timeout)
self.retry_attempts = retry_attempts
self.retry_wait_minutes = retry_wait_minutes
# load engines
self.storage = {}
for name, StorageCls in engines.iteritems():
self.storage[name] = StorageCls(self)
# set document classes
_doc_class_fields = ('update_mins', 'onchanged')
self.doc_classes = doc_classes or {}
for dc_name, dc_props in self.doc_classes.iteritems():
for key in _doc_class_fields:
if key not in dc_props:
raise ValueError('doc_class %s missing key %s' %
(dc_name, key))
# set a default storage engine
if 'storage_engine' not in dc_props:
dc_props['storage_engine'] = default_storage_engine
def _wipe(self):
""" exists primarily for debug use, wipes entire db """
self.db.drop_collection('tracked')
self.db.drop_collection('logs')
self.db.drop_collection('status')
def _add_doc_class(self, doc_class, **properties):
self.doc_classes[doc_class] = properties
def track_url(self, url, doc_class, id=None, **kwargs):
"""
Add a URL to the set of tracked URLs, accessible via a given filename.
url
URL to start tracking
doc_class
document type, can be any arbitrary string
**kwargs
any keyword args will be added to the document's metadata
"""
if doc_class not in self.doc_classes:
error = 'error tracking %s, unregistered doc_class %s'
self.log.error(error, url, doc_class)
raise ValueError(error % (url, doc_class))
# try and find an existing version of this document
tracked = None
if id:
tracked = self.db.tracked.find_one({'_id': id})
else:
tracked = self.db.tracked.find_one({'url': url})
# if id exists, ensure that URL and doc_class haven't changed
# then return existing data (possibly with refreshed metadata)
if tracked:
if (tracked['url'] == url and
tracked['doc_class'] == doc_class):
if kwargs != tracked['metadata']:
tracked['metadata'] = kwargs
self.db.tracked.save(tracked, safe=True)
return tracked['_id']
else:
# id existed but with different URL
message = ('%s already exists with different data (tracked: '
'%s, %s) (new: %s, %s)')
args = (tracked['_id'], tracked['url'], tracked['doc_class'],
url, doc_class)
self.log.error(message, *args)
raise ValueError(message % args)
self.log.info('tracked %s [%s]', url, id)
newdoc = dict(url=url, doc_class=doc_class,
_random=random.randint(0, sys.maxint),
versions=[], metadata=kwargs)
if id:
newdoc['_id'] = id
return self.db.tracked.insert(newdoc, safe=True)
def md5_versioning(self, olddata, newdata):
""" return True if md5 changed or if file is new """
old_md5 = hashlib.md5(olddata).hexdigest()
new_md5 = hashlib.md5(newdata).hexdigest()
return old_md5 != new_md5
def update(self, doc):
"""
perform update upon a given document
:param:`doc` must be a document from the `tracked` collection
* download latest document
* check if document has changed using versioning func
* if a change has occurred save the file
* if error occured, log & keep track of how many errors in a row
* update last_update/next_update timestamp
"""
new_version = True
error = False
now = datetime.datetime.utcnow()
try:
doc_class = self.doc_classes[doc['doc_class']]
except KeyError:
raise ValueError('unregistered doc_class %s' % doc['doc_class'])
update_mins = doc_class['update_mins']
storage = self.storage[doc_class['storage_engine']]
# fetch strategies could be implemented here as well
try:
url = doc['url'].replace(' ', '%20')
newdata = self.scraper.urlopen(url)
content_type = newdata.response.headers['content-type']
except Exception as e:
new_version = False
error = str(e)
# only do versioning check if at least one version exists
if new_version and doc['versions']:
# room here for different versioning schemes
olddata = storage.get(doc['versions'][-1]['storage_key'])
new_version = self.md5_versioning(olddata, newdata)
if new_version:
storage_id = storage.put(doc, newdata, content_type)
doc['versions'].append({'timestamp': now,
'storage_key': storage_id,
'storage_type': storage.storage_type,
})
# fire off onchanged functions
for onchanged in doc_class.get('onchanged', []):
send_task(onchanged, (doc['_id'],))
if error:
# if there's been an error, increment the consecutive_errors count
# and back off a bit until we've reached our retry limit
c_errors = doc.get('consecutive_errors', 0)
doc['consecutive_errors'] = c_errors + 1
if c_errors <= self.retry_attempts:
update_mins = self.retry_wait_minutes * (2 ** c_errors)
else:
# reset error count if all was ok
doc['consecutive_errors'] = 0
# last_update/next_update are separate from question of versioning
doc['last_update'] = now
if update_mins:
doc['next_update'] = now + datetime.timedelta(minutes=update_mins)
else:
doc['next_update'] = None
if error:
self.log.warning('error updating %s [%s]', url, doc['_id'])
else:
new_version = ' (new)'
self.log.info('updated %s [%s]%s', url, doc['_id'], new_version)
self.db.tracked.save(doc, safe=True)
def get_update_queue(self):
"""
Get a list of what needs to be updated.
Documents that have never been updated take priority, followed by
documents that are simply stale. Within these two categories results
are sorted in semirandom order to decrease odds of piling on one
server.
"""
# results are always sorted by random to avoid piling on single server
# first we try to update anything that we've never retrieved
new = self.db.tracked.find({'next_update':
{'$exists': False}}).sort('_random')
queue = list(new)
# pull the rest from those for which next_update is in the past
next = self.db.tracked.find({'$and': [
{'next_update': {'$ne': None}},
{'next_update': {'$lt': datetime.datetime.utcnow()}},
]}).sort('_random')
queue.extend(next)
return queue
def get_update_queue_size(self):
"""
Get the size of the update queue, this should match
``len(self.get_update_queue())``, but is computed more efficiently.
"""
new = self.db.tracked.find({'next_update': {'$exists': False}}).count()
next = self.db.tracked.find({'$and': [
{'next_update': {'$ne': None}},
{'next_update': {'$lt': datetime.datetime.utcnow()}},
]}).count()
return new + next
def get_last_version(self, doc):
try:
doc_class = self.doc_classes[doc['doc_class']]
except KeyError:
raise ValueError('unregistered doc_class %s' % doc['doc_class'])
storage = self.storage[doc_class['storage_engine']]
return storage.get(doc['versions'][-1]['storage_key'])
def extract_text(self, doc):
version = self.get_last_version(doc)
doc_class = self.doc_classes[doc['doc_class']]
try:
extract_text = doc_class['extract_text']
except KeyError:
raise ValueError('doc_class %s missing extract_text' %
doc['doc_class'])
return extract_text(doc, version)
def _get_configured_kernel():
""" factory, gets a connection configured with oyster.conf.settings """
from oyster.conf import settings
return Kernel(mongo_host=settings.MONGO_HOST,
mongo_port=settings.MONGO_PORT,
mongo_db=settings.MONGO_DATABASE,
mongo_log_maxsize=settings.MONGO_LOG_MAXSIZE,
user_agent=settings.USER_AGENT,
rpm=settings.REQUESTS_PER_MINUTE,
timeout=settings.REQUEST_TIMEOUT,
retry_attempts=settings.RETRY_ATTEMPTS,
retry_wait_minutes=settings.RETRY_WAIT_MINUTES,
doc_classes=settings.DOCUMENT_CLASSES,
default_storage_engine=settings.DEFAULT_STORAGE_ENGINE,
)
kernel = _get_configured_kernel()
|
jamesturk/oyster
|
oyster/core.py
|
Python
|
bsd-3-clause
| 10,992 | 0.000455 |
# Licensed to the .NET Foundation under one or more agreements.
# The .NET Foundation licenses this file to you under the Apache 2.0 License.
# See the LICENSE file in the project root for more information.
import unittest
from iptest.type_util import *
from iptest import run_test
class ComplexTest(unittest.TestCase):
def test_from_string(self):
# complex from string: negative
# - space related
l = ['1.2', '.3', '4e3', '.3e-4', "0.031"]
for x in l:
for y in l:
self.assertRaises(ValueError, complex, "%s +%sj" % (x, y))
self.assertRaises(ValueError, complex, "%s+ %sj" % (x, y))
self.assertRaises(ValueError, complex, "%s - %sj" % (x, y))
self.assertRaises(ValueError, complex, "%s- %sj" % (x, y))
self.assertRaises(ValueError, complex, "%s-\t%sj" % (x, y))
self.assertRaises(ValueError, complex, "%sj+%sj" % (x, y))
self.assertEqual(complex(" %s+%sj" % (x, y)), complex(" %s+%sj " % (x, y)))
def test_misc(self):
self.assertEqual(mycomplex(), complex())
a = mycomplex(1)
b = mycomplex(1,0)
c = complex(1)
d = complex(1,0)
for x in [a,b,c,d]:
for y in [a,b,c,d]:
self.assertEqual(x,y)
self.assertEqual(a ** 2, a)
self.assertEqual(a-complex(), a)
self.assertEqual(a+complex(), a)
self.assertEqual(complex()/a, complex())
self.assertEqual(complex()*a, complex())
self.assertEqual(complex()%a, complex())
self.assertEqual(complex() // a, complex())
self.assertEqual(complex(2), complex(2, 0))
def test_inherit(self):
class mycomplex(complex): pass
a = mycomplex(2+1j)
self.assertEqual(a.real, 2)
self.assertEqual(a.imag, 1)
def test_repr(self):
self.assertEqual(repr(1-6j), '(1-6j)')
def test_infinite(self):
self.assertEqual(repr(1.0e340j), 'infj')
self.assertEqual(repr(-1.0e340j),'-infj')
run_test(__name__)
|
slozier/ironpython2
|
Tests/test_complex.py
|
Python
|
apache-2.0
| 2,104 | 0.008555 |
from __future__ import division
from __future__ import print_function
import logging
import os
import sys
import time
from redis import StrictRedis
from redis_lock import Lock
from conf import TIMEOUT
from conf import UDS_PATH
if __name__ == '__main__':
logging.basicConfig(
level=logging.DEBUG,
format='%(process)d %(asctime)s,%(msecs)05d %(name)s %(levelname)s %(message)s',
datefmt="%x~%X"
)
test_name = sys.argv[1]
if test_name == 'test_simple':
conn = StrictRedis(unix_socket_path=UDS_PATH)
with Lock(conn, "foobar"):
time.sleep(0.1)
elif test_name == 'test_no_block':
conn = StrictRedis(unix_socket_path=UDS_PATH)
lock = Lock(conn, "foobar")
res = lock.acquire(blocking=False)
logging.info("acquire=>%s", res)
elif test_name == 'test_timeout':
conn = StrictRedis(unix_socket_path=UDS_PATH)
with Lock(conn, "foobar"):
time.sleep(1)
elif test_name == 'test_expire':
conn = StrictRedis(unix_socket_path=UDS_PATH)
with Lock(conn, "foobar", expire=TIMEOUT/4):
time.sleep(0.1)
with Lock(conn, "foobar", expire=TIMEOUT/4):
time.sleep(0.1)
elif test_name == 'test_no_overlap':
from sched import scheduler
sched = scheduler(time.time, time.sleep)
start = time.time() + TIMEOUT/2
# the idea is to start all the lock at the same time - we use the scheduler to start everything in TIMEOUT/2 seconds, by
# that time all the forks should be ready
def cb_no_overlap():
with Lock(conn, "foobar"):
time.sleep(0.001)
sched.enterabs(start, 0, cb_no_overlap, ())
pids = []
for _ in range(125):
pid = os.fork()
if pid:
pids.append(pid)
else:
try:
conn = StrictRedis(unix_socket_path=UDS_PATH)
sched.run()
finally:
os._exit(0)
for pid in pids:
os.waitpid(pid, 0)
else:
raise RuntimeError('Invalid test spec %r.' % test_name)
logging.info('DIED.')
|
ionelmc/python-redis-lock
|
tests/helper.py
|
Python
|
bsd-2-clause
| 2,211 | 0.000905 |
from planning.models import TeachingCapability, PlanningCourse
from courselib.auth import requires_role
from coredata.models import Person
from django.shortcuts import render_to_response
from django.template import RequestContext
@requires_role('PLAN')
def view_capabilities(request):
instructors = Person.objects.filter(role__role__in=["FAC", "SESS", "COOP"],
role__unit__in=request.units)
capabilities = []
for i in instructors:
capabilities.append(TeachingCapability.objects.filter(instructor=i))
capabilities_list = list(zip(instructors, capabilities))
courses = PlanningCourse.objects.filter(owner__in=request.units)
capabilities = []
for c in courses:
capabilities.append(TeachingCapability.objects.filter(course=c))
course_capabilities_list = list(zip(courses, capabilities))
return render(request, "planning/view_capabilities.html",
{'capabilities_list': capabilities_list,
'course_capabilities_list': course_capabilities_list},
context_instance=RequestContext(request))
|
sfu-fas/coursys
|
oldcode/planning/views/view_capabilities.py
|
Python
|
gpl-3.0
| 1,166 | 0.002573 |
print "Creating downloadable package"
# Remove unwanted files
s=os.path.join(conf.t.dir,'timing')
rmrf(s)
os.rmdir(s)
# Recursively remove the .git files
for root, dirs, files in os.walk(conf.t.dir, topdown=False):
for name in files:
if name in ['.gitattributes','.gitignore','desktop.ini']:
os.remove(os.path.join(root, name))
# "bootstrap" the configure files
os.system("cd "+conf.t.dir+"/src; ./bootstrap")
s=os.path.join(conf.t.dir,'src','autom4te.cache')
rmrf(s)
os.rmdir(s)
# Compile the Java classes
os.system("cd "+conf.t.dir+"/blockproc/java; make")
os.system("cd "+conf.t.dir+"/blockproc/java; make classclean")
|
hagenw/ltfat
|
mat2doc/mat/release_keep_tests.py
|
Python
|
gpl-3.0
| 654 | 0.010703 |
# Copyright (C) 2009 Canonical Ltd
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
"""Tests for checking of trees."""
from bzrlib.tests.per_workingtree import TestCaseWithWorkingTree
from bzrlib.workingtree import InventoryWorkingTree
from bzrlib.tests import TestNotApplicable
class TestCheck(TestCaseWithWorkingTree):
def test__get_check_refs_new(self):
tree = self.make_branch_and_tree('tree')
if not isinstance(tree, InventoryWorkingTree):
raise TestNotApplicable(
"_get_check_refs only relevant for inventory working trees")
self.assertEqual(set([('trees', 'null:')]),
set(tree._get_check_refs()))
def test__get_check_refs_basis(self):
# with a basis, all current bzr trees cache it and so need the
# inventory to cross-check.
tree = self.make_branch_and_tree('tree')
if not isinstance(tree, InventoryWorkingTree):
raise TestNotApplicable(
"_get_check_refs only relevant for inventory working trees")
revid = tree.commit('first post')
self.assertEqual(set([('trees', revid)]),
set(tree._get_check_refs()))
def test__check_with_refs(self):
# _check can be called with a dict of the things required.
tree = self.make_branch_and_tree('tree')
if not isinstance(tree, InventoryWorkingTree):
raise TestNotApplicable(
"_get_check_refs only relevant for inventory working trees")
tree.lock_write()
self.addCleanup(tree.unlock)
revid = tree.commit('first post')
needed_refs = tree._get_check_refs()
repo = tree.branch.repository
for ref in needed_refs:
kind, revid = ref
refs = {}
if kind == 'trees':
refs[ref] = repo.revision_tree(revid)
else:
self.fail('unknown ref kind')
tree._check(refs)
|
Distrotech/bzr
|
bzrlib/tests/per_workingtree/test_check.py
|
Python
|
gpl-2.0
| 2,606 | 0.000767 |
import logging
logging.getLogger('azure.storage.common.storageclient').setLevel(logging.WARNING)
|
ewbankkit/cloud-custodian
|
tools/c7n_mailer/c7n_mailer/azure_mailer/__init__.py
|
Python
|
apache-2.0
| 101 | 0.009901 |
from __future__ import absolute_import
from builtins import str
from processing.core.GeoAlgorithm import GeoAlgorithm
from processing.core.Processing import Processing
from processing.core.ProcessingLog import ProcessingLog
from processing.core.GeoAlgorithmExecutionException import GeoAlgorithmExecutionException
from processing.parameters.ParameterBoolean import ParameterBoolean
from processing.parameters.ParameterCrs import ParameterCrs
from processing.parameters.ParameterExtent import ParameterExtent
from processing.parameters.ParameterMultipleInput import ParameterMultipleInput
from processing.parameters.ParameterNumber import ParameterNumber
from processing.parameters.ParameterRaster import ParameterRaster
from processing.parameters.ParameterSelection import ParameterSelection
from processing.parameters.ParameterString import ParameterString
from processing.parameters.ParameterTable import ParameterTable
from processing.parameters.ParameterVector import ParameterVector
from processing.parameters.ParameterFile import ParameterFile
from processing.outputs.OutputRaster import OutputRaster
from processing.outputs.OutputVector import OutputVector
from processing.outputs.OutputString import OutputString
from processing.outputs.OutputFactory import OutputFactory
from processing.tools import dataobjects
from wps.wpslib.wpsserver import WpsServer
from wps.wpslib.processdescription import ProcessDescription
from wps.wpslib.processdescription import getFileExtension,isMimeTypeVector,isMimeTypeRaster,isMimeTypeText,isMimeTypeFile
from wps.wpslib.processdescription import StringInput, TextInput, SelectionInput, VectorInput, MultipleVectorInput, RasterInput, MultipleRasterInput, FileInput, MultipleFileInput, ExtentInput, CrsInput, VectorOutput, RasterOutput, StringOutput
from wps.wpslib.executionrequest import ExecutionRequest
from wps.wpslib.executionrequest import createTmpGML
from wps.wpslib.executionresult import ExecutionResult
from qgis.PyQt import QtGui
from PyQt4.QtCore import *
from qgis.PyQt.QtWidgets import qApp, QApplication, QMessageBox
import os
class WpsAlgorithm(GeoAlgorithm):
def __init__(self, process, bookmark = False):
self.process = process
self.bookmark = bookmark
GeoAlgorithm.__init__(self) #calls defineCharacteristics
def defineCharacteristics(self):
self.name = str(self.process.identifier)
if self.bookmark:
self.group = "Bookmarks"
else:
self.group = WpsAlgorithm.groupName(self.process.server)
self.loadProcessDescription()
self.buildParametersDialog()
def getIcon(self):
return QtGui.QIcon(os.path.dirname(__file__) + "/../images/wps.png")
@staticmethod
def groupName(server):
return "WPS %s" % server.connectionName
def loadProcessDescription(self):
#retrieve and save if not saved before
if not os.path.exists(self.process.processDescriptionFile(self.wpsDescriptionFolder())):
self.getProcessDescription()
if self.process.identifier == None or self.process.identifier == "":
#Error reading description
self.process.processXML = '' #Save empty description to prevent retry at next startup
self.process.saveDescription(self.wpsDescriptionFolder())
#load from file
self.process.loadDescription(self.wpsDescriptionFolder())
def wpsDescriptionFolder(self):
from .WpsAlgorithmProvider import WpsAlgorithmProvider
return WpsAlgorithmProvider.WpsDescriptionFolder()
def getProcessDescription(self):
self.process.requestDescribeProcess()
#Wait for answer
while not self.process.loaded():
qApp.processEvents()
def buildParametersDialog(self):
for input in self.process.inputs:
inputType = type(input)
if inputType == VectorInput:
self.addParameter(ParameterVector(str(input.identifier), str(input.title), ParameterVector.VECTOR_TYPE_ANY, input.minOccurs == 0))
elif inputType == MultipleVectorInput:
self.addParameter(ParameterMultipleInput(str(input.identifier), str(input.title), ParameterVector.VECTOR_TYPE_ANY, input.minOccurs == 0))
elif inputType == StringInput:
self.addParameter(ParameterString(str(input.identifier), str(input.title)))
elif inputType == TextInput:
self.addParameter(ParameterString(str(input.identifier), str(input.title)))
elif inputType == RasterInput:
self.addParameter(ParameterRaster(str(input.identifier), str(input.title), input.minOccurs == 0))
elif inputType == MultipleRasterInput:
self.addParameter(ParameterMultipleInput(str(input.identifier), str(input.title), ParameterMultipleInput.TYPE_RASTER, input.minOccurs == 0))
elif inputType == FileInput:
#self.addParameter(ParameterFile(str(input.identifier), str(input.title), False, input.minOccurs == 0))
self.addParameter(ParameterFile(str(input.identifier), str(input.title)))
elif inputType == MultipleFileInput:
pass #Not supported
elif inputType == SelectionInput:
self.addParameter(ParameterSelection(str(input.identifier), str(input.title), input.valList))
elif inputType == ExtentInput:
self.addParameter(ParameterExtent(str(input.identifier), str(input.title)))
elif inputType == CrsInput:
self.addParameter(ParameterCrs(str(input.identifier), "Projection", None))
for output in self.process.outputs:
outputType = type(output)
if outputType == VectorOutput:
self.addOutput(OutputVector(str(output.identifier), str(output.title)))
elif outputType == RasterOutput:
self.addOutput(OutputRaster(str(output.identifier), str(output.title)))
elif outputType == StringOutput:
self.addOutput(OutputString(str(output.identifier), str(output.title)))
def defineProcess(self):
"""Create the execute request"""
request = ExecutionRequest(self.process)
request.addExecuteRequestHeader()
# inputs
useSelected = False
request.addDataInputsStart()
for input in self.process.inputs:
inputType = type(input)
value = self.getParameterValue(input.identifier)
if inputType == VectorInput:
layer = dataobjects.getObjectFromUri(value, False)
if layer is None:
raise Exception("Couldn't extract layer for parameter '%s' from '%s'" % (input.identifier, value))
mimeType = input.dataFormat["MimeType"]
data = createTmpGML(layer, useSelected, mimeType)
request.addGeometryInput(input.identifier, mimeType, input.dataFormat["Schema"], input.dataFormat["Encoding"], data, useSelected)
elif inputType == MultipleVectorInput:
#ParameterMultipleInput(input.identifier, input.title, ParameterVector.VECTOR_TYPE_ANY, input.minOccurs == 0))
pass
elif inputType == StringInput:
request.addLiteralDataInput(input.identifier, str(value))
elif inputType == TextInput:
request.addLiteralDataInput(input.identifier, str(value))
elif inputType == RasterInput:
layer = dataobjects.getObjectFromUri(value, False)
mimeType = input.dataFormat["MimeType"]
request.addGeometryBase64Input(input.identifier, mimeType, layer)
elif inputType == MultipleRasterInput:
#ParameterMultipleInput(input.identifier, input.title, ParameterVector.TYPE_RASTER, input.minOccurs == 0))
pass
elif inputType == FileInput:
mimeType = input.dataFormat["MimeType"]
request.addFileBase64Input(input.identifier, mimeType, value)
elif inputType == SelectionInput:
#Value is dropdown index
param = self.getParameterFromName(input.identifier)
strval = str(param.options[int(value)])
request.addLiteralDataInput(input.identifier, strval)
elif inputType == ExtentInput:
#ParameterExtent("EXTENT","EXTENT"))
pass
elif inputType == CrsInput:
#ParameterCrs("CRS", "CRS"))
pass
#TODO: "selcetion only" checkbox
request.addDataInputsEnd()
# outputs
request.addResponseFormStart()
for output in self.process.outputs:
outputType = type(output)
if outputType == StringOutput:
request.addLiteralDataOutput(output.identifier)
elif outputType == VectorOutput or outputType == RasterOutput:
mimeType = output.dataFormat["MimeType"]
schema = output.dataFormat["Schema"]
encoding = output.dataFormat["Encoding"]
request.addReferenceOutput(output.identifier, mimeType, schema, encoding)
request.addResponseFormEnd()
request.addExecuteRequestEnd()
return request.request
def processAlgorithm(self, progress):
postString = self.defineProcess()
qDebug(postString)
self.wps = ExecutionResult(self.getLiteralResult, self.getResultFile, self.errorResult, None)
self.wps.executeProcess(self.process.processUrl, postString)
#Wait for answer
while not self.wps.finished():
qApp.processEvents()
def getLiteralResult(self, identifier, literalText):
self.setOutputValue(identifier, literalText)
ProcessingLog.addToLog(ProcessingLog.LOG_INFO, identifier + ": " + literalText)
def getResultFile(self, identifier, mimeType, encoding, schema, reply):
# Get a unique temporary file name
myQTempFile = QTemporaryFile()
myQTempFile.open()
ext = getFileExtension(mimeType)
tmpFile = str(myQTempFile.fileName() + ext,'latin1')
myQTempFile.close()
# Write the data to the temporary file
outFile = QFile(tmpFile)
outFile.open(QIODevice.WriteOnly)
outFile.write(reply.readAll())
outFile.close()
resultFile = self.wps.handleEncoded(tmpFile, mimeType, encoding, schema)
# Finally, load the data
self.loadData(resultFile, mimeType, identifier)
def loadData(self, resultFile, mimeType, identifier):
# Vector data
# TODO: Check for schema GML and KML
if isMimeTypeVector(mimeType) != None:
self.setOutputValue(identifier, resultFile)
# Raster data
elif isMimeTypeRaster(mimeType) != None:
self.setOutputValue(identifier, resultFile)
# Text data
elif isMimeTypeText(mimeType) != None:
text = open(resultFile, 'r').read()
self.setOutputValue(identifier, text)
# Everything else
elif isMimeTypeFile(mimeType) != None:
text = open(resultFile, 'r').read()
self.setOutputValue(identifier, text)
# Everything else
else:
# For unsupported mime types we assume text
content = open(resultFile, 'r').read()
# TODO: This should have a safe option
QMessageBox.information(None, QCoreApplication.translate("QgsWps", 'Process result (unsupported mime type)'), content)
def errorResult(self, exceptionHtml):
QMessageBox.critical(None, "Exception report", exceptionHtml)
#ProcessingLog.addToLog(ProcessingLog.LOG_ERROR, exceptionHtml)
#raise GeoAlgorithmExecutionException("Exception report\n" + exceptionHtml)
|
sourcepole/qgis-wps-client
|
processingwps/WpsAlgorithm.py
|
Python
|
gpl-2.0
| 11,916 | 0.005874 |
#!/usr/bin/env python2.7
# -*- coding: utf-8 -*-
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module for implementing the Coordinate search."""
import os
from string import Template
import sys
from search.common import exceptions
from search.common import geconstants
from search.common import utils
from search.plugin import coordinate_transform
class CoordinateSearch(object):
"""Class for performing the Coordinate search.
Coordinate search supports the following formats:
1. Decimal Degrees (DD)
2. Degrees Minutes Seconds (DMS)
3. Degrees Decimal Minutes (DDM)
4. Military Grid Reference System (MGRS)
5. Universal Transverse Mercator (UTM)
Coordinate search transforms coordinates from DMS, DDM, UTM, MGRS formats to
DD, validates the coordinates and sends the response back to the client.
Depending on the client type, KML or JSONP formats are supported.
"""
NUM_OF_COORDS_IN_LAT_LNG_FORMAT = 2
NUM_OF_COORDS_IN_MGRS_FORMAT = 1
def __init__(self):
"""Inits CoordinateSearch.
Initializes the logger "ge_search".
Initializes templates for kml, placemark templates for KML/JSONP outputs.
"""
self.utils = utils.SearchUtils()
self._transform = coordinate_transform.CoordinateTransform()
configs = self.utils.GetConfigs(
os.path.join(geconstants.SEARCH_CONFIGS_DIR, "CoordinateSearch.conf"))
self._jsonp_call = self.utils.jsonp_functioncall
self._geom = """
<name>%s</name>
<styleUrl>%s</styleUrl>
<Point>
<coordinates>%s,%s</coordinates>
</Point>\
"""
self._json_geom = """
{
"Point": {
"coordinates": "%s,%s"
}
}
"""
self._kml = """
<kml xmlns="http://www.opengis.net/kml/2.2"
xmlns:gx="http://www.google.com/kml/ext/2.2"
xmlns:kml="http://www.opengis.net/kml/2.2"
xmlns:atom="http://www.w3.org/2005/Atom">
<Folder>
<name>Coordinate Search Results</name>
<open>1</open>
<Style id="placemark_label">\
${style}
</Style>\
${placemark}
</Folder>
</kml>
"""
self._kml_template = Template(self._kml)
self._placemark_template = self.utils.placemark_template
self._json_template = self.utils.json_template
self._json_placemark_template = self.utils.json_placemark_template
style_template = self.utils.style_template
self.coordinates_in_lat_lng_format_ = ["DD", "DMS", "DDM"]
self.logger = self.utils.logger
self._style = style_template.substitute(
balloonBgColor=configs.get("balloonstyle.bgcolor"),
balloonTextColor=configs.get("balloonstyle.textcolor"),
balloonText=configs.get("balloonstyle.text"),
iconStyleScale=configs.get("iconstyle.scale"),
iconStyleHref=configs.get("iconstyle.href"),
lineStyleColor=configs.get("linestyle.color"),
lineStyleWidth=configs.get("linestyle.width"),
polyStyleColor=configs.get("polystyle.color"),
polyStyleColorMode=configs.get("polystyle.colormode"),
polyStyleFill=configs.get("polystyle.fill"),
polyStyleOutline=configs.get("polystyle.outline"),
listStyleHref=configs.get("iconstyle.href"))
def HandleSearchRequest(self, environ):
"""Fetches the search tokens from form and performs the coordinate search.
Args:
environ: A list of environment variables as supplied by the
WSGI interface to the coordinate search application interface.
Returns:
search_results: A KML/JSONP formatted string which contains search results.
Raises:
BadQueryException: if the search query is invalid.
"""
search_results = ""
# Fetch all the attributes provided by the user.
parameters = self.utils.GetParameters(environ)
response_type = self.utils.GetResponseType(environ)
# Retrieve the function call back name for JSONP response.
self.f_callback = self.utils.GetCallback(parameters)
original_query = self.utils.GetValue(parameters, "q")
if not original_query:
msg = "Empty search query received."
self.logger.error(msg)
raise exceptions.BadQueryException(msg)
search_status, search_results = self.DoSearch(original_query, response_type)
if not search_status:
folder_name = "Search returned no results."
search_results = self.utils.NoSearchResults(
folder_name, self._style, response_type, self.f_callback)
return (search_results, response_type)
def DoSearch(self, search_query, response_type):
"""Performs the coordinate search.
Args:
search_query: A string containing the search coordinates as
entered by the user.
response_type: Response type can be KML or JSONP, depending on the client.
Returns:
search_results: A KML/JSONP formatted string which contains search results.
Raises:
BadQueryException: if the search query is invalid.
"""
coordinate_type = ""
search_results = ""
input_coordinates = []
decimal_degrees_coordinates = []
search_tokens = self.utils.SearchTokensFromString(search_query)
self.logger.debug("coordinates: %s", ",".join(search_tokens))
input_coordinates = self._transform.GetInputCoordinates(
",".join(search_tokens))
number_of_coordinates = len(input_coordinates)
if number_of_coordinates == 0:
msg = "Incomplete search query %s submitted" % search_query
self.logger.error(msg)
raise exceptions.BadQueryException(msg)
coordinate_type = self._transform.GetInputType(input_coordinates)
self.logger.debug("Coordinate type is %s.", coordinate_type)
if coordinate_type in self.coordinates_in_lat_lng_format_:
reqd_num_of_coordinates = CoordinateSearch.NUM_OF_COORDS_IN_LAT_LNG_FORMAT
else:
reqd_num_of_coordinates = CoordinateSearch.NUM_OF_COORDS_IN_MGRS_FORMAT
if number_of_coordinates > reqd_num_of_coordinates:
self.logger.warning(
"extra search parameters ignored: %s", ",".join(
input_coordinates[reqd_num_of_coordinates:]))
input_coordinates = input_coordinates[:reqd_num_of_coordinates]
elif number_of_coordinates < reqd_num_of_coordinates:
msg = "Incomplete search query %s submitted" % search_query
self.logger.error(msg)
raise exceptions.BadQueryException(msg)
decimal_degrees_coordinates = self._transform.TransformToDecimalDegrees(
coordinate_type, input_coordinates)
search_results = self.ConstructResponse(
response_type, decimal_degrees_coordinates)
search_status = True if search_results else False
return search_status, search_results
def ConstructKMLResponse(self, latitude, longitude):
"""Prepares KML response.
KML response has the below format:
<kml>
<Folder>
<name/>
<StyleURL>
---
</StyleURL>
<Point>
<coordinates/>
</Point>
</Folder>
</kml>
Args:
latitude: latitude in Decimal Degress format.
longitude: longitude in Decimal Degress format.
Returns:
kml_response: KML formatted response.
"""
placemark = ""
kml_response = ""
name = "%s, %s" % (latitude, longitude)
style_url = "#placemark_label"
geom = self._geom % (name, style_url, str(longitude), str(latitude))
placemark = self._placemark_template.substitute(geom=geom)
kml_response = self._kml_template.substitute(
style=self._style, placemark=placemark)
self.logger.info("KML response successfully formatted")
return kml_response
def ConstructJSONPResponse(self, latitude, longitude):
"""Prepares JSONP response.
{
"Folder": {
"name": "X,Y",
"Style": {
"IconStyle": {"scale": "1" },
"LineStyle": {
"color": "7fffff00",
"width": "5" },
"PolyStyle": {
"color": "7f66ffff",
"fill": "1",
"outline": "1" } },
"Placemark": {
"Point": {
"coordinates": "X,Y" } }
}
}
Args:
latitude: latitude in Decimal Degress format.
longitude: longitude in Decimal Degress format.
Returns:
jsonp_response: JSONP formatted response.
"""
placemark = ""
json_response = ""
jsonp_response = ""
folder_name = "%s, %s" % (latitude, longitude)
json_geom = self._json_geom % (latitude, longitude)
placemark = self._json_placemark_template.substitute(
geom=json_geom)
json_response = self._json_template.substitute(
foldername=folder_name, json_placemark=placemark)
# Escape single quotes from json_response.
json_response = json_response.replace("'", "\\'")
jsonp_response = self._jsonp_call % (self.f_callback, json_response)
self.logger.info("JSONP response successfully formatted")
return jsonp_response
def ConstructResponse(self, response_type, decimal_degrees_coordinates):
"""Construct the response based on response_type.
Args:
response_type: Response type can be KML or JSONP, depending on the client.
decimal_degrees_coordinates: List of coordinates in DD(Decimal Degrees)
format.
Returns:
search_results: A KML/JSONP formatted string which contains search results.
"""
search_results = ""
assert response_type in self.utils.output_formats, (
self.logger.error("Invalid response type %s", response_type))
if response_type == "KML":
search_results = self.ConstructKMLResponse(
decimal_degrees_coordinates[0], decimal_degrees_coordinates[1])
elif response_type == "JSONP":
search_results = self.ConstructJSONPResponse(
decimal_degrees_coordinates[0], decimal_degrees_coordinates[1])
return search_results
def main(coords, response_type):
gepobj = CoordinateSearch()
gepobj.DoSearch(coords, response_type)
if __name__ == "__main__":
main(sys.argv[1], sys.argv[2])
|
tst-mswartz/earthenterprise
|
earth_enterprise/src/server/wsgi/search/plugin/coordinate_search_handler.py
|
Python
|
apache-2.0
| 10,743 | 0.003165 |
# Copyright 2005-2010 Wesabe, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ofxtools.CsvConverter - translate CSV files into OFX files.
#
import datetime
import dateutil.parser
import ofx
import ofxtools
import re
import sys
import xml.sax.saxutils as sax
from decimal import *
from ofx.builder import *
class CsvConverter:
def __init__(self, qif, colspec=None, fid="UNKNOWN", org="UNKNOWN",
bankid="UNKNOWN", accttype="UNKNOWN", acctid="UNKNOWN",
balance="UNKNOWN", curdef=None, lang="ENG", dayfirst=False,
debug=False):
self.qif = qif
self.colspec = colspec
self.fid = fid
self.org = org
self.bankid = bankid
self.accttype = accttype
self.acctid = acctid
self.balance = balance
self.curdef = curdef
self.lang = lang
self.debug = debug
self.dayfirst = dayfirst
self.parsed_csv = None
# FIXME: Move this to one of the OFX generation classes (Document or Response).
self.txns_by_date = {}
if self.debug: sys.stderr.write("Parsing document.\n")
parser = ofxtools.QifParser() # debug=debug)
self.parsed_qif = parser.parse(self.qif)
if self.debug: sys.stderr.write("Cleaning transactions.\n")
# We do a two-pass conversion in order to check the dates of all
# transactions in the statement, and convert all the dates using
# the same date format. The first pass does nothing but look
# at dates; the second actually applies the date conversion and
# all other conversions, and extracts information needed for
# the final output (like date range).
txn_list = self._extract_txn_list(self.parsed_qif)
self._guess_formats(txn_list)
self._clean_txn_list(txn_list)
def _extract_txn_list(self, qif):
stmt_obj = qif.asDict()["QifStatement"]
if self.accttype == "UNKNOWN":
if "BankTransactions" in stmt_obj:
self.accttype = "CHECKING"
elif "CreditCardTransactions" in stmt_obj:
self.accttype = "CREDITCARD"
txn_list = []
for stmt in stmt_obj:
for txn in stmt:
txn_list.append(txn)
if len(txn_list) == 0:
raise ValueError("Found no transactions to convert " +
"in the QIF source.")
else:
return txn_list
#
# Date methods
#
def _guess_formats(self, txn_list):
# Go through the transactions one at a time, and try to parse the date
# field and currency format. If we check the date format and find a
# transaction where the first number must be the day (that is, the first
# number is in the range 13..31), then set the state of the converter to
# use dayfirst for all transaction cleanups. This is a guess because the
# method will only work for UK dates if the statement contains a day in
# the 13..31 range. (We could also test whether a date appears out of
# order, or whether the jumps between transactions are especially long,
# if this guessing method doesn't work reliably.)
for txn_obj in txn_list:
txn = txn_obj.asDict()
txn_date = txn.get("Date", "UNKNOWN")
txn_currency = txn.get("Currency", "UNKNOWN")
# Look for date format.
parsed_date = self._parse_date(txn_date)
self._check_date_format(parsed_date)
def _parse_date(self, txn_date, dayfirst=False):
def _check_date_format(self, parsed_date):
# If we *ever* find a date that parses as dayfirst, treat
# *all* transactions in this statement as dayfirst.
if parsed_date is not None and parsed_date != "UNKNOWN" and parsed_date.microsecond == 3:
self.dayfirst = True
#
# Cleanup methods
#
def _clean_txn_list(self, txn_list):
for txn_obj in txn_list:
try:
txn = self._clean_txn(txn_obj)
txn_date = txn["Date"]
txn_date_list = self.txns_by_date.get(txn_date, [])
txn_date_list.append(txn)
self.txns_by_date[txn_date] = txn_date_list
except ValueError:
# The _clean_txn method will sometimes find transactions
# that are inherently unclean and are unable to be purified.
# In these cases it will reject the transaction by throwing
# a ValueError, which signals us not to store the transaction.
if self.debug: sys.stderr.write("Skipping transaction '%s'." %
str(txn_obj.asDict()))
# Sort the dates (in YYYYMMDD format) and choose the lowest
# date as our start date, and the highest date as our end
# date.
date_list = self.txns_by_date.keys()
date_list.sort()
self.start_date = date_list[0]
self.end_date = date_list[-1]
def _clean_txn(self, txn_obj):
# This is sort of the brute-force method of the converter. It
# looks at the data we get from the bank and tries as hard as
# possible to make best-effort guesses about what the OFX 2.0
# standard values for the transaction should be. There's a
# reasonable amount of guesswork in here -- some of it wise,
# maybe some of it not. If the cleanup method determines that
# the txn_obj shouldn't be in the data, it will return None.
# Otherwise, it will return a transaction cleaned to the best
# of our abilities.
txn = txn_obj.asDict()
self._clean_txn_date(txn)
self._clean_txn_amount(txn)
self._clean_txn_number(txn)
self._clean_txn_type(txn)
self._clean_txn_payee(txn)
return txn
def _clean_txn_date(self, txn):
txn_date = txn.get("Date", "UNKNOWN").strip()
if txn_date != "UNKNOWN":
parsed_date = self._parse_date(txn_date, dayfirst=self.dayfirst)
txn["Date"] = parsed_date.strftime("%Y%m%d")
else:
txn["Date"] = "UNKNOWN"
def _clean_txn_amount(self, txn):
txn_amount = txn.get("Amount", "00.00")
txn_amount2 = txn.get("Amount2", "00.00")
# Home Depot Credit Card seems to send two transaction records for each
# transaction. They're out of order (that is, the second record is not
# directly after the first, nor even necessarily after it at all), and
# the second one *sometimes* appears to be a memo field on the first one
# (e.g., a credit card payment will show up with an amount and date, and
# then the next transaction will have the same date and a payee that
# reads, "Thank you for your payment!"), and *sometimes* is the real
# payee (e.g., the first will say "Home Depot" and the second will say
# "Seasonal/Garden"). One of the two transaction records will have a
# transaction amount of "-", and the other will have the real
# transaction amount. Ideally, we would pull out the memo and attach it
# to the right transaction, but unless the two transactions are the only
# transactions on that date, there doesn't seem to be a good clue (order
# in statement, amount, etc.) as to how to associate them. So, instead,
# we're returning None, which means this transaction should be removed
# from the statement and not displayed to the user. The result is that
# for Home Depot cards, sometimes we lose the memo (which isn't that big
# a deal), and sometimes we make the memo into the payee (which sucks).
if txn_amount == "-" or txn_amount == " ":
raise ValueError("Transaction amount is undefined.")
# Some QIF sources put the amount in Amount2 instead, for unknown
# reasons. Here we ignore Amount2 unless Amount is unknown.
if txn_amount == "00.00":
txn_amount = txn_amount2
# Okay, now strip out whitespace padding.
txn_amount = txn_amount.strip()
# Some QIF files have dollar signs in the amount. Hey, why not?
txn_amount = txn_amount.replace('$', '', 1)
# Some QIF sources put three digits after the decimal, and the Ruby
# code thinks that means we're in Europe. So.....let's deal with
# that now.
try:
txn_amount = str(Decimal(txn_amount).quantize(Decimal('.01')))
except:
# Just keep truckin'.
pass
txn["Amount"] = txn_amount
def _clean_txn_number(self, txn):
txn_number = txn.get("Number", "UNKNOWN").strip()
# Clean up bad check number behavior
all_digits = re.compile("\d+")
if txn_number == "N/A":
# Get rid of brain-dead Chase check number "N/A"s
del txn["Number"]
elif txn_number.startswith("XXXX-XXXX-XXXX"):
# Home Depot credit cards throw THE CREDIT CARD NUMBER
# into the check number field. Oy! At least they mask
# the first twelve digits, so we know they're insane.
del txn["Number"]
elif txn_number != "UNKNOWN" and self.accttype == "CREDITCARD":
# Several other credit card companies (MBNA, CapitalOne)
# seem to use the number field as a transaction ID. Get
# rid of this.
del txn["Number"]
elif txn_number == "0000000000" and self.accttype != "CREDITCARD":
# There's some bank that puts "N0000000000" in every non-check
# transaction. (They do use normal check numbers for checks.)
del txn["Number"]
elif txn_number != "UNKNOWN" and all_digits.search(txn_number):
# Washington Mutual doesn't indicate a CHECK transaction
# when a check number is present.
txn["Type"] = "CHECK"
def _clean_txn_type(self, txn):
txn_type = "UNKNOWN"
txn_amount = txn.get("Amount", "UNKNOWN")
txn_payee = txn.get("Payee", "UNKNOWN")
txn_memo = txn.get("Memo", "UNKNOWN")
txn_number = txn.get("Number", "UNKNOWN")
txn_sign = self._txn_sign(txn_amount)
# Try to figure out the transaction type from the Payee or
# Memo field.
for typestr in self.txn_types.keys():
if txn_number == typestr:
# US Bank sends "DEBIT" or "CREDIT" as a check number
# on credit card transactions.
txn["Type"] = self.txn_types[typestr]
del txn["Number"]
break
elif txn_payee.startswith(typestr + "/") or \
txn_memo.startswith(typestr + "/") or \
txn_memo == typestr or txn_payee == typestr:
if typestr == "ACH" and txn_sign == "credit":
txn["Type"] = "DIRECTDEP"
elif typestr == "ACH" and txn_sign == "debit":
txn["Type"] = "DIRECTDEBIT"
else:
txn["Type"] = self.txn_types[typestr]
break
def _clean_txn_payee(self, txn):
txn_payee = txn.get("Payee", "UNKNOWN")
txn_memo = txn.get("Memo", "UNKNOWN")
txn_number = txn.get("Number", "UNKNOWN")
txn_type = txn.get("Type", "UNKNOWN")
txn_amount = txn.get("Amount", "UNKNOWN")
txn_sign = self._txn_sign(txn_amount)
# Try to fill in the payee field with some meaningful value.
if txn_payee == "UNKNOWN":
if txn_number != "UNKNOWN" and (self.accttype == "CHECKING" or
self.accttype == "SAVINGS"):
txn["Payee"] = "Check #%s" % txn_number
txn["Type"] = "CHECK"
elif txn_type == "INT" and txn_sign == "debit":
txn["Payee"] = "Interest paid"
elif txn_type == "INT" and txn_sign == "credit":
txn["Payee"] = "Interest earned"
elif txn_type == "ATM" and txn_sign == "debit":
txn["Payee"] = "ATM Withdrawal"
elif txn_type == "ATM" and txn_sign == "credit":
txn["Payee"] = "ATM Deposit"
elif txn_type == "POS" and txn_sign == "debit":
txn["Payee"] = "Point of Sale Payment"
elif txn_type == "POS" and txn_sign == "credit":
txn["Payee"] = "Point of Sale Credit"
elif txn_memo != "UNKNOWN":
txn["Payee"] = txn_memo
# Down here, we have no payee, no memo, no check number,
# and no type. Who knows what this stuff is.
elif txn_type == "UNKNOWN" and txn_sign == "debit":
txn["Payee"] = "Other Debit"
txn["Type"] = "DEBIT"
elif txn_type == "UNKNOWN" and txn_sign == "credit":
txn["Payee"] = "Other Credit"
txn["Type"] = "CREDIT"
# Make sure the transaction type has some valid value.
if not txn.has_key("Type") and txn_sign == "debit":
txn["Type"] = "DEBIT"
elif not txn.has_key("Type") and txn_sign == "credit":
txn["Type"] = "CREDIT"
def _txn_sign(self, txn_amount):
# Is this a credit or a debit?
if txn_amount.startswith("-"):
return "debit"
else:
return "credit"
#
# Conversion methods
#
def to_ofx102(self):
if self.debug: sys.stderr.write("Making OFX/1.02.\n")
return DOCUMENT(self._ofx_header(),
OFX(self._ofx_signon(),
self._ofx_stmt()))
def to_xml(self):
ofx102 = self.to_ofx102()
if self.debug:
sys.stderr.write(ofx102 + "\n")
sys.stderr.write("Parsing OFX/1.02.\n")
response = ofx.Response(ofx102) #, debug=self.debug)
if self.debug: sys.stderr.write("Making OFX/2.0.\n")
if self.dayfirst:
date_format = "DD/MM/YY"
else:
date_format = "MM/DD/YY"
xml = response.as_xml(original_format="QIF", date_format=date_format)
return xml
|
wesabe/fixofx
|
lib/ofxtools/csv_converter.py
|
Python
|
apache-2.0
| 14,873 | 0.004034 |
#
# iso2022_jp_1.py: Python Unicode Codec for ISO2022_JP_1
#
# Written by Hye-Shik Chang <perky@FreeBSD.org>
#
import _codecs_iso2022, codecs
import _multibytecodec as mbc
codec = _codecs_iso2022.getcodec('iso2022_jp_1')
class Codec(codecs.Codec):
encode = codec.encode
decode = codec.decode
class IncrementalEncoder(mbc.MultibyteIncrementalEncoder,
codecs.IncrementalEncoder):
codec = codec
class IncrementalDecoder(mbc.MultibyteIncrementalDecoder,
codecs.IncrementalDecoder):
codec = codec
class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader):
codec = codec
class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter):
codec = codec
def getregentry():
return codecs.CodecInfo(
name='iso2022_jp_1',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
|
zwChan/VATEC
|
~/eb-virt/Lib/encodings/iso2022_jp_1.py
|
Python
|
apache-2.0
| 1,100 | 0.006364 |
import os
from copy import deepcopy
from shapely.geometry import LineString
import mappyfile
import sys, os
sys.path.append(os.path.abspath("./docs/examples"))
from helper import create_image
def dilation(mapfile):
line = LineString([(0, 0), (1, 1), (0, 2), (2, 2), (3, 1), (1, 0)])
ll = mappyfile.find(mapfile["layers"], "name", "line")
ll["features"][0]["wkt"] = line.wkt
dilated = line.buffer(0.5, cap_style=3)
pl = mappyfile.find(mapfile["layers"], "name", "polygon")
pl["features"][0]["wkt"] = dilated.wkt
mapfile["extent"] = " ".join(map(str, dilated.buffer(0.8).bounds))
return dilated
def erosion(mapfile, dilated):
"""
We will continue to work with the modified Mapfile
If we wanted to start from scratch we could simply reread it
"""
ll = mappyfile.find(mapfile["layers"], "name", "line")
ll["status"] = "OFF"
pl = mappyfile.find(mapfile["layers"], "name", "polygon")
# make a deep copy of the polygon layer in the Map
# so any modification are made to this layer only
pl2 = deepcopy(pl)
pl2["name"] = "newpolygon"
mapfile["layers"].append(pl2)
dilated = dilated.buffer(-0.3)
pl2["features"][0]["wkt"] = dilated.wkt
style = pl["classes"][0]["styles"][0]
style["color"] = "#999999"
style["outlinecolor"] = "#b2b2b2"
def main():
mf = "./docs/examples/geometry/geometry.map"
mapfile = mappyfile.open(mf)
mapfile["size"] = [600, 600]
output_folder = os.path.join(os.getcwd(), "docs/images")
dilated = dilation(mapfile)
create_image("dilated", mapfile, output_folder=output_folder)
erosion(mapfile, dilated)
create_image("erosion", mapfile, output_folder=output_folder)
if __name__ == "__main__":
main()
print("Done!")
|
geographika/mappyfile
|
docs/examples/geometry/geometry.py
|
Python
|
mit
| 1,786 | 0.004479 |
#-*-coding:utf-8-*-
'''
@author: Hung-Hsin Chen
@mail: chenhh@par.cse.nsysu.edu.tw
@license: GPLv2
'''
import functools
def symbol(sym):
'''
Decorator that assigns a symbol to a function.
The symbol is stored in the function.symbol attribute.
@param sym: symbol to a function
'''
def decorator(func):
'''
Attaches a symbol to a function as its 'symbol' attribute
@param func: function to decorate
'''
func.symbol = sym
return func
return decorator
def cache(func):
'''
cache result of the class member method which has no argument.
The return value is cached on self._{method}_cache where
{method} is the name of the method.
usage:
@cache
def _get_something(self):
...
return 'something'
'''
cache_name = '_%s_cache' %(func.func_name)
@functools.wraps(func)
def decorator(self):
'''Assigns a cache attribute to self on demand'''
try:
return getattr(self, cache_name)
except AttributeError:
# Haven't cached anything yet
setattr(self, cache_name, func(self))
return getattr(self, cache_name)
return decorator
#
# def memory(func):
# '''
# cache result of the class member method which has exact one argument.
# self._{method}_memory where {method} is the name of the method.
#
# Note that the arg must be hashable, thus lists can't be memoized.
# The name of the memoized attribute is stored on the method
# itself as func.memory.
# usage:
# @memoize
# def _compute_something(self, arg):
# ...
# return 'something'
# '''
# func.memory = memory_name = '_%s_memory' %( func.func_name)
#
# @functools.wraps(func)
# def decorator(self, key):
# '''Assigns a memo hash to self on demand'''
# try:
# memo = getattr(self, memory_name)
# except AttributeError:
# # Haven't memoized anything yet
# memo = {}
# setattr(self, memory_name, memo)
#
# try:
# return memo[key]
# except KeyError:
# # Haven't seen this key yet
# memo[key] = results = func(self, key)
# return results
# return decorator
|
chenhh/PyMOGEP
|
src/PyMOGEP/decorator.py
|
Python
|
gpl-2.0
| 2,384 | 0.005872 |
# coding=utf-8
from __future__ import print_function
import cmd
import inspect
import sys
from panshell.base import FS
class Shell(cmd.Cmd):
def __init__(self):
cmd.Cmd.__init__(self)
self.stack = []
self.fsmap = {}
self.fs = None
self._funcs = []
self._keywords = ['use', 'exit']
@property
def prompt(self):
if self.fs:
return self.fs.prompt
return 'pansh$>'
def plugin(self, fscls, **setting):
if not issubclass(fscls, FS):
raise Exception('must inherit `panshell.base.FS`')
name = fscls.name
if name in self.fsmap:
raise Exception('FS <{}> has already plugin in '.format(name))
fs = fscls(**setting)
self.fsmap[name] = (fscls, setting, fs)
def get_names(self):
"""
rewrite cmd.Cmd `dir(self.__class__)`
"""
return dir(self)
def __getattr__(self, name):
if name.startswith('do_'):
action = name[3:]
if action not in self._keywords:
return getattr(self.fs, name)
if name in self.__dict__:
return self.__dict__[name]
return cmd.Cmd.__getattr__(name)
def _plugin_in(self, fs):
for name in dir(fs):
action = name[3:]
if name.startswith('do_') and action not in self._keywords:
attr = getattr(fs, name)
if inspect.ismethod(attr):
self._funcs.append(action)
setattr(self, name, attr)
def _plugin_out(self):
for action in self._funcs:
name = 'do_' + action
delattr(self, name)
self._funcs = []
def set_fs(self, fs):
if self.fs is not None:
self._plugin_out()
self.fs = fs
self._plugin_in(fs)
def do_use(self, name):
"""use <fs> 选择使用某个fs
use baidu
use local
"""
if name not in self.fsmap:
raise Exception('not plugin in this FS with name %s', name)
fscls, setting, _ = self.fsmap[name]
fs = fscls(**setting)
self.stack.append(self.fs)
self.set_fs(fs)
def do_exit(self, line):
"""
退出 shell 或 当前 fs
"""
if self.fs is None:
print('exit-shell', file=sys.stdout)
sys.exit(0)
self.fs.do_exit(line)
self.set_fs(self.stack.pop())
def run(self):
self.cmdloop()
|
alingse/panshell
|
panshell/core.py
|
Python
|
apache-2.0
| 2,543 | 0 |
# secretkey.py: secret-key cryptographic functions
"""
Secret-key functions from chapter 1 of "A Working Introduction to
Cryptography with Python".
"""
import Crypto.Cipher.AES as AES
import Crypto.Hash.HMAC as HMAC
import Crypto.Hash.SHA384 as SHA384
import Crypto.Random.OSRNG.posix as RNG
import pbkdf2
import streql
__AES_KEYLEN = 32
__TAG_KEYLEN = 48
__TAG_LEN = __TAG_KEYLEN
KEYSIZE = __AES_KEYLEN + __TAG_KEYLEN
def pad_data(data):
"""pad_data pads out the data to an AES block length."""
# return data if no padding is required
if len(data) % 16 == 0:
return data
# subtract one byte that should be the 0x80
# if 0 bytes of padding are required, it means only
# a single \x80 is required.
padding_required = 15 - (len(data) % 16)
data = '%s\x80' % data
data = '%s%s' % (data, '\x00' * padding_required)
return data
def unpad_data(data):
"""unpad_data removes padding from the data."""
if not data:
return data
data = data.rstrip('\x00')
if data[-1] == '\x80':
return data[:-1]
else:
return data
def generate_nonce():
"""Generate a random number used once."""
return RNG.new().read(AES.block_size)
def new_tag(ciphertext, key):
"""Compute a new message tag using HMAC-SHA-384."""
return HMAC.new(key, msg=ciphertext, digestmod=SHA384).digest()
def verify_tag(ciphertext, key):
"""Verify the tag on a ciphertext."""
tag_start = len(ciphertext) - __TAG_LEN
data = ciphertext[:tag_start]
tag = ciphertext[tag_start:]
actual_tag = new_tag(data, key)
return streql.equals(actual_tag, tag)
def decrypt(ciphertext, key):
"""
Decrypt a ciphertext encrypted with AES in CBC mode; assumes the IV
has been prepended to the ciphertext.
"""
if len(ciphertext) <= AES.block_size:
return None, False
tag_start = len(ciphertext) - __TAG_LEN
ivec = ciphertext[:AES.block_size]
data = ciphertext[AES.block_size:tag_start]
if not verify_tag(ciphertext, key[__AES_KEYLEN:]):
return None, False
aes = AES.new(key[:__AES_KEYLEN], AES.MODE_CBC, ivec)
data = aes.decrypt(data)
return unpad_data(data), True
def encrypt(data, key):
"""
Encrypt data using AES in CBC mode. The IV is prepended to the
ciphertext.
"""
data = pad_data(data)
ivec = generate_nonce()
aes = AES.new(key[:__AES_KEYLEN], AES.MODE_CBC, ivec)
ctxt = aes.encrypt(data)
tag = new_tag(ivec+ctxt, key[__AES_KEYLEN:])
return ivec + ctxt + tag
def generate_salt(salt_len):
"""Generate a salt for use with PBKDF2."""
return RNG.new().read(salt_len)
def password_key(passphrase, salt=None):
"""Generate a key from a passphrase. Returns the tuple (salt, key)."""
if salt is None:
salt = generate_salt(16)
passkey = pbkdf2.PBKDF2(passphrase, salt, iterations=16384).read(KEYSIZE)
return salt, passkey
|
kisom/crypto_intro
|
src/secretkey.py
|
Python
|
isc
| 2,939 | 0 |
# ----------------------------------------------------------
# Introdução a Programação de Computadores - IPC
# Universidade do Estado do Amazonas - UEA
# Prof. Jucimar Jr
# Edson de Lima Barros 1715310043
# Gabriel Nascimento de Oliveira 1715310052
# Luiz Daniel Raposo Nunes de Mello 1715310049
# Renan de Almeida Campos 0825060036
# Tiago Ferreira Aranha 1715310047
# Wilbert Luís Evangelista Marins 1715310055
# Mackson Garcez Moreno de Oliveira júnior 1215090300
#
# 1.5. Faça um Programa que converta metros para centímetros.
# ----------------------------------------------------------
length_in_meters = int(input('Digite a medida (em metros): '))
length_in_centimeters = length_in_meters * 100
print ('%d metros são %d centímetros' % (length_in_meters, length_in_centimeters))
|
jucimarjr/IPC_2017-1
|
lista02/lista02_exercicio01_questao05.py
|
Python
|
apache-2.0
| 883 | 0.002291 |
from rpython.annotator import model as annmodel
from rpython.rlib import jit
from rpython.rtyper import rint
from rpython.rtyper.error import TyperError
from rpython.rtyper.lltypesystem.lltype import Signed, Bool, Void, UniChar
from rpython.rtyper.lltypesystem import lltype
from rpython.rtyper.rmodel import IteratorRepr, inputconst, Repr
from rpython.rtyper.rint import IntegerRepr
from rpython.rtyper.rfloat import FloatRepr
from rpython.tool.pairtype import pairtype, pair
from rpython.tool.sourcetools import func_with_new_name
class AbstractStringRepr(Repr):
@jit.elidable
def ll_decode_utf8(self, llvalue):
from rpython.rtyper.annlowlevel import hlstr
from rpython.rlib import runicode
value = hlstr(llvalue)
assert value is not None
errorhandler = runicode.default_unicode_error_decode
# NB. keep the arguments in sync with annotator/unaryop.py
u, pos = runicode.str_decode_utf_8_elidable(
value, len(value), 'strict', True, errorhandler, True)
# XXX maybe the whole ''.decode('utf-8') should be not RPython.
return self.ll.llunicode(u)
def _str_reprs(self, hop):
return hop.args_r[0].repr, hop.args_r[1].repr
def get_ll_eq_function(self):
return self.ll.ll_streq
def get_ll_hash_function(self):
return self.ll.ll_strhash
def get_ll_fasthash_function(self):
return self.ll.ll_strfasthash
def rtype_len(self, hop):
string_repr = self.repr
v_str, = hop.inputargs(string_repr)
return hop.gendirectcall(self.ll.ll_strlen, v_str)
def rtype_bool(self, hop):
s_str = hop.args_s[0]
if s_str.can_be_None:
string_repr = hop.args_r[0].repr
v_str, = hop.inputargs(string_repr)
return hop.gendirectcall(self.ll.ll_str_is_true, v_str)
else:
# defaults to checking the length
return super(AbstractStringRepr, self).rtype_bool(hop)
def rtype_method_startswith(self, hop):
str1_repr = hop.args_r[0].repr
str2_repr = hop.args_r[1]
v_str = hop.inputarg(str1_repr, arg=0)
if str2_repr == str2_repr.char_repr:
v_value = hop.inputarg(str2_repr.char_repr, arg=1)
fn = self.ll.ll_startswith_char
else:
v_value = hop.inputarg(str2_repr, arg=1)
fn = self.ll.ll_startswith
hop.exception_cannot_occur()
return hop.gendirectcall(fn, v_str, v_value)
def rtype_method_endswith(self, hop):
str1_repr = hop.args_r[0].repr
str2_repr = hop.args_r[1]
v_str = hop.inputarg(str1_repr, arg=0)
if str2_repr == str2_repr.char_repr:
v_value = hop.inputarg(str2_repr.char_repr, arg=1)
fn = self.ll.ll_endswith_char
else:
v_value = hop.inputarg(str2_repr, arg=1)
fn = self.ll.ll_endswith
hop.exception_cannot_occur()
return hop.gendirectcall(fn, v_str, v_value)
def rtype_method_find(self, hop, reverse=False):
# XXX binaryop
string_repr = hop.args_r[0].repr
char_repr = hop.args_r[0].char_repr
v_str = hop.inputarg(string_repr, arg=0)
if hop.args_r[1] == char_repr:
v_value = hop.inputarg(char_repr, arg=1)
llfn = reverse and self.ll.ll_rfind_char or self.ll.ll_find_char
else:
v_value = hop.inputarg(string_repr, arg=1)
llfn = reverse and self.ll.ll_rfind or self.ll.ll_find
if hop.nb_args > 2:
v_start = hop.inputarg(Signed, arg=2)
if not hop.args_s[2].nonneg:
raise TyperError("str.%s() start must be proven non-negative"
% (reverse and 'rfind' or 'find',))
else:
v_start = hop.inputconst(Signed, 0)
if hop.nb_args > 3:
v_end = hop.inputarg(Signed, arg=3)
if not hop.args_s[3].nonneg:
raise TyperError("str.%s() end must be proven non-negative"
% (reverse and 'rfind' or 'find',))
else:
v_end = hop.gendirectcall(self.ll.ll_strlen, v_str)
hop.exception_cannot_occur()
return hop.gendirectcall(llfn, v_str, v_value, v_start, v_end)
def rtype_method_rfind(self, hop):
return self.rtype_method_find(hop, reverse=True)
def rtype_method_count(self, hop):
rstr = hop.args_r[0].repr
v_str = hop.inputarg(rstr.repr, arg=0)
if hop.args_r[1] == rstr.char_repr:
v_value = hop.inputarg(rstr.char_repr, arg=1)
llfn = self.ll.ll_count_char
else:
v_value = hop.inputarg(rstr.repr, arg=1)
llfn = self.ll.ll_count
if hop.nb_args > 2:
v_start = hop.inputarg(Signed, arg=2)
if not hop.args_s[2].nonneg:
raise TyperError("str.count() start must be proven non-negative")
else:
v_start = hop.inputconst(Signed, 0)
if hop.nb_args > 3:
v_end = hop.inputarg(Signed, arg=3)
if not hop.args_s[3].nonneg:
raise TyperError("str.count() end must be proven non-negative")
else:
v_end = hop.gendirectcall(self.ll.ll_strlen, v_str)
hop.exception_cannot_occur()
return hop.gendirectcall(llfn, v_str, v_value, v_start, v_end)
def rtype_method_strip(self, hop, left=True, right=True):
rstr = hop.args_r[0].repr
v_str = hop.inputarg(rstr.repr, arg=0)
args_v = [v_str]
if len(hop.args_s) == 2:
if isinstance(hop.args_s[1], annmodel.SomeString):
v_stripstr = hop.inputarg(rstr.repr, arg=1)
args_v.append(v_stripstr)
func = self.ll.ll_strip_multiple
else:
v_char = hop.inputarg(rstr.char_repr, arg=1)
args_v.append(v_char)
func = self.ll.ll_strip
else:
func = self.ll.ll_strip_default
args_v.append(hop.inputconst(Bool, left))
args_v.append(hop.inputconst(Bool, right))
hop.exception_is_here()
return hop.gendirectcall(func, *args_v)
def rtype_method_lstrip(self, hop):
return self.rtype_method_strip(hop, left=True, right=False)
def rtype_method_rstrip(self, hop):
return self.rtype_method_strip(hop, left=False, right=True)
def rtype_method_upper(self, hop):
string_repr = hop.args_r[0].repr
v_str, = hop.inputargs(string_repr)
hop.exception_cannot_occur()
return hop.gendirectcall(self.ll.ll_upper, v_str)
def rtype_method_lower(self, hop):
string_repr = hop.args_r[0].repr
v_str, = hop.inputargs(string_repr)
hop.exception_cannot_occur()
return hop.gendirectcall(self.ll.ll_lower, v_str)
def rtype_method_isdigit(self, hop):
string_repr = hop.args_r[0].repr
[v_str] = hop.inputargs(string_repr)
hop.exception_cannot_occur()
return hop.gendirectcall(self.ll.ll_isdigit, v_str)
def rtype_method_isalpha(self, hop):
string_repr = hop.args_r[0].repr
[v_str] = hop.inputargs(string_repr)
hop.exception_cannot_occur()
return hop.gendirectcall(self.ll.ll_isalpha, v_str)
def rtype_method_isalnum(self, hop):
string_repr = hop.args_r[0].repr
[v_str] = hop.inputargs(string_repr)
hop.exception_cannot_occur()
return hop.gendirectcall(self.ll.ll_isalnum, v_str)
def _list_length_items(self, hop, v_lst, LIST):
"""Return two Variables containing the length and items of a
list. Need to be overriden because it is typesystem-specific."""
raise NotImplementedError
def rtype_method_join(self, hop):
from rpython.rtyper.lltypesystem.rlist import BaseListRepr
from rpython.rtyper.lltypesystem.rstr import char_repr, unichar_repr
hop.exception_cannot_occur()
rstr = hop.args_r[0]
if hop.s_result.is_constant():
return inputconst(rstr.repr, hop.s_result.const)
r_lst = hop.args_r[1]
if not isinstance(r_lst, BaseListRepr):
raise TyperError("string.join of non-list: %r" % r_lst)
v_str, v_lst = hop.inputargs(rstr.repr, r_lst)
v_length, v_items = self._list_length_items(hop, v_lst, r_lst.lowleveltype)
if hop.args_s[0].is_constant() and hop.args_s[0].const == '':
if r_lst.item_repr == rstr.repr:
llfn = self.ll.ll_join_strs
elif (r_lst.item_repr == char_repr or
r_lst.item_repr == unichar_repr):
v_tp = hop.inputconst(Void, self.lowleveltype)
return hop.gendirectcall(self.ll.ll_join_chars, v_length,
v_items, v_tp)
else:
raise TyperError("''.join() of non-string list: %r" % r_lst)
return hop.gendirectcall(llfn, v_length, v_items)
else:
if r_lst.item_repr == rstr.repr:
llfn = self.ll.ll_join
else:
raise TyperError("sep.join() of non-string list: %r" % r_lst)
return hop.gendirectcall(llfn, v_str, v_length, v_items)
def rtype_method_splitlines(self, hop):
rstr = hop.args_r[0].repr
if hop.nb_args == 2:
args = hop.inputargs(rstr.repr, Bool)
else:
args = [hop.inputarg(rstr.repr, 0), hop.inputconst(Bool, False)]
try:
list_type = hop.r_result.lowleveltype.TO
except AttributeError:
list_type = hop.r_result.lowleveltype
cLIST = hop.inputconst(Void, list_type)
hop.exception_cannot_occur()
return hop.gendirectcall(self.ll.ll_splitlines, cLIST, *args)
def rtype_method_split(self, hop):
rstr = hop.args_r[0].repr
v_str = hop.inputarg(rstr.repr, 0)
if isinstance(hop.args_s[1], annmodel.SomeString):
v_chr = hop.inputarg(rstr.repr, 1)
fn = self.ll.ll_split
else:
v_chr = hop.inputarg(rstr.char_repr, 1)
fn = self.ll.ll_split_chr
if hop.nb_args == 3:
v_max = hop.inputarg(Signed, 2)
else:
v_max = hop.inputconst(Signed, -1)
try:
list_type = hop.r_result.lowleveltype.TO
except AttributeError:
list_type = hop.r_result.lowleveltype
cLIST = hop.inputconst(Void, list_type)
hop.exception_cannot_occur()
return hop.gendirectcall(fn, cLIST, v_str, v_chr, v_max)
def rtype_method_rsplit(self, hop):
rstr = hop.args_r[0].repr
v_str = hop.inputarg(rstr.repr, 0)
if isinstance(hop.args_s[1], annmodel.SomeString):
v_chr = hop.inputarg(rstr.repr, 1)
fn = self.ll.ll_rsplit
else:
v_chr = hop.inputarg(rstr.char_repr, 1)
fn = self.ll.ll_rsplit_chr
if hop.nb_args == 3:
v_max = hop.inputarg(Signed, 2)
else:
v_max = hop.inputconst(Signed, -1)
try:
list_type = hop.r_result.lowleveltype.TO
except AttributeError:
list_type = hop.r_result.lowleveltype
cLIST = hop.inputconst(Void, list_type)
hop.exception_cannot_occur()
return hop.gendirectcall(fn, cLIST, v_str, v_chr, v_max)
def rtype_method_replace(self, hop):
rstr = hop.args_r[0].repr
if not (hop.args_r[1] == rstr.char_repr and hop.args_r[2] == rstr.char_repr):
raise TyperError('replace only works for char args')
v_str, v_c1, v_c2 = hop.inputargs(rstr.repr, rstr.char_repr, rstr.char_repr)
hop.exception_cannot_occur()
return hop.gendirectcall(self.ll.ll_replace_chr_chr, v_str, v_c1, v_c2)
def rtype_int(self, hop):
hop.has_implicit_exception(ValueError) # record that we know about it
string_repr = hop.args_r[0].repr
if hop.nb_args == 1:
v_str, = hop.inputargs(string_repr)
c_base = inputconst(Signed, 10)
hop.exception_is_here()
return hop.gendirectcall(self.ll.ll_int, v_str, c_base)
if not hop.args_r[1] == rint.signed_repr:
raise TyperError('base needs to be an int')
v_str, v_base = hop.inputargs(string_repr, rint.signed_repr)
hop.exception_is_here()
return hop.gendirectcall(self.ll.ll_int, v_str, v_base)
def rtype_unicode(self, hop):
if hop.args_s[0].is_constant():
# convertion errors occur during annotation, so cannot any more:
hop.exception_cannot_occur()
return hop.inputconst(hop.r_result, hop.s_result.const)
repr = hop.args_r[0].repr
v_str = hop.inputarg(repr, 0)
if repr == hop.r_result: # the argument is a unicode string already
hop.exception_cannot_occur()
return v_str
hop.exception_is_here()
return hop.gendirectcall(self.ll.ll_str2unicode, v_str)
def rtype_bytearray(self, hop):
hop.exception_is_here()
return hop.gendirectcall(self.ll.ll_str2bytearray,
hop.inputarg(hop.args_r[0].repr, 0))
def rtype_method_decode(self, hop):
if not hop.args_s[1].is_constant():
raise TyperError("encoding must be a constant")
encoding = hop.args_s[1].const
v_self = hop.inputarg(self.repr, 0)
hop.exception_is_here()
if encoding == 'ascii':
return hop.gendirectcall(self.ll.ll_str2unicode, v_self)
elif encoding == 'latin-1':
return hop.gendirectcall(self.ll_decode_latin1, v_self)
elif encoding == 'utf-8':
return hop.gendirectcall(self.ll_decode_utf8, v_self)
else:
raise TyperError("encoding %s not implemented" % (encoding, ))
def rtype_float(self, hop):
hop.has_implicit_exception(ValueError) # record that we know about it
string_repr = hop.args_r[0].repr
v_str, = hop.inputargs(string_repr)
hop.exception_is_here()
return hop.gendirectcall(self.ll.ll_float, v_str)
def ll_str(self, s):
if s:
return s
else:
return self.ll.ll_constant('None')
def rtype_getslice(r_str, hop):
string_repr = r_str.repr
v_str = hop.inputarg(string_repr, arg=0)
kind, vlist = hop.decompose_slice_args()
ll_fn = getattr(r_str.ll, 'll_stringslice_%s' % (kind,))
return hop.gendirectcall(ll_fn, v_str, *vlist)
def rtype_bltn_list(self, hop):
string_repr = hop.args_r[0].repr
if hop.r_result.LIST.ITEM != string_repr.lowleveltype.TO.chars.OF:
raise TyperError("list(str-or-unicode) returns a list of chars; "
"it cannot return a list of %r" % (
hop.r_result.LIST.ITEM,))
v_str, = hop.inputargs(string_repr)
cRESLIST = hop.inputconst(Void, hop.r_result.LIST)
hop.exception_is_here()
return hop.gendirectcall(self.ll.ll_string2list, cRESLIST, v_str)
class AbstractUnicodeRepr(AbstractStringRepr):
def rtype_method_upper(self, hop):
raise TyperError("Cannot do toupper on unicode string")
def rtype_method_lower(self, hop):
raise TyperError("Cannot do tolower on unicode string")
@jit.elidable
def ll_encode_utf8(self, ll_s):
from rpython.rtyper.annlowlevel import hlunicode
from rpython.rlib import runicode
s = hlunicode(ll_s)
assert s is not None
errorhandler = runicode.default_unicode_error_encode
# NB. keep the arguments in sync with annotator/unaryop.py
bytes = runicode.unicode_encode_utf_8_elidable(
s, len(s), 'strict', errorhandler, True)
return self.ll.llstr(bytes)
def rtype_method_encode(self, hop):
if not hop.args_s[1].is_constant():
raise TyperError("encoding must be constant")
encoding = hop.args_s[1].const
if encoding == "ascii" and self.lowleveltype == UniChar:
expect = UniChar # only for unichar.encode('ascii')
else:
expect = self.repr # must be a regular unicode string
v_self = hop.inputarg(expect, 0)
hop.exception_is_here()
if encoding == "ascii":
return hop.gendirectcall(self.ll_str, v_self)
elif encoding == "latin-1":
return hop.gendirectcall(self.ll_encode_latin1, v_self)
elif encoding == 'utf-8':
return hop.gendirectcall(self.ll_encode_utf8, v_self)
else:
raise TyperError("encoding %s not implemented" % (encoding, ))
class BaseCharReprMixin(object):
def convert_const(self, value):
if not isinstance(value, str) or len(value) != 1:
raise TyperError("not a character: %r" % (value,))
return value
def get_ll_eq_function(self):
return None
def get_ll_hash_function(self):
return self.ll.ll_char_hash
get_ll_fasthash_function = get_ll_hash_function
def rtype_len(_, hop):
return hop.inputconst(Signed, 1)
def rtype_bool(_, hop):
assert not hop.args_s[0].can_be_None
return hop.inputconst(Bool, True)
def rtype_ord(_, hop):
repr = hop.args_r[0].char_repr
vlist = hop.inputargs(repr)
return hop.genop('cast_char_to_int', vlist, resulttype=Signed)
def _rtype_method_isxxx(_, llfn, hop):
repr = hop.args_r[0].char_repr
vlist = hop.inputargs(repr)
hop.exception_cannot_occur()
return hop.gendirectcall(llfn, vlist[0])
def rtype_method_isspace(self, hop):
return self._rtype_method_isxxx(self.ll.ll_char_isspace, hop)
def rtype_method_isdigit(self, hop):
return self._rtype_method_isxxx(self.ll.ll_char_isdigit, hop)
def rtype_method_isalpha(self, hop):
return self._rtype_method_isxxx(self.ll.ll_char_isalpha, hop)
def rtype_method_isalnum(self, hop):
return self._rtype_method_isxxx(self.ll.ll_char_isalnum, hop)
def rtype_method_isupper(self, hop):
return self._rtype_method_isxxx(self.ll.ll_char_isupper, hop)
def rtype_method_islower(self, hop):
return self._rtype_method_isxxx(self.ll.ll_char_islower, hop)
class AbstractCharRepr(BaseCharReprMixin, AbstractStringRepr):
def rtype_method_lower(self, hop):
char_repr = hop.args_r[0].char_repr
v_chr, = hop.inputargs(char_repr)
hop.exception_cannot_occur()
return hop.gendirectcall(self.ll.ll_lower_char, v_chr)
def rtype_method_upper(self, hop):
char_repr = hop.args_r[0].char_repr
v_chr, = hop.inputargs(char_repr)
hop.exception_cannot_occur()
return hop.gendirectcall(self.ll.ll_upper_char, v_chr)
def ll_str(self, ch):
return self.ll.ll_chr2str(ch)
class AbstractUniCharRepr(BaseCharReprMixin, AbstractStringRepr):
def ll_str(self, ch):
# xxx suboptimal, maybe
return str(unicode(ch))
def ll_unicode(self, ch):
return unicode(ch)
class __extend__(annmodel.SomeString):
def rtyper_makerepr(self, rtyper):
from rpython.rtyper.lltypesystem.rstr import string_repr
return string_repr
def rtyper_makekey(self):
return self.__class__,
class __extend__(annmodel.SomeUnicodeString):
def rtyper_makerepr(self, rtyper):
from rpython.rtyper.lltypesystem.rstr import unicode_repr
return unicode_repr
def rtyper_makekey(self):
return self.__class__,
class __extend__(annmodel.SomeChar):
def rtyper_makerepr(self, rtyper):
from rpython.rtyper.lltypesystem.rstr import char_repr
return char_repr
def rtyper_makekey(self):
return self.__class__,
class __extend__(annmodel.SomeUnicodeCodePoint):
def rtyper_makerepr(self, rtyper):
from rpython.rtyper.lltypesystem.rstr import unichar_repr
return unichar_repr
def rtyper_makekey(self):
return self.__class__,
class __extend__(pairtype(AbstractStringRepr, Repr)):
def rtype_mod((r_str, _), hop):
# for the case where the 2nd argument is a tuple, see the
# overriding rtype_mod() below
return r_str.ll.do_stringformat(hop, [(hop.args_v[1], hop.args_r[1])])
class __extend__(pairtype(AbstractStringRepr, FloatRepr)):
def rtype_mod(_, hop):
from rpython.rtyper.lltypesystem.rstr import do_stringformat
return do_stringformat(hop, [(hop.args_v[1], hop.args_r[1])])
class __extend__(pairtype(AbstractStringRepr, IntegerRepr)):
def rtype_getitem((r_str, r_int), hop, checkidx=False):
string_repr = r_str.repr
v_str, v_index = hop.inputargs(string_repr, Signed)
if checkidx:
if hop.args_s[1].nonneg:
llfn = r_str.ll.ll_stritem_nonneg_checked
else:
llfn = r_str.ll.ll_stritem_checked
else:
if hop.args_s[1].nonneg:
llfn = r_str.ll.ll_stritem_nonneg
else:
llfn = r_str.ll.ll_stritem
if checkidx:
hop.exception_is_here()
else:
hop.exception_cannot_occur()
return hop.gendirectcall(llfn, v_str, v_index)
def rtype_getitem_idx((r_str, r_int), hop):
return pair(r_str, r_int).rtype_getitem(hop, checkidx=True)
def rtype_mul((r_str, r_int), hop):
str_repr = r_str.repr
v_str, v_int = hop.inputargs(str_repr, Signed)
return hop.gendirectcall(r_str.ll.ll_str_mul, v_str, v_int)
rtype_inplace_mul = rtype_mul
class __extend__(pairtype(IntegerRepr, AbstractStringRepr)):
def rtype_mul((r_int, r_str), hop):
str_repr = r_str.repr
v_int, v_str = hop.inputargs(Signed, str_repr)
return hop.gendirectcall(r_str.ll.ll_str_mul, v_str, v_int)
rtype_inplace_mul = rtype_mul
class __extend__(pairtype(AbstractStringRepr, AbstractStringRepr)):
def rtype_add((r_str1, r_str2), hop):
str1_repr = r_str1.repr
str2_repr = r_str2.repr
if hop.s_result.is_constant():
return hop.inputconst(str1_repr, hop.s_result.const)
v_str1, v_str2 = hop.inputargs(str1_repr, str2_repr)
return hop.gendirectcall(r_str1.ll.ll_strconcat, v_str1, v_str2)
rtype_inplace_add = rtype_add
def rtype_eq((r_str1, r_str2), hop):
v_str1, v_str2 = hop.inputargs(r_str1.repr, r_str2.repr)
return hop.gendirectcall(r_str1.ll.ll_streq, v_str1, v_str2)
def rtype_ne((r_str1, r_str2), hop):
v_str1, v_str2 = hop.inputargs(r_str1.repr, r_str2.repr)
vres = hop.gendirectcall(r_str1.ll.ll_streq, v_str1, v_str2)
return hop.genop('bool_not', [vres], resulttype=Bool)
def rtype_lt((r_str1, r_str2), hop):
v_str1, v_str2 = hop.inputargs(r_str1.repr, r_str2.repr)
vres = hop.gendirectcall(r_str1.ll.ll_strcmp, v_str1, v_str2)
return hop.genop('int_lt', [vres, hop.inputconst(Signed, 0)],
resulttype=Bool)
def rtype_le((r_str1, r_str2), hop):
v_str1, v_str2 = hop.inputargs(r_str1.repr, r_str2.repr)
vres = hop.gendirectcall(r_str1.ll.ll_strcmp, v_str1, v_str2)
return hop.genop('int_le', [vres, hop.inputconst(Signed, 0)],
resulttype=Bool)
def rtype_ge((r_str1, r_str2), hop):
v_str1, v_str2 = hop.inputargs(r_str1.repr, r_str2.repr)
vres = hop.gendirectcall(r_str1.ll.ll_strcmp, v_str1, v_str2)
return hop.genop('int_ge', [vres, hop.inputconst(Signed, 0)],
resulttype=Bool)
def rtype_gt((r_str1, r_str2), hop):
v_str1, v_str2 = hop.inputargs(r_str1.repr, r_str2.repr)
vres = hop.gendirectcall(r_str1.ll.ll_strcmp, v_str1, v_str2)
return hop.genop('int_gt', [vres, hop.inputconst(Signed, 0)],
resulttype=Bool)
def rtype_contains((r_str1, r_str2), hop):
v_str1, v_str2 = hop.inputargs(r_str1.repr, r_str2.repr)
v_end = hop.gendirectcall(r_str1.ll.ll_strlen, v_str1)
vres = hop.gendirectcall(r_str1.ll.ll_find, v_str1, v_str2,
hop.inputconst(Signed, 0), v_end)
hop.exception_cannot_occur()
return hop.genop('int_ne', [vres, hop.inputconst(Signed, -1)],
resulttype=Bool)
class __extend__(pairtype(AbstractStringRepr, AbstractCharRepr),
pairtype(AbstractUnicodeRepr, AbstractUniCharRepr)):
def rtype_contains((r_str, r_chr), hop):
string_repr = r_str.repr
char_repr = r_chr.char_repr
v_str, v_chr = hop.inputargs(string_repr, char_repr)
hop.exception_cannot_occur()
return hop.gendirectcall(r_str.ll.ll_contains, v_str, v_chr)
class __extend__(pairtype(AbstractCharRepr, IntegerRepr),
pairtype(AbstractUniCharRepr, IntegerRepr)):
def rtype_mul((r_chr, r_int), hop):
char_repr = r_chr.char_repr
v_char, v_int = hop.inputargs(char_repr, Signed)
return hop.gendirectcall(r_chr.ll.ll_char_mul, v_char, v_int)
rtype_inplace_mul = rtype_mul
class __extend__(pairtype(IntegerRepr, AbstractCharRepr),
pairtype(IntegerRepr, AbstractUniCharRepr)):
def rtype_mul((r_int, r_chr), hop):
char_repr = r_chr.char_repr
v_int, v_char = hop.inputargs(Signed, char_repr)
return hop.gendirectcall(r_chr.ll.ll_char_mul, v_char, v_int)
rtype_inplace_mul = rtype_mul
class __extend__(pairtype(AbstractCharRepr, AbstractCharRepr)):
def rtype_eq(_, hop): return _rtype_compare_template(hop, 'eq')
def rtype_ne(_, hop): return _rtype_compare_template(hop, 'ne')
def rtype_lt(_, hop): return _rtype_compare_template(hop, 'lt')
def rtype_le(_, hop): return _rtype_compare_template(hop, 'le')
def rtype_gt(_, hop): return _rtype_compare_template(hop, 'gt')
def rtype_ge(_, hop): return _rtype_compare_template(hop, 'ge')
#Helper functions for comparisons
def _rtype_compare_template(hop, func):
from rpython.rtyper.lltypesystem.rstr import char_repr
vlist = hop.inputargs(char_repr, char_repr)
return hop.genop('char_' + func, vlist, resulttype=Bool)
class __extend__(AbstractUniCharRepr):
def convert_const(self, value):
if isinstance(value, str):
value = unicode(value)
if not isinstance(value, unicode) or len(value) != 1:
raise TyperError("not a unicode character: %r" % (value,))
return value
def get_ll_eq_function(self):
return None
def get_ll_hash_function(self):
return self.ll.ll_unichar_hash
get_ll_fasthash_function = get_ll_hash_function
def rtype_ord(_, hop):
from rpython.rtyper.lltypesystem.rstr import unichar_repr
vlist = hop.inputargs(unichar_repr)
return hop.genop('cast_unichar_to_int', vlist, resulttype=Signed)
class __extend__(pairtype(AbstractUniCharRepr, AbstractUniCharRepr),
pairtype(AbstractCharRepr, AbstractUniCharRepr),
pairtype(AbstractUniCharRepr, AbstractCharRepr)):
def rtype_eq(_, hop): return _rtype_unchr_compare_template(hop, 'eq')
def rtype_ne(_, hop): return _rtype_unchr_compare_template(hop, 'ne')
def rtype_lt(_, hop): return _rtype_unchr_compare_template_ord(hop, 'lt')
def rtype_le(_, hop): return _rtype_unchr_compare_template_ord(hop, 'le')
def rtype_gt(_, hop): return _rtype_unchr_compare_template_ord(hop, 'gt')
def rtype_ge(_, hop): return _rtype_unchr_compare_template_ord(hop, 'ge')
#Helper functions for comparisons
def _rtype_unchr_compare_template(hop, func):
from rpython.rtyper.lltypesystem.rstr import unichar_repr
vlist = hop.inputargs(unichar_repr, unichar_repr)
return hop.genop('unichar_' + func, vlist, resulttype=Bool)
def _rtype_unchr_compare_template_ord(hop, func):
vlist = hop.inputargs(*hop.args_r)
vlist2 = []
for v in vlist:
if v.concretetype == lltype.Char:
v = hop.genop('cast_char_to_int', [v], resulttype=lltype.Signed)
elif v.concretetype == lltype.UniChar:
v = hop.genop('cast_unichar_to_int', [v], resulttype=lltype.Signed)
else:
assert 0, v.concretetype
vlist2.append(v)
return hop.genop('int_' + func, vlist2, resulttype=Bool)
#
# _________________________ Conversions _________________________
class __extend__(pairtype(AbstractCharRepr, AbstractStringRepr),
pairtype(AbstractUniCharRepr, AbstractUnicodeRepr)):
def convert_from_to((r_from, r_to), v, llops):
from rpython.rtyper.lltypesystem.rstr import (
string_repr, unicode_repr, char_repr, unichar_repr)
if (r_from == char_repr and r_to == string_repr) or\
(r_from == unichar_repr and r_to == unicode_repr):
return llops.gendirectcall(r_from.ll.ll_chr2str, v)
return NotImplemented
class __extend__(pairtype(AbstractStringRepr, AbstractCharRepr)):
def convert_from_to((r_from, r_to), v, llops):
from rpython.rtyper.lltypesystem.rstr import string_repr, char_repr
if r_from == string_repr and r_to == char_repr:
c_zero = inputconst(Signed, 0)
return llops.gendirectcall(r_from.ll.ll_stritem_nonneg, v, c_zero)
return NotImplemented
class __extend__(pairtype(AbstractCharRepr, AbstractUniCharRepr)):
def convert_from_to((r_from, r_to), v, llops):
v2 = llops.genop('cast_char_to_int', [v], resulttype=Signed)
return llops.genop('cast_int_to_unichar', [v2], resulttype=UniChar)
# ____________________________________________________________
#
# Iteration.
class AbstractStringIteratorRepr(IteratorRepr):
def newiter(self, hop):
string_repr = hop.args_r[0].repr
v_str, = hop.inputargs(string_repr)
return hop.gendirectcall(self.ll_striter, v_str)
def rtype_next(self, hop):
v_iter, = hop.inputargs(self)
hop.has_implicit_exception(StopIteration) # record that we know about it
hop.exception_is_here()
return hop.gendirectcall(self.ll_strnext, v_iter)
# ____________________________________________________________
#
# Low-level methods. These can be run for testing, but are meant to
# be direct_call'ed from rtyped flow graphs, which means that they will
# get flowed and annotated, mostly with SomePtr.
#
class AbstractLLHelpers(object):
@staticmethod
def ll_isdigit(s):
from rpython.rtyper.annlowlevel import hlstr
s = hlstr(s)
if not s:
return False
for ch in s:
if not ch.isdigit():
return False
return True
@staticmethod
def ll_isalpha(s):
from rpython.rtyper.annlowlevel import hlstr
s = hlstr(s)
if not s:
return False
for ch in s:
if not ch.isalpha():
return False
return True
@staticmethod
def ll_isalnum(s):
from rpython.rtyper.annlowlevel import hlstr
s = hlstr(s)
if not s:
return False
for ch in s:
if not ch.isalnum():
return False
return True
@staticmethod
def ll_char_isspace(ch):
c = ord(ch)
return c == 32 or (9 <= c <= 13) # c in (9, 10, 11, 12, 13, 32)
@staticmethod
def ll_char_isdigit(ch):
c = ord(ch)
return c <= 57 and c >= 48
@staticmethod
def ll_char_isalpha(ch):
c = ord(ch)
if c >= 97:
return c <= 122
else:
return 65 <= c <= 90
@staticmethod
def ll_char_isalnum(ch):
c = ord(ch)
if c >= 65:
if c >= 97:
return c <= 122
else:
return c <= 90
else:
return 48 <= c <= 57
@staticmethod
def ll_char_isupper(ch):
c = ord(ch)
return 65 <= c <= 90
@staticmethod
def ll_char_islower(ch):
c = ord(ch)
return 97 <= c <= 122
@staticmethod
def ll_upper_char(ch):
if 'a' <= ch <= 'z':
ch = chr(ord(ch) - 32)
return ch
@staticmethod
def ll_lower_char(ch):
if 'A' <= ch <= 'Z':
ch = chr(ord(ch) + 32)
return ch
@staticmethod
def ll_char_hash(ch):
return ord(ch)
@staticmethod
def ll_unichar_hash(ch):
return ord(ch)
@classmethod
def ll_str_is_true(cls, s):
# check if a string is True, allowing for None
return bool(s) and cls.ll_strlen(s) != 0
@classmethod
def ll_stritem_nonneg_checked(cls, s, i):
if i >= cls.ll_strlen(s):
raise IndexError
return cls.ll_stritem_nonneg(s, i)
@classmethod
def ll_stritem(cls, s, i):
if i < 0:
i += cls.ll_strlen(s)
return cls.ll_stritem_nonneg(s, i)
@classmethod
def ll_stritem_checked(cls, s, i):
length = cls.ll_strlen(s)
if i < 0:
i += length
if i >= length or i < 0:
raise IndexError
return cls.ll_stritem_nonneg(s, i)
@staticmethod
def parse_fmt_string(fmt):
# we support x, d, s, f, [r]
it = iter(fmt)
r = []
curstr = ''
for c in it:
if c == '%':
f = it.next()
if f == '%':
curstr += '%'
continue
if curstr:
r.append(curstr)
curstr = ''
if f not in 'xdosrf':
raise TyperError("Unsupported formatting specifier: %r in %r" % (f, fmt))
r.append((f,))
else:
curstr += c
if curstr:
r.append(curstr)
return r
@staticmethod
def ll_float(ll_str):
from rpython.rtyper.annlowlevel import hlstr
from rpython.rlib.rfloat import rstring_to_float
s = hlstr(ll_str)
assert s is not None
n = len(s)
beg = 0
while beg < n:
if s[beg] == ' ':
beg += 1
else:
break
if beg == n:
raise ValueError
end = n - 1
while end >= 0:
if s[end] == ' ':
end -= 1
else:
break
assert end >= 0
return rstring_to_float(s[beg:end + 1])
@classmethod
def ll_splitlines(cls, LIST, ll_str, keep_newlines):
from rpython.rtyper.annlowlevel import hlstr
s = hlstr(ll_str)
strlen = len(s)
i = 0
j = 0
# The annotator makes sure this list is resizable.
res = LIST.ll_newlist(0)
while j < strlen:
while i < strlen and s[i] != '\n' and s[i] != '\r':
i += 1
eol = i
if i < strlen:
if s[i] == '\r' and i + 1 < strlen and s[i + 1] == '\n':
i += 2
else:
i += 1
if keep_newlines:
eol = i
list_length = res.ll_length()
res._ll_resize_ge(list_length + 1)
item = cls.ll_stringslice_startstop(ll_str, j, eol)
res.ll_setitem_fast(list_length, item)
j = i
if j < strlen:
list_length = res.ll_length()
res._ll_resize_ge(list_length + 1)
item = cls.ll_stringslice_startstop(ll_str, j, strlen)
res.ll_setitem_fast(list_length, item)
return res
|
oblique-labs/pyVM
|
rpython/rtyper/rstr.py
|
Python
|
mit
| 35,786 | 0.000643 |
#-
# Copyright (c) 2011 Steven J. Murdoch
# All rights reserved.
#
# This software was developed by SRI International and the University of
# Cambridge Computer Laboratory under DARPA/AFRL contract FA8750-10-C-0237
# ("CTSRD"), as part of the DARPA CRASH research programme.
#
# @BERI_LICENSE_HEADER_START@
#
# Licensed to BERI Open Systems C.I.C. (BERI) under one or more contributor
# license agreements. See the NOTICE file distributed with this work for
# additional information regarding copyright ownership. BERI licenses this
# file to you under the BERI Hardware-Software License, Version 1.0 (the
# "License"); you may not use this file except in compliance with the
# License. You may obtain a copy of the License at:
#
# http://www.beri-open-systems.org/legal/license-1-0.txt
#
# Unless required by applicable law or agreed to in writing, Work distributed
# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
#
# @BERI_LICENSE_HEADER_END@
#
from beritest_tools import BaseBERITestCase
class test_raw_bltzall_lt_back(BaseBERITestCase):
def test_before_bltzall(self):
self.assertRegisterNotEqual(self.MIPS.a0, 0, "instruction before bltzall missed")
def test_bltzall_branch_delay(self):
self.assertRegisterEqual(self.MIPS.a1, 2, "instruction in brach-delay slot missed")
def test_bltzall_skipped(self):
self.assertRegisterNotEqual(self.MIPS.a2, 3, "bltzall didn't branch")
def test_bltzall_target(self):
self.assertRegisterEqual(self.MIPS.a3, 4, "instruction at branch target didn't run")
def test_bltzall_ra(self):
self.assertRegisterEqual(self.MIPS.a4, self.MIPS.ra, "bltzall ra incorrect")
|
8l/beri
|
cheritest/trunk/tests/branch/test_raw_bltzall_lt_back.py
|
Python
|
apache-2.0
| 1,853 | 0.003238 |
# -*- coding: utf-8 -*-
"""
eve.io.mongo.geo
~~~~~~~~~~~~~~~~~~~
Geospatial functions and classes for mongo IO layer
:copyright: (c) 2017 by Nicola Iarocci.
:license: BSD, see LICENSE for more details.
"""
class GeoJSON(dict):
def __init__(self, json):
try:
self['type'] = json['type']
except KeyError:
raise TypeError("Not compilant to GeoJSON")
self.update(json)
if len(self.keys()) != 2:
raise TypeError("Not compilant to GeoJSON")
def _correct_position(self, position):
return isinstance(position, list) and \
all(isinstance(pos, int) or isinstance(pos, float)
for pos in position)
class Geometry(GeoJSON):
def __init__(self, json):
super(Geometry, self).__init__(json)
try:
if not isinstance(self['coordinates'], list) or \
self['type'] != self.__class__.__name__:
raise TypeError
except (KeyError, TypeError):
raise TypeError("Geometry not compilant to GeoJSON")
class GeometryCollection(GeoJSON):
def __init__(self, json):
super(GeometryCollection, self).__init__(json)
try:
if not isinstance(self['geometries'], list):
raise TypeError
for geometry in self['geometries']:
factory = factories[geometry["type"]]
factory(geometry)
except (KeyError, TypeError, AttributeError):
raise TypeError("Geometry not compilant to GeoJSON")
class Point(Geometry):
def __init__(self, json):
super(Point, self).__init__(json)
if not self._correct_position(self['coordinates']):
raise TypeError
class MultiPoint(GeoJSON):
def __init__(self, json):
super(MultiPoint, self).__init__(json)
for position in self["coordinates"]:
if not self._correct_position(position):
raise TypeError
class LineString(GeoJSON):
def __init__(self, json):
super(LineString, self).__init__(json)
for position in self["coordinates"]:
if not self._correct_position(position):
raise TypeError
class MultiLineString(GeoJSON):
def __init__(self, json):
super(MultiLineString, self).__init__(json)
for linestring in self["coordinates"]:
for position in linestring:
if not self._correct_position(position):
raise TypeError
class Polygon(GeoJSON):
def __init__(self, json):
super(Polygon, self).__init__(json)
for linestring in self["coordinates"]:
for position in linestring:
if not self._correct_position(position):
raise TypeError
class MultiPolygon(GeoJSON):
def __init__(self, json):
super(MultiPolygon, self).__init__(json)
for polygon in self["coordinates"]:
for linestring in polygon:
for position in linestring:
if not self._correct_position(position):
raise TypeError
factories = dict([(_type.__name__, _type)
for _type in
[GeometryCollection, Point, MultiPoint, LineString,
MultiLineString, Polygon, MultiPolygon]])
|
bcrochet/eve
|
eve/io/mongo/geo.py
|
Python
|
bsd-3-clause
| 3,352 | 0 |
from ChannelSelection import ChannelSelection, BouquetSelector, SilentBouquetSelector
from Components.ActionMap import ActionMap, HelpableActionMap
from Components.ActionMap import NumberActionMap
from Components.Harddisk import harddiskmanager
from Components.Input import Input
from Components.Label import Label
from Components.MovieList import AUDIO_EXTENSIONS, MOVIE_EXTENSIONS, DVD_EXTENSIONS
from Components.PluginComponent import plugins
from Components.ServiceEventTracker import ServiceEventTracker
from Components.Sources.Boolean import Boolean
from Components.config import config, ConfigBoolean, ConfigClock, ConfigText
from Components.SystemInfo import SystemInfo
from Components.UsageConfig import preferredInstantRecordPath, defaultMoviePath, ConfigSelection
from Components.VolumeControl import VolumeControl
from Components.Sources.StaticText import StaticText
from EpgSelection import EPGSelection
from Plugins.Plugin import PluginDescriptor
from Screen import Screen
from Screens import ScreenSaver
from Screens import Standby
from Screens.ChoiceBox import ChoiceBox
from Screens.Dish import Dish
from Screens.EventView import EventViewEPGSelect, EventViewSimple
from Screens.InputBox import InputBox
from Screens.MessageBox import MessageBox
from Screens.MinuteInput import MinuteInput
from Screens.TimerSelection import TimerSelection
from Screens.PictureInPicture import PictureInPicture
import Screens.Standby
from Screens.SubtitleDisplay import SubtitleDisplay
from Screens.RdsDisplay import RdsInfoDisplay, RassInteractive
from Screens.TimeDateInput import TimeDateInput
from Screens.UnhandledKey import UnhandledKey
from ServiceReference import ServiceReference, isPlayableForCur
from Tools import Notifications, ASCIItranslit
from Tools.Directories import fileExists, getRecordingFilename, moveFiles
from enigma import eTimer, eServiceCenter, eDVBServicePMTHandler, iServiceInformation, \
iPlayableService, eServiceReference, eEPGCache, eActionMap
from time import time, localtime, strftime
import os
from bisect import insort
from sys import maxint
####key debug
# from keyids import KEYIDS
# from datetime import datetime
from RecordTimer import RecordTimerEntry, RecordTimer, findSafeRecordPath
# hack alert!
from Menu import MainMenu, mdom
def isStandardInfoBar(self):
return self.__class__.__name__ == "InfoBar"
def setResumePoint(session):
global resumePointCache, resumePointCacheLast
service = session.nav.getCurrentService()
ref = session.nav.getCurrentlyPlayingServiceOrGroup()
if (service is not None) and (ref is not None): # and (ref.type != 1):
# ref type 1 has its own memory...
seek = service.seek()
if seek:
pos = seek.getPlayPosition()
if not pos[0]:
key = ref.toString()
lru = int(time())
l = seek.getLength()
if l:
l = l[1]
else:
l = None
resumePointCache[key] = [lru, pos[1], l]
if len(resumePointCache) > 50:
candidate = key
for k,v in resumePointCache.items():
if v[0] < lru:
candidate = k
del resumePointCache[candidate]
if lru - resumePointCacheLast > 3600:
saveResumePoints()
def delResumePoint(ref):
global resumePointCache, resumePointCacheLast
try:
del resumePointCache[ref.toString()]
except KeyError:
pass
if int(time()) - resumePointCacheLast > 3600:
saveResumePoints()
def getResumePoint(session):
global resumePointCache
ref = session.nav.getCurrentlyPlayingServiceOrGroup()
if (ref is not None) and (ref.type != 1):
try:
entry = resumePointCache[ref.toString()]
entry[0] = int(time()) # update LRU timestamp
return entry[1]
except KeyError:
return None
def saveResumePoints():
global resumePointCache, resumePointCacheLast
import cPickle
try:
f = open('/home/root/resumepoints.pkl', 'wb')
cPickle.dump(resumePointCache, f, cPickle.HIGHEST_PROTOCOL)
except Exception, ex:
print "[InfoBar] Failed to write resumepoints:", ex
resumePointCacheLast = int(time())
def loadResumePoints():
import cPickle
try:
return cPickle.load(open('/home/root/resumepoints.pkl', 'rb'))
except Exception, ex:
print "[InfoBar] Failed to load resumepoints:", ex
return {}
resumePointCache = loadResumePoints()
resumePointCacheLast = int(time())
class InfoBarDish:
def __init__(self):
self.dishDialog = self.session.instantiateDialog(Dish)
class InfoBarUnhandledKey:
def __init__(self):
self.unhandledKeyDialog = self.session.instantiateDialog(UnhandledKey)
self.hideUnhandledKeySymbolTimer = eTimer()
self.hideUnhandledKeySymbolTimer.callback.append(self.unhandledKeyDialog.hide)
self.checkUnusedTimer = eTimer()
self.checkUnusedTimer.callback.append(self.checkUnused)
self.onLayoutFinish.append(self.unhandledKeyDialog.hide)
eActionMap.getInstance().bindAction('', -maxint -1, self.actionA) #highest prio
eActionMap.getInstance().bindAction('', maxint, self.actionB) #lowest prio
self.flags = (1<<1)
self.uflags = 0
#this function is called on every keypress!
def actionA(self, key, flag):
####key debug
#try:
# print 'KEY: %s %s %s' % (key,(key_name for key_name,value in KEYIDS.items() if value==key).next(),getKeyDescription(key)[0])
#except:
# try:
# print 'KEY: %s %s' % (key,(key_name for key_name,value in KEYIDS.items() if value==key).next()) # inverse dictionary lookup in KEYIDS
# except:
# print 'KEY: %s' % (key)
self.unhandledKeyDialog.hide()
if flag != 4:
if self.flags & (1<<1):
self.flags = self.uflags = 0
self.flags |= (1<<flag)
if flag == 1: # break
self.checkUnusedTimer.start(0, True)
return 0
#this function is only called when no other action has handled this key
def actionB(self, key, flag):
if flag != 4:
self.uflags |= (1<<flag)
def checkUnused(self):
if self.flags == self.uflags:
self.unhandledKeyDialog.show()
self.hideUnhandledKeySymbolTimer.start(2000, True)
class InfoBarScreenSaver:
def __init__(self):
self.onExecBegin.append(self.__onExecBegin)
self.onExecEnd.append(self.__onExecEnd)
self.screenSaverTimer = eTimer()
self.screenSaverTimer.callback.append(self.screensaverTimeout)
self.screensaver = self.session.instantiateDialog(ScreenSaver.Screensaver)
self.onLayoutFinish.append(self.__layoutFinished)
def __layoutFinished(self):
self.screensaver.hide()
def __onExecBegin(self):
self.ScreenSaverTimerStart()
def __onExecEnd(self):
if self.screensaver.shown:
self.screensaver.hide()
eActionMap.getInstance().unbindAction('', self.keypressScreenSaver)
self.screenSaverTimer.stop()
def ScreenSaverTimerStart(self):
time = int(config.usage.screen_saver.value)
flag = self.seekstate[0]
if not flag:
ref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
if ref and not (hasattr(self.session, "pipshown") and self.session.pipshown):
ref = ref.toString().split(":")
flag = ref[2] == "2" or os.path.splitext(ref[10])[1].lower() in AUDIO_EXTENSIONS
if time and flag:
self.screenSaverTimer.startLongTimer(time)
else:
self.screenSaverTimer.stop()
def screensaverTimeout(self):
if self.execing and not Standby.inStandby and not Standby.inTryQuitMainloop:
self.hide()
if hasattr(self, "pvrStateDialog"):
self.pvrStateDialog.hide()
self.screensaver.show()
eActionMap.getInstance().bindAction('', -maxint - 1, self.keypressScreenSaver)
def keypressScreenSaver(self, key, flag):
if flag:
self.screensaver.hide()
self.show()
self.ScreenSaverTimerStart()
eActionMap.getInstance().unbindAction('', self.keypressScreenSaver)
class SecondInfoBar(Screen):
def __init__(self, session):
Screen.__init__(self, session)
self.skin = None
class InfoBarShowHide(InfoBarScreenSaver):
""" InfoBar show/hide control, accepts toggleShow and hide actions, might start
fancy animations. """
STATE_HIDDEN = 0
STATE_HIDING = 1
STATE_SHOWING = 2
STATE_SHOWN = 3
def __init__(self):
self["ShowHideActions"] = ActionMap( ["InfobarShowHideActions"] ,
{
"toggleShow": self.okButtonCheck,
"hide": self.keyHide,
}, 1) # lower prio to make it possible to override ok and cancel..
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evStart: self.serviceStarted,
})
InfoBarScreenSaver.__init__(self)
self.__state = self.STATE_SHOWN
self.__locked = 0
self.hideTimer = eTimer()
self.hideTimer.callback.append(self.doTimerHide)
self.hideTimer.start(5000, True)
self.onShow.append(self.__onShow)
self.onHide.append(self.__onHide)
self.onShowHideNotifiers = []
self.secondInfoBarScreen = ""
if isStandardInfoBar(self):
self.secondInfoBarScreen = self.session.instantiateDialog(SecondInfoBar)
self.secondInfoBarScreen.show()
self.onLayoutFinish.append(self.__layoutFinished)
def __layoutFinished(self):
if self.secondInfoBarScreen:
self.secondInfoBarScreen.hide()
def __onShow(self):
self.__state = self.STATE_SHOWN
for x in self.onShowHideNotifiers:
x(True)
self.startHideTimer()
def doDimming(self):
if config.usage.show_infobar_do_dimming.value:
self.dimmed = self.dimmed-1
else:
self.dimmed = 0
self.DimmingTimer.stop()
self.doHide()
def unDimming(self):
self.unDimmingTimer.stop()
self.doWriteAlpha(config.av.osd_alpha.value)
def doWriteAlpha(self, value):
if fileExists("/proc/stb/video/alpha"):
f=open("/proc/stb/video/alpha","w")
f.write("%i" % (value))
f.close()
def __onHide(self):
self.unDimmingTimer = eTimer()
self.unDimmingTimer.callback.append(self.unDimming)
self.unDimmingTimer.start(100, True)
self.__state = self.STATE_HIDDEN
if self.secondInfoBarScreen:
self.secondInfoBarScreen.hide()
for x in self.onShowHideNotifiers:
x(False)
def keyHide(self):
if self.__state == self.STATE_HIDDEN and self.session.pipshown and "popup" in config.usage.pip_hideOnExit.value:
if config.usage.pip_hideOnExit.value == "popup":
self.session.openWithCallback(self.hidePipOnExitCallback, MessageBox, _("Disable Picture in Picture"), simple=True)
else:
self.hidePipOnExitCallback(True)
elif config.usage.ok_is_channelselection.value and hasattr(self, "openServiceList"):
self.toggleShow()
elif self.__state == self.STATE_SHOWN:
self.hide()
def hidePipOnExitCallback(self, answer):
if answer == True:
self.showPiP()
def connectShowHideNotifier(self, fnc):
if not fnc in self.onShowHideNotifiers:
self.onShowHideNotifiers.append(fnc)
def disconnectShowHideNotifier(self, fnc):
if fnc in self.onShowHideNotifiers:
self.onShowHideNotifiers.remove(fnc)
def serviceStarted(self):
if self.execing:
if config.usage.show_infobar_on_zap.value:
self.doShow()
def startHideTimer(self):
if self.__state == self.STATE_SHOWN and not self.__locked:
self.hideTimer.stop()
if self.secondInfoBarScreen and self.secondInfoBarScreen.shown:
idx = config.usage.show_second_infobar.index - 1
else:
idx = config.usage.infobar_timeout.index
if idx:
self.hideTimer.startLongTimer(idx)
def doShow(self):
self.show()
self.startHideTimer()
def doTimerHide(self):
self.hideTimer.stop()
#if self.__state == self.STATE_SHOWN:
# self.hide()
self.DimmingTimer = eTimer()
self.DimmingTimer.callback.append(self.doDimming)
self.DimmingTimer.start(70, True)
self.dimmed = config.usage.show_infobar_dimming_speed.value
def doHide(self):
if self.__state != self.STATE_HIDDEN:
self.doWriteAlpha((config.av.osd_alpha.value*self.dimmed/config.usage.show_infobar_dimming_speed.value))
if self.dimmed > 0:
self.DimmingTimer.start(70, True)
else:
self.DimmingTimer.stop()
if self.__state == self.STATE_SHOWN:
self.hide()
if hasattr(self, "pvrStateDialog"):
try:
self.pvrStateDialog.hide()
except:
pass
elif self.__state == self.STATE_HIDDEN and self.secondInfoBarScreen and self.secondInfoBarScreen.shown:
self.secondInfoBarScreen.hide()
self.secondInfoBarWasShown = False
def okButtonCheck(self):
if config.usage.ok_is_channelselection.value and hasattr(self, "openServiceList"):
self.openServiceList()
else:
self.toggleShow()
def toggleShow(self):
if self.__state == self.STATE_HIDDEN:
self.showFirstInfoBar()
else:
self.showSecondInfoBar()
def showSecondInfoBar(self):
if isStandardInfoBar(self) and config.usage.show_second_infobar.value == "EPG":
if not(hasattr(self, "hotkeyGlobal") and self.hotkeyGlobal("info") != 0):
self.showDefaultEPG()
elif self.secondInfoBarScreen and config.usage.show_second_infobar.value and not self.secondInfoBarScreen.shown:
self.show()
self.secondInfoBarScreen.show()
self.startHideTimer()
else:
self.hide()
self.hideTimer.stop()
def showFirstInfoBar(self):
if self.__state == self.STATE_HIDDEN or self.secondInfoBarScreen and self.secondInfoBarScreen.shown:
self.secondInfoBarScreen and self.secondInfoBarScreen.hide()
self.show()
else:
self.hide()
self.hideTimer.stop()
def lockShow(self):
self.__locked = self.__locked + 1
if self.execing:
self.show()
self.hideTimer.stop()
def unlockShow(self):
self.__locked = self.__locked - 1
if self.execing:
self.startHideTimer()
class BufferIndicator(Screen):
def __init__(self, session):
Screen.__init__(self, session)
self["status"] = Label()
self.mayShow = False
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evBuffering: self.bufferChanged,
iPlayableService.evStart: self.__evStart,
iPlayableService.evGstreamerPlayStarted: self.__evGstreamerPlayStarted,
})
def bufferChanged(self):
if self.mayShow:
service = self.session.nav.getCurrentService()
info = service and service.info()
if info:
value = info.getInfo(iServiceInformation.sBuffer)
if value and value != 100:
self["status"].setText(_("Buffering %d%%") % value)
if not self.shown:
self.show()
def __evStart(self):
self.mayShow = True
self.hide()
def __evGstreamerPlayStarted(self):
self.mayShow = False
self.hide()
class InfoBarBuffer():
def __init__(self):
self.bufferScreen = self.session.instantiateDialog(BufferIndicator)
self.bufferScreen.hide()
class NumberZap(Screen):
def quit(self):
self.Timer.stop()
self.close()
def keyOK(self):
self.Timer.stop()
self.close(self.service, self.bouquet)
def handleServiceName(self):
if self.searchNumber:
self.service, self.bouquet = self.searchNumber(int(self["number"].getText()))
self["servicename"].text = self["servicename_summary"].text = ServiceReference(self.service).getServiceName()
if not self.startBouquet:
self.startBouquet = self.bouquet
def keyBlue(self):
self.Timer.start(3000, True)
if self.searchNumber:
if self.startBouquet == self.bouquet:
self.service, self.bouquet = self.searchNumber(int(self["number"].getText()), firstBouquetOnly = True)
else:
self.service, self.bouquet = self.searchNumber(int(self["number"].getText()))
self["servicename"].text = self["servicename_summary"].text = ServiceReference(self.service).getServiceName()
def keyNumberGlobal(self, number):
self.Timer.start(1000, True)
self.numberString = self.numberString + str(number)
self["number"].text = self["number_summary"].text = self.numberString
self.field = self.numberString
self.handleServiceName()
if len(self.numberString) >= 5:
self.keyOK()
def __init__(self, session, number, searchNumberFunction = None):
Screen.__init__(self, session)
self.numberString = str(number)
self.field = str(number)
self.searchNumber = searchNumberFunction
self.startBouquet = None
self["channel"] = Label(_("Channel:"))
self["number"] = Label(self.numberString)
self["servicename"] = Label()
self["channel_summary"] = StaticText(_("Channel:"))
self["number_summary"] = StaticText(self.numberString)
self["servicename_summary"] = StaticText()
self.handleServiceName()
self["actions"] = NumberActionMap( [ "SetupActions", "ShortcutActions" ],
{
"cancel": self.quit,
"ok": self.keyOK,
"blue": self.keyBlue,
"1": self.keyNumberGlobal,
"2": self.keyNumberGlobal,
"3": self.keyNumberGlobal,
"4": self.keyNumberGlobal,
"5": self.keyNumberGlobal,
"6": self.keyNumberGlobal,
"7": self.keyNumberGlobal,
"8": self.keyNumberGlobal,
"9": self.keyNumberGlobal,
"0": self.keyNumberGlobal
})
self.Timer = eTimer()
self.Timer.callback.append(self.keyOK)
self.Timer.start(3000, True)
class InfoBarNumberZap:
""" Handles an initial number for NumberZapping """
def __init__(self):
self["NumberActions"] = NumberActionMap( [ "NumberActions"],
{
"1": self.keyNumberGlobal,
"2": self.keyNumberGlobal,
"3": self.keyNumberGlobal,
"4": self.keyNumberGlobal,
"5": self.keyNumberGlobal,
"6": self.keyNumberGlobal,
"7": self.keyNumberGlobal,
"8": self.keyNumberGlobal,
"9": self.keyNumberGlobal,
"0": self.keyNumberGlobal,
})
def keyNumberGlobal(self, number):
if number == 0:
if isinstance(self, InfoBarPiP) and self.pipHandles0Action():
self.pipDoHandle0Action()
elif len(self.servicelist.history) > 1:
self.checkTimeshiftRunning(self.recallPrevService)
else:
if self.has_key("TimeshiftActions") and self.timeshiftEnabled():
ts = self.getTimeshift()
if ts and ts.isTimeshiftActive():
return
self.session.openWithCallback(self.numberEntered, NumberZap, number, self.searchNumber)
def recallPrevService(self, reply):
if reply:
self.servicelist.history_tv = []
self.servicelist.history_radio = []
self.servicelist.recallPrevService()
def numberEntered(self, service = None, bouquet = None):
if service:
self.selectAndStartService(service, bouquet)
def searchNumberHelper(self, serviceHandler, num, bouquet):
servicelist = serviceHandler.list(bouquet)
if servicelist:
serviceIterator = servicelist.getNext()
while serviceIterator.valid():
if num == serviceIterator.getChannelNum():
return serviceIterator
serviceIterator = servicelist.getNext()
return None
def searchNumber(self, number, firstBouquetOnly=False, bouquet=None):
bouquet = bouquet or self.servicelist.getRoot()
service = None
serviceHandler = eServiceCenter.getInstance()
if not firstBouquetOnly:
service = self.searchNumberHelper(serviceHandler, number, bouquet)
if config.usage.multibouquet.value and not service:
bouquet = self.servicelist.bouquet_root
bouquetlist = serviceHandler.list(bouquet)
if bouquetlist:
bouquet = bouquetlist.getNext()
while bouquet.valid():
if bouquet.flags & eServiceReference.isDirectory:
service = self.searchNumberHelper(serviceHandler, number, bouquet)
if service:
playable = not (service.flags & (eServiceReference.isMarker|eServiceReference.isDirectory)) or (service.flags & eServiceReference.isNumberedMarker)
if not playable:
service = None
break
if config.usage.alternative_number_mode.value or firstBouquetOnly:
break
bouquet = bouquetlist.getNext()
return service, bouquet
def selectAndStartService(self, service, bouquet):
if service and not service.flags & eServiceReference.isMarker:
if self.servicelist.getRoot() != bouquet: #already in correct bouquet?
self.servicelist.clearPath()
if self.servicelist.bouquet_root != bouquet:
self.servicelist.enterPath(self.servicelist.bouquet_root)
self.servicelist.enterPath(bouquet)
self.servicelist.setCurrentSelection(service) #select the service in servicelist
self.servicelist.zap(enable_pipzap = True)
self.servicelist.correctChannelNumber()
self.servicelist.startRoot = None
def zapToNumber(self, number):
service, bouquet = self.searchNumber(number)
self.selectAndStartService(service, bouquet)
config.misc.initialchannelselection = ConfigBoolean(default = True)
class InfoBarChannelSelection:
""" ChannelSelection - handles the channelSelection dialog and the initial
channelChange actions which open the channelSelection dialog """
def __init__(self):
#instantiate forever
self.servicelist = self.session.instantiateDialog(ChannelSelection)
if config.misc.initialchannelselection.value:
self.onShown.append(self.firstRun)
self["ChannelSelectActions"] = HelpableActionMap(self, "InfobarChannelSelection",
{
"keyUp": (self.keyUpCheck, self.getKeyUpHelptext),
"keyDown": (self.keyDownCheck, self.getKeyDownHelpText),
"keyLeft": (self.keyLeftCheck, self.getKeyLeftHelptext),
"keyRight": (self.keyRightCheck, self.getKeyRightHelptext),
"historyBack": (self.historyBack, _("Switch to previous channel in history")),
"historyNext": (self.historyNext, _("Switch to next channel in history")),
"openServiceList": (self.openServiceList, _("Open service list")),
"openhistorybrowser": (self.openHistoryBrowser, _("open history browser")),
#"opendevicemanager": (self.openDeviceManager, _("open device manager")),
#"openaroraplugins": (self.openAroraPlugins, _("open Arora Browser")),
"showPluginBrowser": (self.showPluginBrowser, _("Show the plugin browser..")),
"openBouquetList": (self.openBouquetList, _("open bouquetlist")),
"keyChannelUp": (self.keyChannelUpCheck, self.getKeyChannelUpHelptext),
"keyChannelDown": (self.keyChannelDownCheck, self.getKeyChannelDownHelptext),
})
def openHistoryBrowser(self):
if fileExists("/usr/lib/enigma2/python/Plugins/Extensions/ZapHistoryBrowser/plugin.pyo"):
for plugin in plugins.getPlugins([PluginDescriptor.WHERE_EXTENSIONSMENU, PluginDescriptor.WHERE_EVENTINFO]):
if plugin.name == _("Zap-Historie Browser") or plugin.name == _("Zap-History Browser"):
self.runPlugin(plugin)
break
else:
self.session.open(MessageBox, _("The Zap-History Browser plugin is not installed!\nPlease install it."), type = MessageBox.TYPE_INFO,timeout = 10 )
def openDeviceManager(self):
if fileExists("/usr/lib/enigma2/python/Plugins/SystemPlugins/DeviceManager/plugin.pyo"):
for plugin in plugins.getPlugins([PluginDescriptor.WHERE_EXTENSIONSMENU, PluginDescriptor.WHERE_EVENTINFO]):
if plugin.name == _("Device Manager - Fast Mounted Remove"):
self.runPlugin(plugin)
break
else:
self.session.open(MessageBox, _("The Device Manager plugin is not installed!\nPlease install it."), type = MessageBox.TYPE_INFO,timeout = 10 )
def openAroraPlugins(self):
if fileExists("/usr/lib/enigma2/python/Plugins/Extensions/WebBrowser/plugin.pyo"):
for plugin in plugins.getPlugins([PluginDescriptor.WHERE_PLUGINMENU, PluginDescriptor.WHERE_EVENTINFO]):
if plugin.name == _("Web Browser"):
self.runPlugin(plugin)
break
else:
self.session.open(MessageBox, _("The WebBrowser is not installed!\nPlease install it."), type = MessageBox.TYPE_INFO,timeout = 10 )
def showPluginBrowser(self):
if self.secondInfoBarScreen and self.secondInfoBarScreen.shown:
self.secondInfoBarScreen.hide()
self.secondInfoBarWasShown = False
from Screens.PluginBrowser import PluginBrowser
self.session.open(PluginBrowser)
def showTvChannelList(self, zap=False):
self.servicelist.setModeTv()
if zap:
self.servicelist.zap()
def showRadioChannelList(self, zap=False):
self.servicelist.setModeRadio()
if zap:
self.servicelist.zap()
def firstRun(self):
self.onShown.remove(self.firstRun)
config.misc.initialchannelselection.value = False
config.misc.initialchannelselection.save()
self.switchChannelDown()
def historyBack(self):
self.checkTimeshiftRunning(self.historyBackCheckTimeshiftCallback)
def historyBackCheckTimeshiftCallback(self, answer):
if answer:
self.servicelist.historyBack()
def historyNext(self):
self.checkTimeshiftRunning(self.historyNextCheckTimeshiftCallback)
def historyNextCheckTimeshiftCallback(self, answer):
if answer:
self.servicelist.historyNext()
def openBouquetList(self):
self.servicelist.showFavourites()
self.session.execDialog(self.servicelist)
def keyUpCheck(self):
if config.usage.oldstyle_zap_controls.value:
self.zapDown()
elif config.usage.volume_instead_of_channelselection.value:
VolumeControl.instance and VolumeControl.instance.volUp()
else:
self.switchChannelUp()
def keyDownCheck(self):
if config.usage.oldstyle_zap_controls.value:
self.zapUp()
elif config.usage.volume_instead_of_channelselection.value:
VolumeControl.instance and VolumeControl.instance.volDown()
else:
self.switchChannelDown()
def keyLeftCheck(self):
if config.usage.oldstyle_zap_controls.value:
if config.usage.volume_instead_of_channelselection.value:
VolumeControl.instance and VolumeControl.instance.volDown()
else:
self.switchChannelUp()
else:
self.zapUp()
def keyRightCheck(self):
if config.usage.oldstyle_zap_controls.value:
if config.usage.volume_instead_of_channelselection.value:
VolumeControl.instance and VolumeControl.instance.volUp()
else:
self.switchChannelDown()
else:
self.zapDown()
def keyChannelUpCheck(self):
if config.usage.zap_with_ch_buttons.value:
self.zapDown()
else:
self.openServiceList()
def keyChannelDownCheck(self):
if config.usage.zap_with_ch_buttons.value:
self.zapUp()
else:
self.openServiceList()
def getKeyUpHelptext(self):
if config.usage.oldstyle_zap_controls.value:
value = _("Switch to next channel")
else:
if config.usage.volume_instead_of_channelselection.value:
value = _("Volume up")
else:
value = _("Open service list")
if not "keep" in config.usage.servicelist_cursor_behavior.value:
value += " " + _("and select previous channel")
return value
def getKeyDownHelpText(self):
if config.usage.oldstyle_zap_controls.value:
value = _("Switch to previous channel")
else:
if config.usage.volume_instead_of_channelselection.value:
value = _("Volume down")
else:
value = _("Open service list")
if not "keep" in config.usage.servicelist_cursor_behavior.value:
value += " " + _("and select next channel")
return value
def getKeyLeftHelptext(self):
if config.usage.oldstyle_zap_controls.value:
if config.usage.volume_instead_of_channelselection.value:
value = _("Volume down")
else:
value = _("Open service list")
if not "keep" in config.usage.servicelist_cursor_behavior.value:
value += " " + _("and select previous channel")
else:
value = _("Switch to previous channel")
return value
def getKeyRightHelptext(self):
if config.usage.oldstyle_zap_controls.value:
if config.usage.volume_instead_of_channelselection.value:
value = _("Volume up")
else:
value = _("Open service list")
if not "keep" in config.usage.servicelist_cursor_behavior.value:
value += " " + _("and select next channel")
else:
value = _("Switch to next channel")
return value
def getKeyChannelUpHelptext(self):
return config.usage.zap_with_ch_buttons.value and _("Switch to next channel") or _("Open service list")
def getKeyChannelDownHelptext(self):
return config.usage.zap_with_ch_buttons.value and _("Switch to previous channel") or _("Open service list")
def switchChannelUp(self):
if "keep" not in config.usage.servicelist_cursor_behavior.value:
self.servicelist.moveUp()
self.session.execDialog(self.servicelist)
def switchChannelDown(self):
if "keep" not in config.usage.servicelist_cursor_behavior.value:
self.servicelist.moveDown()
self.session.execDialog(self.servicelist)
def zapUp(self):
if self.servicelist.inBouquet():
prev = self.servicelist.getCurrentSelection()
if prev:
prev = prev.toString()
while True:
if config.usage.quickzap_bouquet_change.value:
if self.servicelist.atBegin():
self.servicelist.prevBouquet()
self.servicelist.moveUp()
cur = self.servicelist.getCurrentSelection()
if cur:
if self.servicelist.dopipzap:
isPlayable = self.session.pip.isPlayableForPipService(cur)
else:
isPlayable = isPlayableForCur(cur)
if cur and (cur.toString() == prev or isPlayable):
break
else:
self.servicelist.moveUp()
self.servicelist.zap(enable_pipzap = True)
def zapDown(self):
if self.servicelist.inBouquet():
prev = self.servicelist.getCurrentSelection()
if prev:
prev = prev.toString()
while True:
if config.usage.quickzap_bouquet_change.value and self.servicelist.atEnd():
self.servicelist.nextBouquet()
else:
self.servicelist.moveDown()
cur = self.servicelist.getCurrentSelection()
if cur:
if self.servicelist.dopipzap:
isPlayable = self.session.pip.isPlayableForPipService(cur)
else:
isPlayable = isPlayableForCur(cur)
if cur and (cur.toString() == prev or isPlayable):
break
else:
self.servicelist.moveDown()
self.servicelist.zap(enable_pipzap = True)
def openFavouritesList(self):
self.servicelist.showFavourites()
self.openServiceList()
def openServiceList(self):
self.session.execDialog(self.servicelist)
class InfoBarMenu:
""" Handles a menu action, to open the (main) menu """
def __init__(self):
self["MenuActions"] = HelpableActionMap(self, "InfobarMenuActions",
{
"mainMenu": (self.mainMenu, _("Enter main menu...")),
})
self.session.infobar = None
def mainMenu(self):
print "loading mainmenu XML..."
menu = mdom.getroot()
assert menu.tag == "menu", "root element in menu must be 'menu'!"
self.session.infobar = self
# so we can access the currently active infobar from screens opened from within the mainmenu
# at the moment used from the SubserviceSelection
self.session.openWithCallback(self.mainMenuClosed, MainMenu, menu)
def mainMenuClosed(self, *val):
self.session.infobar = None
class InfoBarSimpleEventView:
""" Opens the Eventview for now/next """
def __init__(self):
self["EPGActions"] = HelpableActionMap(self, "InfobarEPGActions",
{
"showEventInfo": (self.openEventView, _("Show event details")),
"showEventInfoSingleEPG": (self.openEventView, _("Show event details")),
"showInfobarOrEpgWhenInfobarAlreadyVisible": self.showEventInfoWhenNotVisible,
})
def showEventInfoWhenNotVisible(self):
if self.shown:
self.openEventView()
else:
self.toggleShow()
return 1
def openEventView(self):
epglist = [ ]
self.epglist = epglist
service = self.session.nav.getCurrentService()
ref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
info = service.info()
ptr=info.getEvent(0)
if ptr:
epglist.append(ptr)
ptr=info.getEvent(1)
if ptr:
epglist.append(ptr)
if epglist:
self.session.open(EventViewSimple, epglist[0], ServiceReference(ref), self.eventViewCallback)
def eventViewCallback(self, setEvent, setService, val): #used for now/next displaying
epglist = self.epglist
if len(epglist) > 1:
tmp = epglist[0]
epglist[0] = epglist[1]
epglist[1] = tmp
setEvent(epglist[0])
class SimpleServicelist:
def __init__(self, services):
self.services = services
self.length = len(services)
self.current = 0
def selectService(self, service):
if not self.length:
self.current = -1
return False
else:
self.current = 0
while self.services[self.current].ref != service:
self.current += 1
if self.current >= self.length:
return False
return True
def nextService(self):
if not self.length:
return
if self.current+1 < self.length:
self.current += 1
else:
self.current = 0
def prevService(self):
if not self.length:
return
if self.current-1 > -1:
self.current -= 1
else:
self.current = self.length - 1
def currentService(self):
if not self.length or self.current >= self.length:
return None
return self.services[self.current]
class InfoBarEPG:
""" EPG - Opens an EPG list when the showEPGList action fires """
def __init__(self):
self.is_now_next = False
self.dlg_stack = [ ]
self.bouquetSel = None
self.eventView = None
self.epglist = []
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evUpdatedEventInfo: self.__evEventInfoChanged,
})
self["EPGActions"] = HelpableActionMap(self, "InfobarEPGActions",
{
"showEventInfo": (self.showDefaultEPG, _("Show EPG...")),
"showEventInfoSingleEPG": (self.showSingleEPG, _("Show single service EPG")),
"showEventInfoMultiEPG": (self.showMultiEPG, _("Show multi channel EPG")),
#"showCurrentEvent": (self.openEventView, _("Show Current Info...")),
#"showSingleCurrentEPG": (self.openSingleServiceEPG, _("Show single channel EPG...")),
#"showBouquetEPG": (self.openMultiServiceEPG, _("Show Bouquet EPG...")),
##"showEventInfoPlugin": (self.showEventInfoPlugins, _("List EPG functions...")),
##"showEventGuidePlugin": (self.showEventGuidePlugins, _("List EPG functions...")),
"showInfobarOrEpgWhenInfobarAlreadyVisible": self.showEventInfoWhenNotVisible,
})
def getEPGPluginList(self, getAll=False):
pluginlist = [(p.name, boundFunction(self.runPlugin, p), p.path) for p in plugins.getPlugins(where = PluginDescriptor.WHERE_EVENTINFO) \
if 'selectedevent' not in p.__call__.func_code.co_varnames] or []
from Components.ServiceEventTracker import InfoBarCount
if getAll or InfoBarCount == 1:
pluginlist.append((_("Show EPG for current channel..."), self.openSingleServiceEPG, "current_channel"))
pluginlist.append((_("Multi EPG"), self.openMultiServiceEPG, "multi_epg"))
pluginlist.append((_("Current event EPG"), self.openEventView, "event_epg"))
return pluginlist
def showEventInfoWhenNotVisible(self):
if self.shown:
self.openEventView()
else:
self.toggleShow()
return 1
def zapToService(self, service, preview = False, zapback = False):
if self.servicelist.startServiceRef is None:
self.servicelist.startServiceRef = self.session.nav.getCurrentlyPlayingServiceOrGroup()
if service is not None:
if self.servicelist.getRoot() != self.epg_bouquet: #already in correct bouquet?
self.servicelist.clearPath()
if self.servicelist.bouquet_root != self.epg_bouquet:
self.servicelist.enterPath(self.servicelist.bouquet_root)
self.servicelist.enterPath(self.epg_bouquet)
self.servicelist.setCurrentSelection(service) #select the service in servicelist
if not zapback or preview:
self.servicelist.zap(enable_pipzap = True)
if (self.servicelist.dopipzap or zapback) and not preview:
self.servicelist.zapBack()
if not preview:
self.servicelist.startServiceRef = None
self.servicelist.startRoot = None
def getBouquetServices(self, bouquet):
services = [ ]
servicelist = eServiceCenter.getInstance().list(bouquet)
if not servicelist is None:
while True:
service = servicelist.getNext()
if not service.valid(): #check if end of list
break
if service.flags & (eServiceReference.isDirectory | eServiceReference.isMarker): #ignore non playable services
continue
services.append(ServiceReference(service))
return services
def openBouquetEPG(self, bouquet, withCallback=True):
services = self.getBouquetServices(bouquet)
if services:
self.epg_bouquet = bouquet
if withCallback:
self.dlg_stack.append(self.session.openWithCallback(self.closed, EPGSelection, services, self.zapToService, None, self.changeBouquetCB))
else:
self.session.open(EPGSelection, services, self.zapToService, None, self.changeBouquetCB)
def changeBouquetCB(self, direction, epg):
if self.bouquetSel:
if direction > 0:
self.bouquetSel.down()
else:
self.bouquetSel.up()
bouquet = self.bouquetSel.getCurrent()
services = self.getBouquetServices(bouquet)
if services:
self.epg_bouquet = bouquet
epg.setServices(services)
def closed(self, ret=False):
closedScreen = self.dlg_stack.pop()
if self.bouquetSel and closedScreen == self.bouquetSel:
self.bouquetSel = None
elif self.eventView and closedScreen == self.eventView:
self.eventView = None
if ret:
dlgs=len(self.dlg_stack)
if dlgs > 0:
self.dlg_stack[dlgs-1].close(dlgs > 1)
def openMultiServiceEPG(self, withCallback=True):
bouquets = self.servicelist.getBouquetList()
if bouquets is None:
cnt = 0
else:
cnt = len(bouquets)
if config.usage.multiepg_ask_bouquet.value:
self.openMultiServiceEPGAskBouquet(bouquets, cnt, withCallback)
else:
self.openMultiServiceEPGSilent(bouquets, cnt, withCallback)
def openMultiServiceEPGAskBouquet(self, bouquets, cnt, withCallback):
if cnt > 1: # show bouquet list
if withCallback:
self.bouquetSel = self.session.openWithCallback(self.closed, BouquetSelector, bouquets, self.openBouquetEPG, enableWrapAround=True)
self.dlg_stack.append(self.bouquetSel)
else:
self.bouquetSel = self.session.open(BouquetSelector, bouquets, self.openBouquetEPG, enableWrapAround=True)
elif cnt == 1:
self.openBouquetEPG(bouquets[0][1], withCallback)
def openMultiServiceEPGSilent(self, bouquets, cnt, withCallback):
root = self.servicelist.getRoot()
rootstr = root.toCompareString()
current = 0
for bouquet in bouquets:
if bouquet[1].toCompareString() == rootstr:
break
current += 1
if current >= cnt:
current = 0
if cnt > 1: # create bouquet list for bouq+/-
self.bouquetSel = SilentBouquetSelector(bouquets, True, self.servicelist.getBouquetNumOffset(root))
if cnt >= 1:
self.openBouquetEPG(root, withCallback)
def changeServiceCB(self, direction, epg):
if self.serviceSel:
if direction > 0:
self.serviceSel.nextService()
else:
self.serviceSel.prevService()
epg.setService(self.serviceSel.currentService())
def SingleServiceEPGClosed(self, ret=False):
self.serviceSel = None
def openSingleServiceEPG(self):
ref = self.servicelist.getCurrentSelection()
if ref:
if self.servicelist.getMutableList(): # bouquet in channellist
current_path = self.servicelist.getRoot()
services = self.getBouquetServices(current_path)
self.serviceSel = SimpleServicelist(services)
if self.serviceSel.selectService(ref):
self.epg_bouquet = current_path
self.session.openWithCallback(self.SingleServiceEPGClosed, EPGSelection, ref, self.zapToService, serviceChangeCB=self.changeServiceCB)
else:
self.session.openWithCallback(self.SingleServiceEPGClosed, EPGSelection, ref)
else:
self.session.open(EPGSelection, ref)
def runPlugin(self, plugin):
plugin(session = self.session, servicelist = self.servicelist)
def showEventInfoPlugins(self):
pluginlist = self.getEPGPluginList()
if pluginlist:
self.session.openWithCallback(self.EventInfoPluginChosen, ChoiceBox, title=_("Please choose an extension..."), list=pluginlist, skin_name="EPGExtensionsList", reorderConfig="eventinfo_order")
else:
self.openSingleServiceEPG()
def EventInfoPluginChosen(self, answer):
if answer is not None:
answer[1]()
def openSimilarList(self, eventid, refstr):
self.session.open(EPGSelection, refstr, None, eventid)
def getNowNext(self):
epglist = [ ]
service = self.session.nav.getCurrentService()
info = service and service.info()
ptr = info and info.getEvent(0)
if ptr:
epglist.append(ptr)
ptr = info and info.getEvent(1)
if ptr:
epglist.append(ptr)
self.epglist = epglist
def __evEventInfoChanged(self):
if self.is_now_next and len(self.dlg_stack) == 1:
self.getNowNext()
if self.eventView and self.epglist:
self.eventView.setEvent(self.epglist[0])
def showDefaultEPG(self):
self.openEventView()
def showSingleEPG(self):
self.openSingleServiceEPG()
def showMultiEPG(self):
self.openMultiServiceEPG()
def openEventView(self):
from Components.ServiceEventTracker import InfoBarCount
if InfoBarCount > 1:
epglist = [ ]
self.epglist = epglist
service = self.session.nav.getCurrentService()
ref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
info = service.info()
ptr=info.getEvent(0)
if ptr:
epglist.append(ptr)
ptr=info.getEvent(1)
if ptr:
epglist.append(ptr)
if epglist:
self.session.open(EventViewEPGSelect, epglist[0], ServiceReference(ref), self.eventViewCallback, self.openSingleServiceEPG, self.openMultiServiceEPG, self.openSimilarList)
else:
ref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
self.getNowNext()
epglist = self.epglist
if not epglist:
self.is_now_next = False
epg = eEPGCache.getInstance()
ptr = ref and ref.valid() and epg.lookupEventTime(ref, -1)
if ptr:
epglist.append(ptr)
ptr = epg.lookupEventTime(ref, ptr.getBeginTime(), +1)
if ptr:
epglist.append(ptr)
else:
self.is_now_next = True
if epglist:
self.eventView = self.session.openWithCallback(self.closed, EventViewEPGSelect, epglist[0], ServiceReference(ref), self.eventViewCallback, self.openSingleServiceEPG, self.openMultiServiceEPG, self.openSimilarList)
self.dlg_stack.append(self.eventView)
if not epglist:
print "no epg for the service avail.. so we show multiepg instead of eventinfo"
self.openMultiServiceEPG(False)
def eventViewCallback(self, setEvent, setService, val): #used for now/next displaying
epglist = self.epglist
if len(epglist) > 1:
tmp = epglist[0]
epglist[0]=epglist[1]
epglist[1]=tmp
setEvent(epglist[0])
class InfoBarRdsDecoder:
"""provides RDS and Rass support/display"""
def __init__(self):
self.rds_display = self.session.instantiateDialog(RdsInfoDisplay)
self.session.instantiateSummaryDialog(self.rds_display)
self.rass_interactive = None
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evEnd: self.__serviceStopped,
iPlayableService.evUpdatedRassSlidePic: self.RassSlidePicChanged
})
self["RdsActions"] = ActionMap(["InfobarRdsActions"],
{
"startRassInteractive": self.startRassInteractive
},-1)
self["RdsActions"].setEnabled(False)
self.onLayoutFinish.append(self.rds_display.show)
self.rds_display.onRassInteractivePossibilityChanged.append(self.RassInteractivePossibilityChanged)
def RassInteractivePossibilityChanged(self, state):
self["RdsActions"].setEnabled(state)
def RassSlidePicChanged(self):
if not self.rass_interactive:
service = self.session.nav.getCurrentService()
decoder = service and service.rdsDecoder()
if decoder:
decoder.showRassSlidePicture()
def __serviceStopped(self):
if self.rass_interactive is not None:
rass_interactive = self.rass_interactive
self.rass_interactive = None
rass_interactive.close()
def startRassInteractive(self):
self.rds_display.hide()
self.rass_interactive = self.session.openWithCallback(self.RassInteractiveClosed, RassInteractive)
def RassInteractiveClosed(self, *val):
if self.rass_interactive is not None:
self.rass_interactive = None
self.RassSlidePicChanged()
self.rds_display.show()
class InfoBarSeek:
"""handles actions like seeking, pause"""
SEEK_STATE_PLAY = (0, 0, 0, ">")
SEEK_STATE_PAUSE = (1, 0, 0, "||")
SEEK_STATE_EOF = (1, 0, 0, "END")
def __init__(self, actionmap = "InfobarSeekActions"):
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evSeekableStatusChanged: self.__seekableStatusChanged,
iPlayableService.evStart: self.__serviceStarted,
iPlayableService.evEOF: self.__evEOF,
iPlayableService.evSOF: self.__evSOF,
})
self.fast_winding_hint_message_showed = False
class InfoBarSeekActionMap(HelpableActionMap):
def __init__(self, screen, *args, **kwargs):
HelpableActionMap.__init__(self, screen, *args, **kwargs)
self.screen = screen
def action(self, contexts, action):
print "action:", action
if action[:5] == "seek:":
time = int(action[5:])
self.screen.doSeekRelative(time * 90000)
return 1
elif action[:8] == "seekdef:":
key = int(action[8:])
time = (-config.seek.selfdefined_13.value, False, config.seek.selfdefined_13.value,
-config.seek.selfdefined_46.value, False, config.seek.selfdefined_46.value,
-config.seek.selfdefined_79.value, False, config.seek.selfdefined_79.value)[key-1]
self.screen.doSeekRelative(time * 90000)
return 1
else:
return HelpableActionMap.action(self, contexts, action)
self["SeekActions"] = InfoBarSeekActionMap(self, actionmap,
{
"playpauseService": (self.playpauseService, _("Pauze/Continue playback")),
"pauseService": (self.pauseService, _("Pause playback")),
"unPauseService": (self.unPauseService, _("Continue playback")),
"okButton": (self.okButton, _("Continue playback")),
"seekFwd": (self.seekFwd, _("Seek forward")),
"seekFwdManual": (self.seekFwdManual, _("Seek forward (enter time)")),
"seekBack": (self.seekBack, _("Seek backward")),
"seekBackManual": (self.seekBackManual, _("Seek backward (enter time)")),
"jumpPreviousMark": (self.seekPreviousMark, _("Jump to previous marked position")),
"jumpNextMark": (self.seekNextMark, _("Jump to next marked position")),
}, prio=-1)
# give them a little more priority to win over color buttons
self["SeekActions"].setEnabled(False)
self.seekstate = self.SEEK_STATE_PLAY
self.lastseekstate = self.SEEK_STATE_PLAY
self.onPlayStateChanged = [ ]
self.lockedBecauseOfSkipping = False
self.__seekableStatusChanged()
def makeStateForward(self, n):
return (0, n, 0, ">> %dx" % n)
def makeStateBackward(self, n):
return (0, -n, 0, "<< %dx" % n)
def makeStateSlowMotion(self, n):
return (0, 0, n, "/%d" % n)
def isStateForward(self, state):
return state[1] > 1
def isStateBackward(self, state):
return state[1] < 0
def isStateSlowMotion(self, state):
return state[1] == 0 and state[2] > 1
def getHigher(self, n, lst):
for x in lst:
if x > n:
return x
return False
def getLower(self, n, lst):
lst = lst[:]
lst.reverse()
for x in lst:
if x < n:
return x
return False
def showAfterSeek(self):
if isinstance(self, InfoBarShowHide):
self.doShow()
def up(self):
pass
def down(self):
pass
def getSeek(self):
service = self.session.nav.getCurrentService()
if service is None:
return None
seek = service.seek()
if seek is None or not seek.isCurrentlySeekable():
return None
return seek
def isSeekable(self):
if self.getSeek() is None or (isStandardInfoBar(self) and not self.timeshiftEnabled()):
return False
return True
def __seekableStatusChanged(self):
# print "seekable status changed!"
if not self.isSeekable():
self["SeekActions"].setEnabled(False)
# print "not seekable, return to play"
self.setSeekState(self.SEEK_STATE_PLAY)
else:
self["SeekActions"].setEnabled(True)
# print "seekable"
def __serviceStarted(self):
self.fast_winding_hint_message_showed = False
self.setSeekState(self.SEEK_STATE_PLAY)
self.__seekableStatusChanged()
def setSeekState(self, state):
service = self.session.nav.getCurrentService()
if service is None:
return False
if not self.isSeekable():
if state not in (self.SEEK_STATE_PLAY, self.SEEK_STATE_PAUSE):
state = self.SEEK_STATE_PLAY
pauseable = service.pause()
if pauseable is None:
print "not pauseable."
state = self.SEEK_STATE_PLAY
self.seekstate = state
if pauseable is not None:
if self.seekstate[0]:
print "resolved to PAUSE"
pauseable.pause()
elif self.seekstate[1]:
if not pauseable.setFastForward(self.seekstate[1]):
print "resolved to FAST FORWARD"
else:
self.seekstate = self.SEEK_STATE_PLAY
print "FAST FORWARD not possible: resolved to PLAY"
elif self.seekstate[2]:
if not pauseable.setSlowMotion(self.seekstate[2]):
print "resolved to SLOW MOTION"
else:
self.seekstate = self.SEEK_STATE_PAUSE
print "SLOW MOTION not possible: resolved to PAUSE"
else:
print "resolved to PLAY"
pauseable.unpause()
for c in self.onPlayStateChanged:
c(self.seekstate)
self.checkSkipShowHideLock()
if hasattr(self, "ScreenSaverTimerStart"):
self.ScreenSaverTimerStart()
return True
def playpauseService(self):
if self.seekstate != self.SEEK_STATE_PLAY:
self.unPauseService()
else:
self.pauseService()
def okButton(self):
if self.seekstate == self.SEEK_STATE_PLAY:
return 0
elif self.seekstate == self.SEEK_STATE_PAUSE:
self.pauseService()
else:
self.unPauseService()
def pauseService(self):
if self.seekstate == self.SEEK_STATE_PAUSE:
if config.seek.on_pause.value == "play":
self.unPauseService()
elif config.seek.on_pause.value == "step":
self.doSeekRelative(1)
elif config.seek.on_pause.value == "last":
self.setSeekState(self.lastseekstate)
self.lastseekstate = self.SEEK_STATE_PLAY
else:
if self.seekstate != self.SEEK_STATE_EOF:
self.lastseekstate = self.seekstate
self.setSeekState(self.SEEK_STATE_PAUSE)
def unPauseService(self):
print "unpause"
if self.seekstate == self.SEEK_STATE_PLAY:
return 0
self.setSeekState(self.SEEK_STATE_PLAY)
def doSeek(self, pts):
seekable = self.getSeek()
if seekable is None:
return
seekable.seekTo(pts)
def doSeekRelative(self, pts):
seekable = self.getSeek()
if seekable is None:
return
prevstate = self.seekstate
if self.seekstate == self.SEEK_STATE_EOF:
if prevstate == self.SEEK_STATE_PAUSE:
self.setSeekState(self.SEEK_STATE_PAUSE)
else:
self.setSeekState(self.SEEK_STATE_PLAY)
seekable.seekRelative(pts<0 and -1 or 1, abs(pts))
if abs(pts) > 100 and config.usage.show_infobar_on_skip.value:
self.showAfterSeek()
def seekFwd(self):
seek = self.getSeek()
if seek and not (seek.isCurrentlySeekable() & 2):
if not self.fast_winding_hint_message_showed and (seek.isCurrentlySeekable() & 1):
self.session.open(MessageBox, _("No fast winding possible yet.. but you can use the number buttons to skip forward/backward!"), MessageBox.TYPE_INFO, timeout=10)
self.fast_winding_hint_message_showed = True
return
return 0 # trade as unhandled action
if self.seekstate == self.SEEK_STATE_PLAY:
self.setSeekState(self.makeStateForward(int(config.seek.enter_forward.value)))
elif self.seekstate == self.SEEK_STATE_PAUSE:
if len(config.seek.speeds_slowmotion.value):
self.setSeekState(self.makeStateSlowMotion(config.seek.speeds_slowmotion.value[-1]))
else:
self.setSeekState(self.makeStateForward(int(config.seek.enter_forward.value)))
elif self.seekstate == self.SEEK_STATE_EOF:
pass
elif self.isStateForward(self.seekstate):
speed = self.seekstate[1]
if self.seekstate[2]:
speed /= self.seekstate[2]
speed = self.getHigher(speed, config.seek.speeds_forward.value) or config.seek.speeds_forward.value[-1]
self.setSeekState(self.makeStateForward(speed))
elif self.isStateBackward(self.seekstate):
speed = -self.seekstate[1]
if self.seekstate[2]:
speed /= self.seekstate[2]
speed = self.getLower(speed, config.seek.speeds_backward.value)
if speed:
self.setSeekState(self.makeStateBackward(speed))
else:
self.setSeekState(self.SEEK_STATE_PLAY)
elif self.isStateSlowMotion(self.seekstate):
speed = self.getLower(self.seekstate[2], config.seek.speeds_slowmotion.value) or config.seek.speeds_slowmotion.value[0]
self.setSeekState(self.makeStateSlowMotion(speed))
def seekBack(self):
seek = self.getSeek()
if seek and not (seek.isCurrentlySeekable() & 2):
if not self.fast_winding_hint_message_showed and (seek.isCurrentlySeekable() & 1):
self.session.open(MessageBox, _("No fast winding possible yet.. but you can use the number buttons to skip forward/backward!"), MessageBox.TYPE_INFO, timeout=10)
self.fast_winding_hint_message_showed = True
return
return 0 # trade as unhandled action
seekstate = self.seekstate
if seekstate == self.SEEK_STATE_PLAY:
self.setSeekState(self.makeStateBackward(int(config.seek.enter_backward.value)))
elif seekstate == self.SEEK_STATE_EOF:
self.setSeekState(self.makeStateBackward(int(config.seek.enter_backward.value)))
self.doSeekRelative(-6)
elif seekstate == self.SEEK_STATE_PAUSE:
self.doSeekRelative(-1)
elif self.isStateForward(seekstate):
speed = seekstate[1]
if seekstate[2]:
speed /= seekstate[2]
speed = self.getLower(speed, config.seek.speeds_forward.value)
if speed:
self.setSeekState(self.makeStateForward(speed))
else:
self.setSeekState(self.SEEK_STATE_PLAY)
elif self.isStateBackward(seekstate):
speed = -seekstate[1]
if seekstate[2]:
speed /= seekstate[2]
speed = self.getHigher(speed, config.seek.speeds_backward.value) or config.seek.speeds_backward.value[-1]
self.setSeekState(self.makeStateBackward(speed))
elif self.isStateSlowMotion(seekstate):
speed = self.getHigher(seekstate[2], config.seek.speeds_slowmotion.value)
if speed:
self.setSeekState(self.makeStateSlowMotion(speed))
else:
self.setSeekState(self.SEEK_STATE_PAUSE)
def seekFwdManual(self):
self.session.openWithCallback(self.fwdSeekTo, MinuteInput)
def fwdSeekTo(self, minutes):
print "Seek", minutes, "minutes forward"
self.doSeekRelative(minutes * 60 * 90000)
def seekBackManual(self):
self.session.openWithCallback(self.rwdSeekTo, MinuteInput)
def rwdSeekTo(self, minutes):
print "rwdSeekTo"
self.doSeekRelative(-minutes * 60 * 90000)
def checkSkipShowHideLock(self):
wantlock = self.seekstate != self.SEEK_STATE_PLAY
if config.usage.show_infobar_on_skip.value:
if self.lockedBecauseOfSkipping and not wantlock:
self.unlockShow()
self.lockedBecauseOfSkipping = False
if wantlock and not self.lockedBecauseOfSkipping:
self.lockShow()
self.lockedBecauseOfSkipping = True
def calcRemainingTime(self):
seekable = self.getSeek()
if seekable is not None:
len = seekable.getLength()
try:
tmp = self.cueGetEndCutPosition()
if tmp:
len = (False, tmp)
except:
pass
pos = seekable.getPlayPosition()
speednom = self.seekstate[1] or 1
speedden = self.seekstate[2] or 1
if not len[0] and not pos[0]:
if len[1] <= pos[1]:
return 0
time = (len[1] - pos[1])*speedden/(90*speednom)
return time
return False
def __evEOF(self):
if self.seekstate == self.SEEK_STATE_EOF:
return
# if we are seeking forward, we try to end up ~1s before the end, and pause there.
seekstate = self.seekstate
if self.seekstate != self.SEEK_STATE_PAUSE:
self.setSeekState(self.SEEK_STATE_EOF)
if seekstate not in (self.SEEK_STATE_PLAY, self.SEEK_STATE_PAUSE): # if we are seeking
seekable = self.getSeek()
if seekable is not None:
seekable.seekTo(-1)
if seekstate == self.SEEK_STATE_PLAY: # regular EOF
self.doEofInternal(True)
else:
self.doEofInternal(False)
def doEofInternal(self, playing):
pass # Defined in subclasses
def __evSOF(self):
self.setSeekState(self.SEEK_STATE_PLAY)
self.doSeek(0)
# This is needed, because some Mediaplayer use InfoBarSeek but not InfoBarCueSheetSupport
def seekPreviousMark(self):
if isinstance(self, InfoBarCueSheetSupport):
self.jumpPreviousMark()
def seekNextMark(self):
if isinstance(self, InfoBarCueSheetSupport):
self.jumpNextMark()
from Screens.PVRState import PVRState, TimeshiftState
class InfoBarPVRState:
def __init__(self, screen=PVRState, force_show = False):
self.onPlayStateChanged.append(self.__playStateChanged)
self.pvrStateDialog = self.session.instantiateDialog(screen)
self.onShow.append(self._mayShow)
self.onHide.append(self.pvrStateDialog.hide)
self.force_show = force_show
def _mayShow(self):
if self.shown and self.seekstate != self.SEEK_STATE_PLAY:
self.pvrStateDialog.show()
def __playStateChanged(self, state):
playstateString = state[3]
self.pvrStateDialog["state"].setText(playstateString)
# if we return into "PLAY" state, ensure that the dialog gets hidden if there will be no infobar displayed
if not config.usage.show_infobar_on_skip.value and self.seekstate == self.SEEK_STATE_PLAY and not self.force_show:
self.pvrStateDialog.hide()
else:
self._mayShow()
class TimeshiftLive(Screen):
def __init__(self, session):
Screen.__init__(self, session)
class InfoBarTimeshiftState(InfoBarPVRState):
def __init__(self):
InfoBarPVRState.__init__(self, screen=TimeshiftState, force_show = True)
self.timeshiftLiveScreen = self.session.instantiateDialog(TimeshiftLive)
self.onHide.append(self.timeshiftLiveScreen.hide)
self.secondInfoBarScreen and self.secondInfoBarScreen.onShow.append(self.timeshiftLiveScreen.hide)
self.timeshiftLiveScreen.hide()
self.__hideTimer = eTimer()
self.__hideTimer.callback.append(self.__hideTimeshiftState)
self.onFirstExecBegin.append(self.pvrStateDialog.show)
def _mayShow(self):
if self.timeshiftEnabled():
if self.secondInfoBarScreen and self.secondInfoBarScreen.shown:
self.secondInfoBarScreen.hide()
if self.timeshiftActivated():
self.pvrStateDialog.show()
self.timeshiftLiveScreen.hide()
elif self.showTimeshiftState:
self.pvrStateDialog.hide()
self.timeshiftLiveScreen.show()
self.showTimeshiftState = False
if self.seekstate == self.SEEK_STATE_PLAY and config.usage.infobar_timeout.index and (self.pvrStateDialog.shown or self.timeshiftLiveScreen.shown):
self.__hideTimer.startLongTimer(config.usage.infobar_timeout.index)
else:
self.__hideTimeshiftState()
def __hideTimeshiftState(self):
self.pvrStateDialog.hide()
self.timeshiftLiveScreen.hide()
class InfoBarShowMovies:
# i don't really like this class.
# it calls a not further specified "movie list" on up/down/movieList,
# so this is not more than an action map
def __init__(self):
self["MovieListActions"] = HelpableActionMap(self, "InfobarMovieListActions",
{
"movieList": (self.showMovies, _("Open the movie list")),
"up": (self.up, _("Open the movie list")),
"down": (self.down, _("Open the movie list"))
})
# InfoBarTimeshift requires InfoBarSeek, instantiated BEFORE!
# Hrmf.
#
# Timeshift works the following way:
# demux0 demux1 "TimeshiftActions" "TimeshiftActivateActions" "SeekActions"
# - normal playback TUNER unused PLAY enable disable disable
# - user presses "yellow" button. FILE record PAUSE enable disable enable
# - user presess pause again FILE record PLAY enable disable enable
# - user fast forwards FILE record FF enable disable enable
# - end of timeshift buffer reached TUNER record PLAY enable enable disable
# - user backwards FILE record BACK # !! enable disable enable
#
# in other words:
# - when a service is playing, pressing the "timeshiftStart" button ("yellow") enables recording ("enables timeshift"),
# freezes the picture (to indicate timeshift), sets timeshiftMode ("activates timeshift")
# now, the service becomes seekable, so "SeekActions" are enabled, "TimeshiftEnableActions" are disabled.
# - the user can now PVR around
# - if it hits the end, the service goes into live mode ("deactivates timeshift", it's of course still "enabled")
# the service looses it's "seekable" state. It can still be paused, but just to activate timeshift right
# after!
# the seek actions will be disabled, but the timeshiftActivateActions will be enabled
# - if the user rewinds, or press pause, timeshift will be activated again
# note that a timeshift can be enabled ("recording") and
# activated (currently time-shifting).
class InfoBarTimeshift:
ts_disabled = False
def __init__(self):
self["TimeshiftActions"] = HelpableActionMap(self, "InfobarTimeshiftActions",
{
"timeshiftStart": (self.startTimeshift, _("Start timeshift")), # the "yellow key"
"timeshiftStop": (self.stopTimeshift, _("Stop timeshift")) # currently undefined :), probably 'TV'
}, prio=1)
self["TimeshiftActivateActions"] = ActionMap(["InfobarTimeshiftActivateActions"],
{
"timeshiftActivateEnd": self.activateTimeshiftEnd, # something like "rewind key"
"timeshiftActivateEndAndPause": self.activateTimeshiftEndAndPause # something like "pause key"
}, prio=-1) # priority over record
self["TimeshiftActivateActions"].setEnabled(False)
self.ts_rewind_timer = eTimer()
self.ts_rewind_timer.callback.append(self.rewindService)
self.ts_start_delay_timer = eTimer()
self.ts_start_delay_timer.callback.append(self.startTimeshiftWithoutPause)
self.ts_current_event_timer = eTimer()
self.ts_current_event_timer.callback.append(self.saveTimeshiftFileForEvent)
self.save_timeshift_file = False
self.timeshift_was_activated = False
self.showTimeshiftState = False
self.save_timeshift_only_current_event = False
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evStart: self.__serviceStarted,
iPlayableService.evSeekableStatusChanged: self.__seekableStatusChanged,
iPlayableService.evEnd: self.__serviceEnd
})
def getTimeshift(self):
if self.ts_disabled:
return None
service = self.session.nav.getCurrentService()
return service and service.timeshift()
def timeshiftEnabled(self):
ts = self.getTimeshift()
return ts and ts.isTimeshiftEnabled()
def timeshiftActivated(self):
ts = self.getTimeshift()
return ts and ts.isTimeshiftActive()
def startTimeshift(self, pauseService = True):
print "enable timeshift"
ts = self.getTimeshift()
if ts is None:
if not pauseService and not int(config.usage.timeshift_start_delay.value):
self.session.open(MessageBox, _("Timeshift not possible!"), MessageBox.TYPE_ERROR, simple = True)
print "no ts interface"
return 0
if ts.isTimeshiftEnabled():
print "hu, timeshift already enabled?"
else:
if not ts.startTimeshift():
# we remove the "relative time" for now.
#self.pvrStateDialog["timeshift"].setRelative(time.time())
if pauseService:
# PAUSE.
#self.setSeekState(self.SEEK_STATE_PAUSE)
self.activateTimeshiftEnd(False)
self.showTimeshiftState = True
else:
self.showTimeshiftState = False
# enable the "TimeshiftEnableActions", which will override
# the startTimeshift actions
self.__seekableStatusChanged()
# get current timeshift filename and calculate new
self.save_timeshift_file = False
self.save_timeshift_in_movie_dir = False
self.setCurrentEventTimer()
self.current_timeshift_filename = ts.getTimeshiftFilename()
self.new_timeshift_filename = self.generateNewTimeshiftFileName()
else:
print "timeshift failed"
def startTimeshiftWithoutPause(self):
self.startTimeshift(False)
def stopTimeshift(self):
ts = self.getTimeshift()
if ts and ts.isTimeshiftEnabled():
if int(config.usage.timeshift_start_delay.value):
ts.switchToLive()
else:
self.checkTimeshiftRunning(self.stopTimeshiftcheckTimeshiftRunningCallback)
else:
return 0
def stopTimeshiftcheckTimeshiftRunningCallback(self, answer):
ts = self.getTimeshift()
if answer and ts:
ts.stopTimeshift()
self.pvrStateDialog.hide()
self.setCurrentEventTimer()
# disable actions
self.__seekableStatusChanged()
# activates timeshift, and seeks to (almost) the end
def activateTimeshiftEnd(self, back = True):
self.showTimeshiftState = True
ts = self.getTimeshift()
print "activateTimeshiftEnd"
if ts is None:
return
if ts.isTimeshiftActive():
print "!! activate timeshift called - but shouldn't this be a normal pause?"
self.pauseService()
else:
print "play, ..."
ts.activateTimeshift() # activate timeshift will automatically pause
self.setSeekState(self.SEEK_STATE_PAUSE)
seekable = self.getSeek()
if seekable is not None:
seekable.seekTo(-90000) # seek approx. 1 sec before end
self.timeshift_was_activated = True
if back:
self.ts_rewind_timer.start(200, 1)
def rewindService(self):
self.setSeekState(self.makeStateBackward(int(config.seek.enter_backward.value)))
# generates only filename without path
def generateNewTimeshiftFileName(self):
name = "timeshift record"
info = { }
self.getProgramInfoAndEvent(info, name)
serviceref = info["serviceref"]
service_name = ""
if isinstance(serviceref, eServiceReference):
service_name = ServiceReference(serviceref).getServiceName()
begin_date = strftime("%Y%m%d %H%M", localtime(time()))
filename = begin_date + " - " + service_name
if config.recording.filename_composition.value == "veryshort":
filename = service_name + " - " + begin_date
elif config.recording.filename_composition.value == "short":
filename = strftime("%Y%m%d", localtime(time())) + " - " + info["name"]
elif config.recording.filename_composition.value == "long":
filename += " - " + info["name"] + " - " + info["description"]
else:
filename += " - " + info["name"] # standard
if config.recording.ascii_filenames.value:
filename = ASCIItranslit.legacyEncode(filename)
print "New timeshift filename: ", filename
return filename
# same as activateTimeshiftEnd, but pauses afterwards.
def activateTimeshiftEndAndPause(self):
print "activateTimeshiftEndAndPause"
#state = self.seekstate
self.activateTimeshiftEnd(False)
def callServiceStarted(self):
self.__serviceStarted()
def __seekableStatusChanged(self):
self["TimeshiftActivateActions"].setEnabled(not self.isSeekable() and self.timeshiftEnabled())
state = self.getSeek() is not None and self.timeshiftEnabled()
self["SeekActions"].setEnabled(state)
if not state:
self.setSeekState(self.SEEK_STATE_PLAY)
self.restartSubtitle()
def __serviceStarted(self):
self.pvrStateDialog.hide()
self.__seekableStatusChanged()
if self.ts_start_delay_timer.isActive():
self.ts_start_delay_timer.stop()
if int(config.usage.timeshift_start_delay.value):
self.ts_start_delay_timer.start(int(config.usage.timeshift_start_delay.value) * 1000, True)
def checkTimeshiftRunning(self, returnFunction):
if self.timeshiftEnabled() and config.usage.check_timeshift.value and self.timeshift_was_activated:
message = _("Stop timeshift?")
if not self.save_timeshift_file:
choice = [(_("Yes"), "stop"), (_("No"), "continue"), (_("Yes and save"), "save"), (_("Yes and save in movie dir"), "save_movie")]
else:
choice = [(_("Yes"), "stop"), (_("No"), "continue")]
message += "\n" + _("Reminder, you have chosen to save timeshift file.")
if self.save_timeshift_only_current_event:
remaining = self.currentEventTime()
if remaining > 0:
message += "\n" + _("The %d min remaining before the end of the event.") % abs(remaining / 60)
self.session.openWithCallback(boundFunction(self.checkTimeshiftRunningCallback, returnFunction), MessageBox, message, simple = True, list = choice)
else:
returnFunction(True)
def checkTimeshiftRunningCallback(self, returnFunction, answer):
if answer:
if "movie" in answer:
self.save_timeshift_in_movie_dir = True
if "save" in answer:
self.save_timeshift_file = True
ts = self.getTimeshift()
if ts:
ts.saveTimeshiftFile()
del ts
if "continue" not in answer:
self.saveTimeshiftFiles()
returnFunction(answer and answer != "continue")
# renames/moves timeshift files if requested
def __serviceEnd(self):
self.saveTimeshiftFiles()
self.setCurrentEventTimer()
self.timeshift_was_activated = False
def saveTimeshiftFiles(self):
if self.save_timeshift_file and self.current_timeshift_filename and self.new_timeshift_filename:
if config.usage.timeshift_path.value and not self.save_timeshift_in_movie_dir:
dirname = config.usage.timeshift_path.value
else:
dirname = defaultMoviePath()
filename = getRecordingFilename(self.new_timeshift_filename, dirname) + ".ts"
fileList = []
fileList.append((self.current_timeshift_filename, filename))
if fileExists(self.current_timeshift_filename + ".sc"):
fileList.append((self.current_timeshift_filename + ".sc", filename + ".sc"))
if fileExists(self.current_timeshift_filename + ".cuts"):
fileList.append((self.current_timeshift_filename + ".cuts", filename + ".cuts"))
moveFiles(fileList)
self.save_timeshift_file = False
self.setCurrentEventTimer()
def currentEventTime(self):
remaining = 0
ref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
if ref:
epg = eEPGCache.getInstance()
event = epg.lookupEventTime(ref, -1, 0)
if event:
now = int(time())
start = event.getBeginTime()
duration = event.getDuration()
end = start + duration
remaining = end - now
return remaining
def saveTimeshiftFileForEvent(self):
if self.timeshiftEnabled() and self.save_timeshift_only_current_event and self.timeshift_was_activated and self.save_timeshift_file:
message = _("Current event is over.\nSelect an option to save the timeshift file.")
choice = [(_("Save and stop timeshift"), "save"), (_("Save and restart timeshift"), "restart"), (_("Don't save and stop timeshift"), "stop"), (_("Do nothing"), "continue")]
self.session.openWithCallback(self.saveTimeshiftFileForEventCallback, MessageBox, message, simple = True, list = choice, timeout=15)
def saveTimeshiftFileForEventCallback(self, answer):
self.save_timeshift_only_current_event = False
if answer:
ts = self.getTimeshift()
if ts and answer in ("save", "restart", "stop"):
self.stopTimeshiftcheckTimeshiftRunningCallback(True)
if answer in ("save", "restart"):
ts.saveTimeshiftFile()
del ts
self.saveTimeshiftFiles()
if answer == "restart":
self.ts_start_delay_timer.start(1000, True)
self.save_timeshift_file = False
self.save_timeshift_in_movie_dir = False
def setCurrentEventTimer(self, duration=0):
self.ts_current_event_timer.stop()
self.save_timeshift_only_current_event = False
if duration > 0:
self.save_timeshift_only_current_event = True
self.ts_current_event_timer.startLongTimer(duration)
from Screens.PiPSetup import PiPSetup
class InfoBarExtensions:
EXTENSION_SINGLE = 0
EXTENSION_LIST = 1
def __init__(self):
self.list = []
self["InstantExtensionsActions"] = HelpableActionMap(self, "InfobarExtensions",
{
"extensions": (self.showExtensionSelection, _("Show extensions...")),
}, 1) # lower priority
def addExtension(self, extension, key = None, type = EXTENSION_SINGLE):
self.list.append((type, extension, key))
def updateExtension(self, extension, key = None):
self.extensionsList.append(extension)
if key is not None:
if self.extensionKeys.has_key(key):
key = None
if key is None:
for x in self.availableKeys:
if not self.extensionKeys.has_key(x):
key = x
break
if key is not None:
self.extensionKeys[key] = len(self.extensionsList) - 1
def updateExtensions(self):
self.extensionsList = []
self.availableKeys = [ "1", "2", "3", "4", "5", "6", "7", "8", "9", "0", "red", "green", "yellow", "blue" ]
self.extensionKeys = {}
for x in self.list:
if x[0] == self.EXTENSION_SINGLE:
self.updateExtension(x[1], x[2])
else:
for y in x[1]():
self.updateExtension(y[0], y[1])
def showExtensionSelection(self):
self.updateExtensions()
extensionsList = self.extensionsList[:]
keys = []
list = []
for x in self.availableKeys:
if self.extensionKeys.has_key(x):
entry = self.extensionKeys[x]
extension = self.extensionsList[entry]
if extension[2]():
name = str(extension[0]())
list.append((extension[0](), extension))
keys.append(x)
extensionsList.remove(extension)
else:
extensionsList.remove(extension)
list.extend([(x[0](), x) for x in extensionsList])
keys += [""] * len(extensionsList)
self.session.openWithCallback(self.extensionCallback, ChoiceBox, title=_("Please choose an extension..."), list=list, keys=keys, skin_name="ExtensionsList", reorderConfig="extension_order")
def extensionCallback(self, answer):
if answer is not None:
answer[1][1]()
from Tools.BoundFunction import boundFunction
import inspect
# depends on InfoBarExtensions
class InfoBarPlugins:
def __init__(self):
self.addExtension(extension = self.getPluginList, type = InfoBarExtensions.EXTENSION_LIST)
def getPluginName(self, name):
return name
def getPluginList(self):
l = []
for p in plugins.getPlugins(where = PluginDescriptor.WHERE_EXTENSIONSMENU):
args = inspect.getargspec(p.__call__)[0]
if len(args) == 1 or len(args) == 2 and isinstance(self, InfoBarChannelSelection):
l.append(((boundFunction(self.getPluginName, p.name), boundFunction(self.runPlugin, p), lambda: True), None, p.name))
l.sort(key = lambda e: e[2]) # sort by name
return l
def runPlugin(self, plugin):
if isinstance(self, InfoBarChannelSelection):
plugin(session = self.session, servicelist = self.servicelist)
else:
plugin(session = self.session)
from Components.Task import job_manager
class InfoBarJobman:
def __init__(self):
self.addExtension(extension = self.getJobList, type = InfoBarExtensions.EXTENSION_LIST)
def getJobList(self):
return [((boundFunction(self.getJobName, job), boundFunction(self.showJobView, job), lambda: True), None) for job in job_manager.getPendingJobs()]
def getJobName(self, job):
return "%s: %s (%d%%)" % (job.getStatustext(), job.name, int(100*job.progress/float(job.end)))
def showJobView(self, job):
from Screens.TaskView import JobView
job_manager.in_background = False
self.session.openWithCallback(self.JobViewCB, JobView, job)
def JobViewCB(self, in_background):
job_manager.in_background = in_background
# depends on InfoBarExtensions
class InfoBarPiP:
def __init__(self):
try:
self.session.pipshown
except:
self.session.pipshown = False
self.lastPiPService = None
if SystemInfo["PIPAvailable"]:
self["PiPActions"] = HelpableActionMap(self, "InfobarPiPActions",
{
"activatePiP": (self.activePiP, self.activePiPName),
})
if (self.allowPiP):
self.addExtension((self.getShowHideName, self.showPiP, lambda: True), "blue")
self.addExtension((self.getMoveName, self.movePiP, self.pipShown), "green")
self.addExtension((self.getSwapName, self.swapPiP, self.pipShown), "yellow")
self.addExtension((self.getTogglePipzapName, self.togglePipzap, lambda: True), "red")
else:
self.addExtension((self.getShowHideName, self.showPiP, self.pipShown), "blue")
self.addExtension((self.getMoveName, self.movePiP, self.pipShown), "green")
self.lastPiPServiceTimeoutTimer = eTimer()
self.lastPiPServiceTimeoutTimer.callback.append(self.clearLastPiPService)
def pipShown(self):
return self.session.pipshown
def pipHandles0Action(self):
return self.pipShown() and config.usage.pip_zero_button.value != "standard"
def getShowHideName(self):
if self.session.pipshown:
return _("Disable Picture in Picture")
else:
return _("Activate Picture in Picture")
def getSwapName(self):
return _("Swap services")
def getMoveName(self):
return _("Picture in Picture Setup")
def getTogglePipzapName(self):
slist = self.servicelist
if slist and slist.dopipzap:
return _("Zap focus to main screen")
return _("Zap focus to Picture in Picture")
def togglePipzap(self):
if not self.session.pipshown:
self.showPiP()
slist = self.servicelist
if slist and self.session.pipshown:
slist.togglePipzap()
if slist.dopipzap:
currentServicePath = slist.getCurrentServicePath()
slist.setCurrentServicePath(self.session.pip.servicePath, doZap=False)
self.session.pip.servicePath = currentServicePath
def showPiP(self):
self.lastPiPServiceTimeoutTimer.stop()
slist = self.servicelist
if self.session.pipshown:
if slist and slist.dopipzap:
self.togglePipzap()
if self.session.pipshown:
lastPiPServiceTimeout = int(config.usage.pip_last_service_timeout.value)
if lastPiPServiceTimeout >= 0:
self.lastPiPService = self.session.pip.getCurrentServiceReference()
if lastPiPServiceTimeout:
self.lastPiPServiceTimeoutTimer.startLongTimer(lastPiPServiceTimeout)
del self.session.pip
if SystemInfo["LCDMiniTV"]:
if config.lcd.modepip.value >= "1":
f = open("/proc/stb/lcd/mode", "w")
f.write(config.lcd.modeminitv.value)
f.close()
self.session.pipshown = False
if hasattr(self, "ScreenSaverTimerStart"):
self.ScreenSaverTimerStart()
else:
self.session.pip = self.session.instantiateDialog(PictureInPicture)
self.session.pip.show()
newservice = self.lastPiPService or self.session.nav.getCurrentlyPlayingServiceReference() or (slist and slist.servicelist.getCurrent())
if self.session.pip.playService(newservice):
self.session.pipshown = True
self.session.pip.servicePath = slist and slist.getCurrentServicePath()
if SystemInfo["LCDMiniTV"]:
if config.lcd.modepip.value >= "1":
f = open("/proc/stb/lcd/mode", "w")
f.write(config.lcd.modepip.value)
f.close()
f = open("/proc/stb/vmpeg/1/dst_width", "w")
f.write("0")
f.close()
f = open("/proc/stb/vmpeg/1/dst_height", "w")
f.write("0")
f.close()
f = open("/proc/stb/vmpeg/1/dst_apply", "w")
f.write("1")
f.close()
else:
newservice = self.session.nav.getCurrentlyPlayingServiceReference() or (slist and slist.servicelist.getCurrent())
if self.session.pip.playService(newservice):
self.session.pipshown = True
self.session.pip.servicePath = slist and slist.getCurrentServicePath()
else:
self.session.pipshown = False
del self.session.pip
if self.session.pipshown and hasattr(self, "screenSaverTimer"):
self.screenSaverTimer.stop()
self.lastPiPService = None
def clearLastPiPService(self):
self.lastPiPService = None
def activePiP(self):
if self.servicelist and self.servicelist.dopipzap or not self.session.pipshown:
self.showPiP()
else:
self.togglePipzap()
def activePiPName(self):
if self.servicelist and self.servicelist.dopipzap:
return _("Disable Picture in Picture")
if self.session.pipshown:
return _("Zap focus to Picture in Picture")
else:
return _("Activate Picture in Picture")
def swapPiP(self):
if self.pipShown():
swapservice = self.session.nav.getCurrentlyPlayingServiceOrGroup()
pipref = self.session.pip.getCurrentService()
if swapservice and pipref and pipref.toString() != swapservice.toString():
slist = self.servicelist
if slist:
currentServicePath = slist.getCurrentServicePath()
currentBouquet = slist.getRoot()
slist.setCurrentServicePath(self.session.pip.servicePath, doZap=False)
self.session.pip.playService(swapservice)
self.session.nav.playService(pipref, checkParentalControl=False, adjust=False)
if slist:
self.session.pip.servicePath = currentServicePath
self.session.pip.servicePath[1] = currentBouquet
if slist and slist.dopipzap:
# This unfortunately won't work with subservices
slist.setCurrentSelection(self.session.pip.getCurrentService())
def movePiP(self):
if self.pipShown():
self.session.open(PiPSetup, pip = self.session.pip)
def pipDoHandle0Action(self):
use = config.usage.pip_zero_button.value
if "swap" == use:
self.swapPiP()
elif "swapstop" == use:
self.swapPiP()
self.showPiP()
elif "stop" == use:
self.showPiP()
from RecordTimer import parseEvent, RecordTimerEntry
class InfoBarInstantRecord:
"""Instant Record - handles the instantRecord action in order to
start/stop instant records"""
def __init__(self):
self["InstantRecordActions"] = HelpableActionMap(self, "InfobarInstantRecord",
{
"instantRecord": (self.instantRecord, _("Instant recording...")),
})
self.SelectedInstantServiceRef = None
if isStandardInfoBar(self):
self.recording = []
else:
from Screens.InfoBar import InfoBar
InfoBarInstance = InfoBar.instance
if InfoBarInstance:
self.recording = InfoBarInstance.recording
def moveToTrash(self, entry):
print "instantRecord stop and delete recording: ", entry.name
import Tools.Trashcan
trash = Tools.Trashcan.createTrashFolder(entry.Filename)
from MovieSelection import moveServiceFiles
moveServiceFiles(entry.Filename, trash, entry.name, allowCopy=False)
def stopCurrentRecording(self, entry = -1):
def confirm(answer=False):
if answer:
self.session.nav.RecordTimer.removeEntry(self.recording[entry])
if self.deleteRecording:
self.moveToTrash(self.recording[entry])
self.recording.remove(self.recording[entry])
if entry is not None and entry != -1:
msg = _("Stop recording:")
if self.deleteRecording:
msg = _("Stop and delete recording:")
msg += "\n"
msg += " - " + self.recording[entry].name + "\n"
self.session.openWithCallback(confirm, MessageBox, msg, MessageBox.TYPE_YESNO)
def stopAllCurrentRecordings(self, list):
def confirm(answer=False):
if answer:
for entry in list:
self.session.nav.RecordTimer.removeEntry(entry[0])
self.recording.remove(entry[0])
if self.deleteRecording:
self.moveToTrash(entry[0])
msg = _("Stop recordings:")
if self.deleteRecording:
msg = _("Stop and delete recordings:")
msg += "\n"
for entry in list:
msg += " - " + entry[0].name + "\n"
self.session.openWithCallback(confirm, MessageBox, msg, MessageBox.TYPE_YESNO)
def getProgramInfoAndEvent(self, info, name):
info["serviceref"] = hasattr(self, "SelectedInstantServiceRef") and self.SelectedInstantServiceRef or self.session.nav.getCurrentlyPlayingServiceOrGroup()
# try to get event info
event = None
try:
epg = eEPGCache.getInstance()
event = epg.lookupEventTime(info["serviceref"], -1, 0)
if event is None:
if hasattr(self, "SelectedInstantServiceRef") and self.SelectedInstantServiceRef:
service_info = eServiceCenter.getInstance().info(self.SelectedInstantServiceRef)
event = service_info and service_info.getEvent(self.SelectedInstantServiceRef)
else:
service = self.session.nav.getCurrentService()
event = service and service.info().getEvent(0)
except:
pass
info["event"] = event
info["name"] = name
info["description"] = ""
info["eventid"] = None
if event is not None:
curEvent = parseEvent(event)
info["name"] = curEvent[2]
info["description"] = curEvent[3]
info["eventid"] = curEvent[4]
info["end"] = curEvent[1]
def startInstantRecording(self, limitEvent = False):
begin = int(time())
end = begin + 3600 # dummy
name = "instant record"
info = { }
self.getProgramInfoAndEvent(info, name)
serviceref = info["serviceref"]
event = info["event"]
if event is not None:
if limitEvent:
end = info["end"]
else:
if limitEvent:
self.session.open(MessageBox, _("No event info found, recording indefinitely."), MessageBox.TYPE_INFO)
if isinstance(serviceref, eServiceReference):
serviceref = ServiceReference(serviceref)
recording = RecordTimerEntry(serviceref, begin, end, info["name"], info["description"], info["eventid"], dirname = preferredInstantRecordPath())
recording.dontSave = True
if event is None or limitEvent == False:
recording.autoincrease = True
recording.setAutoincreaseEnd()
simulTimerList = self.session.nav.RecordTimer.record(recording)
if simulTimerList is None: # no conflict
recording.autoincrease = False
self.recording.append(recording)
else:
if len(simulTimerList) > 1: # with other recording
name = simulTimerList[1].name
name_date = ' '.join((name, strftime('%F %T', localtime(simulTimerList[1].begin))))
print "[TIMER] conflicts with", name_date
recording.autoincrease = True # start with max available length, then increment
if recording.setAutoincreaseEnd():
self.session.nav.RecordTimer.record(recording)
self.recording.append(recording)
self.session.open(MessageBox, _("Record time limited due to conflicting timer %s") % name_date, MessageBox.TYPE_INFO)
else:
self.session.open(MessageBox, _("Could not record due to conflicting timer %s") % name, MessageBox.TYPE_INFO)
else:
self.session.open(MessageBox, _("Could not record due to invalid service %s") % serviceref, MessageBox.TYPE_INFO)
recording.autoincrease = False
def isInstantRecordRunning(self):
print "self.recording:", self.recording
if self.recording:
for x in self.recording:
if x.isRunning():
return True
return False
def recordQuestionCallback(self, answer):
print "pre:\n", self.recording
if answer is None or answer[1] == "no":
return
list = []
recording = self.recording[:]
for x in recording:
if not x in self.session.nav.RecordTimer.timer_list:
self.recording.remove(x)
elif x.dontSave and x.isRunning():
list.append((x, False))
self.deleteRecording = False
if answer[1] == "changeduration":
if len(self.recording) == 1:
self.changeDuration(0)
else:
self.session.openWithCallback(self.changeDuration, TimerSelection, list)
elif answer[1] == "addrecordingtime":
if len(self.recording) == 1:
self.addRecordingTime(0)
else:
self.session.openWithCallback(self.addRecordingTime, TimerSelection, list)
elif answer[1] == "changeendtime":
if len(self.recording) == 1:
self.setEndtime(0)
else:
self.session.openWithCallback(self.setEndtime, TimerSelection, list)
elif answer[1] == "timer":
import TimerEdit
self.session.open(TimerEdit.TimerEditList)
elif answer[1] == "stop":
if len(self.recording) == 1:
self.stopCurrentRecording(0)
else:
self.session.openWithCallback(self.stopCurrentRecording, TimerSelection, list)
elif answer[1] == "stopdelete":
self.deleteRecording = True
if len(self.recording) == 1:
self.stopCurrentRecording(0)
else:
self.session.openWithCallback(self.stopCurrentRecording, TimerSelection, list)
elif answer[1] == "stopall":
self.stopAllCurrentRecordings(list)
elif answer[1] == "stopdeleteall":
self.deleteRecording = True
self.stopAllCurrentRecordings(list)
elif answer[1] in ( "indefinitely" , "manualduration", "manualendtime", "event"):
self.startInstantRecording(limitEvent = answer[1] in ("event", "manualendtime") or False)
if answer[1] == "manualduration":
self.changeDuration(len(self.recording)-1)
elif answer[1] == "manualendtime":
self.setEndtime(len(self.recording)-1)
elif "timeshift" in answer[1]:
ts = self.getTimeshift()
if ts:
ts.saveTimeshiftFile()
self.save_timeshift_file = True
if "movie" in answer[1]:
self.save_timeshift_in_movie_dir = True
if "event" in answer[1]:
remaining = self.currentEventTime()
if remaining > 0:
self.setCurrentEventTimer(remaining-15)
print "after:\n", self.recording
def setEndtime(self, entry):
if entry is not None and entry >= 0:
self.selectedEntry = entry
self.endtime=ConfigClock(default = self.recording[self.selectedEntry].end)
dlg = self.session.openWithCallback(self.TimeDateInputClosed, TimeDateInput, self.endtime)
dlg.setTitle(_("Please change recording endtime"))
def TimeDateInputClosed(self, ret):
if len(ret) > 1:
if ret[0]:
print "stopping recording at", strftime("%F %T", localtime(ret[1]))
if self.recording[self.selectedEntry].end != ret[1]:
self.recording[self.selectedEntry].autoincrease = False
self.recording[self.selectedEntry].end = ret[1]
self.session.nav.RecordTimer.timeChanged(self.recording[self.selectedEntry])
def changeDuration(self, entry):
if entry is not None and entry >= 0:
self.selectedEntry = entry
self.session.openWithCallback(self.inputCallback, InputBox, title=_("How many minutes do you want to record?"), text="5", maxSize=False, type=Input.NUMBER)
def addRecordingTime(self, entry):
if entry is not None and entry >= 0:
self.selectedEntry = entry
self.session.openWithCallback(self.inputAddRecordingTime, InputBox, title=_("How many minutes do you want add to record?"), text="5", maxSize=False, type=Input.NUMBER)
def inputAddRecordingTime(self, value):
if value:
print "added", int(value), "minutes for recording."
entry = self.recording[self.selectedEntry]
if int(value) != 0:
entry.autoincrease = False
entry.end += 60 * int(value)
self.session.nav.RecordTimer.timeChanged(entry)
def inputCallback(self, value):
if value:
print "stopping recording after", int(value), "minutes."
entry = self.recording[self.selectedEntry]
if int(value) != 0:
entry.autoincrease = False
entry.end = int(time()) + 60 * int(value)
self.session.nav.RecordTimer.timeChanged(entry)
def isTimerRecordRunning(self):
identical = timers = 0
for timer in self.session.nav.RecordTimer.timer_list:
if timer.isRunning() and not timer.justplay:
timers += 1
if self.recording:
for x in self.recording:
if x.isRunning() and x == timer:
identical += 1
return timers > identical
def instantRecord(self, serviceRef=None):
self.SelectedInstantServiceRef = serviceRef
pirr = preferredInstantRecordPath()
if not findSafeRecordPath(pirr) and not findSafeRecordPath(defaultMoviePath()):
if not pirr:
pirr = ""
self.session.open(MessageBox, _("Missing ") + "\n" + pirr +
"\n" + _("No HDD found or HDD not initialized!"), MessageBox.TYPE_ERROR)
return
if isStandardInfoBar(self):
common = ((_("Add recording (stop after current event)"), "event"),
(_("Add recording (indefinitely)"), "indefinitely"),
(_("Add recording (enter recording duration)"), "manualduration"),
(_("Add recording (enter recording endtime)"), "manualendtime"),)
else:
common = ()
if self.isInstantRecordRunning():
title =_("A recording is currently running.\nWhat do you want to do?")
list = common + \
((_("Change recording (duration)"), "changeduration"),
(_("Change recording (add time)"), "addrecordingtime"),
(_("Change recording (endtime)"), "changeendtime"),)
list += ((_("Stop recording"), "stop"),)
if config.usage.movielist_trashcan.value:
list += ((_("Stop and delete recording"), "stopdelete"),)
if len(self.recording) > 1:
list += ((_("Stop all current recordings"), "stopall"),)
if config.usage.movielist_trashcan.value:
list += ((_("Stop and delete all current recordings"), "stopdeleteall"),)
if self.isTimerRecordRunning():
list += ((_("Stop timer recording"), "timer"),)
list += ((_("Do nothing"), "no"),)
else:
title=_("Start recording?")
list = common
if self.isTimerRecordRunning():
list += ((_("Stop timer recording"), "timer"),)
if isStandardInfoBar(self):
list += ((_("Do not record"), "no"),)
if isStandardInfoBar(self) and self.timeshiftEnabled():
list = list + ((_("Save timeshift file"), "timeshift"),
(_("Save timeshift file in movie directory"), "timeshift_movie"))
if self.currentEventTime() > 0:
list += ((_("Save timeshift only for current event"), "timeshift_event"),)
if list:
self.session.openWithCallback(self.recordQuestionCallback, ChoiceBox, title=title, list=list)
else:
return 0
from Tools.ISO639 import LanguageCodes
class InfoBarAudioSelection:
def __init__(self):
self["AudioSelectionAction"] = HelpableActionMap(self, "InfobarAudioSelectionActions",
{
"audioSelection": (self.audioSelection, _("Audio options...")),
})
def audioSelection(self):
from Screens.AudioSelection import AudioSelection
self.session.openWithCallback(self.audioSelected, AudioSelection, infobar=self)
def audioSelected(self, ret=None):
print "[infobar::audioSelected]", ret
class InfoBarSubserviceSelection:
def __init__(self):
self["SubserviceSelectionAction"] = HelpableActionMap(self, "InfobarSubserviceSelectionActions",
{
"subserviceSelection": (self.subserviceSelection, _("Subservice list...")),
})
self["SubserviceQuickzapAction"] = HelpableActionMap(self, "InfobarSubserviceQuickzapActions",
{
"nextSubservice": (self.nextSubservice, _("Switch to next sub service")),
"prevSubservice": (self.prevSubservice, _("Switch to previous sub service"))
}, -1)
self["SubserviceQuickzapAction"].setEnabled(False)
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evUpdatedEventInfo: self.checkSubservicesAvail
})
self.onClose.append(self.__removeNotifications)
self.bsel = None
def __removeNotifications(self):
self.session.nav.event.remove(self.checkSubservicesAvail)
def checkSubservicesAvail(self):
service = self.session.nav.getCurrentService()
subservices = service and service.subServices()
if not subservices or subservices.getNumberOfSubservices() == 0:
self["SubserviceQuickzapAction"].setEnabled(False)
def nextSubservice(self):
self.changeSubservice(+1)
def prevSubservice(self):
self.changeSubservice(-1)
def changeSubservice(self, direction):
service = self.session.nav.getCurrentService()
subservices = service and service.subServices()
n = subservices and subservices.getNumberOfSubservices()
if n and n > 0:
selection = -1
ref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
idx = 0
while idx < n:
if subservices.getSubservice(idx).toString() == ref.toString():
selection = idx
break
idx += 1
if selection != -1:
selection += direction
if selection >= n:
selection=0
elif selection < 0:
selection=n-1
newservice = subservices.getSubservice(selection)
if newservice.valid():
del subservices
del service
self.session.nav.playService(newservice, False)
def subserviceSelection(self):
service = self.session.nav.getCurrentService()
subservices = service and service.subServices()
self.bouquets = self.servicelist.getBouquetList()
n = subservices and subservices.getNumberOfSubservices()
selection = 0
if n and n > 0:
ref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
tlist = []
idx = 0
while idx < n:
i = subservices.getSubservice(idx)
if i.toString() == ref.toString():
selection = idx
tlist.append((i.getName(), i))
idx += 1
if self.bouquets and len(self.bouquets):
keys = ["red", "blue", "", "0", "1", "2", "3", "4", "5", "6", "7", "8", "9" ] + [""] * n
if config.usage.multibouquet.value:
tlist = [(_("Quick zap"), "quickzap", service.subServices()), (_("Add to bouquet"), "CALLFUNC", self.addSubserviceToBouquetCallback), ("--", "")] + tlist
else:
tlist = [(_("Quick zap"), "quickzap", service.subServices()), (_("Add to favourites"), "CALLFUNC", self.addSubserviceToBouquetCallback), ("--", "")] + tlist
selection += 3
else:
tlist = [(_("Quick zap"), "quickzap", service.subServices()), ("--", "")] + tlist
keys = ["red", "", "0", "1", "2", "3", "4", "5", "6", "7", "8", "9" ] + [""] * n
selection += 2
self.session.openWithCallback(self.subserviceSelected, ChoiceBox, title=_("Please select a sub service..."), list = tlist, selection = selection, keys = keys, skin_name = "SubserviceSelection")
def subserviceSelected(self, service):
del self.bouquets
if not service is None:
if isinstance(service[1], str):
if service[1] == "quickzap":
from Screens.SubservicesQuickzap import SubservicesQuickzap
self.session.open(SubservicesQuickzap, service[2])
else:
self["SubserviceQuickzapAction"].setEnabled(True)
self.session.nav.playService(service[1], False)
def addSubserviceToBouquetCallback(self, service):
if len(service) > 1 and isinstance(service[1], eServiceReference):
self.selectedSubservice = service
if self.bouquets is None:
cnt = 0
else:
cnt = len(self.bouquets)
if cnt > 1: # show bouquet list
self.bsel = self.session.openWithCallback(self.bouquetSelClosed, BouquetSelector, self.bouquets, self.addSubserviceToBouquet)
elif cnt == 1: # add to only one existing bouquet
self.addSubserviceToBouquet(self.bouquets[0][1])
self.session.open(MessageBox, _("Service has been added to the favourites."), MessageBox.TYPE_INFO)
def bouquetSelClosed(self, confirmed):
self.bsel = None
del self.selectedSubservice
if confirmed:
self.session.open(MessageBox, _("Service has been added to the selected bouquet."), MessageBox.TYPE_INFO)
def addSubserviceToBouquet(self, dest):
self.servicelist.addServiceToBouquet(dest, self.selectedSubservice[1])
if self.bsel:
self.bsel.close(True)
else:
del self.selectedSubservice
class InfoBarRedButton:
def __init__(self):
self["RedButtonActions"] = HelpableActionMap(self, "InfobarRedButtonActions",
{
"activateRedButton": (self.activateRedButton, _("Red button...")),
})
self.onHBBTVActivation = [ ]
self.onRedButtonActivation = [ ]
def activateRedButton(self):
service = self.session.nav.getCurrentService()
info = service and service.info()
if info and info.getInfoString(iServiceInformation.sHBBTVUrl) != "":
for x in self.onHBBTVActivation:
x()
elif False: # TODO: other red button services
for x in self.onRedButtonActivation:
x()
class InfoBarTimerButton:
def __init__(self):
self["TimerButtonActions"] = HelpableActionMap(self, "InfobarTimerButtonActions",
{
"timerSelection": (self.timerSelection, _("Timer selection...")),
})
def timerSelection(self):
from Screens.TimerEdit import TimerEditList
self.session.open(TimerEditList)
class InfoBarVmodeButton:
def __init__(self):
self["VmodeButtonActions"] = HelpableActionMap(self, "InfobarVmodeButtonActions",
{
"vmodeSelection": (self.vmodeSelection, _("Letterbox zoom")),
})
def vmodeSelection(self):
self.session.open(VideoMode)
class VideoMode(Screen):
def __init__(self,session):
Screen.__init__(self, session)
self["videomode"] = Label()
self["actions"] = NumberActionMap( [ "InfobarVmodeButtonActions" ],
{
"vmodeSelection": self.selectVMode
})
self.Timer = eTimer()
self.Timer.callback.append(self.quit)
self.selectVMode()
def selectVMode(self):
policy = config.av.policy_43
if self.isWideScreen():
policy = config.av.policy_169
idx = policy.choices.index(policy.value)
idx = (idx + 1) % len(policy.choices)
policy.value = policy.choices[idx]
self["videomode"].setText(policy.value)
self.Timer.start(1000, True)
def isWideScreen(self):
from Components.Converter.ServiceInfo import WIDESCREEN
service = self.session.nav.getCurrentService()
info = service and service.info()
return info.getInfo(iServiceInformation.sAspect) in WIDESCREEN
def quit(self):
self.Timer.stop()
self.close()
class InfoBarAdditionalInfo:
def __init__(self):
self["RecordingPossible"] = Boolean(fixed=harddiskmanager.HDDCount() > 0)
self["TimeshiftPossible"] = self["RecordingPossible"]
self["ExtensionsAvailable"] = Boolean(fixed=1)
# TODO: these properties should be queried from the input device keymap
self["ShowTimeshiftOnYellow"] = Boolean(fixed=0)
self["ShowAudioOnYellow"] = Boolean(fixed=0)
self["ShowRecordOnRed"] = Boolean(fixed=0)
class InfoBarNotifications:
def __init__(self):
self.onExecBegin.append(self.checkNotifications)
Notifications.notificationAdded.append(self.checkNotificationsIfExecing)
self.onClose.append(self.__removeNotification)
def __removeNotification(self):
Notifications.notificationAdded.remove(self.checkNotificationsIfExecing)
def checkNotificationsIfExecing(self):
if self.execing:
self.checkNotifications()
def checkNotifications(self):
notifications = Notifications.notifications
if notifications:
n = notifications[0]
del notifications[0]
cb = n[0]
if n[3].has_key("onSessionOpenCallback"):
n[3]["onSessionOpenCallback"]()
del n[3]["onSessionOpenCallback"]
if cb:
dlg = self.session.openWithCallback(cb, n[1], *n[2], **n[3])
elif not Notifications.current_notifications and n[4] == "ZapError":
if n[3].has_key("timeout"):
del n[3]["timeout"]
n[3]["enable_input"] = False
dlg = self.session.instantiateDialog(n[1], *n[2], **n[3])
self.hide()
dlg.show()
self.notificationDialog = dlg
eActionMap.getInstance().bindAction('', -maxint - 1, self.keypressNotification)
else:
dlg = self.session.open(n[1], *n[2], **n[3])
# remember that this notification is currently active
d = (n[4], dlg)
Notifications.current_notifications.append(d)
dlg.onClose.append(boundFunction(self.__notificationClosed, d))
def closeNotificationInstantiateDialog(self):
if hasattr(self, "notificationDialog"):
self.session.deleteDialog(self.notificationDialog)
del self.notificationDialog
eActionMap.getInstance().unbindAction('', self.keypressNotification)
def keypressNotification(self, key, flag):
if flag:
self.closeNotificationInstantiateDialog()
def __notificationClosed(self, d):
Notifications.current_notifications.remove(d)
class InfoBarServiceNotifications:
def __init__(self):
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evEnd: self.serviceHasEnded
})
def serviceHasEnded(self):
print "service end!"
try:
self.setSeekState(self.SEEK_STATE_PLAY)
except:
pass
class InfoBarCueSheetSupport:
CUT_TYPE_IN = 0
CUT_TYPE_OUT = 1
CUT_TYPE_MARK = 2
CUT_TYPE_LAST = 3
ENABLE_RESUME_SUPPORT = False
def __init__(self, actionmap = "InfobarCueSheetActions"):
self["CueSheetActions"] = HelpableActionMap(self, actionmap,
{
"jumpPreviousMark": (self.jumpPreviousMark, _("Jump to previous marked position")),
"jumpNextMark": (self.jumpNextMark, _("Jump to next marked position")),
"toggleMark": (self.toggleMark, _("Toggle a cut mark at the current position"))
}, prio=1)
self.cut_list = [ ]
self.is_closing = False
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evStart: self.__serviceStarted,
iPlayableService.evCuesheetChanged: self.downloadCuesheet,
})
def __serviceStarted(self):
if self.is_closing:
return
print "new service started! trying to download cuts!"
self.downloadCuesheet()
if self.ENABLE_RESUME_SUPPORT:
for (pts, what) in self.cut_list:
if what == self.CUT_TYPE_LAST:
last = pts
break
else:
last = getResumePoint(self.session)
if last is None:
return
# only resume if at least 10 seconds ahead, or <10 seconds before the end.
seekable = self.__getSeekable()
if seekable is None:
return # Should not happen?
length = seekable.getLength() or (None,0)
print "seekable.getLength() returns:", length
# Hmm, this implies we don't resume if the length is unknown...
if (last > 900000) and (not length[1] or (last < length[1] - 900000)):
self.resume_point = last
l = last / 90000
if "ask" in config.usage.on_movie_start.value or not length[1]:
Notifications.AddNotificationWithCallback(self.playLastCB, MessageBox, _("Do you want to resume this playback?") + "\n" + (_("Resume position at %s") % ("%d:%02d:%02d" % (l/3600, l%3600/60, l%60))), timeout=10, default="yes" in config.usage.on_movie_start.value)
elif config.usage.on_movie_start.value == "resume":
# TRANSLATORS: The string "Resuming playback" flashes for a moment
# TRANSLATORS: at the start of a movie, when the user has selected
# TRANSLATORS: "Resume from last position" as start behavior.
# TRANSLATORS: The purpose is to notify the user that the movie starts
# TRANSLATORS: in the middle somewhere and not from the beginning.
# TRANSLATORS: (Some translators seem to have interpreted it as a
# TRANSLATORS: question or a choice, but it is a statement.)
Notifications.AddNotificationWithCallback(self.playLastCB, MessageBox, _("Resuming playback"), timeout=2, type=MessageBox.TYPE_INFO)
def playLastCB(self, answer):
if answer == True:
self.doSeek(self.resume_point)
self.hideAfterResume()
def hideAfterResume(self):
if isinstance(self, InfoBarShowHide):
self.hide()
def __getSeekable(self):
service = self.session.nav.getCurrentService()
if service is None:
return None
return service.seek()
def cueGetCurrentPosition(self):
seek = self.__getSeekable()
if seek is None:
return None
r = seek.getPlayPosition()
if r[0]:
return None
return long(r[1])
def cueGetEndCutPosition(self):
ret = False
isin = True
for cp in self.cut_list:
if cp[1] == self.CUT_TYPE_OUT:
if isin:
isin = False
ret = cp[0]
elif cp[1] == self.CUT_TYPE_IN:
isin = True
return ret
def jumpPreviousNextMark(self, cmp, start=False):
current_pos = self.cueGetCurrentPosition()
if current_pos is None:
return False
mark = self.getNearestCutPoint(current_pos, cmp=cmp, start=start)
if mark is not None:
pts = mark[0]
else:
return False
self.doSeek(pts)
return True
def jumpPreviousMark(self):
# we add 5 seconds, so if the play position is <5s after
# the mark, the mark before will be used
self.jumpPreviousNextMark(lambda x: -x-5*90000, start=True)
def jumpNextMark(self):
if not self.jumpPreviousNextMark(lambda x: x-90000):
self.doSeek(-1)
def getNearestCutPoint(self, pts, cmp=abs, start=False):
# can be optimized
beforecut = True
nearest = None
bestdiff = -1
instate = True
if start:
bestdiff = cmp(0 - pts)
if bestdiff >= 0:
nearest = [0, False]
for cp in self.cut_list:
if beforecut and cp[1] in (self.CUT_TYPE_IN, self.CUT_TYPE_OUT):
beforecut = False
if cp[1] == self.CUT_TYPE_IN: # Start is here, disregard previous marks
diff = cmp(cp[0] - pts)
if start and diff >= 0:
nearest = cp
bestdiff = diff
else:
nearest = None
bestdiff = -1
if cp[1] == self.CUT_TYPE_IN:
instate = True
elif cp[1] == self.CUT_TYPE_OUT:
instate = False
elif cp[1] in (self.CUT_TYPE_MARK, self.CUT_TYPE_LAST):
diff = cmp(cp[0] - pts)
if instate and diff >= 0 and (nearest is None or bestdiff > diff):
nearest = cp
bestdiff = diff
return nearest
def toggleMark(self, onlyremove=False, onlyadd=False, tolerance=5*90000, onlyreturn=False):
current_pos = self.cueGetCurrentPosition()
if current_pos is None:
print "not seekable"
return
nearest_cutpoint = self.getNearestCutPoint(current_pos)
if nearest_cutpoint is not None and abs(nearest_cutpoint[0] - current_pos) < tolerance:
if onlyreturn:
return nearest_cutpoint
if not onlyadd:
self.removeMark(nearest_cutpoint)
elif not onlyremove and not onlyreturn:
self.addMark((current_pos, self.CUT_TYPE_MARK))
if onlyreturn:
return None
def addMark(self, point):
insort(self.cut_list, point)
self.uploadCuesheet()
self.showAfterCuesheetOperation()
def removeMark(self, point):
self.cut_list.remove(point)
self.uploadCuesheet()
self.showAfterCuesheetOperation()
def showAfterCuesheetOperation(self):
if isinstance(self, InfoBarShowHide):
self.doShow()
def __getCuesheet(self):
service = self.session.nav.getCurrentService()
if service is None:
return None
return service.cueSheet()
def uploadCuesheet(self):
cue = self.__getCuesheet()
if cue is None:
print "upload failed, no cuesheet interface"
return
cue.setCutList(self.cut_list)
def downloadCuesheet(self):
cue = self.__getCuesheet()
if cue is None:
print "download failed, no cuesheet interface"
self.cut_list = [ ]
else:
self.cut_list = cue.getCutList()
class InfoBarSummary(Screen):
skin = """
<screen position="0,0" size="132,64">
<widget source="global.CurrentTime" render="Label" position="62,46" size="82,18" font="Regular;16" >
<convert type="ClockToText">WithSeconds</convert>
</widget>
<widget source="session.RecordState" render="FixedLabel" text=" " position="62,46" size="82,18" zPosition="1" >
<convert type="ConfigEntryTest">config.usage.blinking_display_clock_during_recording,True,CheckSourceBoolean</convert>
<convert type="ConditionalShowHide">Blink</convert>
</widget>
<widget source="session.CurrentService" render="Label" position="6,4" size="120,42" font="Regular;18" >
<convert type="ServiceName">Name</convert>
</widget>
<widget source="session.Event_Now" render="Progress" position="6,46" size="46,18" borderWidth="1" >
<convert type="EventTime">Progress</convert>
</widget>
</screen>"""
# for picon: (path="piconlcd" will use LCD picons)
# <widget source="session.CurrentService" render="Picon" position="6,0" size="120,64" path="piconlcd" >
# <convert type="ServiceName">Reference</convert>
# </widget>
class InfoBarSummarySupport:
def __init__(self):
pass
def createSummary(self):
return InfoBarSummary
class InfoBarMoviePlayerSummary(Screen):
skin = """
<screen position="0,0" size="132,64">
<widget source="global.CurrentTime" render="Label" position="62,46" size="64,18" font="Regular;16" halign="right" >
<convert type="ClockToText">WithSeconds</convert>
</widget>
<widget source="session.RecordState" render="FixedLabel" text=" " position="62,46" size="64,18" zPosition="1" >
<convert type="ConfigEntryTest">config.usage.blinking_display_clock_during_recording,True,CheckSourceBoolean</convert>
<convert type="ConditionalShowHide">Blink</convert>
</widget>
<widget source="session.CurrentService" render="Label" position="6,4" size="120,42" font="Regular;18" >
<convert type="ServiceName">Name</convert>
</widget>
<widget source="session.CurrentService" render="Progress" position="6,46" size="56,18" borderWidth="1" >
<convert type="ServicePosition">Position</convert>
</widget>
</screen>"""
class InfoBarMoviePlayerSummarySupport:
def __init__(self):
pass
def createSummary(self):
return InfoBarMoviePlayerSummary
class InfoBarTeletextPlugin:
def __init__(self):
self.teletext_plugin = None
for p in plugins.getPlugins(PluginDescriptor.WHERE_TELETEXT):
self.teletext_plugin = p
if self.teletext_plugin is not None:
self["TeletextActions"] = HelpableActionMap(self, "InfobarTeletextActions",
{
"startTeletext": (self.startTeletext, _("View teletext..."))
})
else:
print "no teletext plugin found!"
def startTeletext(self):
self.teletext_plugin and self.teletext_plugin(session=self.session, service=self.session.nav.getCurrentService())
class InfoBarSubtitleSupport(object):
def __init__(self):
object.__init__(self)
self["SubtitleSelectionAction"] = HelpableActionMap(self, "InfobarSubtitleSelectionActions",
{
"subtitleSelection": (self.subtitleSelection, _("Subtitle selection...")),
})
self.selected_subtitle = None
if isStandardInfoBar(self):
self.subtitle_window = self.session.instantiateDialog(SubtitleDisplay)
else:
from Screens.InfoBar import InfoBar
self.subtitle_window = InfoBar.instance.subtitle_window
self.subtitle_window.hide()
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evStart: self.__serviceChanged,
iPlayableService.evEnd: self.__serviceChanged,
iPlayableService.evUpdatedInfo: self.__updatedInfo
})
def getCurrentServiceSubtitle(self):
service = self.session.nav.getCurrentService()
return service and service.subtitle()
def subtitleSelection(self):
subtitle = self.getCurrentServiceSubtitle()
subtitlelist = subtitle and subtitle.getSubtitleList()
if self.selected_subtitle or subtitlelist and len(subtitlelist)>0:
from Screens.AudioSelection import SubtitleSelection
self.session.open(SubtitleSelection, self)
else:
return 0
def __serviceChanged(self):
if self.selected_subtitle:
self.selected_subtitle = None
self.subtitle_window.hide()
def __updatedInfo(self):
if not self.selected_subtitle:
subtitle = self.getCurrentServiceSubtitle()
cachedsubtitle = subtitle.getCachedSubtitle()
if cachedsubtitle:
self.enableSubtitle(cachedsubtitle)
def enableSubtitle(self, selectedSubtitle):
subtitle = self.getCurrentServiceSubtitle()
self.selected_subtitle = selectedSubtitle
if subtitle and self.selected_subtitle:
subtitle.enableSubtitles(self.subtitle_window.instance, self.selected_subtitle)
self.subtitle_window.show()
else:
if subtitle:
subtitle.disableSubtitles(self.subtitle_window.instance)
self.subtitle_window.hide()
def restartSubtitle(self):
if self.selected_subtitle:
self.enableSubtitle(self.selected_subtitle)
class InfoBarServiceErrorPopupSupport:
def __init__(self):
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evTuneFailed: self.__tuneFailed,
iPlayableService.evTunedIn: self.__serviceStarted,
iPlayableService.evStart: self.__serviceStarted
})
self.__serviceStarted()
def __serviceStarted(self):
self.closeNotificationInstantiateDialog()
self.last_error = None
Notifications.RemovePopup(id = "ZapError")
def __tuneFailed(self):
if not config.usage.hide_zap_errors.value or not config.usage.remote_fallback_enabled.value:
service = self.session.nav.getCurrentService()
info = service and service.info()
error = info and info.getInfo(iServiceInformation.sDVBState)
if not config.usage.remote_fallback_enabled.value and (error == eDVBServicePMTHandler.eventMisconfiguration or error == eDVBServicePMTHandler.eventNoResources):
self.session.nav.currentlyPlayingServiceReference = None
self.session.nav.currentlyPlayingServiceOrGroup = None
if error == self.last_error:
error = None
else:
self.last_error = error
error = {
eDVBServicePMTHandler.eventNoResources: _("No free tuner!"),
eDVBServicePMTHandler.eventTuneFailed: _("Tune failed!"),
eDVBServicePMTHandler.eventNoPAT: _("No data on transponder!\n(Timeout reading PAT)"),
eDVBServicePMTHandler.eventNoPATEntry: _("Service not found!\n(SID not found in PAT)"),
eDVBServicePMTHandler.eventNoPMT: _("Service invalid!\n(Timeout reading PMT)"),
eDVBServicePMTHandler.eventNewProgramInfo: None,
eDVBServicePMTHandler.eventTuned: None,
eDVBServicePMTHandler.eventSOF: None,
eDVBServicePMTHandler.eventEOF: None,
eDVBServicePMTHandler.eventMisconfiguration: _("Service unavailable!\nCheck tuner configuration!"),
}.get(error) #this returns None when the key not exist in the dict
if error and not config.usage.hide_zap_errors.value:
self.closeNotificationInstantiateDialog()
if hasattr(self, "dishDialog") and not self.dishDialog.dishState():
Notifications.AddPopup(text = error, type = MessageBox.TYPE_ERROR, timeout = 5, id = "ZapError")
class InfoBarPowersaver:
def __init__(self):
self.inactivityTimer = eTimer()
self.inactivityTimer.callback.append(self.inactivityTimeout)
self.restartInactiveTimer()
self.sleepTimer = eTimer()
self.sleepStartTime = 0
self.sleepTimer.callback.append(self.sleepTimerTimeout)
eActionMap.getInstance().bindAction('', -maxint - 1, self.keypress)
def keypress(self, key, flag):
if flag:
self.restartInactiveTimer()
def restartInactiveTimer(self):
time = abs(int(config.usage.inactivity_timer.value))
if time:
self.inactivityTimer.startLongTimer(time)
else:
self.inactivityTimer.stop()
def inactivityTimeout(self):
if config.usage.inactivity_timer_blocktime.value:
curtime = localtime(time())
if curtime.tm_year > 1970: #check if the current time is valid
curtime = (curtime.tm_hour, curtime.tm_min, curtime.tm_sec)
begintime = tuple(config.usage.inactivity_timer_blocktime_begin.value)
endtime = tuple(config.usage.inactivity_timer_blocktime_end.value)
begintime_extra = tuple(config.usage.inactivity_timer_blocktime_extra_begin.value)
endtime_extra = tuple(config.usage.inactivity_timer_blocktime_extra_end.value)
if begintime <= endtime and (curtime >= begintime and curtime < endtime) or begintime > endtime and (curtime >= begintime or curtime < endtime) or config.usage.inactivity_timer_blocktime_extra.value and\
(begintime_extra <= endtime_extra and (curtime >= begintime_extra and curtime < endtime_extra) or begintime_extra > endtime_extra and (curtime >= begintime_extra or curtime < endtime_extra)):
duration = (endtime[0]*3600 + endtime[1]*60) - (curtime[0]*3600 + curtime[1]*60 + curtime[2])
if duration:
if duration < 0:
duration += 24*3600
self.inactivityTimer.startLongTimer(duration)
return
if Screens.Standby.inStandby:
self.inactivityTimeoutCallback(True)
else:
message = _("Your receiver will got to standby due to inactivity.") + "\n" + _("Do you want this?")
self.session.openWithCallback(self.inactivityTimeoutCallback, MessageBox, message, timeout=60, simple=True, default=False, timeout_default=True)
def inactivityTimeoutCallback(self, answer):
if answer:
self.goStandby()
else:
print "[InfoBarPowersaver] abort"
def sleepTimerState(self):
if self.sleepTimer.isActive():
return (self.sleepStartTime - time()) / 60
return 0
def setSleepTimer(self, sleepTime):
print "[InfoBarPowersaver] set sleeptimer", sleepTime
if sleepTime:
m = abs(sleepTime / 60)
message = _("The sleep timer has been activated.") + "\n" + _("And will put your receiver in standby over ") + ngettext("%d minute", "%d minutes", m) % m
self.sleepTimer.startLongTimer(sleepTime)
self.sleepStartTime = time() + sleepTime
else:
message = _("The sleep timer has been disabled.")
self.sleepTimer.stop()
Notifications.AddPopup(message, type = MessageBox.TYPE_INFO, timeout = 5)
def sleepTimerTimeout(self):
if not Screens.Standby.inStandby:
list = [ (_("Yes"), True), (_("Extend sleeptimer 15 minutes"), "extend"), (_("No"), False) ]
message = _("Your receiver will got to stand by due to the sleeptimer.")
message += "\n" + _("Do you want this?")
self.session.openWithCallback(self.sleepTimerTimeoutCallback, MessageBox, message, timeout=60, simple=True, list=list, default=False, timeout_default=True)
def sleepTimerTimeoutCallback(self, answer):
if answer == "extend":
print "[InfoBarPowersaver] extend sleeptimer"
self.setSleepTimer(900)
elif answer:
self.goStandby()
else:
print "[InfoBarPowersaver] abort"
self.setSleepTimer(0)
def goStandby(self):
if not Screens.Standby.inStandby:
print "[InfoBarPowersaver] goto standby"
self.session.open(Screens.Standby.Standby)
class InfoBarHDMI:
def HDMIIn(self):
slist = self.servicelist
if slist.dopipzap:
curref = self.session.pip.getCurrentService()
if curref and curref.type != 8192:
self.session.pip.playService(eServiceReference('8192:0:1:0:0:0:0:0:0:0:'))
else:
self.session.pip.playService(slist.servicelist.getCurrent())
else:
curref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
if curref and curref.type != 8192:
if curref and curref.type != -1 and os.path.splitext(curref.toString().split(":")[10])[1].lower() in AUDIO_EXTENSIONS.union(MOVIE_EXTENSIONS, DVD_EXTENSIONS):
setResumePoint(self.session)
self.session.nav.playService(eServiceReference('8192:0:1:0:0:0:0:0:0:0:'))
elif isStandardInfoBar(self):
self.session.nav.playService(slist.servicelist.getCurrent())
else:
self.session.nav.playService(self.cur_service)
|
ssh1/stbgui
|
lib/python/Screens/InfoBarGenerics.py
|
Python
|
gpl-2.0
| 119,400 | 0.029414 |
# -*- coding: utf-8 -*-
# !/usr/bin/python
################################### PART0 DESCRIPTION #################################
# Filename: def_get_ngram_2_db.py
# Description:
#
# Author: Shuai Yuan
# E-mail: ysh329@sina.com
# Create: 2015-8-17 22:17:26
# Last:
__author__ = 'yuens'
################################### PART1 IMPORT ######################################
import MySQLdb
import logging
################################### PART2 CLASS && FUNCTION ###########################
def get_one_bi_tri_gram(raw_string):
""" Get onegram, bigram, trigram from raw_string and
return.
Args:
raw_string (str): constitution.txt string stored the text
Returns:
(one_gram_list, bi_gram_list, tri_gram_list) (tuple):
each element in tuple is constitution.txt list of onegram or
bigram or trigram.
"""
one_gram_list = []
bi_gram_list = []
tri_gram_list = []
for idx in xrange(len(raw_string)):
# one-gram
one_gram = raw_string[idx]
one_gram_list.append(one_gram)
# bi-gram
if len(raw_string) > idx + 1:
bi_gram = raw_string[idx:idx+2]
bi_gram_list.append(bi_gram)
# tri-gram
if len(raw_string) > idx + 2:
tri_gram = raw_string[idx:idx+3]
tri_gram_list.append(tri_gram)
return (one_gram_list, bi_gram_list, tri_gram_list)
def insert_ngram_2_db(word, showtimes, database_name, table_name):
""" Insert ngram(word) and its show times(showtimes) in corpus to
table(table_name) of database(database_name).
Args:
word (str): ngram word
showtimes (int): this ngram word's show times in corpus
database_name (str): name of preparing inserted database
table_name (str): name of preparing inserted table
Returns:
None
"""
try:
con = MySQLdb.connect(host = "localhost", user = "root", passwd = "931209", db = database_name, charset = "utf8")
#logging.info("Success in connecting MySQL.")
except MySQLdb.Error, e:
logging.error("Fail in connecting MySQL.")
logging.error("MySQL Error %d: %s." % (e.args[0], e.args[1]))
cursor = con.cursor()
try:
cursor.execute("""SELECT id FROM %s.%s WHERE word='%s'"""\
% (database_name, table_name, word)
)
id_tuple = cursor.fetchone()
if id_tuple == None: # not existed word
try:
cursor.execute("""INSERT INTO %s.%s
(word, pinyin, showtimes, weight, cixing, type1, type2, source, gram, meaning)
VALUES('%s', '', '%s', 0.0, 'cx', 't1', 't2', 'stock-newspaper-essence', '%s', 'ex')"""\
% (database_name, table_name, word, showtimes, len(word))\
)
con.commit()
except MySQLdb.Error, e:
con.rollback()
logging.error("Failed in inserting %s gram word %s, which is existed."\
% (len(word), word))
logging.error("MySQL Error %d: %s." % (e.args[0], e.args[1]))
else: # exited word
id = id_tuple[0]
try:
cursor.execute("""UPDATE %s.%s
SET showtimes=showtimes+'%s',
gram='%s'
WHERE id='%s'"""\
% (database_name, table_name, showtimes, len(word), id)\
)
con.commit()
except MySQLdb.Error, e:
con.rollback()
logging.error("Failed in updating %s gram word %s, which is existed."\
% (len(word), word))
logging.error("MySQL Error %d: %s." % (e.args[0], e.args[1]))
except MySQLdb.Error, e:
con.rollback()
logging.error("Fail in selecting %s gram word %s in table %s of database %s."\
% (len(word), word, table_name, database_name))
logging.error("MySQL Error %d: %s." % (e.args[0], e.args[1]))
finally:
con.close()
return None
def computation_corpus_scale_and_weight_2_db(database_name, table_name):
""" Compute the scale of corpus. Different ngram word, its corpus
scale is different, such as bigram word's corpus scale need to
compute the quantity of bigram words.
Args:
database_name (str): name of preparing updated database
table_name (str): name of preparing updated table
Returns:
None
"""
try:
con = MySQLdb.connect(host = "localhost",\
user = "root",\
passwd = "931209",\
db = database_name,\
charset = "utf8")
logging.info("Success in connecting MySQL.")
except MySQLdb.Error, e:
logging.error("Fail in connecting MySQL.")
logging.error("MySQL Error %d: %s." % (e.args[0], e.args[1]))
cursor = con.cursor()
try:
sql_list = []
sql_list.append("SET @onegram_num = (SELECT SUM(showtimes) FROM %s.%s WHERE gram = 1)" % (database_name, table_name))
sql_list.append("SET @bigram_num = (SELECT SUM(showtimes) FROM %s.%s WHERE gram = 2)" % (database_name, table_name))
sql_list.append("SET @trigram_num = (SELECT SUM(showtimes) FROM %s.%s WHERE gram = 3)" % (database_name, table_name))
sql_list.append("UPDATE %s.%s SET corpus_scale = @onegram_num WHERE gram = 1" % (database_name, table_name))
sql_list.append("UPDATE %s.%s SET corpus_scale = @bigram_num WHERE gram = 2" % (database_name, table_name))
sql_list.append("UPDATE %s.%s SET corpus_scale = @trigram_num WHERE gram = 3" % (database_name, table_name))
sql_list.append("UPDATE %s.%s SET weight = (showtimes / corpus_scale)" % (database_name, table_name))
map(lambda sql: cursor.execute(sql), sql_list)
con.commit()
logging.info("Success in updating corpus scale and weight of words.")
except MySQLdb.Error, e:
con.rollback()
logging.error("Fail in selecting gram word in table %s of database %s."\
% (table_name, database_name))
logging.error("MySQL Error %d: %s." % (e.args[0], e.args[1]))
finally:
con.close()
return None
################################### PART3 CLASS TEST ##################################
|
ysh329/wordsDB
|
mydef/def_get_ngram_2_db.py
|
Python
|
apache-2.0
| 6,600 | 0.01 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.