text
stringlengths 6
947k
| repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
|
---|---|---|---|---|---|---|
from abc import ABCMeta, abstractmethod, abstractproperty
from ruleset import Ruleset
from device import Device
from propagation_model import PropagationModel
from region import Region
from boundary import Boundary
from data_map import DataMap2D, DataMap3D, DataMap2DWithFixedBoundingBox
from population import PopulationData
from custom_logging import getModuleLogger
import os
import textwrap
from configuration import base_data_directory
def _is_class(obj):
"""Returns True if ``obj`` is a class and False if it is an instance."""
return issubclass(obj.__class__, type)
def _is_object(obj):
"""Returns True if ``obj`` is an instance and False if it is a class."""
return not _is_class(obj)
def _make_string(obj):
def obj_belongs_to(class_object):
return (_is_class(obj) and issubclass(obj, class_object)) or (
_is_object(obj) and isinstance(obj, class_object))
def get_class_name():
if _is_class(obj):
return obj.__name__
else:
return obj.__class__.__name__
if obj_belongs_to(Ruleset) or obj_belongs_to(PropagationModel) or \
obj_belongs_to(Boundary) or obj_belongs_to(DataMap2D):
return get_class_name()
elif obj_belongs_to(Device):
if _is_class(obj):
raise TypeError("Expected an actual Device object.")
else:
if obj.is_portable():
return "Device(portable)"
else:
return "Device(fixed,HAAT=%d)" % obj.get_haat()
elif obj_belongs_to(Region):
return get_class_name()
class Specification(object):
"""
A Specification is the minimum amount of information needed to describe a
set of data. A Specification can be used to create data, fetch data,
and automatically map data.
Specifications are best-effort data caches which are meant to aid in data
generation and organization.
Guiding principles:
* The user is responsible for cache invalidation.
* A best-effort attempt at avoiding naming collisions has been made but
nothing should be considered certain.
* When possible, load data from disk. When not possible, generate the
data, save it, and then load it from disk.
* When possible, allow the user to specify either a class name or an
instance of the class. If an instance is specified, that instance
will be used if an instance is needed. Otherwise, an instance will
be created only when it becomes necessary for data generation.
Notes for extending this class:
* Become familiar with the use of the many helper functions. See e.g. the
init function for :class:`SpecificationWhitespaceMap` for example
usage.
* Be sure that :meth:`make_data` returns the data in addition to saving
it.
* Implement :meth:`get_map` if possible.
* Filenames should not exceed 255 characters in order to be compatible
with common file systems.
"""
__metaclass__ = ABCMeta
@abstractmethod
def to_string(self):
"""Returns the string representation of the Specification."""
pass
@abstractproperty
def subdirectory(self):
"""Returns a string with the name of the data subdirectory to be used
for storing the data created by this Specification."""
pass
@abstractmethod
def make_data(self):
"""
Creates the data based on the information in the Specification. Must
both save and return created data.
See also: :meth:`save_data`.
"""
pass
def get_map(self):
"""Optionally-implemented method which will create the default map for
the Specification."""
raise NotImplementedError("")
def _get_datamap_spec(self):
"""If possible, returns the internal :class:`SpecificationDataMap`. To
succeed, the Specification must satisfy at least one of the following:
* Be a SpecificationDataMap
* Have an attribute "datamap_spec" which is a SpecificationDataMap
object
* Have an attribute "region_map_spec" which is a
SpecificationRegionMap object
Raises an AttributeError if no SpecificationDataMap is found.
"""
if isinstance(self, SpecificationDataMap):
return self
if hasattr(self, "datamap_spec"):
return self.datamap_spec
if hasattr(self, "region_map_spec"):
return self.region_map_spec._get_datamap_spec()
raise AttributeError("No datamap specification found (expected to "
"find one of the following attributes: "
"datamap_spec, region_map_spec")
def _convert_to_class_and_object(self, var_name, obj,
may_create_new_objects=True, **kwargs):
"""
Sets the internal variables [var_name]_class, [var_name]_object based on
``obj``. ``obj`` may be either a class or an instance of a class.
If ``obj`` is a class, the object will be created only if
``may_create_new_objects`` is True. In that case, the keyword
arguments are passed to the constructor.
If ``obj`` is an instance, that instance will be used.
"""
if _is_class(obj):
setattr(self, var_name + "_class", obj)
if may_create_new_objects:
setattr(self, var_name + "_object", obj(**kwargs))
else:
setattr(self, var_name + "_object", obj)
setattr(self, var_name + "_class", obj.__class__)
def _boundary_to_class_and_object(self, boundary):
self._convert_to_class_and_object("boundary", boundary)
def _region_to_class_and_object(self, region):
self._convert_to_class_and_object("region", region)
def _ruleset_to_class_and_object(self, ruleset):
self._convert_to_class_and_object("ruleset", ruleset)
def _propagation_model_to_class_and_object(self, propagation_model):
self._convert_to_class_and_object("propagation_model",
propagation_model)
def _store_at_least_class(self, var_name, obj):
"""Stores at minimum the class of ``obj``. If ``obj`` is an instance
(rather than a class), ``obj`` will be stored as well."""
self._convert_to_class_and_object(var_name, obj,
may_create_new_objects=False)
def _create_obj_if_needed(self, var_name, **kwargs):
"""If [var_name]_object does not exist, create it. In that case, the
keyword arguments are passed to the constructor."""
if hasattr(self, var_name + "_object"):
return
obj_class = getattr(self, var_name + "_class")
setattr(self, var_name + "_object", obj_class(**kwargs))
def _expect_of_type(self, obj, expected_types):
"""Raise a TypeError if ``obj`` is neither a subclass nor an instance of
one of the expected types.
expected_types may be either a list or a singleton."""
if not isinstance(expected_types, list):
expected_types = [expected_types]
for e_type in expected_types:
if not _is_class(e_type):
raise TypeError("Expected type must be a class (got '%s' "
"instead)." % str(expected_types))
if _is_class(obj):
cls = obj
else:
cls = obj.__class__
is_wrong_type = True
for e_type in expected_types:
if issubclass(cls, e_type):
is_wrong_type = False
if is_wrong_type:
raise TypeError("Expected something of a type in %s (either a "
"class or object) but received something of "
"type %s." % (str(expected_types), cls.__name__))
def _expect_is_object(self, obj):
"""Raise a TypeError if ``obj`` is not an instance."""
if not _is_object(obj):
raise TypeError("Expected to receive an instance and instead "
"received %s." % str(obj))
def _expect_is_class(self, obj):
"""Raise a TypeError if ``obj`` is not a class."""
if not _is_class(obj):
raise TypeError("Expected to receive a class and instead "
"received %s." % str(obj))
@property
def filename(self):
"""Returns a string which is the full path to the file."""
return os.path.join(self.full_directory, self.to_string() + ".pkl")
@property
def full_directory(self):
"""Returns a string which is the full directory path in which the file
will be stored."""
return os.path.join(base_data_directory, self.subdirectory)
def data_exists(self):
"""Returns True if data with the associated filename already exists and
False otherwise."""
return os.path.isfile(self.filename)
def load_data(self):
"""Loads the :class:`data_map.DataMap2D` or :class:`data_map.DataMap3D`
from a pickle. The filename is determined by :meth:`filename`."""
if self._get_datamap_spec().is_datamap2d():
return DataMap2D.from_pickle(self.filename)
else:
return DataMap3D.from_pickle(self.filename)
def save_data(self, datamap):
"""Save the :class:`data_map.DataMap2D` or :class:`data_map.DataMap3D`
to a pickle. The filename is determined
by :meth:`filename`."""
self._expect_of_type(datamap, [DataMap2D, DataMap3D])
if not os.path.isdir(self.full_directory):
os.makedirs(self.full_directory)
datamap.to_pickle(self.filename)
def fetch_data(self):
"""Fetch the data described by this Specification. If none exists, the
data will be made and then loaded.
Components: :meth:`load_data`, :meth:`make_data`
"""
if not hasattr(self, "log"):
self.log = getModuleLogger(self)
if self.data_exists():
self.log.debug("Fetching data (LOAD): %s" % self.to_string())
data = self.load_data()
else:
self.log.debug("Fetching data (MAKE): %s" % self.to_string())
data = self.make_data()
if data is None:
raise ValueError("No data loaded")
return data
def _set_map_title(self, map):
"""Automatically sets the map title from the filename."""
map.title_font_size = 10
wrapped_title = "\n".join(textwrap.wrap(self.to_string(), 80))
map.set_title(wrapped_title)
class SpecificationDataMap(Specification):
"""
This Specification describes a :class:`data_map.DataMap2D`. The
Specification must be created with a class derived from
:class:`data_map.DataMap2DWithFixedBoundingBox`, e.g.
:class:`data_map.DataMap2DContinentalUnitedStates`.
Unlike other classes, it will *always* create a new DataMap2D/DataMap3D when
"making" or fetching data. Data will
never be saved.
"""
def __init__(self, datamap_derived_class, num_latitude_divisions,
num_longitude_divisions):
self._expect_of_type(datamap_derived_class,
DataMap2DWithFixedBoundingBox)
self._expect_of_type(num_latitude_divisions, int)
self._expect_of_type(num_longitude_divisions, int)
self._store_at_least_class("datamap", datamap_derived_class)
self.num_latitude_divisions = num_latitude_divisions
self.num_longitude_divisions = num_longitude_divisions
def to_string(self):
return "%s_%dx%d" % (_make_string(self.datamap_class),
self.num_latitude_divisions,
self.num_longitude_divisions)
def make_data(self):
return self.datamap_class.create(self.num_latitude_divisions,
self.num_longitude_divisions)
@property
def subdirectory(self):
# Data is never saved
return None
def is_datamap2d(self):
"""Returns True if this Specification describes a
:class:`data_map.DataMap2D`."""
return issubclass(self.datamap_class, DataMap2D)
def is_datamap3d(self):
"""Returns True if this Specification describes a
:class:`data_map.DataMap3D`."""
return issubclass(self.datamap_class, DataMap3D)
def data_exists(self):
# Override the natural behavior so that the Specification never tried
# to load the data
return False
class SpecificationRegionMap(Specification):
"""
This Specification describes a :class:`data_map.DataMap2D` which contains
boolean data. Values will be True (or truthy) if and only if the pixel's
center is inside the :class:`boundary.Boundary`.
"""
def __init__(self, boundary, datamap_spec):
self._expect_of_type(boundary, Boundary)
self._expect_of_type(datamap_spec, SpecificationDataMap)
if not datamap_spec.is_datamap2d():
raise TypeError("The datamap spec must describe a DataMap2D.")
self._store_at_least_class("boundary", boundary)
self.datamap_spec = datamap_spec
def make_data(self):
self._create_obj_if_needed("boundary")
boundary = self.boundary_object
datamap = self.datamap_spec.fetch_data()
def is_in_region(latitude, longitude, latitude_index,
longitude_index, current_value):
location = (latitude, longitude)
return boundary.location_inside_boundary(location)
datamap.update_all_values_via_function(update_function=is_in_region)
self.save_data(datamap)
return datamap
def to_string(self):
return " ".join(["REGION_MAP", _make_string(self.boundary_class),
self.datamap_spec.to_string()])
@property
def subdirectory(self):
return "REGION_MAP"
def get_map(self):
"""Creates a linear-scale :class:`map.Map` with boundary outlines and a
white background. The title is automatically set using the
Specification information but can be reset with
:meth:`map.Map.set_title`. Returns a handle to the map object; does
not save or show the map."""
datamap = self.fetch_data()
self._create_obj_if_needed("boundary")
map = datamap.make_map(is_in_region_map=datamap)
map.add_boundary_outlines(self.boundary_object)
self._set_map_title(map)
return map
class SpecificationWhitespaceMap(Specification):
"""
This Specification describes a :class:`data_map.DataMap3D` which is True (or
truthy) for pixels which are considered whitespace for the device in
accordance with the :class:`ruleset.Ruleset`.
The resulting DataMap3D has layers described by
:meth:`region.Region.get_tvws_channel_list()`.
.. note:: The naming conventions for this class assume that the default \
:class:`protected_entities.ProtectedEntities` for the \
:class:`region.Region` should be used. To specify alternative \
protected entities, create a new class derived from the desired Region.
"""
def __init__(self, region_map_spec, region, ruleset, device_object,
propagation_model=None):
# Type checking
self._expect_of_type(region_map_spec, SpecificationRegionMap)
self._expect_of_type(region, Region)
self._expect_of_type(ruleset, Ruleset)
self._expect_is_object(device_object)
# Store data
self.region_map_spec = region_map_spec
self._store_at_least_class("region", region)
self._store_at_least_class("ruleset", ruleset)
self._convert_to_class_and_object("device", device_object)
# Propagation model needs special handling
if propagation_model is None:
self._create_obj_if_needed("ruleset")
propagation_model = self.ruleset_object.get_default_propagation_model()
self._expect_of_type(propagation_model, PropagationModel)
self._store_at_least_class("propagation_model", propagation_model)
def to_string(self):
return " ".join(["WHITESPACE_MAP",
"(%s)" % self.region_map_spec.to_string(),
_make_string(self.region_class),
_make_string(self.ruleset_class),
_make_string(self.propagation_model_class),
_make_string(self.device_object)])
@property
def subdirectory(self):
return "WHITESPACE_MAP"
def make_data(self):
self._create_obj_if_needed("region")
self._create_obj_if_needed("propagation_model")
self.ruleset_object.set_propagation_model(self.propagation_model_object)
region_datamap = self.region_map_spec.fetch_data()
channel_list = self.region_object.get_tvws_channel_list()
whitespace_datamap3d = DataMap3D.from_DataMap2D(region_datamap, channel_list)
for channel in channel_list:
channel_layer = whitespace_datamap3d.get_layer(channel)
self.ruleset_object.apply_all_protections_to_map(self.region_object, channel_layer, channel,
self.device_object)
self.save_data(whitespace_datamap3d)
return whitespace_datamap3d
def get_map(self):
"""Creates a linear-scale :class:`map.Map` with boundary outlines, a
white background, and a colorbar. The title is automatically set
using the Specification information but can be reset with
:meth:`map.Map.set_title`. Returns a handle to the map object; does
not save or show the map."""
datamap3d = self.fetch_data()
datamap2d = datamap3d.sum_all_layers()
region_map = self.region_map_spec.fetch_data()
self.region_map_spec._create_obj_if_needed("boundary")
boundary = self.region_map_spec.boundary_object
map = datamap2d.make_map(is_in_region_map=region_map)
map.add_boundary_outlines(boundary)
map.add_colorbar(decimal_precision=0)
map.set_colorbar_label("Number of available whitespace channels")
self._set_map_title(map)
return map
class SpecificationRegionAreaMap(Specification):
"""
This Specification describes a :class:`data_map.DataMap2D` where the value
of each pixel describes the area (in square kilometers) of the pixel.
This data may be useful e.g. to create a CDF by area using
:meth:`data_manipulation.calculate_cdf_from_datamap2d`.
"""
def __init__(self, datamap_spec):
self._expect_of_type(datamap_spec, SpecificationDataMap)
self._expect_is_object(datamap_spec)
self.datamap_spec = datamap_spec
@property
def subdirectory(self):
return "REGION_AREA"
def to_string(self):
return " ".join(["REGION_AREA", "(%s)" % self.datamap_spec.to_string()])
def make_data(self):
from geopy.distance import vincenty
datamap = self.datamap_spec.fetch_data()
latitude_width = float(datamap.latitudes[1] - datamap.latitudes[0])
longitude_width = float(datamap.longitudes[1] - datamap.longitudes[0])
def create_pixel_area(latitude, longitude, latitude_index,
longitude_index, current_value):
# Calculate its area by pinpointing each corner of the trapezoid
# it represents.
# Assume that it extends lat_div_size/2 east-west.
NW_lat = latitude + latitude_width/2
SW_lat = NW_lat
NE_lat = latitude - latitude_width/2
SE_lat = NE_lat
# Assume that it extends long_div_size/2 north-south.
NW_lon = longitude + longitude_width/2
SW_lon = longitude - longitude_width/2
NE_lon = NW_lon
SE_lon = SW_lon
height = vincenty((NW_lat, NW_lon), (SW_lat, SW_lon)).kilometers
top = vincenty((NW_lat, NW_lon), (NE_lat, NE_lon)).kilometers
bottom = vincenty((SW_lat, SW_lon), (SE_lat, SE_lon)).kilometers
return 0.5*height*(top+bottom)
datamap.update_all_values_via_function(
update_function=create_pixel_area)
self.save_data(datamap)
return datamap
def get_map(self):
"""Creates a linear-scale :class:`map.Map` with a colorbar. The title is
automatically set using the Specification information but can be
reset with :meth:`map.Map.set_title`. Returns a handle to the map
object; does not save or show the map."""
datamap = self.fetch_data()
map = datamap.make_map()
map.add_colorbar()
map.set_colorbar_label("Area of pixel (km^2)")
self._set_map_title(map)
return map
class SpecificationPopulationMap(Specification):
"""
This Specification describes a :class:`data_map.DataMap2D` where the value
of each pixel is the population of the pixel (in people).
This data may be useful e.g. to create a CDF by population using
:meth:`data_manipulation.calculate_cdf_from_datamap2d`.
"""
def __init__(self, region_map_spec, population):
self._expect_of_type(region_map_spec, SpecificationRegionMap)
self._expect_of_type(population, PopulationData)
self.region_map_spec = region_map_spec
self._store_at_least_class("population", population)
@property
def subdirectory(self):
return "POPULATION"
def to_string(self):
return " ".join(["POPULATION", "(%s)" %
self.region_map_spec.to_string()])
def make_data(self):
self._create_obj_if_needed("population")
region_datamap = self.region_map_spec.fetch_data()
population_datamap = self.population_object.create_population_map(
is_in_region_datamap2d=region_datamap)
self.save_data(population_datamap)
return population_datamap
def get_map(self):
"""Creates a log-scale :class:`map.Map` with boundary outlines, a white
background, and a colorbar. The title is automatically set using the
Specification information but can be reset with
:meth:`map.Map.set_title`. Returns a handle to the map object; does not
save or show the map."""
datamap = self.fetch_data()
region_datamap = self.region_map_spec.fetch_data()
self.region_map_spec._create_obj_if_needed("boundary")
boundary = self.region_map_spec.boundary_object
map = datamap.make_map(transformation='log',
is_in_region_map=region_datamap)
map.add_colorbar()
map.set_colorbar_label("Population")
map.add_boundary_outlines(boundary)
self._set_map_title(map)
return map
|
kate-harrison/west
|
west/data_management.py
|
Python
|
gpl-2.0
| 23,067 | 0.00065 |
import unittest
import ray
from ray.rllib.agents.pg import PGTrainer, DEFAULT_CONFIG
from ray.rllib.utils.test_utils import framework_iterator
class LocalModeTest(unittest.TestCase):
def setUp(self) -> None:
ray.init(local_mode=True)
def tearDown(self) -> None:
ray.shutdown()
def test_local(self):
cf = DEFAULT_CONFIG.copy()
cf["model"]["fcnet_hiddens"] = [10]
cf["num_workers"] = 2
for _ in framework_iterator(cf):
agent = PGTrainer(cf, "CartPole-v0")
print(agent.train())
agent.stop()
if __name__ == "__main__":
import pytest
import sys
sys.exit(pytest.main(["-v", __file__]))
|
richardliaw/ray
|
rllib/tests/test_local.py
|
Python
|
apache-2.0
| 696 | 0 |
"""
The wrapper for Postgres through SQLAchemy
__author__ = "Alex Xiao <http://www.alexxiao.me/>"
__date__ = "2018-11-03"
__version__ = "0.1"
Version:
0.1 (03/11/2018 AX) : init
"""
from urllib.parse import quote_plus
from sqlalchemy import create_engine, text
import pandas
from ax.log import get_logger
class Connection:
"""
Base Class for all SQL Alchemy Connection
"""
def __init__(self, user, password, logger_name='Toby.DB', db_type='postgresql+psycopg2', host='localhost',
port=5432, db='toby', encoding='utf8'):
self._connection = None
self._uri = None
self._encoding = encoding
self.logger = get_logger(logger_name)
self.connect(db_type, user, password, host, port, db, encoding)
def connect(self, db_type, user, password, host='localhost', port=5432, db='toby', encoding='utf8'):
self._uri = '{}://{}:{}@{}:{}/{}'
if not self._connection or self._connection.closed:
self._connection = create_engine(self._uri.format(db_type, quote_plus(user), quote_plus(password), host,
port, db), client_encoding=encoding).connect()
def disconnect(self,):
self._connection.close()
def reconnect(self,):
if self._connection.closed:
self._connection = create_engine(self._uri, client_encoding=self._encoding).connect()
def query(self, sql, **options):
return pandas.read_sql(text(sql), self._connection, **options)
def execute(self, sql):
self.logger.info('Executing:' + sql)
self._connection.execute(text(sql))
self.logger.info('Done')
|
axxiao/toby
|
ax/wrapper/sqlalchemy.py
|
Python
|
mit
| 1,716 | 0.004662 |
#!/usr/bin/env python
'''
BlueBanana Rat Config Decoder
'''
__description__ = 'BlueBanana Rat Config Extractor'
__author__ = 'Kevin Breen http://techanarchy.net http://malwareconfig.com'
__version__ = '0.1'
__date__ = '2014/04/10'
#Standard Imports Go Here
import os
import sys
import string
from zipfile import ZipFile
from cStringIO import StringIO
from optparse import OptionParser
#Non Standard Imports
try:
from Crypto.Cipher import AES
except ImportError:
print "[+] Couldn't Import Cipher, try 'sudo pip install pycrypto'"
# Main Decode Function Goes Here
'''
data is a read of the file
Must return a python dict of values
'''
def run(data):
newZip = StringIO(data)
with ZipFile(newZip) as zip:
for name in zip.namelist(): # get all the file names
if name == "config.txt": # this file contains the encrypted config
conFile = zip.read(name)
if conFile: #
confRaw = decryptConf(conFile)
conf = configParse(confRaw)
return conf
#Helper Functions Go Here
def DecryptAES(enckey, data):
cipher = AES.new(enckey) # set the cipher
return cipher.decrypt(data) # decrpyt the data
def decryptConf(conFile):
key1 = "15af8sd4s1c5s511"
key2 = "4e3f5a4c592b243f"
first = DecryptAES(key1, conFile.decode('hex'))
second = DecryptAES(key2, first[:-16].decode('hex'))
return second
def configParse(confRaw):
config = {}
clean = filter(lambda x: x in string.printable, confRaw)
list = clean.split("<separator>")
config["Domain"] = list[0]
config["Password"] = list[1]
config["Port1"] = list[2]
config["Port2"] = list[3]
if len(list) > 4:
config["Install Name"] = list[4]
config["Jar Name"] = list[5]
return config
#Recursive Function Goes Here
# Main
if __name__ == "__main__":
parser = OptionParser(usage='usage: %prog inFile outConfig\n' + __description__, version='%prog ' + __version__)
parser.add_option("-r", "--recursive", action='store_true', default=False, help="Recursive Mode")
(options, args) = parser.parse_args()
# If we dont have args quit with help page
if len(args) > 0:
pass
else:
parser.print_help()
sys.exit()
# if we want a recursive extract run this function
if options.recursive == True:
print "[+] Sorry Not Here Yet Come Back Soon"
# If not recurisve try to open file
try:
print "[+] Reading file"
fileData = open(args[0], 'rb').read()
except:
print "[+] Couldn't Open File {0}".format(args[0])
#Run the config extraction
print "[+] Searching for Config"
config = run(fileData)
#If we have a config figure out where to dump it out.
if config == None:
print "[+] Config not found"
sys.exit()
#if you gave me two args im going to assume the 2nd arg is where you want to save the file
if len(args) == 2:
print "[+] Writing Config to file {0}".format(args[1])
with open(args[1], 'a') as outFile:
for key, value in sorted(config.iteritems()):
clean_value = filter(lambda x: x in string.printable, value)
outFile.write("Key: {0}\t Value: {1}\n".format(key,clean_value))
# if no seconds arg then assume you want it printing to screen
else:
print "[+] Printing Config to screen"
for key, value in sorted(config.iteritems()):
clean_value = filter(lambda x: x in string.printable, value)
print " [-] Key: {0}\t Value: {1}".format(key,clean_value)
print "[+] End of Config"
|
1ookup/RATDecoders
|
BlueBanana.py
|
Python
|
gpl-2.0
| 3,304 | 0.030266 |
#!/usr/bin/env python
#
# Copyright 2011 Markus Pielmeier
#
# This file is part of minecraft-world-io.
#
# minecraft-world-io is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# minecraft-world-io is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with minecraft-world-io. If not, see <http://www.gnu.org/licenses/>.
#
import os
import unittest
from marook.minecraft.tag.entities import SkeletonParser
from marook.minecraft.tag.entities import SheepParser
class SkeletonParserTest(unittest.TestCase):
def testParseSkeleton(self):
with open(os.path.join('etc', 'dumps', 'skeleton.dump'), 'r') as f:
p = SkeletonParser()
s = p.readEntity(f)
# TODO check skeleton attributes
class SheepParserTest(unittest.TestCase):
def testParseSheep(self):
with open(os.path.join('etc', 'dumps', 'sheep.dump'), 'r') as f:
p = SheepParser()
s = p.readEntity(f)
# TODO check sheep attributes
|
marook/minecraft-world-io
|
src/test/marook_test/minecraft_test/tag_test/test_entities.py
|
Python
|
gpl-3.0
| 1,426 | 0.002805 |
#!/usr/bin/python
import os, sys, shutil, collections
from optparse import OptionParser
# Fix for python 2
try:
input = raw_input
except NameError:
pass
def find_recursive(root, subpath, maxdepth=4):
queue = collections.deque([(root, 0)])
if 'PATH' in os.environ:
envpath = os.environ['PATH'].split(':')
relpath = ['..'] * (len(subpath) - 1)
queue.extendleft([(os.path.join(x, *relpath), maxdepth) for x in envpath if 'android' in x.lower()])
while len(queue) > 0:
item = queue.popleft()
if os.path.isfile(os.path.join(item[0], *subpath)):
return os.path.abspath(item[0])
if item[1] < maxdepth:
for name in os.listdir(item[0]):
fullname = os.path.join(item[0], name)
if os.path.isdir(fullname) and '.' not in name:
queue.append((fullname, item[1] + 1))
return None
def read_local_properties():
androidRoot = os.path.join(os.path.dirname(sys.argv[0]), '..', '..', 'android')
propsFile = os.path.join(androidRoot, 'local.properties')
sdkDir = None
ndkDir = None
if os.path.exists(propsFile):
with open(propsFile, 'r') as f:
for line in f:
line = line.strip()
if line.startswith('sdk.dir') and '=' in line:
sdkDir = line.split('=')[1].strip()
elif line.startswith('ndk.dir') and '=' in line:
ndkDir = line.split('=')[1].strip()
return (sdkDir, ndkDir)
def query_path(title, option, default, subpath):
default = '' if not default else os.path.abspath(default)
searchHint = ', "s" to search'
while True:
path = input('Path to {0}{1} [{2}]:'.format(title, searchHint, default)) or default
if len(searchHint) > 0 and path.lower().strip() == 's':
found = find_recursive(os.path.expanduser('~'), subpath)
if found:
default = found
searchHint = ''
else:
break
test = os.path.join(path, *subpath)
if path and os.path.isfile(test):
return os.path.abspath(path)
else:
print('Could not find {0}, not an {1} path.'.format(test, title))
sys.exit(1)
def write_local_properties(sdkDir, ndkDir):
content = ''.join([x + '\n' for x in [
'# Autogenerated file',
'# Do not add it to version control',
'sdk.dir={0}'.format(sdkDir),
'ndk.dir={0}'.format(ndkDir)
]])
# Create omim/android/local.properties
androidRoot = os.path.join(os.path.dirname(sys.argv[0]), '..', '..', 'android')
propsFile = os.path.join(androidRoot, 'local.properties')
print('Writing {0}'.format(propsFile))
with open(propsFile, 'w') as f:
f.write(content)
# Copy files to folders
for folder in ['YoPme', 'YoPme2', 'UnitTests']:
destFolder = os.path.join(androidRoot, folder)
if not os.path.exists(destFolder):
os.makedirs(destFolder)
dst = os.path.join(destFolder, 'local.properties')
print('Copying to {0}'.format(dst))
shutil.copy(propsFile, dst)
if __name__ == '__main__':
parser = OptionParser()
parser.add_option('-s', '--sdk', help='Path to Android SDK')
parser.add_option('-n', '--ndk', help='Path to Android NDK')
options, _ = parser.parse_args()
sdkDir = options.sdk
ndkDir = options.ndk
if not options.sdk or not options.ndk:
sdkDirOld, ndkDirOld = read_local_properties()
if not sdkDir:
sdkDir = sdkDirOld
if not ndkDir:
ndkDir = ndkDirOld
sdkDir = query_path('Android SDK', options.sdk, sdkDir, ['platform-tools', 'adb'])
ndkDir = query_path('Android NDK', options.ndk, ndkDir, ['ndk-build'])
write_local_properties(sdkDir, ndkDir)
|
rokuz/omim
|
tools/android/set_up_android.py
|
Python
|
apache-2.0
| 3,533 | 0.016417 |
# -*- coding: utf-8 -*-
# Copyright © 2012-2022 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
# documentation files (the "Software"), to deal in the
# Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the
# Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice
# shall be included in all copies or substantial portions of
# the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
# PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
# OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""Copy static files into the output folder."""
import os
from nikola.plugin_categories import Task
from nikola import utils
class CopyFiles(Task):
"""Copy static files into the output folder."""
name = "copy_files"
def gen_tasks(self):
"""Copy static files into the output folder."""
kw = {
'files_folders': self.site.config['FILES_FOLDERS'],
'output_folder': self.site.config['OUTPUT_FOLDER'],
'filters': self.site.config['FILTERS'],
}
yield self.group_task()
for src in kw['files_folders']:
dst = kw['output_folder']
filters = kw['filters']
real_dst = os.path.join(dst, kw['files_folders'][src])
for task in utils.copy_tree(src, real_dst, link_cutoff=dst):
task['basename'] = self.name
task['uptodate'] = [utils.config_changed(kw, 'nikola.plugins.task.copy_files')]
yield utils.apply_filters(task, filters, skip_ext=['.html'])
|
getnikola/nikola
|
nikola/plugins/task/copy_files.py
|
Python
|
mit
| 2,163 | 0.000463 |
import pdb
class TimingDiagram:
def print_diagram(self, xtsm_object):
pdb.set_trace()
seq = xtsm_object.XTSM.getActiveSequence()
cMap=seq.getOwnerXTSM().getDescendentsByType("ChannelMap")[0]
#channelHeir=cMap.createTimingGroupHeirarchy()
#channelRes=cMap.findTimingGroupResolutions()
#Parser out put node. Use TimingProffer
#Control arrays hold what is actually coming out.
seq.collectTimingProffers()
edge_timings = seq.TimingProffer.data['Edge']
class Edge:
def __init__(self, timing_group, channel_number, time, value, tag,
name, initial_value, holding_value):
self.timing_group = timing_group
self.channel_number = channel_number
self.time = time
self.value = value
self.tag = tag
self.max = 0
self.min = 0
self.name = name
self.holding_value = holding_value
self.initial_value = initial_value
def is_same(self,edge):
if ((self.timing_group == edge.timing_group) and
(self.channel_number == edge.channel_number) and
(self.time == edge.time) and
(self.value == edge.value) and
(self.tag == edge.tag)):
return True
else:
return False
edges = []
longest_name = 0
for edge in edge_timings:
for channel in cMap.Channel:
tgroup = int(channel.TimingGroup.PCDATA)
tgroupIndex = int(channel.TimingGroupIndex.PCDATA)
if tgroup == int(edge[0]) and tgroupIndex == int(edge[1]):
name = channel.ChannelName.PCDATA
init_val = ''
hold_val = ''
try:
init_val = channel.InitialValue.PCDATA
except AttributeError:
init_val = 'None '
try:
hold_val = channel.HoldingValue.PCDATA
except AttributeError:
hold_val = 'None '
if len(name) > longest_name:
longest_name = len(name)
edges.append(Edge(edge[0],edge[1],edge[2],edge[3],edge[4],
name, init_val,hold_val))
#pdb.set_trace()
unique_group_channels = []
for edge in edges:
is_found = False
for ugc in unique_group_channels:
if edge.is_same(ugc):
is_found = True
if not is_found:
unique_group_channels.append(edge)
from operator import itemgetter
edge_timings_by_group = sorted(edge_timings, key=itemgetter(2))
edge_timings_by_group_list = []
for edge in edge_timings_by_group:
edge_timings_by_group_list.append(edge.tolist())
#print edge_timings
for p in edge_timings_by_group_list: print p
unique_times = []
for edge in edges:
is_found = False
for t in unique_times:
if edge.time == t.time:
is_found = True
if not is_found:
unique_times.append(edge)
#pdb.set_trace()
for ugc in unique_group_channels:
s = ugc.name.rjust(longest_name)
current_edge = edges[0]
previous_edge = edges[0]
is_first = True
for t in unique_times:
is_found = False
for edge in edges:
if edge.timing_group == ugc.timing_group and edge.channel_number == ugc.channel_number and edge.time == t.time:
is_found = True
current_edge = edge
if is_first:
s = s + '|' + str('%7s' % str(current_edge.initial_value))
is_first = False
previous_edge.value = current_edge.initial_value
if previous_edge.value == 'None ':
previous_edge.value = 0
if is_found:
if current_edge.value > previous_edge.value:
s += '^' + str('%7s' % str(current_edge.value))
else:
s += 'v' + str('%7s' % str(current_edge.value))
previous_edge = current_edge
else:
s += '|' + '.'*7
s = s + '|' + str('%7s' % str(current_edge.holding_value))
print s
s = "Time (ms)".rjust(longest_name) + '|' + str('%7s' % str("Initial"))
for t in unique_times:
s += '|' + str('%7s' % str(t.time))
s = s + '|' + str('%7s' % str("Holding"))
print s
|
gemelkelabs/timing_system_software
|
server_py_files/utilities/timing_diagram.py
|
Python
|
mit
| 5,170 | 0.00793 |
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import oslo_i18n
DOMAIN = "neutron"
_translators = oslo_i18n.TranslatorFactory(domain=DOMAIN)
# The primary translation function using the well-known name "_"
_ = _translators.primary
# The contextual translation function using the name "_C"
_C = _translators.contextual_form
# The plural translation function using the name "_P"
_P = _translators.plural_form
# Translators for log levels.
#
# The abbreviated names are meant to reflect the usual use of a short
# name like '_'. The "L" is for "log" and the other letter comes from
# the level.
_LI = _translators.log_info
_LW = _translators.log_warning
_LE = _translators.log_error
_LC = _translators.log_critical
def get_available_languages():
return oslo_i18n.get_available_languages(DOMAIN)
|
wolverineav/neutron
|
neutron/_i18n.py
|
Python
|
apache-2.0
| 1,355 | 0 |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.1 on 2017-05-24 07:00
from __future__ import unicode_literals
from django.db import migrations
import wagtail.core.blocks
import wagtail.core.fields
class Migration(migrations.Migration):
dependencies = [
('home', '0004_auto_20170524_0608'),
]
operations = [
migrations.RemoveField(
model_name='homepage',
name='content',
),
migrations.AddField(
model_name='homepage',
name='body',
field=wagtail.core.fields.StreamField((('motto', wagtail.core.blocks.CharBlock()), ('paragraph', wagtail.core.blocks.RichTextBlock())), blank=True),
),
]
|
cts-admin/cts
|
cts/home/migrations/0005_auto_20170524_0700.py
|
Python
|
gpl-3.0
| 708 | 0.001412 |
#!/usr/bin/env python
'''
Creates an html treemap of disk usage, using the Google Charts API
'''
import json
import os
import subprocess
import sys
def memoize(fn):
stored_results = {}
def memoized(*args):
try:
return stored_results[args]
except KeyError:
result = stored_results[args] = fn(*args)
return result
return memoized
@memoize
def get_folder_size(folder):
total_size = os.path.getsize(folder)
for item in os.listdir(folder):
itempath = os.path.join(folder, item)
if os.path.isfile(itempath):
total_size += os.path.getsize(itempath)
elif os.path.isdir(itempath):
total_size += get_folder_size(itempath)
return total_size
def usage_iter(root):
root = os.path.abspath(root)
root_size = get_folder_size(root)
root_string = "{0}\n{1}".format(root, root_size)
yield [root_string, None, root_size]
for parent, dirs, files in os.walk(root):
for dirname in dirs:
fullpath = os.path.join(parent, dirname)
try:
this_size = get_folder_size(fullpath)
parent_size = get_folder_size(parent)
this_string = "{0}\n{1}".format(fullpath, this_size)
parent_string = "{0}\n{1}".format(parent, parent_size)
yield [this_string, parent_string, this_size]
except OSError:
continue
def json_usage(root):
root = os.path.abspath(root)
result = [['Path', 'Parent', 'Usage']]
result.extend(entry for entry in usage_iter(root))
return json.dumps(result)
def main(args):
'''Populates an html template using JSON-formatted output from the
Linux 'du' utility and prints the result'''
html = '''
<html>
<head>
<script type="text/javascript" src="https://www.google.com/jsapi"></script>
<script type="text/javascript">
google.load("visualization", "1", {packages:["treemap"]});
google.setOnLoadCallback(drawChart);
function drawChart() {
// Create and populate the data table.
var data = google.visualization.arrayToDataTable(%s);
// Create and draw the visualization.
var tree = new google.visualization.TreeMap(document.getElementById('chart_div'));
tree.draw(data, { headerHeight: 15, fontColor: 'black' });
}
</script>
</head>
<body>
<div id="chart_div" style="width: 900px; height: 500px;"></div>
<p style="text-align: center">Click to descend. Right-click to ascend.</p>
</body>
</html>
''' % json_usage(args[0])
# ''' % du2json(get_usage(args[0]))
print html
if __name__ == "__main__":
main(sys.argv[1:] or ['.'])
|
geekoftheweek/disk-treemap
|
treemap.py
|
Python
|
mit
| 2,734 | 0.001829 |
#!/usr/bin/python
"""Test of tree output using Firefox."""
from macaroon.playback import *
import utils
sequence = MacroSequence()
sequence.append(PauseAction(3000))
sequence.append(KeyComboAction("<Alt>b"))
sequence.append(KeyComboAction("Return"))
sequence.append(KeyComboAction("Tab"))
sequence.append(KeyComboAction("Tab"))
sequence.append(KeyComboAction("Up"))
sequence.append(KeyComboAction("Up"))
sequence.append(KeyComboAction("Up"))
sequence.append(KeyComboAction("Tab"))
sequence.append(PauseAction(3000))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("<Shift>Tab"))
sequence.append(utils.AssertPresentationAction(
"1. Shift Tab for tree",
["BRAILLE LINE: 'Firefox application Library frame All Bookmarks expanded TREE LEVEL 1'",
" VISIBLE: 'All Bookmarks expanded TREE LEVE', cursor=1",
"SPEECH OUTPUT: 'All Bookmarks.'",
"SPEECH OUTPUT: 'expanded.'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Down"))
sequence.append(utils.AssertPresentationAction(
"2. Down Arrow in tree",
["BRAILLE LINE: 'Firefox application Library frame Bookmarks Toolbar TREE LEVEL 2'",
" VISIBLE: 'Bookmarks Toolbar TREE LEVEL 2', cursor=1",
"SPEECH OUTPUT: 'Bookmarks Toolbar.'",
"SPEECH OUTPUT: 'tree level 2'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Down"))
sequence.append(utils.AssertPresentationAction(
"3. Down Arrow in tree",
["BRAILLE LINE: 'Firefox application Library frame Bookmarks Menu collapsed TREE LEVEL 2'",
" VISIBLE: 'Bookmarks Menu collapsed TREE LE', cursor=1",
"SPEECH OUTPUT: 'Bookmarks Menu.'",
"SPEECH OUTPUT: 'collapsed.'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("KP_Enter"))
sequence.append(utils.AssertPresentationAction(
"4. Basic Where Am I",
["BRAILLE LINE: 'Firefox application Library frame Bookmarks Menu collapsed TREE LEVEL 2'",
" VISIBLE: 'Bookmarks Menu collapsed TREE LE', cursor=1",
"SPEECH OUTPUT: 'Bookmarks Menu tree item.'",
"SPEECH OUTPUT: '2 of 3.'",
"SPEECH OUTPUT: 'collapsed tree level 2'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Right"))
sequence.append(utils.AssertPresentationAction(
"5. Right Arrow to expand folder",
["BRAILLE LINE: 'Firefox application Library frame Bookmarks Menu expanded TREE LEVEL 2'",
" VISIBLE: 'Bookmarks Menu expanded TREE LEV', cursor=1",
"SPEECH OUTPUT: 'expanded'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("KP_Enter"))
sequence.append(utils.AssertPresentationAction(
"6. Basic Where Am I",
["BRAILLE LINE: 'Firefox application Library frame Bookmarks Menu expanded TREE LEVEL 2'",
" VISIBLE: 'Bookmarks Menu expanded TREE LEV', cursor=1",
"SPEECH OUTPUT: 'Bookmarks Menu tree item.'",
"SPEECH OUTPUT: '2 of 3.'",
"SPEECH OUTPUT: 'expanded tree level 2'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Down"))
sequence.append(utils.AssertPresentationAction(
"7. Down Arrow in tree",
["BRAILLE LINE: 'Firefox application Library frame GNOME TREE LEVEL 3'",
" VISIBLE: 'GNOME TREE LEVEL 3', cursor=1",
"SPEECH OUTPUT: 'GNOME.'",
"SPEECH OUTPUT: 'tree level 3'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("KP_Enter"))
sequence.append(utils.AssertPresentationAction(
"8. Basic Where Am I",
["BRAILLE LINE: 'Firefox application Library frame GNOME TREE LEVEL 3'",
" VISIBLE: 'GNOME TREE LEVEL 3', cursor=1",
"SPEECH OUTPUT: 'GNOME tree item.'",
"SPEECH OUTPUT: '1 of 2.'",
"SPEECH OUTPUT: 'tree level 3'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Up"))
sequence.append(utils.AssertPresentationAction(
"9. Up Arrow in tree",
["BRAILLE LINE: 'Firefox application Library frame Bookmarks Menu expanded TREE LEVEL 2'",
" VISIBLE: 'Bookmarks Menu expanded TREE LEV', cursor=1",
"SPEECH OUTPUT: 'Bookmarks Menu.'",
"SPEECH OUTPUT: 'expanded.'",
"SPEECH OUTPUT: 'tree level 2'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Left"))
sequence.append(utils.AssertPresentationAction(
"10. Left Arrow to collapse folder",
["BRAILLE LINE: 'Firefox application Library frame Bookmarks Menu collapsed TREE LEVEL 2'",
" VISIBLE: 'Bookmarks Menu collapsed TREE LE', cursor=1",
"SPEECH OUTPUT: 'collapsed'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Up"))
sequence.append(utils.AssertPresentationAction(
"11. Up Arrow in tree",
["BRAILLE LINE: 'Firefox application Library frame Bookmarks Toolbar TREE LEVEL 2'",
" VISIBLE: 'Bookmarks Toolbar TREE LEVEL 2', cursor=1",
"SPEECH OUTPUT: 'Bookmarks Toolbar.'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Up"))
sequence.append(utils.AssertPresentationAction(
"12. Up Arrow in tree",
["BRAILLE LINE: 'Firefox application Library frame All Bookmarks expanded TREE LEVEL 1'",
" VISIBLE: 'All Bookmarks expanded TREE LEVE', cursor=1",
"SPEECH OUTPUT: 'All Bookmarks.'",
"SPEECH OUTPUT: 'expanded.'",
"SPEECH OUTPUT: 'tree level 1'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Tab"))
sequence.append(utils.AssertPresentationAction(
"13. Tab back to tree table",
["BRAILLE LINE: 'Firefox application Library frame tree table Bookmarks Toolbar table row TREE LEVEL 1'",
" VISIBLE: 'Bookmarks Toolbar table row TR', cursor=1",
"SPEECH OUTPUT: 'Bookmarks Toolbar '"]))
sequence.append(KeyComboAction("<Alt>F4"))
sequence.append(utils.AssertionSummaryAction())
sequence.start()
|
GNOME/orca
|
test/keystrokes/firefox/ui_role_tree.py
|
Python
|
lgpl-2.1
| 5,957 | 0.002686 |
#!/usr/bin/env python3
# Copyright (c) 2015-2016 The Bitsend Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Test ZMQ interface
#
from test_framework.test_framework import BitsendTestFramework
from test_framework.util import *
import zmq
import struct
class ZMQTest (BitsendTestFramework):
def __init__(self):
super().__init__()
self.num_nodes = 4
port = 28332
def setup_nodes(self):
self.zmqContext = zmq.Context()
self.zmqSubSocket = self.zmqContext.socket(zmq.SUB)
self.zmqSubSocket.setsockopt(zmq.SUBSCRIBE, b"hashblock")
self.zmqSubSocket.setsockopt(zmq.SUBSCRIBE, b"hashtx")
self.zmqSubSocket.connect("tcp://127.0.0.1:%i" % self.port)
return start_nodes(self.num_nodes, self.options.tmpdir, extra_args=[
['-zmqpubhashtx=tcp://127.0.0.1:'+str(self.port), '-zmqpubhashblock=tcp://127.0.0.1:'+str(self.port)],
[],
[],
[]
])
def run_test(self):
self.sync_all()
genhashes = self.nodes[0].generate(1)
self.sync_all()
print("listen...")
msg = self.zmqSubSocket.recv_multipart()
topic = msg[0]
assert_equal(topic, b"hashtx")
body = msg[1]
nseq = msg[2]
msgSequence = struct.unpack('<I', msg[-1])[-1]
assert_equal(msgSequence, 0) #must be sequence 0 on hashtx
msg = self.zmqSubSocket.recv_multipart()
topic = msg[0]
body = msg[1]
msgSequence = struct.unpack('<I', msg[-1])[-1]
assert_equal(msgSequence, 0) #must be sequence 0 on hashblock
blkhash = bytes_to_hex_str(body)
assert_equal(genhashes[0], blkhash) #blockhash from generate must be equal to the hash received over zmq
n = 10
genhashes = self.nodes[1].generate(n)
self.sync_all()
zmqHashes = []
blockcount = 0
for x in range(0,n*2):
msg = self.zmqSubSocket.recv_multipart()
topic = msg[0]
body = msg[1]
if topic == b"hashblock":
zmqHashes.append(bytes_to_hex_str(body))
msgSequence = struct.unpack('<I', msg[-1])[-1]
assert_equal(msgSequence, blockcount+1)
blockcount += 1
for x in range(0,n):
assert_equal(genhashes[x], zmqHashes[x]) #blockhash from generate must be equal to the hash received over zmq
#test tx from a second node
hashRPC = self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), 1.0)
self.sync_all()
# now we should receive a zmq msg because the tx was broadcast
msg = self.zmqSubSocket.recv_multipart()
topic = msg[0]
body = msg[1]
hashZMQ = ""
if topic == b"hashtx":
hashZMQ = bytes_to_hex_str(body)
msgSequence = struct.unpack('<I', msg[-1])[-1]
assert_equal(msgSequence, blockcount+1)
assert_equal(hashRPC, hashZMQ) #blockhash from generate must be equal to the hash received over zmq
if __name__ == '__main__':
ZMQTest ().main ()
|
madzebra/BitSend
|
qa/rpc-tests/zmq_test.py
|
Python
|
mit
| 3,224 | 0.006514 |
string = (input("What is your string?\n"))
string.replace('K', 'M'[max])
string.replace('O', 'Q'[max])
string.replace('E', 'G'[max])
print(string)
|
AustinHartman/randomPrograms
|
stringRep.py
|
Python
|
gpl-3.0
| 148 | 0.006757 |
from enigma import eComponentScan, iDVBFrontend
from Components.NimManager import nimmanager as nimmgr
from Tools.Transponder import getChannelNumber
class ServiceScan:
Idle = 1
Running = 2
Done = 3
Error = 4
Errors = {
0: _("error starting scanning"),
1: _("error while scanning"),
2: _("no resource manager"),
3: _("no channel list")
}
def scanStatusChanged(self):
if self.state == self.Running:
self.progressbar.setValue(self.scan.getProgress())
self.lcd_summary.updateProgress(self.scan.getProgress())
if self.scan.isDone():
errcode = self.scan.getError()
if errcode == 0:
self.state = self.Done
self.servicelist.listAll()
else:
self.state = self.Error
self.errorcode = errcode
self.network.setText("")
self.transponder.setText("")
else:
result = self.foundServices + self.scan.getNumServices()
percentage = self.scan.getProgress()
if percentage > 99:
percentage = 99
#TRANSLATORS: The stb is performing a channel scan, progress percentage is printed in '%d' (and '%%' will show a single '%' symbol)
message = ngettext("Scanning - %d%% completed", "Scanning - %d%% completed", percentage) % percentage
message += ", "
#TRANSLATORS: Intermediate scanning result, '%d' channel(s) have been found so far
message += ngettext("%d channel found", "%d channels found", result) % result
self.text.setText(message)
transponder = self.scan.getCurrentTransponder()
network = ""
tp_text = ""
if transponder:
tp_type = transponder.getSystem()
if tp_type == iDVBFrontend.feSatellite:
network = _("Satellite")
tp = transponder.getDVBS()
orb_pos = tp.orbital_position
try:
sat_name = str(nimmgr.getSatDescription(orb_pos))
except KeyError:
sat_name = ""
if orb_pos > 1800: # west
orb_pos = 3600 - orb_pos
h = _("W")
else:
h = _("E")
if ("%d.%d" % (orb_pos/10, orb_pos%10)) in sat_name:
network = sat_name
else:
network = "%s %d.%d %s" % (sat_name, orb_pos / 10, orb_pos % 10, h)
tp_text = { tp.System_DVB_S : "DVB-S", tp.System_DVB_S2 : "DVB-S2" }.get(tp.system, "")
if tp_text == "DVB-S2":
tp_text = "%s %s" % ( tp_text,
{ tp.Modulation_Auto : "Auto", tp.Modulation_QPSK : "QPSK",
tp.Modulation_8PSK : "8PSK", tp.Modulation_QAM16 : "QAM16" }.get(tp.modulation, ""))
tp_text = "%s %d%c / %d / %s" % ( tp_text, tp.frequency/1000,
{ tp.Polarisation_Horizontal : 'H', tp.Polarisation_Vertical : 'V', tp.Polarisation_CircularLeft : 'L',
tp.Polarisation_CircularRight : 'R' }.get(tp.polarisation, ' '),
tp.symbol_rate/1000,
{ tp.FEC_Auto : "AUTO", tp.FEC_1_2 : "1/2", tp.FEC_2_3 : "2/3",
tp.FEC_3_4 : "3/4", tp.FEC_5_6 : "5/6", tp.FEC_7_8 : "7/8",
tp.FEC_8_9 : "8/9", tp.FEC_3_5 : "3/5", tp.FEC_4_5 : "4/5",
tp.FEC_9_10 : "9/10", tp.FEC_None : "NONE" }.get(tp.fec, ""))
elif tp_type == iDVBFrontend.feCable:
network = _("Cable")
tp = transponder.getDVBC()
tp_text = "DVB-C %s %d / %d / %s" %( { tp.Modulation_Auto : "AUTO",
tp.Modulation_QAM16 : "QAM16", tp.Modulation_QAM32 : "QAM32",
tp.Modulation_QAM64 : "QAM64", tp.Modulation_QAM128 : "QAM128",
tp.Modulation_QAM256 : "QAM256" }.get(tp.modulation, ""),
tp.frequency,
tp.symbol_rate/1000,
{ tp.FEC_Auto : "AUTO", tp.FEC_1_2 : "1/2", tp.FEC_2_3 : "2/3",
tp.FEC_3_4 : "3/4", tp.FEC_5_6 : "5/6", tp.FEC_7_8 : "7/8",
tp.FEC_8_9 : "8/9", tp.FEC_3_5 : "3/5", tp.FEC_4_5 : "4/5", tp.FEC_9_10 : "9/10", tp.FEC_None : "NONE" }.get(tp.fec_inner, ""))
elif tp_type == iDVBFrontend.feTerrestrial:
network = _("Terrestrial")
tp = transponder.getDVBT()
channel = getChannelNumber(tp.frequency, self.scanList[self.run]["feid"])
if channel:
channel = _("CH") + "%s " % channel
freqMHz = "%0.1f MHz" % (tp.frequency/1000000.)
tp_text = "%s %s %s %s" %(
{
tp.System_DVB_T_T2 : "DVB-T/T2",
tp.System_DVB_T : "DVB-T",
tp.System_DVB_T2 : "DVB-T2"
}.get(tp.system, ""),
{
tp.Modulation_QPSK : "QPSK",
tp.Modulation_QAM16 : "QAM16", tp.Modulation_QAM64 : "QAM64",
tp.Modulation_Auto : "AUTO", tp.Modulation_QAM256 : "QAM256"
}.get(tp.modulation, ""),
"%s%s" % (channel, freqMHz.replace(".0","")),
{
tp.Bandwidth_8MHz : "Bw 8MHz", tp.Bandwidth_7MHz : "Bw 7MHz", tp.Bandwidth_6MHz : "Bw 6MHz",
tp.Bandwidth_Auto : "Bw Auto", tp.Bandwidth_5MHz : "Bw 5MHz",
tp.Bandwidth_1_712MHz : "Bw 1.712MHz", tp.Bandwidth_10MHz : "Bw 10MHz"
}.get(tp.bandwidth, ""))
else:
print "unknown transponder type in scanStatusChanged"
self.network.setText(network)
self.transponder.setText(tp_text)
if self.state == self.Done:
result = self.foundServices + self.scan.getNumServices()
self.text.setText(ngettext("Scanning completed, %d channel found", "Scanning completed, %d channels found", result) % result)
if self.state == self.Error:
self.text.setText(_("ERROR - failed to scan (%s)!") % (self.Errors[self.errorcode]) )
if self.state == self.Done or self.state == self.Error:
if self.run != len(self.scanList) - 1:
self.foundServices += self.scan.getNumServices()
self.execEnd()
self.run += 1
self.execBegin()
def __init__(self, progressbar, text, servicelist, passNumber, scanList, network, transponder, frontendInfo, lcd_summary):
self.foundServices = 0
self.progressbar = progressbar
self.text = text
self.servicelist = servicelist
self.passNumber = passNumber
self.scanList = scanList
self.frontendInfo = frontendInfo
self.transponder = transponder
self.network = network
self.run = 0
self.lcd_summary = lcd_summary
def doRun(self):
self.scan = eComponentScan()
self.frontendInfo.frontend_source = lambda : self.scan.getFrontend()
self.feid = self.scanList[self.run]["feid"]
self.flags = self.scanList[self.run]["flags"]
self.networkid = 0
if self.scanList[self.run].has_key("networkid"):
self.networkid = self.scanList[self.run]["networkid"]
self.state = self.Idle
self.scanStatusChanged()
for x in self.scanList[self.run]["transponders"]:
self.scan.addInitial(x)
def updatePass(self):
size = len(self.scanList)
if size > 1:
self.passNumber.setText(_("pass") + " " + str(self.run + 1) + "/" + str(size) + " (" + _("Tuner") + " " + str(self.scanList[self.run]["feid"]) + ")")
def execBegin(self):
self.doRun()
self.updatePass()
self.scan.statusChanged.get().append(self.scanStatusChanged)
self.scan.newService.get().append(self.newService)
self.servicelist.clear()
self.state = self.Running
err = self.scan.start(self.feid, self.flags, self.networkid)
self.frontendInfo.updateFrontendData()
if err:
self.state = self.Error
self.errorcode = 0
self.scanStatusChanged()
def execEnd(self):
self.scan.statusChanged.get().remove(self.scanStatusChanged)
self.scan.newService.get().remove(self.newService)
if not self.isDone():
print "*** warning *** scan was not finished!"
del self.scan
def isDone(self):
return self.state == self.Done or self.state == self.Error
def newService(self):
newServiceName = self.scan.getLastServiceName()
newServiceRef = self.scan.getLastServiceRef()
self.servicelist.addItem((newServiceName, newServiceRef))
self.lcd_summary.updateService(newServiceName)
def destroy(self):
pass
|
mrnamingo/vix4-34-enigma2-bcm
|
lib/python/Components/ServiceScan.py
|
Python
|
gpl-2.0
| 7,567 | 0.037531 |
import statistics
from datetime import datetime
class Benchmark():
def __init__(self, n_runs: int = 5, print_checkpoint: bool = True):
self.n_runs = n_runs
self.print_checkpoint = print_checkpoint
@staticmethod
def log(message: str) -> None:
print('[%s] - %s' % (datetime.now(), message))
def log_stats(self, times: list, unit: str = 'ms'):
self.log(
'[iteration %s/%s] %s' % (len(times), self.n_runs,
self.format_stats(times, unit=unit)))
@staticmethod
def format_stats(times: list, unit: str) -> str:
return 'median: %.2f%s, mean: %.2f%s, stdev: %.2f, max: %.2f%s, min: %.2f%s' % (
statistics.median(times), unit, statistics.mean(times), unit,
statistics.stdev(times), max(times), unit, min(times), unit)
def start(self, suffix: str = None):
if suffix is None:
suffix = '...'
else:
suffix = ': ' + suffix
self.log('starting benchmark%s' % suffix)
self.benchmark()
def benchmark(self):
raise NotImplementedError('method benchmark() not implemented yet')
|
numerai/submission-criteria
|
tests/benchmark_base.py
|
Python
|
apache-2.0
| 1,174 | 0.000852 |
# Copyright (C) 2015 Twitter, Inc.
"""Container for all targeting related logic used by the Ads API SDK."""
from twitter_ads.http import Request
from twitter_ads.resource import resource_property, Resource, Persistence
from twitter_ads import API_VERSION
from twitter_ads.utils import FlattenParams
import json
class AudienceEstimate(Resource, Persistence):
PROPERTIES = {}
RESOURCE = '/' + API_VERSION + '/accounts/{account_id}/audience_estimate'
@classmethod
@FlattenParams
def load(klass, account, params):
resource = klass.RESOURCE.format(account_id=account.id)
headers = {'Content-Type': 'application/json'}
response = Request(account.client,
'post',
resource,
headers=headers,
body=json.dumps(params)).perform()
return klass(account).from_response(response.body['data'])
resource_property(AudienceEstimate, 'audience_size')
|
twitterdev/twitter-python-ads-sdk
|
twitter_ads/targeting.py
|
Python
|
mit
| 998 | 0 |
from __future__ import unicode_literals
def device_from_request(request):
"""
Determine's the device name from the request by first looking for an
overridding cookie, and if not found then matching the user agent.
Used at both the template level for choosing the template to load and
also at the cache level as a cache key prefix.
"""
from mezzanine.conf import settings
try:
# If a device was set via cookie, match available devices.
for (device, _) in settings.DEVICE_USER_AGENTS:
if device == request.COOKIES["mezzanine-device"]:
return device
except KeyError:
# If a device wasn't set via cookie, match user agent.
try:
user_agent = request.META["HTTP_USER_AGENT"].lower()
except KeyError:
pass
else:
try:
user_agent = user_agent.decode("utf-8")
except AttributeError:
pass
for (device, ua_strings) in settings.DEVICE_USER_AGENTS:
for ua_string in ua_strings:
if ua_string.lower() in user_agent:
return device
return ""
def templates_for_device(request, templates):
"""
Given a template name (or list of them), returns the template names
as a list, with each name prefixed with the device directory
inserted before it's associate default in the list.
"""
from mezzanine.conf import settings
if not isinstance(templates, (list, tuple)):
templates = [templates]
device = device_from_request(request)
device_templates = []
for template in templates:
if device:
device_templates.append("%s/%s" % (device, template))
if settings.DEVICE_DEFAULT and settings.DEVICE_DEFAULT != device:
default = "%s/%s" % (settings.DEVICE_DEFAULT, template)
device_templates.append(default)
device_templates.append(template)
return device_templates
|
TecnoSalta/bg
|
mezzanine/utils/device.py
|
Python
|
bsd-2-clause
| 2,013 | 0 |
from setuptools import setup
setup(
setup_requires=['pbr', ],
pbr=True,
auto_version="PBR",
)
|
rocktavious/pyversion
|
setup.py
|
Python
|
mit
| 107 | 0 |
#!/usr/bin/env python3
# encoding: utf-8
# === This file is part of Calamares - <http://github.com/calamares> ===
#
# Copyright 2014, Aurélien Gâteau <agateau@kde.org>
#
# Calamares is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Calamares is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Calamares. If not, see <http://www.gnu.org/licenses/>.
import os
import re
import libcalamares
HEADER = """# /etc/fstab: static file system information.
#
# Use 'blkid' to print the universally unique identifier for a device; this may
# be used with UUID= as a more robust way to name devices that works even if
# disks are added and removed. See fstab(5).
#
# <file system> <mount point> <type> <options> <dump> <pass>"""
# Turn Parted filesystem names into fstab names
FS_MAP = {
"fat16": "vfat",
"fat32": "vfat",
"linuxswap": "swap",
}
def mkdir_p(path):
if not os.path.exists(path):
os.makedirs(path)
def is_ssd_disk(disk_name):
filename = os.path.join("/sys/block", disk_name, "queue/rotational")
if not os.path.exists(filename):
# Should not happen unless sysfs changes, but better safe than sorry
return False
with open(filename) as f:
return f.read() == "0\n"
def disk_name_for_partition(partition):
name = os.path.basename(partition["device"])
return re.sub("[0-9]+$", "", name)
class FstabGenerator(object):
def __init__(self, partitions, root_mount_point, mount_options,
ssd_extra_mount_options):
self.partitions = partitions
self.root_mount_point = root_mount_point
self.mount_options = mount_options
self.ssd_extra_mount_options = ssd_extra_mount_options
self.ssd_disks = set()
self.root_is_ssd = False
def run(self):
self.find_ssd_disks()
self.generate_fstab()
self.create_mount_points()
return None
def find_ssd_disks(self):
disks = {disk_name_for_partition(x) for x in self.partitions}
self.ssd_disks = {x for x in disks if is_ssd_disk(x)}
def generate_fstab(self):
# Create fstab
mkdir_p(os.path.join(self.root_mount_point, "etc"))
fstab_path = os.path.join(self.root_mount_point, "etc", "fstab")
with open(fstab_path, "w") as fl:
print(HEADER, file=fl)
for partition in self.partitions:
dct = self.generate_fstab_line_info(partition)
if dct:
self.print_fstab_line(dct, file=fl)
if self.root_is_ssd:
# Mount /tmp on a tmpfs
dct = dict(
device="tmpfs",
mount_point="/tmp",
fs="tmpfs",
options="defaults,noatime,mode=1777",
check=0,
)
self.print_fstab_line(dct, file=fl)
def generate_fstab_line_info(self, partition):
fs = partition["fs"]
mount_point = partition["mountPoint"]
disk_name = disk_name_for_partition(partition)
is_ssd = disk_name in self.ssd_disks
fs = FS_MAP.get(fs, fs)
if not mount_point and not fs == "swap":
return None
options = self.mount_options.get(fs, self.mount_options["default"])
if is_ssd:
extra = self.ssd_extra_mount_options.get(fs)
if extra:
options += "," + extra
if mount_point == "/":
check = 1
elif mount_point:
check = 2
else:
check = 0
if mount_point == "/":
self.root_is_ssd = is_ssd
return dict(
device="UUID=" + partition["uuid"],
mount_point=mount_point or "none",
fs=fs,
options=options,
check=check)
def print_fstab_line(self, dct, file=None):
line = "{:41} {:<14} {:<7} {:<10} 0 {}".format(
dct["device"],
dct["mount_point"],
dct["fs"],
dct["options"],
dct["check"])
print(line, file=file)
def create_mount_points(self):
for partition in self.partitions:
if partition["mountPoint"]:
mkdir_p(self.root_mount_point + partition["mountPoint"])
def run():
gs = libcalamares.globalstorage
conf = libcalamares.job.configuration
partitions = gs.value("partitions")
root_mount_point = gs.value("rootMountPoint")
mount_options = conf["mountOptions"]
ssd_extra_mount_options = conf.get("ssdExtraMountOptions", {})
generator = FstabGenerator(partitions, root_mount_point,
mount_options, ssd_extra_mount_options)
return generator.run()
|
maui-packages/calamares
|
src/modules/fstab/main.py
|
Python
|
gpl-3.0
| 5,216 | 0.000767 |
#!/bin/env python
# -*- python -*-
#
# Copyright 2003,2009 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
from generate_utils import *
# ----------------------------------------------------------------
def make_gr_fir_sysconfig_generic_h ():
out = open_and_log_name ('gr_fir_sysconfig_generic.h', 'w')
if not out:
return
out.write (copyright)
out.write (
'''
/*
* WARNING: This file is automatically generated by
* generate_gr_fir_sysconfig_generic.py.
*
* Any changes made to this file will be overwritten.
*/
#ifndef _GR_FIR_SYSCONFIG_GENERIC_H_
#define _GR_FIR_SYSCONFIG_GENERIC_H_
#include <gr_fir_sysconfig.h>
''')
out.write (
'''
class gr_fir_sysconfig_generic : public gr_fir_sysconfig {
public:
''')
for sig in fir_signatures:
out.write ((' virtual gr_fir_%s *create_gr_fir_%s (const std::vector<%s> &taps);\n' %
(sig, sig, tap_type (sig))))
out.write ('\n')
for sig in fir_signatures:
out.write ((' virtual void get_gr_fir_%s_info (std::vector<gr_fir_%s_info> *info);\n' %
(sig, sig)))
out.write (
'''
};
#endif /* _GR_FIR_SYSCONFIG_GENERIC_H_ */
''')
out.close ()
# ----------------------------------------------------------------
def make_constructor (sig, out):
out.write ('''
static gr_fir_%s *
make_gr_fir_%s (const std::vector<%s> &taps)
{
return new gr_fir_%s_generic (taps);
}
''' % (sig, sig, tap_type (sig), sig))
def make_creator (sig, out):
out.write ('''
gr_fir_%s *
gr_fir_sysconfig_generic::create_gr_fir_%s (const std::vector<%s> &taps)
{
return make_gr_fir_%s (taps);
}
''' % (sig, sig, tap_type (sig), sig))
def make_info (sig, out):
out.write ('''
void
gr_fir_sysconfig_generic::get_gr_fir_%s_info (std::vector<gr_fir_%s_info> *info)
{
info->resize (1);
(*info)[0].name = "generic";
(*info)[0].create = make_gr_fir_%s;
}
''' % (sig, sig, sig))
# ----------------------------------------------------------------
def make_gr_fir_sysconfig_generic_cc ():
out = open_and_log_name ('gr_fir_sysconfig_generic.cc', 'w')
if not out:
return
out.write (copyright)
out.write (
'''
/*
* WARNING: This file is automatically generated by
* generate_gr_fir_sysconfig_generic.py.
*
* Any changes made to this file will be overwritten.
*/
#ifdef HAVE_CONFIG_H
#include <config.h>
#endif
#include <gr_fir_sysconfig_generic.h>
''')
for sig in fir_signatures:
out.write ('#include <gr_fir_%s_generic.h>\n' % (sig))
out.write (
'''
/*
* ----------------------------------------------------------------
* static functions that serve as constructors returned by info
* ----------------------------------------------------------------
*/
''')
for sig in fir_signatures:
make_constructor (sig, out)
out.write (
'''
/*
* ----------------------------------------------------------------
* return instances of the generic C++ versions of these classes.
* ----------------------------------------------------------------
*/
''')
for sig in fir_signatures:
make_creator (sig, out)
out.write (
'''
/*
* Return info about available implementations.
*
* This is the bottom of the concrete hierarchy, so we set the
* size of the vector to 1, and install our info. Classes derived
* from us invoke us first, then append their own info.
*/
''')
for sig in fir_signatures:
make_info (sig, out)
out.close ()
# ----------------------------------------------------------------
def generate ():
make_gr_fir_sysconfig_generic_h ()
make_gr_fir_sysconfig_generic_cc ()
if __name__ == '__main__':
generate ()
|
manojgudi/sandhi
|
modules/gr36/gnuradio-core/src/lib/filter/generate_gr_fir_sysconfig_generic.py
|
Python
|
gpl-3.0
| 4,373 | 0.011891 |
# -*- coding: utf-8 -*-
#-------------------------------------------------
#-- osm map importer
#--
#-- microelly 2016 v 0.4
#--
#-- GNU Lesser General Public License (LGPL)
#-------------------------------------------------
'''import data from openstreetmap'''
#http://api.openstreetmap.org/api/0.6/map?bbox=11.74182,50.16413,11.74586,50.16561
#http://api.openstreetmap.org/api/0.6/way/384013089
#http://api.openstreetmap.org/api/0.6/node/3873106739
#\cond
from geodat.say import *
import time, json, os
import sys
if sys.version_info[0] !=2:
from importlib import reload
import urllib.request
from say import *
import time, json, os
try:
import urllib2
except:
import urllib
import pivy
from pivy import coin
import geodat.my_xmlparser
reload (geodat.my_xmlparser)
import geodat.transversmercator
from geodat.transversmercator import TransverseMercator
import geodat.inventortools as inventortools
import geodat.xmltodict
from geodat.xmltodict import parse
#\endcond
#------------------------------
#
# microelly 2016 ..
#
#------------------------------
import time
## get the elevation height of a single point
def getHeight(b,l):
''' get height of a single point with latitude b, longitude l'''
anz=0
while anz<4:
source="https://maps.googleapis.com/maps/api/elevation/json?locations="+str(b)+','+str(l)
try:
response = urllib2.urlopen(source)
except:
response = urllib.request.urlopen(source)
ans=response.read()
if ans.find("OVER_QUERY_LIMIT"):
anz += 1
time.sleep(5)
else:
anz=10
s=json.loads(ans)
res=s['results']
for r in res:
return round(r['elevation']*1000,2)
## get the heights for a list of points
def getHeights(points):
''' get heights for a list of points'''
i=0
size=len(points)
while i<size:
source="https://maps.googleapis.com/maps/api/elevation/json?locations="
ii=0
if i>0:
time.sleep(1)
while ii < 20 and i < size:
p=points[i]
ss= p[1]+','+p[2] + '|'
source += ss
i += 1
ii += 1
source += "60.0,10.0"
response = urllib.request.urlopen(source)
ans=response.read()
s=json.loads(ans)
res=s['results']
heights= {}
for r in res:
key="%0.7f" %(r['location']['lat']) + " " + "%0.7f" %(r['location']['lng'])
heights[key]=r['elevation']
return heights
def organize():
'''create groups for the different object types
GRP_highways, GRP_building, GRP_landuse
'''
highways=App.activeDocument().addObject("App::DocumentObjectGroup","GRP_highways")
landuse=App.activeDocument().addObject("App::DocumentObjectGroup","GRP_landuse")
buildings=App.activeDocument().addObject("App::DocumentObjectGroup","GRP_building")
pathes=App.activeDocument().addObject("App::DocumentObjectGroup","GRP_pathes")
for oj in App.activeDocument().Objects:
if oj.Label.startswith('building'):
buildings.addObject(oj)
# oj.ViewObject.Visibility=False
if oj.Label.startswith('highway') or oj.Label.startswith('way'):
highways.addObject(oj)
oj.ViewObject.Visibility=False
if oj.Label.startswith('landuse'):
landuse.addObject(oj)
oj.ViewObject.Visibility=False
if oj.Label.startswith('w_'):
pathes.addObject(oj)
oj.ViewObject.Visibility=False
#---------------------
from geodat.say import *
import re
#fn='/home/thomas/.FreeCAD//geodat3/50.340722-11.232647-0.015'
#fn='/home/thomas/.FreeCAD/system.cfg'
debug=False
#--------------------
## core method to download and import the data
#
#def import_osm(b,l,bk,progressbar,status):
# import_osm2(b,l,bk,progressbar,status,False)
def import_osm2(b,l,bk,progressbar,status,elevation):
dialog=False
debug=False
if progressbar:
progressbar.setValue(0)
if status:
status.setText("get data from openstreetmap.org ...")
FreeCADGui.updateGui()
content=''
bk=0.5*bk
dn=FreeCAD.ConfigGet("UserAppData") + "/geodat3/"
fn=dn+str(b)+'-'+str(l)+'-'+str(bk)
import os
if not os.path.isdir(dn):
os.makedirs(dn)
try:
say("I try to read data from cache file ... ")
say(fn)
f=open(fn,"r")
content=f.read()
# say(content)
# raise Exception("to debug:force load from internet")
except:
sayW("no cache file, so I connect to openstreetmap.org...")
lk=bk #
b1=b-bk/1113*10
l1=l-lk/713*10
b2=b+bk/1113*10
l2=l+lk/713*10
source='http://api.openstreetmap.org/api/0.6/map?bbox='+str(l1)+','+str(b1)+','+str(l2)+','+str(b2)
say(source)
import requests
response = requests.get(source)
data = response.text
lines=response.text.split('\n')
FreeCAD.t=response
f=open(fn,"w")
# f.write(response.text)
if response.status_code == 200:
with open(fn, 'wb') as f:
for chunk in response.iter_content(1024):
f.write(chunk)
f.close()
# print("huhu");return
if 0:
try:
say("read--")
response = urllib.request.urlopen(source)
#import ssl
#ssl._create_default_https_context = ssl._create_unverified_context
#response = urllib.request.urlopen(source)
# import requests
# response = requests.get(source)
say(response)
say("2huu")
first=True
content=''
f=open(fn,"w")
l=0
z=0
ct=0
say("2wkkw")
#say(response.text)
# lines=response.text.split('\n')
# say(len(lines))
say("ll")
# for line in lines:
for line in response:
print ("Y",line)
if status:
if z>5000:
status.setText("read data ..." + str(l))
z=0
FreeCADGui.updateGui()
l+=1
z+=1
if first:
first=False
else:
content += line
f.write(line)
f.close()
if status:
status.setText("FILE CLOSED ..." + str(l))
FreeCADGui.updateGui()
response.close()
except:
sayErr( "Fehler beim Lesen")
if status:
status.setText("got data from openstreetmap.org ...")
FreeCADGui.updateGui()
sayW("Beeenden - im zweiten versuch daten auswerten")
return False
if elevation:
baseheight=getHeight(b,l)
else:
baseheight=0
if debug:
say( "-------Data---------")
say(content)
if status:
status.setText("parse data ...")
FreeCADGui.updateGui()
say("------------------------------")
say(fn)
# fn='/home/thomas/.FreeCAD//geodat3/50.340722-11.232647-0.015'
say(fn)
tree=geodat.my_xmlparser.getData(fn)
# for element in tree.getiterator('node'):
# say(element.params)
# say("ways")
# for element in tree.getiterator('way'):
# say(element.params)
# say("relations")
# for element in tree.getiterator('relation'):
# say(element.params)
if 0:
try:
sd=parse(content)
except:
sayexc("Problem parsing data - abort")
status.setText("Problem parsing data - aborted, for details see Report view")
return
if debug: say(json.dumps(sd, indent=4))
if status:
status.setText("transform data ...")
FreeCADGui.updateGui()
relations=tree.getiterator('relation')
nodes=tree.getiterator('node')
ways=tree.getiterator('way')
bounds=tree.getiterator('bounds')[0]
# center of the scene
minlat=float(bounds.params['minlat'])
minlon=float(bounds.params['minlon'])
maxlat=float(bounds.params['maxlat'])
maxlon=float(bounds.params['maxlon'])
tm=TransverseMercator()
tm.lat=0.5*(minlat+maxlat)
tm.lon=0.5*(minlon+maxlon)
center=tm.fromGeographic(tm.lat,tm.lon)
corner=tm.fromGeographic(minlat,minlon)
size=[center[0]-corner[0],center[1]-corner[1]]
# map all points to xy-plane
points={}
nodesbyid={}
for n in nodes:
nodesbyid[n.params['id']]=n
ll=tm.fromGeographic(float(n.params['lat']),float(n.params['lon']))
points[str(n.params['id'])]=FreeCAD.Vector(ll[0]-center[0],ll[1]-center[1],0.0)
# say(points)
# say("abbruch3 -hier daten uebernehmen !!");return
# hack to catch deutsche umlaute
def beaustring(string):
res=''
for tk in zz:
try:
res += str(tk)
except:
if ord(tk)==223:
res += 'ß'
elif ord(tk)==246:
res += 'ö'
elif ord(tk)==196:
res += 'Ä'
elif ord(tk)==228:
res += 'ä'
elif ord(tk)==242:
res += 'ü'
else:
sayErr(["error sign",tk,ord(tk),string])
res +="#"
return res
if status:
status.setText("create visualizations ...")
FreeCADGui.updateGui()
App.newDocument("OSM Map")
say("Datei erzeugt")
area=App.ActiveDocument.addObject("Part::Plane","area")
obj = FreeCAD.ActiveDocument.ActiveObject
say("grundflaeche erzeugt")
try:
viewprovider = obj.ViewObject
root=viewprovider.RootNode
myLight = coin.SoDirectionalLight()
myLight.color.setValue(coin.SbColor(0,1,0))
root.insertChild(myLight, 0)
say("beleuchtung auf grundobjekt eingeschaltet")
except:
sayexc("Beleuchtung 272")
cam='''#Inventor V2.1 ascii
OrthographicCamera {
viewportMapping ADJUST_CAMERA
orientation 0 0 -1.0001 0.001
nearDistance 0
farDistance 10000000000
aspectRatio 100
focalDistance 1
'''
x=0
y=0
height=1000000
height=200*bk*10000/0.6
cam += '\nposition ' +str(x) + ' ' + str(y) + ' 999\n '
cam += '\nheight ' + str(height) + '\n}\n\n'
FreeCADGui.activeDocument().activeView().setCamera(cam)
FreeCADGui.activeDocument().activeView().viewAxonometric()
say("Kamera gesetzt")
area.Length=size[0]*2
area.Width=size[1]*2
area.Placement=FreeCAD.Placement(FreeCAD.Vector(-size[0],-size[1],0.00),FreeCAD.Rotation(0.00,0.00,0.00,1.00))
say("Area skaliert")
wn=-1
coways=len(ways)
starttime=time.time()
refresh=1000
for w in ways:
wid=w.params['id']
# say(w.params)
# say("way content")
# for c in w.content:
# say(c)
building=False
landuse=False
highway=False
wn += 1
# nur teile testen
#if wn <2000: continue
nowtime=time.time()
if wn!=0 and (nowtime-starttime)/wn > 0.5:
say(("way ---- # " + str(wn) + "/" + str(coways) + " time per house: " + str(round((nowtime-starttime)/wn,2))))
if progressbar:
progressbar.setValue(int(0+100.0*wn/coways))
st=""
st2=""
nr=""
h=0
ci=""
for t in w.getiterator('tag'):
try:
if debug: say(t)
# say(t.params['k'])
# say(t.params['v'])
if str(t.params['k'])=='building':
building=True
if st == '':
st='building'
if str(t.params['k'])=='landuse':
landuse=True
st=t.params['k']
nr=t.params['v']
if str(t.params['k'])=='highway':
highway=True
st=t.params['k']
if str(t.params['k'])=='addr:city':
ci=t.params['v']
if str(t.params['k'])=='name':
zz=t.params['v']
nr=beaustring(zz)
if str(t.params['k'])=='ref':
zz=t.params['v']
nr=beaustring(zz)+" /"
if str(t.params['k'])=='addr:street':
zz=t.params['v']
st2=" "+beaustring(zz)
if str(t.params['k'])=='addr:housenumber':
nr=str(t.params['v'])
if str(t.params['k'])=='building:levels':
if h==0:
h=int(str(t.params['v']))*1000*3
if str(t.params['k'])=='building:height':
h=int(str(t.params['v']))*1000
except:
sayErr("unexpected error ######################################################")
name=str(st) + " " + str(nr)
name=str(st) + st2+ " " + str(nr)
if name==' ':
name='landuse xyz'
if debug: say(("name ",name))
#say(name,zz,nr,ci)
#generate pointlist of the way
polis=[]
height=None
llpoints=[]
# say("get nodes",w)
for n in w.getiterator('nd'):
# say(n.params)
m=nodesbyid[n.params['ref']]
llpoints.append([n.params['ref'],m.params['lat'],m.params['lon']])
if elevation:
say("get heights for " + str(len(llpoints)))
heights=getHeights(llpoints)
for n in w.getiterator('nd'):
p=points[str(n.params['ref'])]
if building and elevation:
if not height:
try:
height=heights[m.params['lat']+' '+m.params['lon']]*1000 - baseheight
except:
sayErr("---no height avaiable for " + m.params['lat']+' '+m.params['lon'])
height=0
p.z=height
polis.append(p)
#create 2D map
pp=Part.makePolygon(polis)
Part.show(pp)
z=App.ActiveDocument.ActiveObject
z.Label="w_"+wid
if name==' ':
g=App.ActiveDocument.addObject("Part::Extrusion",name)
g.Base = z
g.ViewObject.ShapeColor = (1.00,1.00,0.00)
g.Dir = (0,0,10)
g.Solid=True
g.Label='way ex '
if building:
g=App.ActiveDocument.addObject("Part::Extrusion",name)
g.Base = z
g.ViewObject.ShapeColor = (1.00,1.00,1.00)
if h==0:
h=10000
g.Dir = (0,0,h)
g.Solid=True
g.Label=name
obj = FreeCAD.ActiveDocument.ActiveObject
inventortools.setcolors2(obj)
if landuse:
g=App.ActiveDocument.addObject("Part::Extrusion",name)
g.Base = z
if nr == 'residential':
g.ViewObject.ShapeColor = (1.00,.60,.60)
elif nr == 'meadow':
g.ViewObject.ShapeColor = (0.00,1.00,0.00)
elif nr == 'farmland':
g.ViewObject.ShapeColor = (.80,.80,.00)
elif nr == 'forest':
g.ViewObject.ShapeColor = (1.0,.40,.40)
g.Dir = (0,0,0.1)
g.Label=name
g.Solid=True
if highway:
g=App.ActiveDocument.addObject("Part::Extrusion","highway")
g.Base = z
g.ViewObject.LineColor = (0.00,.00,1.00)
g.ViewObject.LineWidth = 10
g.Dir = (0,0,0.2)
g.Label=name
refresh += 1
if os.path.exists("/tmp/stop"):
sayErr("notbremse gezogen")
FreeCAD.w=w
raise Exception("Notbremse Manager main loop")
if refresh >3:
FreeCADGui.updateGui()
# FreeCADGui.SendMsgToActiveView("ViewFit")
refresh=0
FreeCAD.activeDocument().recompute()
FreeCADGui.updateGui()
FreeCAD.activeDocument().recompute()
if status:
status.setText("import finished.")
if progressbar:
progressbar.setValue(100)
organize()
endtime=time.time()
say(("running time ", int(endtime-starttime), " count ways ", coways))
return True
import FreeCAD,FreeCADGui
import WebGui
#import geodat.import_osm
#reload(geodat.import_osm)
'''
{
"error_message" : "You have exceeded your daily request quota for this API. We recommend registering for a key at the Google Developers Console: https://console.developers.google.com/",
"results" : [],
"status" : "OVER_QUERY_LIMIT"
}
'''
## the dialog layout as miki string
#
s6='''
#VerticalLayoutTab:
MainWindow:
#DockWidget:
VerticalLayout:
id:'main'
setFixedHeight: 600
setFixedWidth: 730
setFixedWidth: 654
move: PySide.QtCore.QPoint(3000,100)
HorizontalLayout:
setFixedHeight: 50
QtGui.QLabel:
setFixedWidth: 600
QtGui.QLabel:
setText:"C o n f i g u r a t i o n s"
setFixedHeight: 20
QtGui.QLineEdit:
setText:"50.340722, 11.232647"
# setText:"50.3736049,11.191643"
# setText:"50.3377879,11.2104096"
id: 'bl'
setFixedHeight: 20
textChanged.connect: app.getSeparator
QtGui.QLabel:
QtGui.QLabel:
setText:"S e p a r a t o r"
setFixedHeight: 20
QtGui.QLineEdit:
id:'sep'
setPlaceholderText:"Enter separators separated by symbol: | example: @|,|:"
setToolTip:"<nobr>Enter separators separated by symbol: |</nobr><br>example: @|,|:"
setFixedHeight: 20
QtGui.QPushButton:
setText:"Help"
setFixedHeight: 20
clicked.connect: app.showHelpBox
QtGui.QLabel:
QtGui.QPushButton:
setText:"Get Coordinates "
setFixedHeight: 20
clicked.connect: app.getCoordinate
QtGui.QLabel:
HorizontalLayout:
setFixedHeight: 50
QtGui.QLabel:
setFixedWidth: 150
QtGui.QLineEdit:
id:'lat'
setText:"50.340722"
setFixedWidth: 100
QtGui.QPushButton:
id:'swap'
setText:"swap"
setFixedWidth: 50
clicked.connect: app.swap
QtGui.QLineEdit:
id:'long'
setText:"11.232647"
setFixedWidth: 100
HorizontalLayout:
setFixedHeight: 50
QtGui.QLabel:
setFixedWidth: 155
QtGui.QLabel:
setText:"Latitude"
setFixedWidth: 100
QtGui.QLabel:
setFixedWidth: 50
QtGui.QLabel:
setText:"Longitude"
setFixedWidth: 100
QtGui.QLabel:
QtGui.QLabel:
QtGui.QCheckBox:
id:'elevation'
setText: 'Process Elevation Data'
QtGui.QLabel:
QtGui.QLabel:
setText:"Length of the Square 0 km ... 4 km, default 0.5 km "
QtGui.QLabel:
setText:"Distance is 0.5 km."
id: "showDistanceLabel"
QtGui.QSlider:
id:'s'
setFixedHeight: 20
setOrientation: PySide.QtCore.Qt.Orientation.Horizontal
setMinimum: 0
setMaximum: 40
setTickInterval: 1
setValue: 5
setTickPosition: QtGui.QSlider.TicksBothSides
valueChanged.connect: app.showDistanceOnLabel
QtGui.QLabel:
QtGui.QLabel:
id:'running'
setText:"R u n n i n g Please Wait "
setVisible: False
QtGui.QPushButton:
id:'runbl1'
setText: "Download values"
setFixedHeight: 20
clicked.connect: app.downloadData
setVisible: True
QtGui.QPushButton:
id:'runbl2'
setText: "Apply values"
setFixedHeight: 20
clicked.connect: app.applyData
setVisible: False
QtGui.QPushButton:
setText: "Show openstreet map in web browser"
clicked.connect: app.showMap
setFixedHeight: 20
QtGui.QLabel:
QtGui.QLabel:
setText:"P r e d e f i n e d L o c a t i o n s"
# QtGui.QLabel:
QtGui.QRadioButton:
setText: "Sonneberg Outdoor Inn"
clicked.connect: app.run_sternwarte
QtGui.QRadioButton:
setText: "Coburg university and school "
clicked.connect: app.run_co2
QtGui.QRadioButton:
setText: "Berlin Alexanderplatz/Haus des Lehrers"
clicked.connect: app.run_alex
QtGui.QRadioButton:
setText: "Berlin Spandau"
clicked.connect: app.run_spandau
QtGui.QRadioButton:
setText: "Paris Rue de Seine"
clicked.connect: app.run_paris
QtGui.QRadioButton:
setText: "Tokyo near tower"
clicked.connect: app.run_tokyo
QtGui.QLabel:
QtGui.QLabel:
setText:"P r o c e s s i n g:"
id: "status"
setFixedHeight: 20
QtGui.QLabel:
setText:"---"
id: "status"
QtGui.QProgressBar:
id: "progb"
setFixedHeight: 20
'''
## the gui backend
class MyApp(object):
'''execution layer of the Gui'''
def run(self,b,l):
'''run(self,b,l) imports area with center coordinates latitude b, longitude l'''
s=self.root.ids['s'].value()
key="%0.7f" %(b) + "," + "%0.7f" %(l)
self.root.ids['bl'].setText(key)
import_osm2(b,l,float(s)/10,self.root.ids['progb'],self.root.ids['status'],False)
def run_alex(self):
'''imports Berlin Aleancderplatz'''
self.run(52.52128,l=13.41646)
def run_paris(self):
'''imports Paris'''
self.run(48.85167,2.33669)
def run_tokyo(self):
'''imports Tokyo near tower'''
self.run(35.65905,139.74991)
def run_spandau(self):
'''imports Berlin Spandau'''
self.run(52.508,13.18)
def run_co2(self):
'''imports Coburg Univerity and School'''
self.run(50.2631171, 10.9483)
def run_sternwarte(self):
'''imports Sonneberg Neufang observatorium'''
self.run(50.3736049,11.191643)
def showHelpBox(self):
msg=PySide.QtGui.QMessageBox()
msg.setText("<b>Help</b>")
msg.setInformativeText("Import_osm map dialogue box can also accept links from following sites in addition to (latitude, longitude)<ul><li>OpenStreetMap</li><br>e.g. https://www.openstreetmap.org/#map=15/30.8611/75.8610<br><li>Google Maps</li><br>e.g. https://www.google.co.in/maps/@30.8611,75.8610,5z<br><li>Bing Map</li><br>e.g. https://www.bing.com/maps?osid=339f4dc6-92ea-4f25-b25c-f98d8ef9bc45&cp=30.8611~75.8610&lvl=17&v=2&sV=2&form=S00027<br><li>Here Map</li><br>e.g. https://wego.here.com/?map=30.8611,75.8610,15,normal<br><li>(latitude,longitude)</li><br>e.g. 30.8611,75.8610</ul><br>If in any case, the latitude & longitudes are estimated incorrectly, you can use different separators in separator box or can put latitude & longitude directly into their respective boxes.")
msg.exec_()
def showHelpBoxY(self):
#self.run_sternwarte()
say("showHelpBox called")
def getSeparator(self):
bl=self.root.ids['bl'].text()
if bl.find('openstreetmap.org') != -1:
self.root.ids['sep'].setText('/')
elif bl.find('google.co') != -1:
self.root.ids['sep'].setText('@|,')
elif bl.find('bing.com') != -1:
self.root.ids['sep'].setText('=|~|&')
elif bl.find('wego.here.com') != -1:
self.root.ids['sep'].setText('=|,')
elif bl.find(',') != -1:
self.root.ids['sep'].setText(',')
elif bl.find(':') != -1:
self.root.ids['sep'].setText(':')
elif bl.find('/') != -1:
self.root.ids['sep'].setText('/')
def getCoordinate(self):
sep=self.root.ids['sep'].text()
bl=self.root.ids['bl'].text()
import re
spli=re.split(sep, bl)
flag='0'
for x in spli:
try:
float(x)
if x.find('.') != -1:
if flag=='0':
self.root.ids['lat'].setText(x)
flag='1'
elif flag=='1':
self.root.ids['long'].setText(x)
flag='2'
except:
flag=flag
def swap(self):
tmp1=self.root.ids['lat'].text()
tmp2=self.root.ids['long'].text()
self.root.ids['long'].setText(tmp1)
self.root.ids['lat'].setText(tmp2)
def downloadData(self):
'''download data from osm'''
button=self.root.ids['runbl1']
button.hide()
br=self.root.ids['running']
br.show()
bl_disp=self.root.ids['lat'].text()
b=float(bl_disp)
bl_disp=self.root.ids['long'].text()
l=float(bl_disp)
s=self.root.ids['s'].value()
elevation=self.root.ids['elevation'].isChecked()
rc= import_osm2(float(b),float(l),float(s)/10,self.root.ids['progb'],self.root.ids['status'],elevation)
if not rc:
button=self.root.ids['runbl2']
button.show()
else:
button=self.root.ids['runbl1']
button.show()
br.hide()
def applyData(self):
'''apply downloaded or cached data to create the FreeCAD models'''
button=self.root.ids['runbl2']
button.hide()
br=self.root.ids['running']
br.show()
bl_disp=self.root.ids['lat'].text()
b=float(bl_disp)
bl_disp=self.root.ids['long'].text()
l=float(bl_disp)
s=self.root.ids['s'].value()
elevation=self.root.ids['elevation'].isChecked()
import_osm2(float(b),float(l),float(s)/10,self.root.ids['progb'],self.root.ids['status'],elevation)
button=self.root.ids['runbl1']
button.show()
br.hide()
def showMap(self):
'''open a webbrowser window and display the openstreetmap presentation of the area'''
bl_disp=self.root.ids['lat'].text()
b=float(bl_disp)
bl_disp=self.root.ids['long'].text()
l=float(bl_disp)
s=self.root.ids['s'].value()
WebGui.openBrowser( "http://www.openstreetmap.org/#map=16/"+str(b)+'/'+str(l))
def showDistanceOnLabel(self):
distance=self.root.ids['s'].value()
showDistanceLabel=self.root.ids['showDistanceLabel']
showDistanceLabel.setText('Distance is '+str(float(distance)/10)+'km.')
## the gui startup
def mydialog():
''' starts the gui dialog '''
app=MyApp()
import geodat.miki as miki
reload(miki)
miki=miki.Miki()
miki.app=app
app.root=miki
miki.parse2(s6)
miki.run(s6)
return miki
def importOSM():
mydialog()
'''
#-----------------
# verarbeiten
import xml.etree.ElementTree as ET
fn='/home/thomas/.FreeCAD//geodat3/50.340722-11.232647-0.015'
#tree = ET.parse(fn)
data_as_string=''<?xml version="1.0"?><data>
<country name="Liechtenstein">
<rank>1</rank>
<year>2008</year>
<gdppc>141100</gdppc>
<neighbor name="Austria" direction="E"/>
<neighbor name="Switzerland" direction="W"/>
</country>
<country name="Singapore">
<rank>4</rank>
<year>2011</year>
<gdppc>59900</gdppc>
<neighbor name="Malaysia" direction="N"/>
</country>
<country name="Panama">
<rank>68</rank>
<year>2011</year>
<gdppc>13600</gdppc>
<neighbor name="Costa Rica" direction="W"/>
<neighbor name="Colombia" direction="E"/>
</country>
</data>
''
root = ET.fromstring(data_as_string)
for element in tree.getiterator('node'):
print(element.attrib)
root = tree.getroot()
ET.dump(root)
for elem in root:
print (elem.tag,elem.attrib)
#----------------
'''
|
microelly2/geodata
|
geodat/import_osm.py
|
Python
|
lgpl-3.0
| 23,641 | 0.05022 |
#!/usr/bin/python
# @lint-avoid-python-3-compatibility-imports
#
# uobjnew Summarize object allocations in high-level languages.
# For Linux, uses BCC, eBPF.
#
# USAGE: uobjnew [-h] [-T TOP] [-v] {java,ruby,c} pid [interval]
#
# Copyright 2016 Sasha Goldshtein
# Licensed under the Apache License, Version 2.0 (the "License")
#
# 25-Oct-2016 Sasha Goldshtein Created this.
from __future__ import print_function
import argparse
from bcc import BPF, USDT
from time import sleep
examples = """examples:
./uobjnew java 145 # summarize Java allocations in process 145
./uobjnew c 2020 1 # grab malloc() sizes and print every second
./uobjnew ruby 6712 -C 10 # top 10 Ruby types by number of allocations
./uobjnew ruby 6712 -S 10 # top 10 Ruby types by total size
"""
parser = argparse.ArgumentParser(
description="Summarize object allocations in high-level languages.",
formatter_class=argparse.RawDescriptionHelpFormatter,
epilog=examples)
parser.add_argument("language", choices=["java", "ruby", "c"],
help="language to trace")
parser.add_argument("pid", type=int, help="process id to attach to")
parser.add_argument("interval", type=int, nargs='?',
help="print every specified number of seconds")
parser.add_argument("-C", "--top-count", type=int,
help="number of most frequently allocated types to print")
parser.add_argument("-S", "--top-size", type=int,
help="number of largest types by allocated bytes to print")
parser.add_argument("-v", "--verbose", action="store_true",
help="verbose mode: print the BPF program (for debugging purposes)")
args = parser.parse_args()
program = """
#include <linux/ptrace.h>
struct key_t {
#if MALLOC_TRACING
u64 size;
#else
char name[50];
#endif
};
struct val_t {
u64 total_size;
u64 num_allocs;
};
BPF_HASH(allocs, struct key_t, struct val_t);
""".replace("MALLOC_TRACING", "1" if args.language == "c" else "0")
usdt = USDT(pid=args.pid)
#
# Java
#
if args.language == "java":
program += """
int alloc_entry(struct pt_regs *ctx) {
struct key_t key = {};
struct val_t *valp, zero = {};
u64 classptr = 0, size = 0;
bpf_usdt_readarg(2, ctx, &classptr);
bpf_usdt_readarg(4, ctx, &size);
bpf_probe_read(&key.name, sizeof(key.name), (void *)classptr);
valp = allocs.lookup_or_init(&key, &zero);
valp->total_size += size;
valp->num_allocs += 1;
return 0;
}
"""
usdt.enable_probe("object__alloc", "alloc_entry")
#
# Ruby
#
elif args.language == "ruby":
create_template = """
int THETHING_alloc_entry(struct pt_regs *ctx) {
struct key_t key = { .name = "THETHING" };
struct val_t *valp, zero = {};
u64 size = 0;
bpf_usdt_readarg(1, ctx, &size);
valp = allocs.lookup_or_init(&key, &zero);
valp->total_size += size;
valp->num_allocs += 1;
return 0;
}
"""
program += """
int object_alloc_entry(struct pt_regs *ctx) {
struct key_t key = {};
struct val_t *valp, zero = {};
u64 classptr = 0;
bpf_usdt_readarg(1, ctx, &classptr);
bpf_probe_read(&key.name, sizeof(key.name), (void *)classptr);
valp = allocs.lookup_or_init(&key, &zero);
valp->num_allocs += 1; // We don't know the size, unfortunately
return 0;
}
"""
usdt.enable_probe("object__create", "object_alloc_entry")
for thing in ["string", "hash", "array"]:
program += create_template.replace("THETHING", thing)
usdt.enable_probe("%s__create" % thing, "%s_alloc_entry" % thing)
#
# C
#
elif args.language == "c":
program += """
int alloc_entry(struct pt_regs *ctx, size_t size) {
struct key_t key = {};
struct val_t *valp, zero = {};
key.size = size;
valp = allocs.lookup_or_init(&key, &zero);
valp->total_size += size;
valp->num_allocs += 1;
return 0;
}
"""
if args.verbose:
print(usdt.get_text())
print(program)
bpf = BPF(text=program, usdt_contexts=[usdt])
if args.language == "c":
bpf.attach_uprobe(name="c", sym="malloc", fn_name="alloc_entry",
pid=args.pid)
exit_signaled = False
print("Tracing allocations in process %d (language: %s)... Ctrl-C to quit." %
(args.pid, args.language or "none"))
while True:
try:
sleep(args.interval or 99999999)
except KeyboardInterrupt:
exit_signaled = True
print()
data = bpf["allocs"]
if args.top_count:
data = sorted(data.items(), key=lambda (k, v): v.num_allocs)
data = data[-args.top_count:]
elif args.top_size:
data = sorted(data.items(), key=lambda (k, v): v.total_size)
data = data[-args.top_size:]
else:
data = sorted(data.items(), key=lambda (k, v): v.total_size)
print("%-30s %8s %12s" % ("TYPE", "# ALLOCS", "# BYTES"))
for key, value in data:
if args.language == "c":
obj_type = "block size %d" % key.size
else:
obj_type = key.name
print("%-30s %8d %12d" %
(obj_type, value.num_allocs, value.total_size))
if args.interval and not exit_signaled:
bpf["allocs"].clear()
else:
exit()
|
mkacik/bcc
|
tools/uobjnew.py
|
Python
|
apache-2.0
| 5,131 | 0.000974 |
import pytest
from apispec import yaml_utils
def test_load_yaml_from_docstring():
def f():
"""
Foo
bar
baz quux
---
herp: 1
derp: 2
"""
result = yaml_utils.load_yaml_from_docstring(f.__doc__)
assert result == {"herp": 1, "derp": 2}
@pytest.mark.parametrize("docstring", (None, "", "---"))
def test_load_yaml_from_docstring_empty_docstring(docstring):
assert yaml_utils.load_yaml_from_docstring(docstring) == {}
@pytest.mark.parametrize("docstring", (None, "", "---"))
def test_load_operations_from_docstring_empty_docstring(docstring):
assert yaml_utils.load_operations_from_docstring(docstring) == {}
def test_dict_to_yaml_unicode():
assert yaml_utils.dict_to_yaml({"가": "나"}) == '"\\uAC00": "\\uB098"\n'
assert yaml_utils.dict_to_yaml({"가": "나"}, {"allow_unicode": True}) == "가: 나\n"
|
marshmallow-code/apispec
|
tests/test_yaml_utils.py
|
Python
|
mit
| 908 | 0.001116 |
#!/usr/bin/env python
"""This is a game called Tetros made with Tkinter graphics (quite similar to Tetris)."""
# Import modules
from tkinter import *
from tkinter import filedialog
from tkinter import messagebox
from tkinter.ttk import *
import random
import math
import time
import cmath
import copy
import sys
import winsound
__author__ = "Advait Maybhate"
__copyright__ = "Copyright 2016, The Final Project"
__credits__ = [
"Jason Schattman",
"Huzaifa Arshad",
"Gaurav Iyer",
"Leon Fattakhov",
"Zach Chapman"]
__license__ = "GPL"
__version__ = "20"
__maintainer__ = "Advait Maybhate"
__status__ = "Stable Release"
# Create root in order to use tkinter
root = Tk()
Style().configure("TButton", padding=6, relief="flat", background="#33cc33")
root.title(string="Tetros") # Title window with game name
instructions = Canvas(
root,
width=800,
height=600,
background="white") # Make instructions canvas
# Make text box for user to enter speed at which tetrominoes should fall
eText = Combobox(root, font="Times 20 bold", values=["Easy - 0.5", "Medium - 0.3", "Hard - 0.1"])
# Make button for user to click in order to advance to the game screen
okayB = Button(
root,
text="Begin!",
command=lambda: getDifficulty())
screen = Canvas(
root,
width=600,
height=525,
background="white") # Make main game canvas
# Make button for quitting Tetros (present in the final game statistics screen)
quitB = Button(
root,
text="Quit Tetros",
command=lambda: endAll())
restartB = Button(
root,
text="Restart Tetros",
command=lambda: restart())
# Initialize variables and objects needed for the instructions screen
menubar = Menu(root)
menuB = Menu(menubar, tearoff=0)
menuB.add_command(label="Save Progress", command=lambda: save())
menuB.add_command(label="Load From File", command=lambda: loadSave())
menuB.add_command(label="Restart", command=lambda: restart())
menuB.add_command(label="Exit", command=lambda: exitB())
menubar.add_cascade(label="File", menu=menuB)
root.config(menu=menubar)
string = -1
def exitB():
"""Function called when the exit button is pressed to end the game."""
global qPressed
try:
if qPressed:
endAll()
endGame()
qPressed = True
except NameError:
endAll()
def setInitialValues():
"""Initializes many variables used later on in the game."""
global length, clearedRows, blocks3d, blockCoords, blocks, paused, predictShape, qPressed, centres, colours, floor, counter, functions, s, score, scoreP, tetrisSong
counter = -1 # Keeps track of how many pieces have been dropped
length = 25 # Length of a single block
blockCoords = [] # List that holds all block coordinates
blocks = [] # List that holds all block objects (using create_polygon)
qPressed = False # Keeps track of whether q/Q/quit button has been pressed
centres = [] # List that holds all of the centres to the tetrominoes
colours = [] # List that holds all of the colours of the tetrominoes
floor = 500 # The y coordinate of the bottom side of the Tetros box
score = 0 # Keeps track of the score
scoreP = 0 # Actual text object of the score being displayed on the screen
# Adjust background song tempo according to difficulty
if 0.2 <= s:
tetrisSong = "0%.wav"
elif 0.1 < s < 0.2:
tetrisSong = "50%.wav"
elif s == 0.1:
tetrisSong = "100%.wav"
elif 0.05 <= s < 0.1:
tetrisSong = "150%.wav"
else:
tetrisSong = "200%.wav"
# List of functions to make tetrominoes
functions = [makei, makej, makel, makeo, makes, maket, makez]
# Initializing a variable to assign to the next shape tetromino (top right
# of the interface)
predictShape = 0
paused = False # Keeps track of whether the pause button has been pressed
clearedRows = 0 # Keeps track of how many rows have been cleared
blocks3d = PhotoImage(file="End.gif") # Final game screen background image
def hexadecimal():
"""Returns a random hexadecimal (used for a random colour)."""
hexadecimals = "#"
for i in range(0, 6):
a = random.randint(48, 70)
while 58 <= a <= 64:
a = random.randint(48, 70)
hexadecimals += chr(a)
return hexadecimals
# MAKE GRID OVERLAY (only enable if developing)
def overlay():
"""Makes a grid or dot overlay."""
global gridOverlay, dotOverlay
# Boolean that controls whether grid overlay should be present (used for
# developing)
gridOverlay = False
if gridOverlay:
spacing = 25 # Spacing between grid lines
for x in range(0, 600, spacing): # Draw vertical lines
screen.create_line(x, 10, x, 800, fill="black")
screen.create_text(
x,
0,
text=str(x),
font="Times 8",
anchor=N) # Label lines with coordinates
for y in range(0, 525, spacing): # Draw horizontal lines
screen.create_line(20, y, 800, y, fill="black")
screen.create_text(
4,
y,
text=str(y),
font="Times 8",
anchor=W) # Label lines with coordinates
dotOverlay = True # Boolean that controls whether dot overlay should be present
if dotOverlay:
spacing = 25 # Spacing between dots
# Draw dot grid on Tetros box
for x in range(25, 300, spacing):
for y in range(0, 525, spacing):
screen.create_oval(x - 1, y - 1, x + 1, y + 1, fill="black")
# Draw dot grid on "Next Shape" box
for x in range(400, 525, spacing):
for y in range(125, 200, spacing):
screen.create_oval(x - 1, y - 1, x + 1, y + 1, fill="black")
def rotatePoint(point, centre, thetaDegrees):
"""Rotates given point around the given centre by the given angle."""
x = point[0] # Pull out x coordinate
y = point[1] # Pull out y coordinate
thetaRadians = math.radians(thetaDegrees) # Convert degrees to radians
# Express angle as a complex number (for calculations)
thetac = cmath.exp(thetaRadians * 1j)
centreX = centre[0] # Pull out x coordinate of centre
centreY = centre[1] # Pull out y coordinate of centre
# Create a complex expression to represent the centre
centrec = complex(centreX, centreY)
# v consists of the x and y values of the rotated coordinate in its real
# and imaginary parts respectively
v = thetac * (complex(x, y) - centrec) + centrec
newX = v.real # Pull out new x coordinate (rotated)
newY = v.imag # Pull out new y coordinate (rotated)
return [newX, newY] # Return list of the rotated coordinates
def makeWholeCoords():
"""Deletes all objects on screen and redraws them using the coordinates list."""
global blockCoords, blocks, colours
# Delete all objects on the screen (to make sure all objects get updated)
screen.delete(ALL)
# Go through blockCoords and redraw all the blocks that it contains the
# coordinates for
for i in range(0, len(blockCoords) - 1):
for g in range(0, len(blockCoords[i])):
coords = []
for p in range(0, 4):
coords.append(blockCoords[i][g][p])
blocks[i][g] = screen.create_polygon(coords, fill=colours[i], outline="black", width="2")
def rotatePolygon(polygon, centre, angleDegrees):
"""Rotates given polygon around given centre by given angle."""
# Rotate all points in the polygon using the rotatePoint function
for i in range(0, len(polygon)):
polygon[i] = rotatePoint(polygon[i], centre, angleDegrees)
return polygon # Return the new polygon coordinates
def makeCoords(x, y):
"""Returns the coordinates of a block with the given coordinates as its top left corner."""
return [[x, y], [x + 25, y], [x + 25, y + 25], [x, y + 25]]
def makePolygon(coords, colour):
"""Draws four blocks using given coordinates and given colour."""
block1 = screen.create_polygon(
coords[0],
fill=colour,
outline="black",
width="2")
block2 = screen.create_polygon(
coords[1],
fill=colour,
outline="black",
width="2")
block3 = screen.create_polygon(
coords[2],
fill=colour,
outline="black",
width="2")
block4 = screen.create_polygon(
coords[3],
fill=colour,
outline="black",
width="2")
return [block1, block2, block3, block4] # Return the four blocks in a list
def makei(x, y, real=True):
"""Makes an I shape tetromino."""
global blockCoords, blocks, length, centres, colours, predictShape
if real: # If real is true information is added to the arrays, otherwise it is to be made for the "Next Shape" box
coords = [] # Initialize coordinates list
# Append coordinates to the list according to the shape being created
coords.append(makeCoords(x - length * 2, y - length))
coords.append(makeCoords(x - length, y - length))
coords.append(makeCoords(x, y - length))
coords.append(makeCoords(x + length, y - length))
# Append the coordinates to the main blockCoords list
blockCoords.append(coords)
# Create the polygon on the screen
blocks.append(makePolygon(coords, "cyan"))
# Append the centre of the polygon to the centres list
centres.append([x, y])
# Append the colour of the polygon to the colours list
colours.append("cyan")
else:
c = [] # Initialize coordinates list
# Append coordinates to the list according to the shape being created
c.append(makeCoords(x - length * 2, y - length))
c.append(makeCoords(x - length, y - length))
c.append(makeCoords(x, y - length))
c.append(makeCoords(x + length, y - length))
# Create polygon on the screen and assign it to predictShape
predictShape = makePolygon(c, "cyan")
def makej(x, y, real=True):
"""Makes an J shape tetromino."""
global blockCoords, blocks, length, centres, colours, predictShape
if real: # If real is true information is added to the arrays, otherwise it is to be made for the "Next Shape" box
coords = [] # Initialize coordinates list
# Append coordinates to the list according to the shape being created
coords.append(makeCoords(x - length * 3 / 2, y - length * 3 / 2))
coords.append(makeCoords(x - length * 3 / 2, y - length / 2))
coords.append(makeCoords(x - length / 2, y - length / 2))
coords.append(makeCoords(x + length / 2, y - length / 2))
# Append the coordinates to the main blockCoords list
blockCoords.append(coords)
# Create the polygon on the screen
blocks.append(makePolygon(coords, "blue"))
# Append the centre of the polygon to the centres list
centres.append([x, y])
# Append the colour of the polygon to the colours list
colours.append("blue")
else:
c = [] # Initialize coordinates list
# Append coordinates to the list according to the shape being created
c.append(makeCoords(x - length * 3 / 2, y - length * 3 / 2))
c.append(makeCoords(x - length * 3 / 2, y - length / 2))
c.append(makeCoords(x - length / 2, y - length / 2))
c.append(makeCoords(x + length / 2, y - length / 2))
# Create polygon on the screen and assign it to predictShape
predictShape = makePolygon(c, "blue")
def makel(x, y, real=True):
"""Makes an L shape tetromino."""
global blockCoords, blocks, length, centres, colours, predictShape
if real: # If real is true information is added to the arrays, otherwise it is to be made for the "Next Shape" box
coords = [] # Initialize coordinates list
# Append coordinates to the list according to the shape being created
coords.append(makeCoords(x - length * 3 / 2, y - length / 2))
coords.append(makeCoords(x - length / 2, y - length / 2))
coords.append(makeCoords(x + length / 2, y - length / 2))
coords.append(makeCoords(x + length / 2, y - length * 3 / 2))
# Append the coordinates to the main blockCoords list
blockCoords.append(coords)
# Create the polygon on the screen
blocks.append(makePolygon(coords, "orange"))
# Append the centre of the polygon to the centres list
centres.append([x, y])
# Append the colour of the polygon to the colours list
colours.append("orange")
else:
c = [] # Initialize coordinates list
# Append coordinates to the list according to the shape being created
c.append(makeCoords(x - length * 3 / 2, y - length / 2))
c.append(makeCoords(x - length / 2, y - length / 2))
c.append(makeCoords(x + length / 2, y - length / 2))
c.append(makeCoords(x + length / 2, y - length * 3 / 2))
# Create polygon on the screen and assign it to predictShape
predictShape = makePolygon(c, "orange")
def makeo(x, y, real=True):
"""Makes an O shape tetromino."""
global blockCoords, blocks, length, centres, colours, predictShape
if real: # If real is true information is added to the arrays, otherwise it is to be made for the "Next Shape" box
coords = [] # Initialize coordinates list
# Append coordinates to the list according to the shape being created
coords.append(makeCoords(x - length, y - length))
coords.append(makeCoords(x, y - length))
coords.append(makeCoords(x - length, y))
coords.append(makeCoords(x, y))
# Append the coordinates to the main blockCoords list
blockCoords.append(coords)
# Create the polygon on the screen
blocks.append(makePolygon(coords, "yellow"))
# Append the centre of the polygon to the centres list
centres.append([x, y])
# Append the colour of the polygon to the colours list
colours.append("yellow")
else:
c = [] # Initialize coordinates list
# Append coordinates to the list according to the shape being created
c.append(makeCoords(x - length, y - length))
c.append(makeCoords(x, y - length))
c.append(makeCoords(x - length, y))
c.append(makeCoords(x, y))
# Create polygon on the screen and assign it to predictShape
predictShape = makePolygon(c, "yellow")
def makes(x, y, real=True):
"""Makes an S shape tetromino."""
global blockCoords, blocks, length, centres, colours, predictShape
if real: # If real is true information is added to the arrays, otherwise it is to be made for the "Next Shape" box
coords = [] # Initialize coordinates list
# Append coordinates to the list according to the shape being created
coords.append(makeCoords(x - length * 3 / 2, y - length / 2))
coords.append(makeCoords(x - length / 2, y - length / 2))
coords.append(makeCoords(x - length / 2, y - length * 3 / 2))
coords.append(makeCoords(x + length / 2, y - length * 3 / 2))
# Append the coordinates to the main blockCoords list
blockCoords.append(coords)
# Create the polygon on the screen
blocks.append(makePolygon(coords, "green"))
# Append the centre of the polygon to the centres list
centres.append([x, y])
# Append the colour of the polygon to the colours list
colours.append("green")
else:
c = [] # Initialize coordinates list
# Append coordinates to the list according to the shape being created
c.append(makeCoords(x - length * 3 / 2, y - length / 2))
c.append(makeCoords(x - length / 2, y - length / 2))
c.append(makeCoords(x - length / 2, y - length * 3 / 2))
c.append(makeCoords(x + length / 2, y - length * 3 / 2))
# Create polygon on the screen and assign it to predictShape
predictShape = makePolygon(c, "green")
def maket(x, y, real=True):
"""Makes an T shape tetromino."""
global blockCoords, blocks, length, centres, colours, predictShape
if real: # If real is true information is added to the arrays, otherwise it is to be made for the "Next Shape" box
coords = [] # Initialize coordinates list
coords.append(makeCoords(x - length * 3 / 2, y - length / 2))
coords.append(makeCoords(x - length / 2, y - length / 2))
coords.append(makeCoords(x - length / 2, y - length * 3 / 2))
coords.append(makeCoords(x + length / 2, y - length / 2))
# Append the coordinates to the main blockCoords list
blockCoords.append(coords)
# Create the polygon on the screen
blocks.append(makePolygon(coords, "magenta"))
# Append the centre of the polygon to the centres list
centres.append([x, y])
# Append the colour of the polygon to the colours list
colours.append("magenta")
else:
c = [] # Initialize coordinates list
c.append(makeCoords(x - length * 3 / 2, y - length / 2))
c.append(makeCoords(x - length / 2, y - length / 2))
c.append(makeCoords(x - length / 2, y - length * 3 / 2))
c.append(makeCoords(x + length / 2, y - length / 2))
# Create polygon on the screen and assign it to predictShape
predictShape = makePolygon(c, "magenta")
def makez(x, y, real=True):
"""Makes an Z shape tetromino."""
global blockCoords, blocks, length, centres, colours, predictShape
if real: # If real is true information is added to the arrays, otherwise it is to be made for the "Next Shape" box
coords = [] # Initialize coordinates list
coords.append(makeCoords(x - length * 3 / 2, y - length * 3 / 2))
coords.append(makeCoords(x - length / 2, y - length * 3 / 2))
coords.append(makeCoords(x - length / 2, y - length / 2))
coords.append(makeCoords(x + length / 2, y - length / 2))
# Append the coordinates to the main blockCoords list
blockCoords.append(coords)
# Create the polygon on the screen
blocks.append(makePolygon(coords, "red"))
# Append the centre of the polygon to the centres list
centres.append([x, y])
# Append the colour of the polygon to the colours list
colours.append("red")
else:
c = [] # Initialize coordinates list
c.append(makeCoords(x - length * 3 / 2, y - length * 3 / 2))
c.append(makeCoords(x - length / 2, y - length * 3 / 2))
c.append(makeCoords(x - length / 2, y - length / 2))
c.append(makeCoords(x + length / 2, y - length / 2))
# Create polygon on the screen and assign it to predictShape
predictShape = makePolygon(c, "red")
def makeScore():
"""Deletes previous score from screen and updates it."""
global score, scoreP, qPressed
screen.delete(scoreP) # Delete previous score that is on the screen
if not qPressed: # If the main game is still running
# Create the new score on the screen
scoreP = screen.create_text(
450,
200,
text="Score: " +
str(score),
font="Times 20 bold") # Draw new score on the screen
def checkFloor():
"""Checks if tetromino is about to hit the floor, returns True if it is and False otherwise."""
global blockCoords, floor
# Go through all the y coordinates in the current falling tetromino and
# check if any of them are the same as the floor
for i in range(0, 4):
for e in range(0, 4):
if blockCoords[-1][i][e][1] == floor:
return True # Return true if any y coordinate is the same as the floor
return False # Otherwise, return false
def checkWalls():
"""Checks if tetromino is about to hit a wall, if it is then it will return which wall it is about to hit."""
global blockCoords
# Go through all the x coordinates in the current falling tetromino
for i in range(0, 4):
for u in range(0, 4):
# Check if any x coordinate is the same as the left wall
if blockCoords[-1][i][u][0] < 50:
return "left" # Return whic wall it is about to hit
# Check if any x coordinate is the same as the right wall
if blockCoords[-1][i][u][0] > 250:
return "right" # Return which wall it is about to hit
def crash(coord=1, value=25):
"""Checks if tetromino is about to crash into another tetromino that has been placed."""
global blockCoords, blocks
hit = False # Initialize the hit variable
if checkFloor(): # If about to hit the floor then return True
return True
if len(blockCoords) > 1: # If this is not the first tetromino
# Add value to the x or y coordinate (depends on function parameters)
for n in range(0, 4):
for m in range(0, 4):
blockCoords[-1][n][m][coord] += value
# Check if any block in the falling tetromino has the same exact coordinates as any block that has already been placed
# In other words, check if any block overlaps a block that has already
# been placed
for g in range(0, len(blockCoords) - 1):
for w in range(0, len(blockCoords[g])):
for v in range(0, 4):
if equalIgnoreOrder(blockCoords[g][w], blockCoords[-1][v]):
hit = True # If it overlaps a block already placed then set hit to true
# Return the coordinates to their original values
for j in range(0, 4):
for k in range(0, 4):
blockCoords[-1][j][k][coord] -= value
# Return true if hit is true
if hit:
return True
# If no conditions are met the return false
return False
def getKey(item):
"""Returns index 1 of the given object."""
return item[1]
def checkRow():
"""Checks if a row is to be cleared, if it is then it clears it."""
global blockCoords, score, blocks, clearedRows
ys = [] # Initialize list to keep track of y coordinates
addKeys = []
# Append all y coordinates from the blocks already placed to the ys list
for i in range(0, len(blockCoords) - 1):
for u in range(0, len(blockCoords[i])):
ys.append(min([blockCoords[i][u][0][1], blockCoords[i][u][1][
1], blockCoords[i][u][2][1], blockCoords[i][u][3][1]]))
# Make a dictionary of the frequency of y values
yds = dict((i, ys.count(i)) for i in ys)
for e in yds: # Loop through yds
toDel = [] # Initialize list to keep track of blocks to delete
explodeCentres = []
if yds[e] == 10:
for p in range(0, len(blockCoords) - 1):
for k in range(0, len(blockCoords[p])):
# Check if block is part of row being deleted using its top
# y coordinate
if e == min([blockCoords[p][k][0][1], blockCoords[p][k][1][
1], blockCoords[p][k][2][1], blockCoords[p][k][3][1]]):
centreY = e + 12.5 # Calculate the block's centre y coordinate
topX = min([blockCoords[p][k][0][0], blockCoords[p][k][1][0], blockCoords[p][
k][2][0], blockCoords[p][k][3][0]]) # Find its left x coordinate
centreX = topX + 12.5 # Calculate the block's centre x coordinate
# Add centre coordinates to explodeCentres array
explodeCentres.append([centreX, centreY])
toDel.append([p, k]) # Add its information to toDel
# Check if the block is above the row being deleted
if e > min([blockCoords[p][k][0][1], blockCoords[p][k][1][
1], blockCoords[p][k][2][1], blockCoords[p][k][3][1]]):
for b in range(0, 4):
# Update dictionary value for that y coordinate key
# (decrease its frequency count by 1)
yds[blockCoords[p][k][b][1]] -= 1
# Make block fall down by 25 pixels
blockCoords[p][k][b][1] += 25
try:
# Update dictionary value for that y coordinate
# key (increase its frequency count by 1)
yds[blockCoords[p][k][b][1]] += 1
except KeyError:
pass # If key is not in dictionary then pass
# Create a deep copy of blockCoords (this one will not be edited to
# avoid indexing errors)
blockCoords2 = copy.deepcopy(blockCoords)
# Sort toDel by the second value in each nested list
toDel2 = sorted(toDel, key=getKey)
# Reverse toDel2 (delete the highest indices first to avoid
# indexing errors)
toDel3 = toDel2[::-1]
# Initialize variables for the mini explosions
xP = [] # Array to store particle x coordinates
yP = [] # Array to store particle y coordinates
# Array to store particle objects (being displayed on screen)
particles = []
# The angle that they move in (relative to the point they explode
# from)
angles = []
xSizes = [] # How wide the particles are (half of their width)
ySizes = [] # How tall the particles are (half of their height)
r = [] # Radius of particles (from the point they explode from)
rSpeeds = [] # Speed of particles
# Fill up the arrays for all 10 blocks being deleted
for blockNum in range(0, 10):
# Use temporary arrays so the entire list can be appended to
# the main array later (better for indexing)
rTemp = []
xSizesTemp = []
ySizesTemp = []
rSpeedsTemp = []
particlesTemp = []
anglesTemp = []
xPTemp = []
yPTemp = []
# Make information for 25 particles for each block
for particleNum in range(0, 25):
xPTemp.append(0)
yPTemp.append(0)
dAngle = random.randint(1, 360)
rAngle = math.radians(dAngle)
anglesTemp.append(rAngle)
rTemp.append(random.randint(-15, 15))
xSizesTemp.append(random.randint(1, 4))
ySizesTemp.append(random.randint(1, 4))
rSpeedsTemp.append(random.randint(-15, 15))
while rSpeedsTemp[particleNum] == 0:
rSpeedsTemp[particleNum] = random.randint(-15, 15)
particlesTemp.append(0)
# Append temporary arrays to the main array
r.append(rTemp)
xSizes.append(xSizesTemp)
ySizes.append(ySizesTemp)
rSpeeds.append(rSpeedsTemp)
particles.append(particlesTemp)
angles.append(anglesTemp)
xP.append(xPTemp)
yP.append(yPTemp)
for w in range(0, 5): # Show explosion for 5 frames
for i in range(
0, 10): # Loop through the 10 explosions, one for each block being deleted
for q in range(
0, 25): # Loop through the particles in each explosion (25)
# Use trigonometry to calculate te x and y position of
# the specific particle
xP[i][q] = explodeCentres[i][0] + r[i][q] * math.cos(angles[i][q])
yP[i][q] = explodeCentres[i][1] - r[i][q] * math.sin(angles[i][q])
# Increase the particles radius by whatever speed it is
# going at
r[i][q] = r[i][q] + rSpeeds[i][q]
# Draw the particle on the screen
particles[i][q] = screen.create_oval(
xP[i][q] - xSizes[i][q],
yP[i][q] - ySizes[i][q],
xP[i][q] + xSizes[i][q],
yP[i][q] + ySizes[i][q],
fill=hexadecimal())
# Update screen and sleep for a bit (for animation effect)
screen.update()
time.sleep(0.02)
for i in range(
0, 10): # Loop through the 10 explosions, one for each block being deleted
for q in range(
0, 25): # Loop through the particles in each explosion (25)
# Delete the particle (for animation effect)
screen.delete(particles[i][q])
# Remove the specific block coordinates from blockCoords using
# toDel3 and blockCoords2
for r in range(0, len(toDel3)):
blockCoords[
toDel3[r][0]].remove(
blockCoords2[
toDel3[r][0]][
toDel3[r][1]])
makeWholeCoords() # Update the graphics of all the blocks
showNext() # Show the next shape (it got deleted in makeWholeCoords)
overlay() # Show the dot overlay (it got deleted in makeWholeCoords)
makeTetrisRectangle() # Show the Tetros box (it got deleted in makeWholeCoords)
sidebar() # Show sidebar (it got deleted in makeWholeCoords)
score += 20 # Increase score by 20
clearedRows += 1 # Increase amount of rows cleared by 1
def endAll():
"""Destroys the game and plays an exit sound"""
# Destroy root
root.destroy()
# Play exit sound
winsound.PlaySound("SystemExit", winsound.SND_ALIAS)
sys.exit() # Raise SystemExit exception to exit the program
def endGame():
"""Shows the user a screen with their final statistics and then destroys the game."""
global qPressed, screen, tetros, clearedRows, score, blocks3d, restartB
qPressed = True # Make sure qPressed is true so game does not run
screen.delete(ALL) # Delete all items on the screen
# Create background
screen.create_image(300, 300, image=blocks3d)
# Show user final score
# Create white rectangle behind text to make sure text is legible
screen.create_rectangle(-10, 225, 610, 275, fill="white", outline="white")
screen.create_text(
300,
250,
text="Your final score is: " +
str(score) +
". Thank you for playing Tetros!",
font="Times 18 bold")
comment = "" # Initialize comment
# Depending on amount of rows cleared make rowForm (plural or singular)
if clearedRows == 1:
rowForm = " row. "
else:
rowForm = " rows. "
# If the user cleared more than one row then say "Nice job!"
if clearedRows > 0:
comment = "Nice job!"
# Display amount of rows cleared and maybe a comment to the user
# Create white rectangle behind text to make sure text is legible
screen.create_rectangle(-10, 375, 610, 425, fill="white", outline="white")
screen.create_text(
300,
400,
text="You cleared " +
str(clearedRows) +
rowForm +
comment,
font="Times 18 bold")
# Display Tetros logo
screen.create_image(300, 100, image=tetros)
# Display "Quit Tetros" button
quitB_window = screen.create_window(250, 470, window=quitB)
#Display "Restart Tetros" button
restartB_window = screen.create_window(350, 470, window=restartB)
# Refresh screen
screen.update()
def equalIgnoreOrder(a, b):
"""Checks if objects in list a are the same as in list b considering order does not matter."""
if len(a) != len(
b): # If the lengths of the lists are not equal then their elements are not equal for sure
return False
unmatched = list(b) # Create a list of b (so the original b stays intact)
for element in a: # Loop through list a
try:
# Try to remove the current element in list a from unmatched
unmatched.remove(element)
except ValueError: # If the element cannot be removed then return False
return False
# Else return the opposite of unmatched (returns True when list is empty
# which it will be)
return not unmatched
def makeTetrisRectangle():
"""Makes the Tetros box in which tetrominoes are placed."""
screen.create_rectangle(25, 0, 275, 500, fill=None,
outline="black", width="3")
def coverNext():
"""Covers the previous 'next' shape."""
global length
screen.create_polygon(
450 - length * 2,
150 - length,
450 + length * 2,
150 - length,
450 + length * 2,
150 + length * 3 / 2,
450 - length * 2,
150 + length * 3 / 2,
fill="white",
outline="white",
width=2)
def showNext():
"""Draws the next shape that is coming."""
global pShapes, counter, functions, predictShape
if not qPressed: # If the main game is still running
coverNext() # Cover previous "next shape"
screen.create_text(
450,
100,
text="Next Shape:",
font="Times 20 bold") # Create "next shape" text
shapeN = pShapes[counter % 7 + 1] # Get the next shape name
call = "make" + shapeN # Set call to the function name
# Loop through the functions list
for p in range(0, len(functions)):
# If it is the function we want then make the next shape object
if call == functions[p].__name__:
# An I-shape tetromino or O-shape tetromino require different x
# and y coordinates when compared to the others (they have
# different centres)
if shapeN == "i" or shapeN == "o":
functions[p](450, 150, real=False)
else:
functions[p](462.5, 162.5, real=False)
# Break the for loop for efficiency (don't need to loop for no
# reason)
break
overlay() # Redraw dot overlay
def getRandomShape():
"""Draws a random tetromino above the screen (there are two hidden rows above the Tetros box)."""
global counter, pShapes, blocks, score, nextShape, functions
# If this is not the first tetromino being created then make sure the current tetromino is updated according to its position in blockCoords
# This is needed because blockCoords may have been updated and a crash may
# have been detected but the graphical tetromino may not have been updated
if len(blocks) > 1:
for j in range(0, 4):
screen.delete(blocks[-1][j])
blocks[-1] = makePolygon(blockCoords[-1], colours[-1])
# Increment counter
counter += 1
if counter == 0: # If it is the first shape
pShapes = ["i", "j", "l", "o", "s", "t", "z"] # Possible shapes array
random.shuffle(pShapes) # Randomize pShapes array
showNext() # Show the next tetromino in the "next shape" area
if (counter + 1) % 7 == 0: # Just before reaching the last element of the pShapes list (every time it reaches the sixth element)
# Create a temporary array of possible shapes
tempShapes = ["i", "j", "l", "o", "s", "t", "z"]
# Assign nextShape to the last element in the current pShapes array
nextShape = pShapes[-1]
# Remove the last element from the tempShapes array
tempShapes.remove(nextShape)
# Create a tempStart array that just has one element which is nextShape
tempStart = [nextShape]
random.shuffle(tempShapes) # Randomize the tempShapes array
# Add the tempShapes array to the end of the tempStart array
tempStart.extend(tempShapes)
pShapes = tempStart # Assign pShapes to the tempStart array
counter = 7 # Set counter to 7
# Choose a shape from pShapes (using modulus to make sure that there are
# no index errors)
shape = pShapes[counter % 7]
call = "make" + shape # Call is a string that is a function name
for p in range(0, len(functions)): # Go through the list of functions
if call == functions[
p].__name__: # If call matches with the current function's name
# Depending on the shape, call the function with x and y
# coordinates (I and O tetrominoes has different centres compared
# to the rest)
if shape == "i" or shape == "o":
functions[p](150, -length * 2)
else:
functions[p](162.5, -length * 3 / 2)
if crash(): # If the shape that has just spawned cannot move
endGame() # End the game
else:
score += 8 # Otherwise increase the score by 8 (2 points per block)
def animateShape():
"""Animates current tetromino as it falls (if it is about to crash then it stops it)."""
global blockCoords, blocks, qPressed, centres, colours, floor, crashed
checkRow() # Checks if a row is to be deleted
if crash(): # If the current shape has crashed then it gets the next tetromino to fall
getRandomShape()
showNext()
else:
for i in range(0, 4):
for u in range(0, 4):
# Make tetromino fall by 25 pixels
blockCoords[-1][i][u][1] += 25
# Increase the y coordinate of the centre of tetromino by 25 pixels
centres[-1][1] += 25
for j in range(0, 4):
screen.delete(blocks[-1][j]) # Delete previous frame
# Create updated tetromino on the screen
blocks[-1] = makePolygon(blockCoords[-1], colours[-1])
def ascendSky():
"""Creates the rewind effect of tetrominoes."""
global blockCoords, blocks, centres, colours
rewindSound = "rewind.wav"
winsound.PlaySound(rewindSound, winsound.SND_FILENAME|winsound.SND_ASYNC)
try:
for r in range(0, 20):
for a in range(0, len(blockCoords)):
for b in range(0, len(blockCoords[a])):
for c in range(0, len(blockCoords[a][b])):
blockCoords[a][b][c][1] -= 25
for i in range(0, len(centres)):
centres[i][1] -= 25
for h in range(0, len(blocks)):
for j in range(0, 4):
screen.delete(blocks[h][j])
for w in range(0, len(blocks)):
blocks[w] = makePolygon(blockCoords[w], colours[w])
screen.update()
time.sleep(0.1)
except IndexError:
pass
def makeInstructions():
"""Draws images and text on the instructions screen."""
global instructions, tetros, background
# Create background image
instructions.create_image(400, 300, image=background)
instructions.create_image(400, 100, image=tetros) # Create Tetros logo
# Create white rectangle background for text
instructions.create_rectangle(40, 200, 750, 560, fill="white")
# Create instructions for the user
instructions.create_text(
400,
230,
text="WELCOME TO TETROS!",
font="Times 20 bold underline",
fill="#008855")
instructions.create_text(
400,
275,
text="INSTRUCTIONS",
font="Times 18 bold")
instructions.create_text(
400,
300,
text="Objective: Try to fit as many tetrominoes as possible on the screen by clearing rows!",
font="Times 11 bold")
instructions.create_text(
400,
325,
text="Press the UP key or X to rotate the tetromino clockwise.",
font="Times 11 bold")
instructions.create_text(
400,
350,
text="Press Z to rotate the tetromino anti-clockwise.",
font="Times 11 bold")
instructions.create_text(
400,
375,
text="Press the DOWN key to accelerate the current tetromino.",
font="Times 11 bold")
instructions.create_text(
400,
400,
text="Press the LEFT or RIGHT keys to move the tetromino in their respective directions.",
font="Times 11 bold")
instructions.create_text(
400,
425,
text="Press Q to quit the game.",
font="Times 11 bold")
instructions.create_text(
400,
450,
text="Press P to pause/unpause the game.",
font="Times 11 bold")
instructions.create_text(
400,
475,
text="Scoring: 8 points for a new tetromino, 20 points for a cleared row, and 1 point for accelerating the tetromino.",
font="Times 11 bold")
instructions.create_text(
400,
500,
text="Tip: Accelerate a tetromino if you are confident it is in the right column for free points!",
font="Times 10 bold")
# Instruct user to enter speed that they want tetrominoes to fall at
instructions.create_text(
400,
525,
text="Enter speed at which tetrominoes should fall (in seconds) - must be non-negative:",
font="Times 14 bold underline")
instructions.create_text(
400,
550,
text="Approximate difficulty for different speeds (in seconds): Easy - 0.5 Medium - 0.3 Hard - 0.1",
font="Times 12 bold")
def getDifficulty():
"""Gets the difficulty from the user. If they have not entered a non-negative number it does nothing."""
global s, string
string = eText.get() # Get the text the user has entered
if string == "Easy - 0.5" or string == "Medium - 0.3" or string == "Hard - 0.1":
string = string[-2:]
try:
s = float(string) # Try to make the string entered a float
if s >= 0: # If it is non-negative
# Destroy the instructions screen, the text box and the "Next"
# button
eText.destroy()
okayB.destroy()
instructions.destroy()
# Pack screen and start the runGame proceduress
screen.pack()
screen.focus_set()
root.after(1, runGame)
except ValueError: # If it is not a float then pass
pass
def restart():
"""Restarts the game with an animation of tetrominoes rising."""
global scoreP, qPressed
if string == -1:
messagebox.showwarning(title= 'Restart Alert' , message='You currently do not have a game to restart')
if qPressed:
try:
screen.delete(ALL)
# Start the runGame proceduress
root.after(1, runGame)
except ValueError: # If it is not a float then pass
pass
else:
try:
ascendSky()
screen.delete(ALL)
# Start the runGame proceduress
root.after(1, runGame)
except ValueError: # If it is not a float then pass
pass
screen.focus_set()
def sidebar():
"""Draws the side panel in the main game screen, specifically the controls."""
global left, tetrosSmall, right, up, down, xImage, zImage, pImage, qImage, rImage
screen.create_text(
440,
275,
text="CONTROLS",
font="Times 13 bold underline") # Create "CONTROLS" headline
# Create images of the controls
screen.create_image(300, 375, image=left)
screen.create_image(300, 325, image=right)
screen.create_image(300, 425, image=up)
screen.create_image(300, 475, image=down)
screen.create_image(470, 375, image=pImage)
screen.create_image(470, 425, image=qImage)
screen.create_image(470, 325, image=zImage)
screen.create_image(350, 425, image=xImage)
screen.create_image(470, 475, image=rImage)
# Explain what each control does
screen.create_text(535, 315, text="Rotate", font="Times 10 bold")
screen.create_text(535, 335, text="Anti-clockwise", font="Times 10 bold")
screen.create_text(535, 375, text="Pause/Unpause", font="Times 10 bold")
screen.create_text(535, 425, text="Quit Game", font="Times 10 bold")
screen.create_text(350, 375, text="Move Left", font="Times 10 bold")
screen.create_text(360, 325, text="Move Right", font="Times 10 bold")
screen.create_text(410, 415, text="Rotate", font="Times 10 bold")
screen.create_text(410, 435, text="Clockwise", font="Times 10 bold")
screen.create_text(385, 475, text="Accelerate Tetromino",font="Times 10 bold")
screen.create_text(535, 475, text="Restart Game", font= "Times 10 bold")
# Create Tetros logo
screen.create_image(440, 45, image=tetrosSmall)
# Redraw interface buttons (pause and quit)
interfaceButtons()
def coreGame():
"""Calls the core functions of the game."""
global s, paused, qPressed
while not qPressed and not paused: # Run while not paused and not quitted
animateShape() # Animate shape falling down
makeScore() # Update score
screen.update() # Update screen
time.sleep(s) # Sleep for animation effect
def keyDownHandler(event):
"""Handles any key being pressed by the user during the game. It operates according to the key pressed and the state of the current tetromino."""
global blockCoords, blocks, paused, qPressed, centres, colours, score
# Make sure game is not paused, tetromino is not about to crash and that
# the key pressed is to rotate the tetromino
if not paused and not crash() and (event.keysym == "Up" or event.keysym ==
"x" or event.keysym == "X" or event.keysym == "z" or event.keysym == "Z"):
# Initialize hit
hit = False
for i in range(0, 4):
# Depending on the key pressed, rotate tetromino clockwise or
# anti-clockwise
if event.keysym == "z" or event.keysym == "Z":
blockCoords[-1][i] = rotatePolygon(
blockCoords[-1][i], centres[-1], -90)
else:
blockCoords[-1][i] = rotatePolygon(
blockCoords[-1][i], centres[-1], 90)
# Make sure that the rotated tetromino is not going through the floor
# or any wall
if not checkWalls() is None or checkFloor():
hit = True
# If this is not the first tetromino and hit is not already true then
# check to see if rotating it will cause it to overlap already placed
# blocks
if len(blockCoords) > 1 and not hit:
for i in range(0, len(blockCoords) - 1):
for u in range(0, len(blockCoords[i])):
for g in range(0, 4):
# Check if the blocks are the same (order of points
# does not matter in this case)
if equalIgnoreOrder(
blockCoords[i][u], blockCoords[-1][g]):
hit = True
break # Break for efficiency
if not hit: # If the rotating the tetromino will not cause errors
# Delete previous tetromino from the screen
for j in range(0, 4):
screen.delete(blocks[-1][j])
# Delete previous tetromino from the blocks array
del blocks[-1]
# Append the new tetromino to the blocks array (also creates new
# tetromino on the screen)
blocks.append(makePolygon(blockCoords[-1], colours[-1]))
else: # If rotating the tetromino will cause errors
# Rotate tetromino back to its original position
for i in range(0, 4):
# Rotate block depending on the key that was pressed
if event.keysym == "z" or event.keysym == "Z":
blockCoords[-1][i] = rotatePolygon(
blockCoords[-1][i], centres[-1], 90)
else:
blockCoords[-1][i] = rotatePolygon(
blockCoords[-1][i], centres[-1], -90)
# If game is not paused, the left arrow key has been pressed, the
# tetromino will not crash if it is moved left, the tetromino is not about
# to crash and the tetromino is not hitting the left wall
elif not paused and event.keysym == "Left" and not crash(coord=0, value=-25) and not crash() and checkWalls() != "left":
# Move the tetromino to the left by decreasing its x coordinates
for i in range(0, 4):
for e in range(0, 4):
blockCoords[-1][i][e][0] -= 25
# Update its centre
centres[-1][0] -= 25
# If game is not paused, the right arrow key has been pressed, the
# tetromino will not crash if it is moved right, the tetromino is not
# about to crash and the tetromino is not hitting the right wall
elif not paused and event.keysym == "Right" and not crash(coord=0, value=25) and not crash() and checkWalls() != "right":
# Move the tetromino to the right by increasing its x coordinates
for i in range(0, 4):
for e in range(0, 4):
blockCoords[-1][i][e][0] += 25
# Update its centre
centres[-1][0] += 25
# If the game is not paused, the down arrow key has been pressed and the
# tetromino will not crash if it is moved down
elif not paused and event.keysym == "Down" and not crash():
# Move tetromino down by increasing its y coordinates
for i in range(0, 4):
for e in range(0, 4):
blockCoords[-1][i][e][1] += 25
# Update its centre
centres[-1][1] += 25
# Delete previous tetromino
for j in range(0, 4):
screen.delete(blocks[-1][j])
# Update the blocks array and create the new tetromino on the screen
blocks[-1] = makePolygon(blockCoords[-1], colours[-1])
# Increase the score by 1
score += 1
# If q was pressed
elif event.keysym == "q" or event.keysym == "Q":
# Make qPressed true
qPressed = True
endGame() # Run the endGame procedure
# If p was pressed
elif event.keysym == "p" or event.keysym == "P":
changePause() # Change the state of the paused variable; also will resume the game if the game was alrady paused
elif event.keysym == "r" or event.keysym == "R":
restart()
def images():
"""Assigns images to many variables."""
global tetros, background, left, right, tetrosSmall, up, down, xImage, zImage, pImage, qImage, rImage
tetros = PhotoImage(file="Tetros.gif")
background = PhotoImage(file="background.gif")
left = PhotoImage(file="left.gif")
right = PhotoImage(file="right.gif")
tetrosSmall = PhotoImage(file="Tetros Small.gif")
up = PhotoImage(file="Up.gif")
down = PhotoImage(file="Down.gif")
xImage = PhotoImage(file="X.gif")
zImage = PhotoImage(file="Z.gif")
pImage = PhotoImage(file="P.gif")
qImage = PhotoImage(file="Q.gif")
rImage = PhotoImage(file="R.gif")
def changePause():
"""Changes the state of the variable paused (switches it boolean value) and resumes the game if needed."""
global paused, pausedText
if paused:
screen.delete(pausedText)
screen.focus_set()
screen.update()
paused = False
coreGame() # Call core game to resume the game
else:
pausedText = screen.create_text(150,200, text='PAUSED', font = 'Times 20 bold')
screen.focus_set()
paused = True
def callDifficulty(event):
"""Calls the getDifficulty function."""
getDifficulty()
def initialScreen():
"""Creates the initial instructions screen."""
images() # Assign images to variables
makeInstructions() # Make the instructions screen
instructions.pack() # Pack the instructions screen
# Put the text box in the instructions screen
eText_window = instructions.create_window(400, 580, window=eText)
eText.focus_set() # Focus automatically to the text box
# Put the "Begin!" button on the instructions screen
okayB_window = instructions.create_window(600, 580, window=okayB)
def interfaceButtons():
"""Creates the pause and quit buttons in the game interface."""
# Create quit button and assign it to endGame function
buttonQuit = Button(root, text="Quit Game", command=lambda: endGame())
# Create the quit button on the screen
buttonQuit_window = screen.create_window(335, 240, window=buttonQuit)
# Create pause button and assign it to the changePause function
buttonPause = Button(
root,
text="Pause Game",
command=lambda: changePause())
# Create the pause button on the screen
buttonPause_window = screen.create_window(435, 240, window=buttonPause)
# Restart button
buttonRestart = Button(root, text="Restart Game", command=lambda: restart())
buttonRestart = screen.create_window(535, 240, window=buttonRestart)
def save():
"""Function used to save the game's progress."""
global length, clearedRows, blocks3d, blockCoords, blocks, paused, predictShape, qPressed, centres, colours, floor, counter, functions, s, score, scoreP, tetrisSong, pShapes
try:
temp = blockCoords
path = filedialog.asksaveasfilename(
defaultextension=".txt", filetypes=[
("TetrosSaveFile", ".txt")], title="Save game")
try:
sf = open(path, "w")
sf.write(str(clearedRows)+"\n")
sf.write(str(len(blockCoords))+"\n")
for i in range(0, len(blockCoords)):
sf.write(str(len(blockCoords[i]))+"\n")
sf.write(" ".join(map(str, blockCoords)))
sf.write("\n")
sf.write(" ".join(map(str, centres)))
sf.write("\n")
sf.write(" ".join(colours)+"\n")
sf.write(str(counter)+"\n"+str(s)+"\n"+str(score)+"\n"+tetrisSong)
sf.write("\n")
sf.write(" ".join(map(str, blocks)))
sf.write("\n")
sf.write(str(pShapes))
except FileNotFoundError:
pass
except NameError:
messagebox.showwarning(title= 'Save Alert' , message='Sorry but you cannot save at this moment')
def turnList(l):
"""Turns raw text representing a Python list into an actual list object."""
x2 = l.replace("[","")
x3 = x2.replace ("]","")
final = []
cur = ""
for i in range(0, len(x3)):
if x3[i] == ",":
try:
final.append(int(cur))
cur = ""
except ValueError:
final.append(float(cur))
cur = ""
else:
cur += x3[i]
x3 = list(x3)
x3.reverse()
e = x3.index(",")
new2 = x3[0:e]
new2.reverse()
temp = ""
for i in range(0, len(new2)):
temp += new2[i]
try:
final.append(int(temp))
except ValueError:
final.append(float(temp))
return final
def loadSave():
"""Function to load game data from a previous save file."""
global length, clearedRows, blocks3d, blockCoords, blocks, paused, predictShape, qPressed, centres, colours, floor, counter, functions, s, score, scoreP, tetrisSong, pShapes
try:
if not qPressed: # Since you cannot load a file if you have quit the game
# Import text file containing game data
loadGame = filedialog.askopenfilename(
defaultextension=".txt", filetypes=[
("TetrosSaveFile", ".txt")], title="Load Game") # Returns path of file
try:
lf = open(loadGame, "r")
# Parse through game data
lines = lf.read()
llist = lines.splitlines()
clearedRows = int(llist[0])
blockCoords = []
curlen = int(llist[1])
for i in range(0, curlen):
temp = []
for j in range(0, int(llist[i+2])):
temp.append([[], [], [], []])
blockCoords.append(temp)
c = llist[curlen+2]
x = c.replace(",","")
x2 = x.replace("[","")
x3 = x2.replace("]","")
x4 = list(x3)
nums = []
temp = ""
for k in range(0, len(x4)):
if x4[k] != " ":
temp += x4[k]
else:
nums.append(temp)
temp = ""
x4.reverse()
e = x4.index(" ")
new2 = x4[0:e]
new2.reverse()
temp = ""
for i in range(0, len(new2)):
temp += new2[i]
nums.append(temp)
cur = 0
# Add game data to respective variables
for a in range(0, len(blockCoords)):
if nums[cur] == "":
cur += 1
continue
for b in range(0, len(blockCoords[a])):
for c in range(0, len(blockCoords[a][b])):
blockCoords[a][b][c].append(float(nums[cur]))
cur += 1
blockCoords[a][b][c].append(float(nums[cur]))
cur += 1
cens = llist[curlen+3]
scens = cens.split("] [")
centres = []
for i in range(0, len(scens)):
centres.append(turnList(scens[i]))
col = llist[curlen+4]
newcol = col.split()
colours = []
for i in range(0, len(newcol)):
colours.append(newcol[i])
counter = int(llist[curlen+5])
s = float(llist[curlen+6])
score = int(llist[curlen+7])
tetrisSong = llist[curlen+8]
winsound.PlaySound(tetrisSong, winsound.SND_FILENAME |
winsound.SND_ASYNC | winsound.SND_LOOP) # Loop the background music
blockies = llist[curlen+9]
sblocks = blockies.split("] [")
blocks = []
for i in range(0, len(sblocks)):
blocks.append(turnList(sblocks[i]))
snext = llist[curlen+10]
pShapes = []
for i in range(0, len(snext)-2):
if snext[i] == "[":
pShapes.append(snext[i+2])
elif snext[i] == ",":
pShapes.append(snext[i+3])
# Call core functions to resume game
makeWholeCoords()
overlay()
showNext()
makeTetrisRectangle()
sidebar()
except FileNotFoundError:
pass
else:
# If you have quit the game you cannot load a file
messagebox.showwarning(title= 'Load Alert' , message='Sorry but you cannot load a save file at this moment')
except NameError:
# If the main game has not been started yet (qPressed is not defined)
try:
# Import text file containing game data
loadGame = filedialog.askopenfilename(
defaultextension=".txt", filetypes=[
("TetrosSaveFile", ".txt")], title="Load Game") # Returns Path of file
lf = open(loadGame, "r")
eText.destroy()
okayB.destroy()
instructions.destroy()
# Pack screen and start the runGame proceduress
screen.pack()
screen.focus_set()
s = 0
setInitialValues() # Set up initial values
# Parse through game data
lines = lf.read()
llist = lines.splitlines()
clearedRows = int(llist[0])
blockCoords = []
curlen = int(llist[1])
for i in range(0, curlen):
temp = []
for j in range(0, int(llist[i+2])):
temp.append([[], [], [], []])
blockCoords.append(temp)
c = llist[curlen+2]
x = c.replace(",","")
x2 = x.replace("[","")
x3 = x2.replace("]","")
x4 = list(x3)
nums = []
temp = ""
for k in range(0, len(x4)):
if x4[k] != " ":
temp += x4[k]
else:
nums.append(temp)
temp = ""
x4.reverse()
e = x4.index(" ")
new2 = x4[0:e]
new2.reverse()
temp = ""
for i in range(0, len(new2)):
temp += new2[i]
nums.append(temp)
cur = 0
# Add game data to respective variables
for a in range(0, len(blockCoords)):
if nums[cur] == "":
cur += 1
continue
for b in range(0, len(blockCoords[a])):
for c in range(0, len(blockCoords[a][b])):
blockCoords[a][b][c].append(float(nums[cur]))
cur += 1
blockCoords[a][b][c].append(float(nums[cur]))
cur += 1
cens = llist[curlen+3]
scens = cens.split("] [")
centres = []
for i in range(0, len(scens)):
centres.append(turnList(scens[i]))
col = llist[curlen+4]
newcol = col.split()
colours = []
for i in range(0, len(newcol)):
colours.append(newcol[i])
counter = int(llist[curlen+5])
s = float(llist[curlen+6])
score = int(llist[curlen+7])
tetrisSong = llist[curlen+8]
winsound.PlaySound(tetrisSong, winsound.SND_FILENAME |
winsound.SND_ASYNC | winsound.SND_LOOP) # Loop the background music
blockies = llist[curlen+9]
sblocks = blockies.split("] [")
blocks = []
for i in range(0, len(sblocks)):
blocks.append(turnList(sblocks[i]))
snext = llist[curlen+10]
pShapes = []
for i in range(0, len(snext)-2):
if snext[i] == "[":
pShapes.append(snext[i+2])
elif snext[i] == ",":
pShapes.append(snext[i+3])
# Call core functions to resume game
makeWholeCoords()
overlay()
showNext()
makeTetrisRectangle()
sidebar()
coreGame()
except FileNotFoundError:
pass
def runGame():
"""Runs initializing functions and then runs the core functions of the game."""
global qPressed, s, paused
setInitialValues() # Set up initial values
makeTetrisRectangle() # Make Tetros box
overlay() # Make dot overlay
getRandomShape() # Get a random shape to appear at the top (in the top two hidden rows)
sidebar() # Create the sidebar
winsound.PlaySound(tetrisSong, winsound.SND_FILENAME |
winsound.SND_ASYNC | winsound.SND_LOOP) # Loop the background music
while not qPressed and not paused: # Run while not paused and not quitted
animateShape() # Animate shape falling down
makeScore() # Update score
screen.update() # Update screen
time.sleep(s) # Sleep for animation effect
initialScreen() # Create the initial instructions screen (this will go on to call the main runGame procedure)
# Bind the return key press in the instructions screen to callDifficulty
# (allows user to enter input and press enter to progress to the game
# screen)
eText.bind("<Return>", callDifficulty)
# Bind any key pressed in the game screen to keyDownHandler which decides
# what action is to be taken, if any
screen.bind("<Key>", keyDownHandler)
# Start tkinter mainloop
root.mainloop()
|
Advait-M/Tetros
|
src/Tetros.py
|
Python
|
gpl-3.0
| 64,805 | 0.001836 |
import pygame.time
class Animation:
"""
Class that defines simple looped frame-by-frame animations
on art-assets and plays them when prompted to.
"""
def __init__(self, sprite):
"""
Create a new animation.
:param sprite: Asset sprite that will play the animation.
:return: An empty animation instance.
"""
# Store the associated sprite
self.sprite = sprite
# Initialise the animation as empty by default
self.frames = []
# Current frame index
self.current = 0
# Ticks when the frame got shown
self.duration = 0
def add_frame(self, state, duration):
"""
Add a new frame to the animation.
:param state: State the the art asset should be in.
:param duration: Duration of the frame in milliseconds.
"""
self.frames.append((state, duration))
def is_playing(self):
"""
Identify whether the animation is currently playing.
:return: True if animation is active, false otherwise.
"""
return self.duration != 0
def play(self):
"""
Play the animation. Takes care of setting the animation off,
measuring all the timings, changing states and looping. Similar
to update() method of a sprite.
"""
# Just started playing the animation
if self.duration == 0:
self.duration = pygame.time.get_ticks()
# Retrieve the current frame information
(state, duration) = self.frames[self.current]
self.sprite.set_state(state)
# Check whether the state needs changing
elapsed = pygame.time.get_ticks() - self.duration
if elapsed > duration:
self.current += 1
self.duration = pygame.time.get_ticks()
# Check whether the loop is needed
if self.current == len(self.frames):
self.current = 0
def invalidate(self):
"""
Update the current frame displayed by the animation.
Internal routine necessary to be carried out after the
animation flow was manually altered.
"""
(state, duration) = self.frames[self.current]
self.sprite.set_state(state)
def stop(self):
"""
Stop playing the animation and reset its state.
"""
# Do not trigger reset if animation was not being played
if not self.is_playing():
return
# Reset the animation state
self.current = 0
self.duration = 0
self.invalidate()
|
EricHripko/TheQuestOfTin
|
tqot/animation.py
|
Python
|
gpl-3.0
| 2,600 | 0.000385 |
#!/usr/bin/env python
import pantilthat
import time
import sys
import math
import servo_ranges
def tick():
time.sleep(0.010)
class Shelf(object):
def __init__(self, num, start, end, tilt):
self.count = num; # num of records
self.pan_start = start; # degress +
self.pan_end = end; # degrees -
self.tilt_pos = tilt; # degrees
def map_pos_to_angles(self, pos):
if (pos <= 0 or pos > self.count):
return 0
# naive algorithm: just lerp the range of angles
# it works well enough
pan_range = abs(self.pan_start) + abs(self.pan_end)
incr = float(pan_range) / self.count
return int(self.pan_start - pos * incr)
# a better algoritm: get the angle based on physical
# measurements - but somehow behaves very poorly
# dist = 700. #mm
# record_thick = 10. #mm
# error = .5 #mm
# offset = (self.count / 2. - pos) * record_thick + error
# print offset
# angle = math.atan2(offset, dist)
# return int(math.degrees(angle))
max_shelves = 5
shelves = [
Shelf(42, 24, -29, -68),
Shelf(68, 24, -28, -40),
Shelf(80, 26, -25, 0),
Shelf(88, 25, -26, +40),
Shelf(68, 26, -26, +65)
]
# sanity checks
if len(sys.argv) != 3:
print "Usage: <shelf id> <shelf pos>\n"
exit()
# setup
servo_ranges.calibrate()
# read last cmd
orig_pan = pantilthat.get_pan()
orig_tilt = pantilthat.get_tilt()
print "found pan: %i; tilt: %i" % (orig_pan, orig_tilt)
# get args
in_id = int(sys.argv[1])
in_id = (in_id - 1) % max_shelves # convert to C array notation
in_pos = int(sys.argv[2])
print "searching: %i %i" % (in_id, in_pos)
# find
new_pan = shelves[in_id].map_pos_to_angles(in_pos)
new_tilt = shelves[in_id].tilt_pos
# debug
print "output: %i %i" % (new_pan, new_tilt)
#exit()
# start laser
pantilthat.light_mode(pantilthat.PWM)
pantilthat.brightness(128)
# do the requests
pan = orig_pan
pan_incr = 1 if new_pan > orig_pan else -1
while pan != new_pan:
pan = pan + pan_incr
#print pan
pantilthat.pan(pan)
tick()
tilt = orig_tilt
tilt_incr = 1 if new_tilt > orig_tilt else -1
while tilt != new_tilt:
tilt = tilt + tilt_incr
#print tilt
pantilthat.tilt(tilt)
tick()
# because the servos are so shit
# do a dance to hide the horrible inaccuracy
a = 0.
while a < (12 * math.pi):
a += math.pi / 20.
r = int(math.sin(a) * 5.)
pantilthat.pan(new_pan + r)
time.sleep(0.005)
# sec; to allow the servos to move before they are auto shut down on exit
print "waiting:"
for t in range(0, 3):
time.sleep(1)
print "."
# turn off the laser on the way out
pantilthat.brightness(0)
|
valentingalea/vinyl-shelf-finder
|
pantilthat/finder.py
|
Python
|
mit
| 2,511 | 0.024691 |
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
from frappe import _
def install(country=None):
records = [
# address template
{'doctype':"Address Template", "country": country},
# item group
{'doctype': 'Item Group', 'item_group_name': _('All Item Groups'),
'is_group': 'Yes', 'parent_item_group': ''},
{'doctype': 'Item Group', 'item_group_name': _('Products'),
'is_group': 'No', 'parent_item_group': _('All Item Groups'), "show_in_website": 1 },
{'doctype': 'Item Group', 'item_group_name': _('Raw Material'),
'is_group': 'No', 'parent_item_group': _('All Item Groups') },
{'doctype': 'Item Group', 'item_group_name': _('Services'),
'is_group': 'No', 'parent_item_group': _('All Item Groups') },
{'doctype': 'Item Group', 'item_group_name': _('Sub Assemblies'),
'is_group': 'No', 'parent_item_group': _('All Item Groups') },
{'doctype': 'Item Group', 'item_group_name': _('Consumable'),
'is_group': 'No', 'parent_item_group': _('All Item Groups') },
# deduction type
{'doctype': 'Deduction Type', 'name': _('Income Tax'), 'description': _('Income Tax'), 'deduction_name': _('Income Tax')},
# earning type
{'doctype': 'Earning Type', 'name': _('Basic'), 'description': _('Basic'), 'earning_name': _('Basic'), 'taxable': 'Yes'},
# expense claim type
{'doctype': 'Expense Claim Type', 'name': _('Calls'), 'expense_type': _('Calls')},
{'doctype': 'Expense Claim Type', 'name': _('Food'), 'expense_type': _('Food')},
{'doctype': 'Expense Claim Type', 'name': _('Medical'), 'expense_type': _('Medical')},
{'doctype': 'Expense Claim Type', 'name': _('Others'), 'expense_type': _('Others')},
{'doctype': 'Expense Claim Type', 'name': _('Travel'), 'expense_type': _('Travel')},
# leave type
{'doctype': 'Leave Type', 'leave_type_name': _('Casual Leave'), 'name': _('Casual Leave'), 'is_encash': 1, 'is_carry_forward': 1, 'max_days_allowed': '3', },
{'doctype': 'Leave Type', 'leave_type_name': _('Compensatory Off'), 'name': _('Compensatory Off'), 'is_encash': 0, 'is_carry_forward': 0, },
{'doctype': 'Leave Type', 'leave_type_name': _('Sick Leave'), 'name': _('Sick Leave'), 'is_encash': 0, 'is_carry_forward': 0, },
{'doctype': 'Leave Type', 'leave_type_name': _('Privilege Leave'), 'name': _('Privilege Leave'), 'is_encash': 0, 'is_carry_forward': 0, },
{'doctype': 'Leave Type', 'leave_type_name': _('Leave Without Pay'), 'name': _('Leave Without Pay'), 'is_encash': 0, 'is_carry_forward': 0, 'is_lwp':1},
# Employment Type
{'doctype': 'Employment Type', 'employee_type_name': _('Full-time')},
{'doctype': 'Employment Type', 'employee_type_name': _('Part-time')},
{'doctype': 'Employment Type', 'employee_type_name': _('Probation')},
{'doctype': 'Employment Type', 'employee_type_name': _('Contract')},
{'doctype': 'Employment Type', 'employee_type_name': _('Commission')},
{'doctype': 'Employment Type', 'employee_type_name': _('Piecework')},
{'doctype': 'Employment Type', 'employee_type_name': _('Intern')},
{'doctype': 'Employment Type', 'employee_type_name': _('Apprentice')},
# Department
{'doctype': 'Department', 'department_name': _('Accounts')},
{'doctype': 'Department', 'department_name': _('Marketing')},
{'doctype': 'Department', 'department_name': _('Sales')},
{'doctype': 'Department', 'department_name': _('Purchase')},
{'doctype': 'Department', 'department_name': _('Operations')},
{'doctype': 'Department', 'department_name': _('Production')},
{'doctype': 'Department', 'department_name': _('Dispatch')},
{'doctype': 'Department', 'department_name': _('Customer Service')},
{'doctype': 'Department', 'department_name': _('Human Resources')},
{'doctype': 'Department', 'department_name': _('Management')},
{'doctype': 'Department', 'department_name': _('Quality Management')},
{'doctype': 'Department', 'department_name': _('Research & Development')},
{'doctype': 'Department', 'department_name': _('Legal')},
# Designation
{'doctype': 'Designation', 'designation_name': _('CEO')},
{'doctype': 'Designation', 'designation_name': _('Manager')},
{'doctype': 'Designation', 'designation_name': _('Analyst')},
{'doctype': 'Designation', 'designation_name': _('Engineer')},
{'doctype': 'Designation', 'designation_name': _('Accountant')},
{'doctype': 'Designation', 'designation_name': _('Secretary')},
{'doctype': 'Designation', 'designation_name': _('Associate')},
{'doctype': 'Designation', 'designation_name': _('Administrative Officer')},
{'doctype': 'Designation', 'designation_name': _('Business Development Manager')},
{'doctype': 'Designation', 'designation_name': _('HR Manager')},
{'doctype': 'Designation', 'designation_name': _('Project Manager')},
{'doctype': 'Designation', 'designation_name': _('Head of Marketing and Sales')},
{'doctype': 'Designation', 'designation_name': _('Software Developer')},
{'doctype': 'Designation', 'designation_name': _('Designer')},
{'doctype': 'Designation', 'designation_name': _('Assistant')},
{'doctype': 'Designation', 'designation_name': _('Researcher')},
# territory
{'doctype': 'Territory', 'territory_name': _('All Territories'), 'is_group': 'Yes', 'name': _('All Territories'), 'parent_territory': ''},
# customer group
{'doctype': 'Customer Group', 'customer_group_name': _('All Customer Groups'), 'is_group': 'Yes', 'name': _('All Customer Groups'), 'parent_customer_group': ''},
{'doctype': 'Customer Group', 'customer_group_name': _('Individual'), 'is_group': 'No', 'parent_customer_group': _('All Customer Groups')},
{'doctype': 'Customer Group', 'customer_group_name': _('Commercial'), 'is_group': 'No', 'parent_customer_group': _('All Customer Groups')},
{'doctype': 'Customer Group', 'customer_group_name': _('Non Profit'), 'is_group': 'No', 'parent_customer_group': _('All Customer Groups')},
{'doctype': 'Customer Group', 'customer_group_name': _('Government'), 'is_group': 'No', 'parent_customer_group': _('All Customer Groups')},
# supplier type
{'doctype': 'Supplier Type', 'supplier_type': _('Services')},
{'doctype': 'Supplier Type', 'supplier_type': _('Local')},
{'doctype': 'Supplier Type', 'supplier_type': _('Raw Material')},
{'doctype': 'Supplier Type', 'supplier_type': _('Electrical')},
{'doctype': 'Supplier Type', 'supplier_type': _('Hardware')},
{'doctype': 'Supplier Type', 'supplier_type': _('Pharmaceutical')},
{'doctype': 'Supplier Type', 'supplier_type': _('Distributor')},
# Sales Person
{'doctype': 'Sales Person', 'sales_person_name': _('Sales Team'), 'is_group': "Yes", "parent_sales_person": ""},
# UOM
{'uom_name': _('Unit'), 'doctype': 'UOM', 'name': _('Unit'), "must_be_whole_number": 1},
{'uom_name': _('Box'), 'doctype': 'UOM', 'name': _('Box'), "must_be_whole_number": 1},
{'uom_name': _('Kg'), 'doctype': 'UOM', 'name': _('Kg')},
{'uom_name': _('Nos'), 'doctype': 'UOM', 'name': _('Nos'), "must_be_whole_number": 1},
{'uom_name': _('Pair'), 'doctype': 'UOM', 'name': _('Pair'), "must_be_whole_number": 1},
{'uom_name': _('Set'), 'doctype': 'UOM', 'name': _('Set'), "must_be_whole_number": 1},
{'uom_name': _('Hour'), 'doctype': 'UOM', 'name': _('Hour')},
{'uom_name': _('Minute'), 'doctype': 'UOM', 'name': _('Minute')},
# Mode of Payment
{'doctype': 'Mode of Payment', 'mode_of_payment': 'Check' if country=="United States" else _('Cheque')},
{'doctype': 'Mode of Payment', 'mode_of_payment': _('Cash')},
{'doctype': 'Mode of Payment', 'mode_of_payment': _('Credit Card')},
{'doctype': 'Mode of Payment', 'mode_of_payment': _('Wire Transfer')},
{'doctype': 'Mode of Payment', 'mode_of_payment': _('Bank Draft')},
# Activity Type
{'doctype': 'Activity Type', 'activity_type': _('Planning')},
{'doctype': 'Activity Type', 'activity_type': _('Research')},
{'doctype': 'Activity Type', 'activity_type': _('Proposal Writing')},
{'doctype': 'Activity Type', 'activity_type': _('Execution')},
{'doctype': 'Activity Type', 'activity_type': _('Communication')},
# Industry Type
{'doctype': 'Industry Type', 'industry': _('Accounting')},
{'doctype': 'Industry Type', 'industry': _('Advertising')},
{'doctype': 'Industry Type', 'industry': _('Aerospace')},
{'doctype': 'Industry Type', 'industry': _('Agriculture')},
{'doctype': 'Industry Type', 'industry': _('Airline')},
{'doctype': 'Industry Type', 'industry': _('Apparel & Accessories')},
{'doctype': 'Industry Type', 'industry': _('Automotive')},
{'doctype': 'Industry Type', 'industry': _('Banking')},
{'doctype': 'Industry Type', 'industry': _('Biotechnology')},
{'doctype': 'Industry Type', 'industry': _('Broadcasting')},
{'doctype': 'Industry Type', 'industry': _('Brokerage')},
{'doctype': 'Industry Type', 'industry': _('Chemical')},
{'doctype': 'Industry Type', 'industry': _('Computer')},
{'doctype': 'Industry Type', 'industry': _('Consulting')},
{'doctype': 'Industry Type', 'industry': _('Consumer Products')},
{'doctype': 'Industry Type', 'industry': _('Cosmetics')},
{'doctype': 'Industry Type', 'industry': _('Defense')},
{'doctype': 'Industry Type', 'industry': _('Department Stores')},
{'doctype': 'Industry Type', 'industry': _('Education')},
{'doctype': 'Industry Type', 'industry': _('Electronics')},
{'doctype': 'Industry Type', 'industry': _('Energy')},
{'doctype': 'Industry Type', 'industry': _('Entertainment & Leisure')},
{'doctype': 'Industry Type', 'industry': _('Executive Search')},
{'doctype': 'Industry Type', 'industry': _('Financial Services')},
{'doctype': 'Industry Type', 'industry': _('Food, Beverage & Tobacco')},
{'doctype': 'Industry Type', 'industry': _('Grocery')},
{'doctype': 'Industry Type', 'industry': _('Health Care')},
{'doctype': 'Industry Type', 'industry': _('Internet Publishing')},
{'doctype': 'Industry Type', 'industry': _('Investment Banking')},
{'doctype': 'Industry Type', 'industry': _('Legal')},
{'doctype': 'Industry Type', 'industry': _('Manufacturing')},
{'doctype': 'Industry Type', 'industry': _('Motion Picture & Video')},
{'doctype': 'Industry Type', 'industry': _('Music')},
{'doctype': 'Industry Type', 'industry': _('Newspaper Publishers')},
{'doctype': 'Industry Type', 'industry': _('Online Auctions')},
{'doctype': 'Industry Type', 'industry': _('Pension Funds')},
{'doctype': 'Industry Type', 'industry': _('Pharmaceuticals')},
{'doctype': 'Industry Type', 'industry': _('Private Equity')},
{'doctype': 'Industry Type', 'industry': _('Publishing')},
{'doctype': 'Industry Type', 'industry': _('Real Estate')},
{'doctype': 'Industry Type', 'industry': _('Retail & Wholesale')},
{'doctype': 'Industry Type', 'industry': _('Securities & Commodity Exchanges')},
{'doctype': 'Industry Type', 'industry': _('Service')},
{'doctype': 'Industry Type', 'industry': _('Soap & Detergent')},
{'doctype': 'Industry Type', 'industry': _('Software')},
{'doctype': 'Industry Type', 'industry': _('Sports')},
{'doctype': 'Industry Type', 'industry': _('Technology')},
{'doctype': 'Industry Type', 'industry': _('Telecommunications')},
{'doctype': 'Industry Type', 'industry': _('Television')},
{'doctype': 'Industry Type', 'industry': _('Transportation')},
{'doctype': 'Industry Type', 'industry': _('Venture Capital')},
{'doctype': "Email Account", "email_id": "sales@example.com", "append_to": "Lead"},
{'doctype': "Email Account", "email_id": "support@example.com", "append_to": "Issue"},
{'doctype': "Email Account", "email_id": "jobs@example.com", "append_to": "Job Applicant"}
]
from frappe.modules import scrub
for r in records:
doc = frappe.new_doc(r.get("doctype"))
doc.update(r)
# ignore mandatory for root
parent_link_field = ("parent_" + scrub(doc.doctype))
if doc.meta.get_field(parent_link_field) and not doc.get(parent_link_field):
doc.ignore_mandatory = True
doc.insert()
|
gangadharkadam/v5_erp
|
erpnext/setup/page/setup_wizard/install_fixtures.py
|
Python
|
agpl-3.0
| 12,019 | 0.018055 |
# -*- coding: utf-8 -*-
from __future__ import division, absolute_import, print_function
import os
import re
import sys
# Check Sphinx version
import sphinx
if sphinx.__version__ < "1.0.1":
raise RuntimeError("Sphinx 1.0.1 or newer required")
needs_sphinx = '1.0'
# -----------------------------------------------------------------------------
# General configuration
# -----------------------------------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
sys.path.insert(0, os.path.abspath('../sphinxext'))
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.pngmath', 'numpydoc',
'sphinx.ext.intersphinx', 'sphinx.ext.coverage',
'sphinx.ext.doctest', 'sphinx.ext.autosummary',
'matplotlib.sphinxext.plot_directive']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# General substitutions.
project = 'NumPy'
copyright = '2008-2009, The Scipy community'
# The default replacements for |version| and |release|, also used in various
# other places throughout the built documents.
#
import numpy
# The short X.Y version (including .devXXXX, rcX, b1 suffixes if present)
version = re.sub(r'(\d+\.\d+)\.\d+(.*)', r'\1\2', numpy.__version__)
version = re.sub(r'(\.dev\d+).*?$', r'\1', version)
# The full version, including alpha/beta/rc tags.
release = numpy.__version__
print("%s %s" % (version, release))
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
today_fmt = '%B %d, %Y'
# List of documents that shouldn't be included in the build.
# unused_docs = []
# The reST default role (used for this markup: `text`) to use for all documents.
default_role = "autolink"
# List of directories, relative to source directories, that shouldn't be searched
# for source files.
exclude_dirs = []
# If true, '()' will be appended to :func: etc. cross-reference text.
add_function_parentheses = False
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# -----------------------------------------------------------------------------
# HTML output
# -----------------------------------------------------------------------------
themedir = os.path.join(os.pardir, 'scipy-sphinx-theme', '_theme')
if not os.path.isdir(themedir):
raise RuntimeError("Get the scipy-sphinx-theme first, "
"via git submodule init && git submodule update")
html_theme = 'scipy'
html_theme_path = [themedir]
if 'scipyorg' in tags:
# Build for the scipy.org web
html_theme_options = {
"edit_link": True,
"sidebar": "right",
"scipy_org_logo": True,
"rootlinks": [("http://scipy.org/", "Scipy.org"),
("http://docs.scipy.org/", "Docs")]
}
else:
# Default build
html_theme_options = {
"edit_link": False,
"sidebar": "left",
"scipy_org_logo": False,
"rootlinks": []
}
html_sidebars = {'index': 'indexsidebar.html'}
html_additional_pages = {
'index': 'indexcontent.html',
}
html_title = "%s v%s Manual" % (project, version)
html_static_path = ['_static']
html_last_updated_fmt = '%b %d, %Y'
html_use_modindex = True
html_copy_source = False
html_domain_indices = False
html_file_suffix = '.html'
htmlhelp_basename = 'numpy'
pngmath_use_preview = True
pngmath_dvipng_args = ['-gamma', '1.5', '-D', '96', '-bg', 'Transparent']
# -----------------------------------------------------------------------------
# LaTeX output
# -----------------------------------------------------------------------------
# The paper size ('letter' or 'a4').
# latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
# latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, document class [howto/manual]).
_stdauthor = 'Written by the NumPy community'
latex_documents = [
('reference/index', 'numpy-ref.tex', 'NumPy Reference',
_stdauthor, 'manual'),
('user/index', 'numpy-user.tex', 'NumPy User Guide',
_stdauthor, 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
# latex_use_parts = False
# Additional stuff for the LaTeX preamble.
latex_preamble = r'''
\usepackage{amsmath}
\DeclareUnicodeCharacter{00A0}{\nobreakspace}
% In the parameters section, place a newline after the Parameters
% header
\usepackage{expdlist}
\let\latexdescription=\description
\def\description{\latexdescription{}{} \breaklabel}
% Make Examples/etc section headers smaller and more compact
\makeatletter
\titleformat{\paragraph}{\normalsize\py@HeaderFamily}%
{\py@TitleColor}{0em}{\py@TitleColor}{\py@NormalColor}
\titlespacing*{\paragraph}{0pt}{1ex}{0pt}
\makeatother
% Fix footer/header
\renewcommand{\chaptermark}[1]{\markboth{\MakeUppercase{\thechapter.\ #1}}{}}
\renewcommand{\sectionmark}[1]{\markright{\MakeUppercase{\thesection.\ #1}}}
'''
# Documents to append as an appendix to all manuals.
# latex_appendices = []
# If false, no module index is generated.
latex_use_modindex = False
# -----------------------------------------------------------------------------
# Texinfo output
# -----------------------------------------------------------------------------
texinfo_documents = [
("contents", 'numpy', 'NumPy Documentation', _stdauthor, 'NumPy',
"NumPy: array processing for numbers, strings, records, and objects.",
'Programming',
1),
]
# -----------------------------------------------------------------------------
# Intersphinx configuration
# -----------------------------------------------------------------------------
intersphinx_mapping = {
'python': ('https://docs.python.org/dev', None),
'scipy': ('https://docs.scipy.org/doc/scipy/reference', None),
'matplotlib': ('http://matplotlib.org', None)
}
# -----------------------------------------------------------------------------
# NumPy extensions
# -----------------------------------------------------------------------------
# If we want to do a phantom import from an XML file for all autodocs
phantom_import_file = 'dump.xml'
# Make numpydoc to generate plots for example sections
numpydoc_use_plots = True
# -----------------------------------------------------------------------------
# Autosummary
# -----------------------------------------------------------------------------
import glob
autosummary_generate = glob.glob("reference/*.rst")
# -----------------------------------------------------------------------------
# Coverage checker
# -----------------------------------------------------------------------------
coverage_ignore_modules = r"""
""".split()
coverage_ignore_functions = r"""
test($|_) (some|all)true bitwise_not cumproduct pkgload
generic\.
""".split()
coverage_ignore_classes = r"""
""".split()
coverage_c_path = []
coverage_c_regexes = {}
coverage_ignore_c_items = {}
# -----------------------------------------------------------------------------
# Plots
# -----------------------------------------------------------------------------
plot_pre_code = """
import numpy as np
np.random.seed(0)
"""
plot_include_source = True
plot_formats = [('png', 100), 'pdf']
import math
phi = (math.sqrt(5) + 1) / 2
plot_rcparams = {
'font.size': 8,
'axes.titlesize': 8,
'axes.labelsize': 8,
'xtick.labelsize': 8,
'ytick.labelsize': 8,
'legend.fontsize': 8,
'figure.figsize': (3 * phi, 3),
'figure.subplot.bottom': 0.2,
'figure.subplot.left': 0.2,
'figure.subplot.right': 0.9,
'figure.subplot.top': 0.85,
'figure.subplot.wspace': 0.4,
'text.usetex': False,
}
# -----------------------------------------------------------------------------
# Source code links
# -----------------------------------------------------------------------------
import inspect
from os.path import relpath, dirname
for name in ['sphinx.ext.linkcode', 'numpydoc.linkcode']:
try:
__import__(name)
extensions.append(name)
break
except ImportError:
pass
else:
print("NOTE: linkcode extension not found -- no links to source generated")
def linkcode_resolve(domain, info):
"""
Determine the URL corresponding to Python object
"""
if domain != 'py':
return None
modname = info['module']
fullname = info['fullname']
submod = sys.modules.get(modname)
if submod is None:
return None
obj = submod
for part in fullname.split('.'):
try:
obj = getattr(obj, part)
except:
return None
try:
fn = inspect.getsourcefile(obj)
except:
fn = None
if not fn:
return None
try:
source, lineno = inspect.getsourcelines(obj)
except:
lineno = None
if lineno:
linespec = "#L%d-L%d" % (lineno, lineno + len(source) - 1)
else:
linespec = ""
fn = relpath(fn, start=dirname(numpy.__file__))
if 'dev' in numpy.__version__:
return "http://github.com/numpy/numpy/blob/master/numpy/%s%s" % (
fn, linespec)
else:
return "http://github.com/numpy/numpy/blob/v%s/numpy/%s%s" % (
numpy.__version__, fn, linespec)
|
DailyActie/Surrogate-Model
|
01-codes/numpy-master/doc/source/conf.py
|
Python
|
mit
| 9,985 | 0.001202 |
#
# (c) 2016 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import sys
import copy
from ansible import constants as C
from ansible.module_utils._text import to_text
from ansible.module_utils.connection import Connection
from ansible.plugins.action.normal import ActionModule as _ActionModule
from ansible.module_utils.network.cloudengine.ce import ce_provider_spec
from ansible.module_utils.network.common.utils import load_provider
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
class ActionModule(_ActionModule):
def run(self, tmp=None, task_vars=None):
if self._play_context.connection != 'local':
return dict(
failed=True,
msg='invalid connection specified, expected connection=local, '
'got %s' % self._play_context.connection
)
provider = load_provider(ce_provider_spec, self._task.args)
transport = provider['transport'] or 'cli'
display.vvvv('connection transport is %s' % transport, self._play_context.remote_addr)
if transport == 'cli':
pc = copy.deepcopy(self._play_context)
pc.connection = 'network_cli'
pc.network_os = 'ce'
pc.remote_addr = provider['host'] or self._play_context.remote_addr
pc.port = int(provider['port'] or self._play_context.port or 22)
pc.remote_user = provider['username'] or self._play_context.connection_user
pc.password = provider['password'] or self._play_context.password
pc.timeout = int(provider['timeout'] or C.PERSISTENT_COMMAND_TIMEOUT)
self._task.args['provider'] = provider.update(
host=pc.remote_addr,
port=pc.port,
username=pc.remote_user,
password=pc.password,
ssh_keyfile=pc.private_key_file
)
display.vvv('using connection plugin %s' % pc.connection, pc.remote_addr)
connection = self._shared_loader_obj.connection_loader.get('persistent', pc, sys.stdin)
socket_path = connection.run()
display.vvvv('socket_path: %s' % socket_path, pc.remote_addr)
if not socket_path:
return {'failed': True,
'msg': 'unable to open shell. Please see: ' +
'https://docs.ansible.com/ansible/network_debug_troubleshooting.html#unable-to-open-shell'}
# make sure we are in the right cli context which should be
# enable mode and not config module
conn = Connection(socket_path)
out = conn.get_prompt()
while to_text(out, errors='surrogate_then_replace').strip().endswith(']'):
display.vvvv('wrong context, sending exit to device', self._play_context.remote_addr)
conn.send_command('exit')
out = conn.get_prompt()
task_vars['ansible_socket'] = socket_path
# make sure a transport value is set in args
self._task.args['transport'] = transport
result = super(ActionModule, self).run(tmp, task_vars)
return result
|
kbrebanov/ansible
|
lib/ansible/plugins/action/ce.py
|
Python
|
gpl-3.0
| 3,950 | 0.002025 |
from pathlib import Path
from django.utils import timezone
import factory
from photonix.accounts.models import User
from photonix.photos.models import Library, LibraryUser, Photo, PhotoFile, Tag, PhotoTag, Task
class UserFactory(factory.django.DjangoModelFactory):
class Meta:
model = User
username = 'test'
email = 'test@example.com'
has_config_persional_info = True
has_created_library = True
has_configured_importing = True
has_configured_image_analysis = True
class LibraryFactory(factory.django.DjangoModelFactory):
class Meta:
model = Library
name = factory.Sequence(lambda n: f'Test Library {n}')
classification_color_enabled = True
classification_location_enabled = True
classification_style_enabled = True
classification_object_enabled = True
classification_face_enabled = True
setup_stage_completed = True
class LibraryUserFactory(factory.django.DjangoModelFactory):
class Meta:
model = LibraryUser
library = factory.SubFactory(LibraryFactory)
user = factory.SubFactory(UserFactory)
owner = True
class PhotoFactory(factory.django.DjangoModelFactory):
class Meta:
model = Photo
library = factory.SubFactory(LibraryFactory)
class PhotoFileFactory(factory.django.DjangoModelFactory):
class Meta:
model = PhotoFile
photo = factory.SubFactory(PhotoFactory)
path = str(Path(__file__).parent / 'photos' / 'snow.jpg')
mimetype = 'image/jpeg'
bytes = 1000
file_modified_at = factory.LazyAttribute(lambda o: timezone.now())
class TagFactory(factory.django.DjangoModelFactory):
class Meta:
model = Tag
library = factory.SubFactory(LibraryFactory)
name = factory.Sequence(lambda n: f'Tag {n}')
class PhotoTagFactory(factory.django.DjangoModelFactory):
class Meta:
model = PhotoTag
photo = factory.SubFactory(PhotoFactory)
tag = factory.SubFactory(TagFactory)
class TaskFactory(factory.django.DjangoModelFactory):
class Meta:
model = Task
type = 'classify.style'
status = 'P'
library = factory.SubFactory(LibraryFactory)
|
damianmoore/photo-manager
|
tests/factories.py
|
Python
|
agpl-3.0
| 2,156 | 0.000464 |
import numpy as np
import torch
import torch.nn as nn
from itertools import product
from torch.nn import functional as F
#import pytorch_fft.fft as fft
# def laplace():
# return np.array([[0.25, 0.5, 0.25], [0.5, -3.0, 0.5], [0.25, 0.5, 0.25]]).astype(np.float32)[None, None, ...]
def laplace():
return np.array([[0, -1, 0], [-1, 4, -1], [0, -1, 0]]).astype(np.float32)[None, None, ...]
def laplace3d():
l = np.zeros((3, 3, 3))
l[1, 1, 1] = -6.
l[1, 1, 2] = 1.
l[1, 1, 0] = 1.
l[1, 0, 1] = 1.
l[1, 2, 1] = 1.
l[0, 1, 1] = 1.
l[2, 1, 1] = 1.
return l.astype(np.float32)[None, None, ...]
#def fft_smooth(grad, factor=1/4):
# """
# Tones down the gradient with (1/f)**(2 * factor) filter in the Fourier domain.
# Equivalent to low-pass filtering in the spatial domain.
#
# `grad` is an at least 2D CUDA Tensor, where the last two dimensions are treated
# as images to apply smoothening transformation.
#
# `factor` controls the strength of the fall off.
# """
# h, w = grad.size()[-2:]
# tw = np.minimum(np.arange(0, w), np.arange(w, 0, -1), dtype=np.float32)#[-(w+2)//2:]
# th = np.minimum(np.arange(0, h), np.arange(h, 0, -1), dtype=np.float32)
# t = 1 / np.maximum(1.0, (tw[None,:] ** 2 + th[:,None] ** 2) ** (factor))
# F = torch.Tensor(t / t.mean()).cuda()
# rp, ip = fft.fft2(grad.data, torch.zeros_like(grad.data))
# return Variable(fft.ifft2(rp * F, ip * F)[0])
class Laplace(nn.Module):
"""
Laplace filter for a stack of data.
"""
def __init__(self, padding=0):
super().__init__()
self._padding = padding
self.register_buffer('filter', torch.from_numpy(laplace()))
def forward(self, x):
return F.conv2d(x, self.filter, padding=self._padding, bias=None)
class Laplace3d(nn.Module):
"""
Laplace filter for a stack of data.
"""
def __init__(self):
super().__init__()
self.register_buffer('filter', torch.from_numpy(laplace3d()))
def forward(self, x):
return F.conv3d(x, self.filter, bias=None)
class LaplaceL2(nn.Module):
"""
Laplace regularizer for a 2D convolutional layer.
"""
def __init__(self, padding=0):
super().__init__()
self.laplace = Laplace(padding=padding)
def forward(self, x, weights=None):
ic, oc, k1, k2 = x.size()
if weights is None:
weights = 1.0
return (self.laplace(x.view(ic * oc, 1, k1, k2)).view(ic, oc, k1, k2).pow(2) * weights).mean() / 2
class LaplaceL23d(nn.Module):
"""
Laplace regularizer for a 2D convolutional layer.
"""
def __init__(self):
super().__init__()
self.laplace = Laplace3d()
def forward(self, x):
ic, oc, k1, k2, k3 = x.size()
return self.laplace(x.view(ic * oc, 1, k1, k2, k3)).pow(2).mean() / 2
class FlatLaplaceL23d(nn.Module):
"""
Laplace regularizer for a 2D convolutional layer.
"""
def __init__(self):
super().__init__()
self.laplace = Laplace()
def forward(self, x):
ic, oc, k1, k2, k3 = x.size()
assert k1 == 1, 'time dimension must be one'
return self.laplace(x.view(ic * oc, 1, k2, k3)).pow(2).mean() / 2
class LaplaceL1(nn.Module):
"""
Laplace regularizer for a 2D convolutional layer.
"""
def __init__(self, padding=0):
super().__init__()
self.laplace = Laplace(padding=padding)
def forward(self, x):
ic, oc, k1, k2 = x.size()
return self.laplace(x.view(ic * oc, 1, k1, k2)).abs().mean()
|
atlab/attorch
|
attorch/regularizers.py
|
Python
|
mit
| 3,598 | 0.002779 |
# -*- coding: utf-8 -*-
#
#
# Author: Guewen Baconnier
# Copyright 2010-2012 Camptocamp SA
# Copyright (C) 2011 Akretion Sébastien BEAU <sebastien.beau@akretion.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#
{
"name": "Immediately Usable Stock Quantity",
"version": "1.0",
"depends": ["product", "stock", ],
"author": "Camptocamp",
"license": "AGPL-3",
"description": """
Compute the immediately usable stock.
Immediately usable is computed : Quantity on Hand - Outgoing Stock.
""",
"website": "http://tinyerp.com/module_account.html",
"category": "Generic Modules/Stock",
"data": ["product_view.xml",
],
"active": False,
'installable': False
}
|
Therp/stock-logistics-warehouse
|
__unported__/stock_available_immediately/__openerp__.py
|
Python
|
agpl-3.0
| 1,363 | 0 |
from organise import app
app.run()
|
msanatan/organise
|
run.py
|
Python
|
mit
| 36 | 0 |
#
# Copyright (C) 2006, 2013 Red Hat, Inc.
# Copyright (C) 2006 Daniel P. Berrange <berrange@redhat.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301 USA.
#
# pylint: disable=E0611
from gi.repository import GLib
from gi.repository import GObject
from gi.repository import Gtk
# pylint: enable=E0611
import logging
import re
import Queue
import threading
import libvirt
from virtinst import util
from virtManager import packageutils
from virtManager.about import vmmAbout
from virtManager.baseclass import vmmGObject
from virtManager.clone import vmmCloneVM
from virtManager.connect import vmmConnect
from virtManager.connection import vmmConnection
from virtManager.preferences import vmmPreferences
from virtManager.manager import vmmManager
from virtManager.migrate import vmmMigrateDialog
from virtManager.details import vmmDetails
from virtManager.asyncjob import vmmAsyncJob
from virtManager.create import vmmCreate
from virtManager.host import vmmHost
from virtManager.error import vmmErrorDialog
from virtManager.systray import vmmSystray
from virtManager.delete import vmmDeleteDialog
# Enable this to get a report of leaked objects on app shutdown
# gtk3/pygobject has issues here as of Fedora 18
debug_ref_leaks = False
DETAILS_PERF = 1
DETAILS_CONFIG = 2
DETAILS_CONSOLE = 3
(PRIO_HIGH,
PRIO_LOW) = range(1, 3)
class vmmEngine(vmmGObject):
__gsignals__ = {
"conn-added": (GObject.SignalFlags.RUN_FIRST, None, [object]),
"conn-removed": (GObject.SignalFlags.RUN_FIRST, None, [str]),
}
def __init__(self):
vmmGObject.__init__(self)
self.windowConnect = None
self.windowPreferences = None
self.windowAbout = None
self.windowCreate = None
self.windowManager = None
self.windowMigrate = None
self.conns = {}
self.err = vmmErrorDialog()
self.timer = None
self.last_timeout = 0
self.systray = None
self.delete_dialog = None
self.application = Gtk.Application(
application_id="com.redhat.virt-manager",
flags=0)
self.application.connect("activate", self._activate)
self._appwindow = Gtk.Window()
self._tick_counter = 0
self._tick_thread_slow = False
self._tick_thread = threading.Thread(name="Tick thread",
target=self._handle_tick_queue,
args=())
self._tick_thread.daemon = True
self._tick_queue = Queue.PriorityQueue(100)
self.inspection = None
self._create_inspection_thread()
# Counter keeping track of how many manager and details windows
# are open. When it is decremented to 0, close the app or
# keep running in system tray if enabled
self.windows = 0
# Public bits set by virt-manager cli
self.skip_autostart = False
self.uri_at_startup = None
self.uri_cb = None
self.show_manager_window = True
self.init_systray()
self.add_gconf_handle(
self.config.on_stats_update_interval_changed(self.reschedule_timer))
self.add_gconf_handle(
self.config.on_view_system_tray_changed(self.system_tray_changed))
self.schedule_timer()
self.load_stored_uris()
self._tick_thread.start()
self.tick()
def _activate(self, ignore):
if self.show_manager_window:
self.show_manager()
else:
self.get_manager()
self.application.add_window(self._appwindow)
if self.uri_at_startup:
conn = self.make_conn(self.uri_at_startup)
self.register_conn(conn, skip_config=True)
if conn and self.uri_cb:
conn.connect_opt_out("resources-sampled", self.uri_cb)
self.connect_to_uri(self.uri_at_startup)
if not self.skip_autostart:
self.autostart_conns()
def init_systray(self):
if self.systray:
return
self.systray = vmmSystray(self)
self.systray.connect("action-toggle-manager", self._do_toggle_manager)
self.systray.connect("action-suspend-domain", self._do_suspend_domain)
self.systray.connect("action-resume-domain", self._do_resume_domain)
self.systray.connect("action-run-domain", self._do_run_domain)
self.systray.connect("action-shutdown-domain", self._do_shutdown_domain)
self.systray.connect("action-reboot-domain", self._do_reboot_domain)
self.systray.connect("action-destroy-domain", self._do_destroy_domain)
self.systray.connect("action-reset-domain", self._do_reset_domain)
self.systray.connect("action-save-domain", self._do_save_domain)
self.systray.connect("action-show-domain", self._do_show_vm)
self.systray.connect("action-migrate-domain", self._do_show_migrate)
self.systray.connect("action-delete-domain", self._do_delete_domain)
self.systray.connect("action-clone-domain", self._do_show_clone)
self.systray.connect("action-exit-app", self.exit_app)
def system_tray_changed(self, *ignore):
systray_enabled = self.config.get_view_system_tray()
if self.windows == 0 and not systray_enabled:
# Show the manager so that the user can control the application
self.show_manager()
def add_default_conn(self, manager):
# Only add default if no connections are currently known
if self.config.get_conn_uris():
return
self.timeout_add(1000, self._add_default_conn, manager)
def _add_default_conn(self, manager):
# Manager fail message
msg = _("Could not detect a default hypervisor. Make\n"
"sure the appropriate virtualization packages\n"
"are installed (kvm, qemu, libvirt, etc.), and\n"
"that libvirtd is running.\n\n"
"A hypervisor connection can be manually\n"
"added via File->Add Connection")
logging.debug("Determining default libvirt URI")
ret = None
try:
libvirt_packages = self.config.libvirt_packages
packages = self.config.hv_packages + libvirt_packages
ret = packageutils.check_packagekit(manager, manager.err, packages)
except:
logging.exception("Error talking to PackageKit")
if ret:
tryuri = "qemu:///system"
else:
tryuri = vmmConnect.default_uri(always_system=True)
if tryuri is None:
manager.set_startup_error(msg)
return
warnmsg = _("The 'libvirtd' service will need to be started.\n\n"
"After that, virt-manager will connect to libvirt on\n"
"the next application start up.")
# Do the initial connection in an idle callback, so the
# packagekit async dialog has a chance to go away
def idle_connect():
do_start = packageutils.start_libvirtd()
connected = self.connect_to_uri(tryuri,
autoconnect=True, do_start=do_start)
if not connected and do_start:
manager.err.ok(_("Libvirt service must be started"), warnmsg)
self.idle_add(idle_connect)
def load_stored_uris(self):
uris = self.config.get_conn_uris()
if not uris:
return
logging.debug("About to connect to uris %s", uris)
for uri in uris:
conn = self.make_conn(uri)
self.register_conn(conn, skip_config=True)
def autostart_conns(self):
"""
We serialize conn autostart, so polkit/ssh-askpass doesn't spam
"""
queue = Queue.Queue()
auto_conns = [uri for uri in self.conns
if self.conns[uri]["conn"].get_autoconnect()]
def add_next_to_queue():
if not auto_conns:
queue.put(None)
else:
queue.put(auto_conns.pop(0))
def state_change_cb(conn):
if (conn.get_state() == conn.STATE_ACTIVE or
conn.get_state() == conn.STATE_INACTIVE):
add_next_to_queue()
conn.disconnect_by_func(state_change_cb)
def connect(uri):
self.connect_to_uri(uri)
def handle_queue():
while True:
uri = queue.get()
if uri is None:
return
if uri not in self.conns:
add_next_to_queue()
continue
conn = self.conns[uri]["conn"]
conn.connect("state-changed", state_change_cb)
self.idle_add(connect, uri)
add_next_to_queue()
thread = threading.Thread(name="Autostart thread",
target=handle_queue, args=())
thread.daemon = True
thread.start()
def _do_vm_removed(self, conn, vmuuid):
hvuri = conn.get_uri()
if vmuuid not in self.conns[hvuri]["windowDetails"]:
return
self.conns[hvuri]["windowDetails"][vmuuid].cleanup()
del(self.conns[hvuri]["windowDetails"][vmuuid])
def _do_conn_changed(self, conn):
if (conn.get_state() == conn.STATE_ACTIVE or
conn.get_state() == conn.STATE_CONNECTING):
return
hvuri = conn.get_uri()
for vmuuid in self.conns[hvuri]["windowDetails"].keys():
self.conns[hvuri]["windowDetails"][vmuuid].cleanup()
del(self.conns[hvuri]["windowDetails"][vmuuid])
if (self.windowCreate and
self.windowCreate.conn and
self.windowCreate.conn.get_uri() == hvuri):
self.windowCreate.close()
def reschedule_timer(self, *args, **kwargs):
ignore = args
ignore = kwargs
self.schedule_timer()
def schedule_timer(self):
interval = self.config.get_stats_update_interval() * 1000
if self.timer is not None:
self.remove_gobject_timeout(self.timer)
self.timer = None
self.timer = self.timeout_add(interval, self.tick)
def _add_obj_to_tick_queue(self, obj, isprio, **kwargs):
if self._tick_queue.full():
if not self._tick_thread_slow:
logging.debug("Tick is slow, not running at requested rate.")
self._tick_thread_slow = True
return
self._tick_counter += 1
self._tick_queue.put((isprio and PRIO_HIGH or PRIO_LOW,
self._tick_counter,
obj, kwargs))
def _schedule_priority_tick(self, conn, kwargs):
self._add_obj_to_tick_queue(conn, True, **kwargs)
def tick(self):
for uri in self.conns.keys():
conn = self.conns[uri]["conn"]
self._add_obj_to_tick_queue(conn, False,
stats_update=True, pollvm=True)
return 1
def _handle_tick_queue(self):
while True:
ignore1, ignore2, obj, kwargs = self._tick_queue.get()
self._tick_single_conn(obj, kwargs)
self._tick_queue.task_done()
return 1
def _tick_single_conn(self, conn, kwargs):
e = None
try:
conn.tick(**kwargs)
except KeyboardInterrupt:
raise
except Exception, e:
pass
if e is None:
return
from_remote = getattr(libvirt, "VIR_FROM_REMOTE", None)
from_rpc = getattr(libvirt, "VIR_FROM_RPC", None)
sys_error = getattr(libvirt, "VIR_ERR_SYSTEM_ERROR", None)
dom = -1
code = -1
if isinstance(e, libvirt.libvirtError):
dom = e.get_error_domain()
code = e.get_error_code()
if (dom in [from_remote, from_rpc] and
code in [sys_error]):
logging.exception("Could not refresh connection %s",
conn.get_uri())
logging.debug("Closing connection since libvirtd "
"appears to have stopped")
else:
error_msg = _("Error polling connection '%s': %s") \
% (conn.get_uri(), e)
self.idle_add(lambda: self.err.show_err(error_msg))
self.idle_add(conn.close)
def increment_window_counter(self, src):
ignore = src
self.windows += 1
logging.debug("window counter incremented to %s", self.windows)
def decrement_window_counter(self, src):
self.windows -= 1
logging.debug("window counter decremented to %s", self.windows)
if self._can_exit():
# Defer this to an idle callback, since we can race with
# a vmmDetails window being deleted.
self.idle_add(self.exit_app, src)
def _can_exit(self):
# Don't exit if system tray is enabled
return (self.windows <= 0 and
self.systray and
not self.systray.is_visible())
def _cleanup(self):
self.err = None
if self.inspection:
self.inspection.cleanup()
self.inspection = None
if self.timer is not None:
GLib.source_remove(self.timer)
if self.systray:
self.systray.cleanup()
self.systray = None
self.get_manager()
if self.windowManager:
self.windowManager.cleanup()
self.windowManager = None
if self.windowPreferences:
self.windowPreferences.cleanup()
self.windowPreferences = None
if self.windowAbout:
self.windowAbout.cleanup()
self.windowAbout = None
if self.windowConnect:
self.windowConnect.cleanup()
self.windowConnect = None
if self.windowCreate:
self.windowCreate.cleanup()
self.windowCreate = None
if self.windowMigrate:
self.windowMigrate.cleanup()
self.windowMigrate = None
if self.delete_dialog:
self.delete_dialog.cleanup()
self.delete_dialog = None
# Do this last, so any manually 'disconnected' signals
# take precedence over cleanup signal removal
for uri in self.conns:
self.cleanup_conn(uri)
self.conns = {}
def exit_app(self, src):
if self.err is None:
# Already in cleanup
return
self.cleanup()
if debug_ref_leaks:
objs = self.config.get_objects()
# Engine will always appear to leak
objs.remove(self.object_key)
if src.object_key in objs:
# UI that initiates the app exit will always appear to leak
objs.remove(src.object_key)
for name in objs:
logging.debug("Leaked %s", name)
logging.debug("Exiting app normally.")
# We need this if there are any asyncdialog fobjs running
if Gtk.main_level():
logging.debug("%s other gtk main loops running, killing them.",
Gtk.main_level())
for ignore in range(Gtk.main_level()):
Gtk.main_quit()
self.application.remove_window(self._appwindow)
def _create_inspection_thread(self):
logging.debug("libguestfs inspection support: %s",
self.config.support_inspection)
if not self.config.support_inspection:
return
from virtManager.inspection import vmmInspection
self.inspection = vmmInspection()
self.inspection.start()
self.connect("conn-added", self.inspection.conn_added)
self.connect("conn-removed", self.inspection.conn_removed)
return
def make_conn(self, uri, probe=False):
conn = self._check_conn(uri)
if conn:
return conn
conn = vmmConnection(uri)
self.conns[uri] = {
"conn": conn,
"windowHost": None,
"windowDetails": {},
"windowClone": None,
"probeConnection": probe
}
conn.connect("vm-removed", self._do_vm_removed)
conn.connect("state-changed", self._do_conn_changed)
conn.connect("connect-error", self._connect_error)
conn.connect("priority-tick", self._schedule_priority_tick)
return conn
def register_conn(self, conn, skip_config=False):
# if `skip_config' then the connection is only showed in the ui and
# not added to the config.
if not skip_config and conn.get_uri() not in \
(self.config.get_conn_uris() or []):
self.config.add_conn(conn.get_uri())
self.emit("conn-added", conn)
def connect_to_uri(self, uri, autoconnect=None, do_start=True, probe=False):
try:
conn = self.make_conn(uri, probe=probe)
self.register_conn(conn)
if autoconnect is not None:
conn.set_autoconnect(bool(autoconnect))
if do_start:
conn.open()
else:
try:
conn.open()
except:
return None
return conn
except Exception:
logging.exception("Error connecting to %s", uri)
return None
def cleanup_conn(self, uri):
try:
if self.conns[uri]["windowHost"]:
self.conns[uri]["windowHost"].cleanup()
if self.conns[uri]["windowClone"]:
self.conns[uri]["windowClone"].cleanup()
details = self.conns[uri]["windowDetails"]
for win in details.values():
win.cleanup()
self.conns[uri]["conn"].cleanup()
except:
logging.exception("Error cleaning up conn in engine")
def remove_conn(self, src, uri):
ignore = src
self.cleanup_conn(uri)
del(self.conns[uri])
self.emit("conn-removed", uri)
self.config.remove_conn(uri)
def connect(self, name, callback, *args):
handle_id = vmmGObject.connect(self, name, callback, *args)
if name == "conn-added":
for uri in self.conns.keys():
self.emit("conn-added",
self.conns[uri]["conn"])
return handle_id
def _check_conn(self, uri):
conn = self.conns.get(uri)
if conn:
return conn["conn"]
return None
def _lookup_conn(self, uri):
conn = self._check_conn(uri)
if not conn:
raise RuntimeError(_("Unknown connection URI %s") % uri)
return conn
def _connect_error(self, conn, errmsg, tb, warnconsole):
errmsg = errmsg.strip(" \n")
tb = tb.strip(" \n")
hint = ""
show_errmsg = True
if conn.is_remote():
logging.debug(conn.get_transport())
if re.search(r"nc: .* -- 'U'", tb):
hint += _("The remote host requires a version of netcat/nc\n"
"which supports the -U option.")
show_errmsg = False
elif (conn.get_transport()[0] == "ssh" and
re.search(r"ssh-askpass", tb)):
if self.config.askpass_package:
ret = packageutils.check_packagekit(
None,
self.err,
self.config.askpass_package)
if ret:
conn.open()
return
hint += _("You need to install openssh-askpass or "
"similar\nto connect to this host.")
show_errmsg = False
else:
hint += _("Verify that the 'libvirtd' daemon is running\n"
"on the remote host.")
elif conn.is_xen():
hint += _("Verify that:\n"
" - A Xen host kernel was booted\n"
" - The Xen service has been started")
else:
if warnconsole:
hint += _("Could not detect a local session: if you are \n"
"running virt-manager over ssh -X or VNC, you \n"
"may not be able to connect to libvirt as a \n"
"regular user. Try running as root.")
show_errmsg = False
elif re.search(r"libvirt-sock", tb):
hint += _("Verify that the 'libvirtd' daemon is running.")
show_errmsg = False
probe_connection = self.conns[conn.get_uri()]["probeConnection"]
msg = _("Unable to connect to libvirt.")
if show_errmsg:
msg += "\n\n%s" % errmsg
if hint:
msg += "\n\n%s" % hint
msg = msg.strip("\n")
details = msg
details += "\n\n"
details += "Libvirt URI is: %s\n\n" % conn.get_uri()
details += tb
if probe_connection:
msg += "\n\n%s" % _("Would you still like to remember this connection?")
title = _("Virtual Machine Manager Connection Failure")
if probe_connection:
remember_connection = self.err.show_err(msg, details, title,
buttons=Gtk.ButtonsType.YES_NO,
dialog_type=Gtk.MessageType.QUESTION, modal=True)
if remember_connection:
self.conns[conn.get_uri()]["probeConnection"] = False
else:
self.idle_add(self._do_edit_connect, self.windowManager, conn)
else:
if self._can_exit():
self.err.show_err(msg, details, title, modal=True)
self.idle_add(self.exit_app, conn)
else:
self.err.show_err(msg, details, title)
####################
# Dialog launchers #
####################
def _do_show_about(self, src):
try:
if self.windowAbout is None:
self.windowAbout = vmmAbout()
self.windowAbout.show()
except Exception, e:
src.err.show_err(_("Error launching 'About' dialog: %s") % str(e))
def _get_preferences(self):
if self.windowPreferences:
return self.windowPreferences
obj = vmmPreferences()
self.windowPreferences = obj
return self.windowPreferences
def _do_show_preferences(self, src):
try:
self._get_preferences().show(src.topwin)
except Exception, e:
src.err.show_err(_("Error launching preferences: %s") % str(e))
def _get_host_dialog(self, uri):
if self.conns[uri]["windowHost"]:
return self.conns[uri]["windowHost"]
con = self._lookup_conn(uri)
obj = vmmHost(con)
obj.connect("action-exit-app", self.exit_app)
obj.connect("action-view-manager", self._do_show_manager)
obj.connect("action-restore-domain", self._do_restore_domain)
obj.connect("host-opened", self.increment_window_counter)
obj.connect("host-closed", self.decrement_window_counter)
self.conns[uri]["windowHost"] = obj
return self.conns[uri]["windowHost"]
def _do_show_host(self, src, uri):
try:
self._get_host_dialog(uri).show()
except Exception, e:
src.err.show_err(_("Error launching host dialog: %s") % str(e))
def _get_connect_dialog(self):
if self.windowConnect:
return self.windowConnect
def completed(src, uri, autoconnect):
ignore = src
return self.connect_to_uri(uri, autoconnect, probe=True)
def cancelled(src):
if len(self.conns.keys()) == 0:
self.exit_app(src)
obj = vmmConnect()
obj.connect("completed", completed)
obj.connect("cancelled", cancelled)
self.windowConnect = obj
return self.windowConnect
def _do_show_connect(self, src, reset_state=True):
try:
self._get_connect_dialog().show(src.topwin, reset_state)
except Exception, e:
src.err.show_err(_("Error launching connect dialog: %s") % str(e))
def _do_edit_connect(self, src, connection):
try:
self._do_show_connect(src, False)
finally:
self.remove_conn(None, connection.get_uri())
def _get_details_dialog(self, uri, uuid):
if uuid in self.conns[uri]["windowDetails"]:
return self.conns[uri]["windowDetails"][uuid]
con = self._lookup_conn(uri)
obj = vmmDetails(con.get_vm(uuid))
obj.connect("action-save-domain", self._do_save_domain)
obj.connect("action-destroy-domain", self._do_destroy_domain)
obj.connect("action-reset-domain", self._do_reset_domain)
obj.connect("action-suspend-domain", self._do_suspend_domain)
obj.connect("action-resume-domain", self._do_resume_domain)
obj.connect("action-run-domain", self._do_run_domain)
obj.connect("action-shutdown-domain", self._do_shutdown_domain)
obj.connect("action-reboot-domain", self._do_reboot_domain)
obj.connect("action-exit-app", self.exit_app)
obj.connect("action-view-manager", self._do_show_manager)
obj.connect("action-migrate-domain", self._do_show_migrate)
obj.connect("action-delete-domain", self._do_delete_domain)
obj.connect("action-clone-domain", self._do_show_clone)
obj.connect("details-opened", self.increment_window_counter)
obj.connect("details-closed", self.decrement_window_counter)
self.conns[uri]["windowDetails"][uuid] = obj
return self.conns[uri]["windowDetails"][uuid]
def _find_vm_by_id(self, uri, domstr):
vms = self.conns[uri]["conn"].vms
if domstr in vms:
return domstr
for vm in vms.values():
if domstr.isdigit():
if int(domstr) == vm.get_id():
return vm.get_uuid()
elif domstr == vm.get_name():
return vm.get_uuid()
def _show_vm_helper(self, src, uri, domstr, page=None, forcepage=False):
try:
uuid = self._find_vm_by_id(uri, domstr)
if not uuid:
# This will only happen if --show-* option was used during
# virt-manager launch and an invalid UUID is passed.
# The error message must be sync otherwise the user will not
# know why the application ended.
self.err.show_err("%s does not have VM '%s'" %
(uri, domstr), modal=True)
return
details = self._get_details_dialog(uri, uuid)
if forcepage or not details.is_visible():
if page == DETAILS_PERF:
details.activate_performance_page()
elif page == DETAILS_CONFIG:
details.activate_config_page()
elif page == DETAILS_CONSOLE:
details.activate_console_page()
elif page is None:
details.activate_default_page()
details.show()
except Exception, e:
src.err.show_err(_("Error launching details: %s") % str(e))
finally:
if self._can_exit():
self.idle_add(self.exit_app, src)
def _do_show_vm(self, src, uri, uuid):
self._show_vm_helper(src, uri, uuid)
def get_manager(self):
if self.windowManager:
return self.windowManager
obj = vmmManager()
obj.connect("action-suspend-domain", self._do_suspend_domain)
obj.connect("action-resume-domain", self._do_resume_domain)
obj.connect("action-run-domain", self._do_run_domain)
obj.connect("action-shutdown-domain", self._do_shutdown_domain)
obj.connect("action-reboot-domain", self._do_reboot_domain)
obj.connect("action-destroy-domain", self._do_destroy_domain)
obj.connect("action-reset-domain", self._do_reset_domain)
obj.connect("action-save-domain", self._do_save_domain)
obj.connect("action-migrate-domain", self._do_show_migrate)
obj.connect("action-delete-domain", self._do_delete_domain)
obj.connect("action-clone-domain", self._do_show_clone)
obj.connect("action-show-domain", self._do_show_vm)
obj.connect("action-show-preferences", self._do_show_preferences)
obj.connect("action-show-create", self._do_show_create)
obj.connect("action-show-about", self._do_show_about)
obj.connect("action-show-host", self._do_show_host)
obj.connect("action-show-connect", self._do_show_connect)
obj.connect("action-exit-app", self.exit_app)
obj.connect("manager-opened", self.increment_window_counter)
obj.connect("manager-closed", self.decrement_window_counter)
obj.connect("remove-conn", self.remove_conn)
obj.connect("add-default-conn", self.add_default_conn)
self.connect("conn-added", obj.add_conn)
self.connect("conn-removed", obj.remove_conn)
self.windowManager = obj
return self.windowManager
def _do_toggle_manager(self, ignore):
manager = self.get_manager()
if manager.is_visible():
manager.close()
else:
manager.show()
def _do_show_manager(self, src):
try:
manager = self.get_manager()
manager.show()
except Exception, e:
if not src:
raise
src.err.show_err(_("Error launching manager: %s") % str(e))
def _get_create_dialog(self):
if self.windowCreate:
return self.windowCreate
obj = vmmCreate(self)
obj.connect("action-show-domain", self._do_show_vm)
self.windowCreate = obj
return self.windowCreate
def _do_show_create(self, src, uri):
try:
self._get_create_dialog().show(src.topwin, uri)
except Exception, e:
src.err.show_err(_("Error launching manager: %s") % str(e))
def _do_show_migrate(self, src, uri, uuid):
try:
conn = self._lookup_conn(uri)
vm = conn.get_vm(uuid)
if not self.windowMigrate:
self.windowMigrate = vmmMigrateDialog(vm, self)
self.windowMigrate.set_state(vm)
self.windowMigrate.show(src.topwin)
except Exception, e:
src.err.show_err(_("Error launching migrate dialog: %s") % str(e))
def _do_show_clone(self, src, uri, uuid):
con = self._lookup_conn(uri)
orig_vm = con.get_vm(uuid)
clone_window = self.conns[uri]["windowClone"]
try:
if clone_window is None:
clone_window = vmmCloneVM(orig_vm)
self.conns[uri]["windowClone"] = clone_window
else:
clone_window.set_orig_vm(orig_vm)
clone_window.show(src.topwin)
except Exception, e:
src.err.show_err(_("Error setting clone parameters: %s") % str(e))
##########################################
# Window launchers from virt-manager cli #
##########################################
def show_manager(self):
self._do_show_manager(None)
def show_host_summary(self, uri):
self._do_show_host(self.get_manager(), uri)
def show_domain_creator(self, uri):
self.show_manager()
self._do_show_create(self.get_manager(), uri)
def show_domain_console(self, uri, uuid):
self.idle_add(self._show_vm_helper, self.get_manager(), uri, uuid,
page=DETAILS_CONSOLE, forcepage=True)
def show_domain_editor(self, uri, uuid):
self.idle_add(self._show_vm_helper, self.get_manager(), uri, uuid,
page=DETAILS_CONFIG, forcepage=True)
def show_domain_performance(self, uri, uuid):
self.idle_add(self._show_vm_helper, self.get_manager(), uri, uuid,
page=DETAILS_PERF, forcepage=True)
#######################################
# Domain actions run/destroy/save ... #
#######################################
def _do_save_domain(self, src, uri, uuid):
conn = self._lookup_conn(uri)
vm = conn.get_vm(uuid)
managed = bool(vm.managedsave_supported)
if not managed and conn.is_remote():
src.err.val_err(_("Saving virtual machines over remote "
"connections is not supported with this "
"libvirt version or hypervisor."))
return
if not src.err.chkbox_helper(self.config.get_confirm_poweroff,
self.config.set_confirm_poweroff,
text1=_("Are you sure you want to save '%s'?" % vm.get_name())):
return
path = None
if not managed:
path = src.err.browse_local(
conn, _("Save Virtual Machine"),
dialog_type=Gtk.FileChooserAction.SAVE,
browse_reason=self.config.CONFIG_DIR_SAVE)
if not path:
return
_cancel_cb = None
if vm.getjobinfo_supported:
_cancel_cb = (self._save_cancel, vm)
def cb(asyncjob):
vm.save(path, meter=asyncjob.get_meter())
def finish_cb(error, details):
if error is not None:
error = _("Error saving domain: %s") % error
src.err.show_err(error, details=details)
progWin = vmmAsyncJob(cb, [],
finish_cb, [],
_("Saving Virtual Machine"),
_("Saving virtual machine memory to disk "),
src.topwin, cancel_cb=_cancel_cb)
progWin.run()
def _save_cancel(self, asyncjob, vm):
logging.debug("Cancelling save job")
if not vm:
return
try:
vm.abort_job()
except Exception, e:
logging.exception("Error cancelling save job")
asyncjob.show_warning(_("Error cancelling save job: %s") % str(e))
return
asyncjob.job_canceled = True
return
def _do_restore_domain(self, src, uri):
conn = self._lookup_conn(uri)
if conn.is_remote():
src.err.val_err(_("Restoring virtual machines over remote "
"connections is not yet supported"))
return
path = src.err.browse_local(
conn, _("Restore Virtual Machine"),
browse_reason=self.config.CONFIG_DIR_RESTORE)
if not path:
return
vmmAsyncJob.simple_async_noshow(conn.restore, [path], src,
_("Error restoring domain"))
def _do_destroy_domain(self, src, uri, uuid):
conn = self._lookup_conn(uri)
vm = conn.get_vm(uuid)
if not src.err.chkbox_helper(
self.config.get_confirm_forcepoweroff,
self.config.set_confirm_forcepoweroff,
text1=_("Are you sure you want to force poweroff '%s'?" %
vm.get_name()),
text2=_("This will immediately poweroff the VM without "
"shutting down the OS and may cause data loss.")):
return
logging.debug("Destroying vm '%s'", vm.get_name())
vmmAsyncJob.simple_async_noshow(vm.destroy, [], src,
_("Error shutting down domain"))
def _do_suspend_domain(self, src, uri, uuid):
conn = self._lookup_conn(uri)
vm = conn.get_vm(uuid)
if not src.err.chkbox_helper(self.config.get_confirm_pause,
self.config.set_confirm_pause,
text1=_("Are you sure you want to pause '%s'?" %
vm.get_name())):
return
logging.debug("Pausing vm '%s'", vm.get_name())
vmmAsyncJob.simple_async_noshow(vm.suspend, [], src,
_("Error pausing domain"))
def _do_resume_domain(self, src, uri, uuid):
conn = self._lookup_conn(uri)
vm = conn.get_vm(uuid)
logging.debug("Unpausing vm '%s'", vm.get_name())
vmmAsyncJob.simple_async_noshow(vm.resume, [], src,
_("Error unpausing domain"))
def _do_run_domain(self, src, uri, uuid):
conn = self._lookup_conn(uri)
vm = conn.get_vm(uuid)
logging.debug("Starting vm '%s'", vm.get_name())
if vm.hasSavedImage():
def errorcb(error, details):
# This is run from the main thread
res = src.err.show_err(
_("Error restoring domain") + ": " + error,
details=details,
text2=_(
"The domain could not be restored. Would you like\n"
"to remove the saved state and perform a regular\n"
"start up?"),
dialog_type=Gtk.MessageType.WARNING,
buttons=Gtk.ButtonsType.YES_NO,
modal=True)
if not res:
return
try:
vm.removeSavedImage()
self._do_run_domain(src, uri, uuid)
except Exception, e:
src.err.show_err(_("Error removing domain state: %s")
% str(e))
# VM will be restored, which can take some time, so show progress
title = _("Restoring Virtual Machine")
text = _("Restoring virtual machine memory from disk")
vmmAsyncJob.simple_async(vm.startup, [], src,
title, text, "", errorcb=errorcb)
else:
# Regular startup
errorintro = _("Error starting domain")
vmmAsyncJob.simple_async_noshow(vm.startup, [], src, errorintro)
def _do_shutdown_domain(self, src, uri, uuid):
conn = self._lookup_conn(uri)
vm = conn.get_vm(uuid)
if not src.err.chkbox_helper(self.config.get_confirm_poweroff,
self.config.set_confirm_poweroff,
text1=_("Are you sure you want to poweroff '%s'?" %
vm.get_name())):
return
logging.debug("Shutting down vm '%s'", vm.get_name())
vmmAsyncJob.simple_async_noshow(vm.shutdown, [], src,
_("Error shutting down domain"))
def _do_reboot_domain(self, src, uri, uuid):
conn = self._lookup_conn(uri)
vm = conn.get_vm(uuid)
if not src.err.chkbox_helper(self.config.get_confirm_poweroff,
self.config.set_confirm_poweroff,
text1=_("Are you sure you want to reboot '%s'?" %
vm.get_name())):
return
logging.debug("Rebooting vm '%s'", vm.get_name())
def reboot_cb():
no_support = False
reboot_err = None
try:
vm.reboot()
except Exception, reboot_err:
no_support = util.is_error_nosupport(reboot_err)
if not no_support:
raise RuntimeError(_("Error rebooting domain: %s" %
str(reboot_err)))
if not no_support:
return
# Reboot isn't supported. Let's try to emulate it
logging.debug("Hypervisor doesn't support reboot, let's fake it")
try:
vm.manual_reboot()
except:
logging.exception("Could not fake a reboot")
# Raise the original error message
raise RuntimeError(_("Error rebooting domain: %s" %
str(reboot_err)))
vmmAsyncJob.simple_async_noshow(reboot_cb, [], src, "")
def _do_reset_domain(self, src, uri, uuid):
conn = self._lookup_conn(uri)
vm = conn.get_vm(uuid)
if not src.err.chkbox_helper(
self.config.get_confirm_forcepoweroff,
self.config.set_confirm_forcepoweroff,
text1=_("Are you sure you want to force reset '%s'?" %
vm.get_name()),
text2=_("This will immediately reset the VM without "
"shutting down the OS and may cause data loss.")):
return
logging.debug("Resetting vm '%s'", vm.get_name())
vmmAsyncJob.simple_async_noshow(vm.reset, [], src,
_("Error resetting domain"))
def _do_delete_domain(self, src, uri, uuid):
conn = self._lookup_conn(uri)
vm = conn.get_vm(uuid)
if not self.delete_dialog:
self.delete_dialog = vmmDeleteDialog()
self.delete_dialog.show(vm, src.topwin)
|
aurex-linux/virt-manager
|
virtManager/engine.py
|
Python
|
gpl-2.0
| 41,820 | 0.001124 |
# vim:fileencoding=utf-8:noet
try:
import vim
except ImportError:
vim = object() # NOQA
from powerline.bindings.vim import getbufvar
from powerline.segments.vim import window_cached
@window_cached
def ctrlp(pl, side):
'''
Highlight groups used: ``ctrlp.regex`` or ``background``, ``ctrlp.prev`` or ``background``, ``ctrlp.item`` or ``file_name``, ``ctrlp.next`` or ``background``, ``ctrlp.marked`` or ``background``, ``ctrlp.focus`` or ``background``, ``ctrlp.byfname`` or ``background``, ``ctrlp.progress`` or ``file_name``, ``ctrlp.progress`` or ``file_name``.
'''
ctrlp_type = getbufvar('%', 'powerline_ctrlp_type')
ctrlp_args = getbufvar('%', 'powerline_ctrlp_args')
return globals()['ctrlp_stl_{0}_{1}'.format(side, ctrlp_type)](pl, *ctrlp_args)
def ctrlp_stl_left_main(pl, focus, byfname, regex, prev, item, next, marked):
'''
Highlight groups used: ``ctrlp.regex`` or ``background``, ``ctrlp.prev`` or ``background``, ``ctrlp.item`` or ``file_name``, ``ctrlp.next`` or ``background``, ``ctrlp.marked`` or ``background``.
'''
marked = marked[2:-1]
segments = []
if int(regex):
segments.append({
'contents': 'regex',
'highlight_group': ['ctrlp.regex', 'background'],
})
segments += [
{
'contents': prev + ' ',
'highlight_group': ['ctrlp.prev', 'background'],
'draw_inner_divider': True,
'priority': 40,
},
{
'contents': item,
'highlight_group': ['ctrlp.item', 'file_name'],
'draw_inner_divider': True,
'width': 10,
'align': 'c',
},
{
'contents': ' ' + next,
'highlight_group': ['ctrlp.next', 'background'],
'draw_inner_divider': True,
'priority': 40,
},
]
if marked != '-':
segments.append({
'contents': marked,
'highlight_group': ['ctrlp.marked', 'background'],
'draw_inner_divider': True,
})
return segments
def ctrlp_stl_right_main(pl, focus, byfname, regex, prev, item, next, marked):
'''
Highlight groups used: ``ctrlp.focus`` or ``background``, ``ctrlp.byfname`` or ``background``.
'''
segments = [
{
'contents': focus,
'highlight_group': ['ctrlp.focus', 'background'],
'draw_inner_divider': True,
'priority': 50,
},
{
'contents': byfname,
'highlight_group': ['ctrlp.byfname', 'background'],
'priority': 50,
},
]
return segments
def ctrlp_stl_left_prog(pl, progress):
'''
Highlight groups used: ``ctrlp.progress`` or ``file_name``.
'''
return [
{
'contents': 'Loading...',
'highlight_group': ['ctrlp.progress', 'file_name'],
},
]
def ctrlp_stl_right_prog(pl, progress):
'''
Highlight groups used: ``ctrlp.progress`` or ``file_name``.
'''
return [
{
'contents': progress,
'highlight_group': ['ctrlp.progress', 'file_name'],
},
]
|
keelerm84/powerline
|
powerline/segments/plugin/ctrlp.py
|
Python
|
mit
| 2,722 | 0.031227 |
#!/usr/bin/env python3
import os
from construct import Adapter, Const, GreedyBytes, Int32ul, Struct, this
from .common import ZeroString, PreallocatedArray, test_folder, mkdir_p
from .encryption import EncryptedBlock
RES_ENCRYPTION = 23, 9782, 3391, 31
# noinspection PyPep8,PyUnresolvedReferences
ResourceEntry = Struct(
"name" / ZeroString(16),
"offset" / Int32ul,
"size" / Int32ul
)
# noinspection PyPep8,PyUnresolvedReferences
RawResourceFile = Struct(
"files_num" / Int32ul,
"file_table" / EncryptedBlock(RES_ENCRYPTION,
PreallocatedArray(150, this.files_num, ResourceEntry)),
Const(Int32ul, 0x1490ff),
"raw_data" / GreedyBytes
)
class ResourceFileAdapter(Adapter):
"""
Discards header and returns a dict of {file_name: file_data}
"""
HEADER_SIZE = 3608 # 4 + 150 * 24 + 4
def _decode(self, obj, context):
shift = self.HEADER_SIZE
return {f.name: obj.raw_data[f.offset - shift:f.offset + f.size - shift]
for f in obj.file_table}
def _encode(self, files, context):
file_table = []
raw_data = b""
last_offset = self.HEADER_SIZE
for file_name, file_data in files.items():
file_table.append({"name": file_name,
"offset": last_offset,
"size": len(file_data)})
last_offset += len(file_data)
raw_data += file_data
return {"files_num": len(files),
"file_table": file_table,
"raw_data": raw_data}
ResourceFile = ResourceFileAdapter(RawResourceFile)
def unpack_res(file_path, dir_path):
with open(file_path) as f:
data = f.read()
res = ResourceFile.parse(data)
mkdir_p(dir_path)
for file_name, file_data in res.items():
with open(os.path.join(dir_path, file_name), "wb") as f:
f.write(file_data)
def pack_res(dir_path, file_path):
res_content = {}
for root, dirs, files in os.walk(dir_path):
for name in files:
with open(os.path.join(root, name)) as f:
res_content[name] = f.read()
break
with open(file_path, "wb") as f:
f.write(ResourceFile.build(res_content))
if __name__ == "__main__":
import sys
test_folder(sys.argv[1], ".res", ResourceFile)
|
domi-id/across
|
across/res.py
|
Python
|
mit
| 2,378 | 0.002103 |
class Comp1Plugin(object):
def __init__(self):
self.version = '1.4'
class Comp2Plugin(object):
def __init__(self):
self.version = '1.4'
|
DailyActie/Surrogate-Model
|
01-codes/OpenMDAO-Framework-dev/openmdao.test/src/openmdao/test/plugins/foo2/foo.py
|
Python
|
mit
| 162 | 0 |
from django.db import models
from constituencies.models import Constituency
from uk_political_parties.models import Party
from elections.models import Election
class Person(models.Model):
name = models.CharField(blank=False, max_length=255)
remote_id = models.CharField(blank=True, max_length=255, null=True)
source_url = models.URLField(blank=True, null=True)
source_name = models.CharField(blank=True, max_length=100)
image_url = models.URLField(blank=True, null=True)
elections = models.ManyToManyField(Election)
parties = models.ManyToManyField(Party, through='PartyMemberships')
constituencies = models.ManyToManyField(Constituency, through='PersonConstituencies')
@property
def current_party(self):
parties = self.partymemberships_set.filter(membership_end=None)
if parties:
return parties[0]
@property
def current_election(self):
return self.elections.filter(active=True)[0]
@property
def current_constituency(self):
return self.constituencies.filter(
personconstituencies__election=self.current_election)[0]
def __unicode__(self):
return "%s (%s)" % (self.name, self.remote_id)
class PartyMemberships(models.Model):
person = models.ForeignKey(Person)
party = models.ForeignKey(Party)
membership_start = models.DateField()
membership_end = models.DateField(null=True)
class PersonConstituencies(models.Model):
person = models.ForeignKey(Person)
constituency = models.ForeignKey(Constituency)
election = models.ForeignKey(Election)
|
JustinWingChungHui/electionleaflets
|
electionleaflets/apps/people/models.py
|
Python
|
mit
| 1,603 | 0.001871 |
from bot.server import main
main()
|
fedorlol/Tolyan
|
bot/__main__.py
|
Python
|
gpl-3.0
| 36 | 0 |
import math
class segment_tree():
def __init__(self,a):
self.a = a
self.root = self.build(0,len(a)-1)
def build(self,left,right):
if left == right:
node = {}
node['value'] = self.a[left]
node['left'] = None
node['right'] = None
return node
else:
node = {}
mid = int((left+right)/2)
node['left'] = self.build(left,mid)
node['right'] = self.build(mid+1,right)
node['value'] = min(node['left']['value'],node['right']['value'])
return node
def update(self,node,index,new_value,left,right):
if left==right:
self.a[index] = new_value
node['value'] = new_value
else:
mid = int((left+right)/2)
if left<=index and index<=mid:
self.update(node['left'],index,new_value,left,mid)
else:
self.update(node['right'],index,new_value,mid+1,right)
node['value'] = min(node['left']['value'] , node['right']['value'])
def query(self,root,start,end,left,right):
if start>right or end<left:
return float('inf')
if start<=left and right<=end:
return root['value']
mid = int((left+right)/2)
return min(self.query(root['left'],start,end,left,mid),self.query(root['right'],start,end,mid+1,right))
n,q = input().split(' ')
n,q = int(n),int(q)
a = list(map(int,input().split(' ')))
s = segment_tree(a)
for i in range(q):
query,left,right = input().split(' ')
left,right = int(left)-1,int(right)-1
if query == 'q':
print(s.query(s.root,left,right,0,n-1))
else:
s.update(s.root,left,right+1,0,n-1)
|
saisankargochhayat/algo_quest
|
hackerearth/segment_tree_problem.py
|
Python
|
apache-2.0
| 1,757 | 0.036426 |
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Weibull bijector."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import ops
from tensorflow.python.ops import check_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops.distributions import bijector
__all__ = [
"Weibull",
]
class Weibull(bijector.Bijector):
"""Compute `Y = g(X) = 1 - exp((-X / scale) ** concentration), X >= 0`.
This bijector maps inputs from `[0, inf]` to [0, 1]`. The inverse of the
bijector applied to a uniform random variable `X ~ U(0, 1) gives back a
random variable with the
[Weibull distribution](https://en.wikipedia.org/wiki/Weibull_distribution):
```none
Y ~ Weibull(scale, concentration)
pdf(y; scale, concentration, y >= 0) = (scale / concentration) * (
scale / concentration) ** (concentration - 1) * exp(
-(y / scale) ** concentration)
```
"""
def __init__(self,
scale=1.,
concentration=1.,
validate_args=False,
name="weibull"):
"""Instantiates the `Weibull` bijector.
Args:
scale: Positive Float-type `Tensor` that is the same dtype and is
broadcastable with `concentration`.
This is `l` in `Y = g(X) = 1 - exp((-x / l) ** k)`.
concentration: Positive Float-type `Tensor` that is the same dtype and is
broadcastable with `scale`.
This is `k` in `Y = g(X) = 1 - exp((-x / l) ** k)`.
validate_args: Python `bool` indicating whether arguments should be
checked for correctness.
name: Python `str` name given to ops managed by this object.
"""
self._graph_parents = []
self._name = name
self._validate_args = validate_args
with self._name_scope("init", values=[scale, concentration]):
self._scale = ops.convert_to_tensor(scale, name="scale")
self._concentration = ops.convert_to_tensor(
concentration, name="concentration")
check_ops.assert_same_float_dtype([self._scale, self._concentration])
if validate_args:
self._scale = control_flow_ops.with_dependencies([
check_ops.assert_positive(
self._scale,
message="Argument scale was not positive")
], self._scale)
self._concentration = control_flow_ops.with_dependencies([
check_ops.assert_positive(
self._concentration,
message="Argument concentration was not positive")
], self._concentration)
super(Weibull, self).__init__(
forward_min_event_ndims=0,
validate_args=validate_args,
name=name)
@property
def scale(self):
"""The `l` in `Y = g(X) = 1 - exp((-x / l) ** k)`."""
return self._scale
@property
def concentration(self):
"""The `k` in `Y = g(X) = 1 - exp((-x / l) ** k)`."""
return self._concentration
def _forward(self, x):
x = self._maybe_assert_valid_x(x)
return -math_ops.expm1(-((x / self.scale) ** self.concentration))
def _inverse(self, y):
y = self._maybe_assert_valid_y(y)
return self.scale * (-math_ops.log1p(-y)) ** (1 / self.concentration)
def _inverse_log_det_jacobian(self, y):
y = self._maybe_assert_valid_y(y)
return (
-math_ops.log1p(-y) +
(1 / self.concentration - 1) * math_ops.log(-math_ops.log1p(-y)) +
math_ops.log(self.scale / self.concentration))
def _forward_log_det_jacobian(self, x):
x = self._maybe_assert_valid_x(x)
return (
-(x / self.scale) ** self.concentration +
(self.concentration - 1) * math_ops.log(x) +
math_ops.log(self.concentration) +
-self.concentration * math_ops.log(self.scale))
def _maybe_assert_valid_x(self, x):
if not self.validate_args:
return x
is_valid = check_ops.assert_non_negative(
x,
message="Forward transformation input must be at least 0.")
return control_flow_ops.with_dependencies([is_valid], x)
def _maybe_assert_valid_y(self, y):
if not self.validate_args:
return y
is_positive = check_ops.assert_non_negative(
y, message="Inverse transformation input must be greater than 0.")
less_than_one = check_ops.assert_less_equal(
y, constant_op.constant(1., y.dtype),
message="Inverse transformation input must be less than or equal to 1.")
return control_flow_ops.with_dependencies([is_positive, less_than_one], y)
|
yanchen036/tensorflow
|
tensorflow/contrib/distributions/python/ops/bijectors/weibull.py
|
Python
|
apache-2.0
| 5,266 | 0.003608 |
# -*- coding: utf-8 -*-
"""
zang.inboundxml.elements.play
~~~~~~~~~~~~~~~~~~~
Module containing `Play` inbound xml element
"""
from zang.inboundxml.elements.base_node import BaseNode
class Play(BaseNode):
_allowedContentClass = ()
def __init__(self, url, loop=None):
if url is None:
raise TypeError
self._value = url
self.loop = loop
self._content = None
@property
def url(self):
return self._value
@url.setter
def url(self, value):
if value is None:
raise TypeError
self._value = value
|
jaymin-panchal/zang-python
|
zang/inboundxml/elements/play.py
|
Python
|
mit
| 599 | 0 |
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
from zope.interface import implements
from twisted.internet import defer
from twisted.trial import unittest
from twisted.words.protocols.jabber import sasl, sasl_mechanisms, xmlstream, jid
from twisted.words.xish import domish
NS_XMPP_SASL = 'urn:ietf:params:xml:ns:xmpp-sasl'
class DummySASLMechanism(object):
"""
Dummy SASL mechanism.
This just returns the initialResponse passed on creation, stores any
challenges and replies with an empty response.
@ivar challenge: Last received challenge.
@type challenge: C{unicode}.
@ivar initialResponse: Initial response to be returned when requested
via C{getInitialResponse} or C{None}.
@type initialResponse: C{unicode}
"""
implements(sasl_mechanisms.ISASLMechanism)
challenge = None
name = "DUMMY"
def __init__(self, initialResponse):
self.initialResponse = initialResponse
def getInitialResponse(self):
return self.initialResponse
def getResponse(self, challenge):
self.challenge = challenge
return ""
class DummySASLInitiatingInitializer(sasl.SASLInitiatingInitializer):
"""
Dummy SASL Initializer for initiating entities.
This hardwires the SASL mechanism to L{DummySASLMechanism}, that is
instantiated with the value of C{initialResponse}.
@ivar initialResponse: The initial response to be returned by the
dummy SASL mechanism or C{None}.
@type initialResponse: C{unicode}.
"""
initialResponse = None
def setMechanism(self):
self.mechanism = DummySASLMechanism(self.initialResponse)
class SASLInitiatingInitializerTest(unittest.TestCase):
"""
Tests for L{sasl.SASLInitiatingInitializer}
"""
def setUp(self):
self.output = []
self.authenticator = xmlstream.Authenticator()
self.xmlstream = xmlstream.XmlStream(self.authenticator)
self.xmlstream.send = self.output.append
self.xmlstream.connectionMade()
self.xmlstream.dataReceived("<stream:stream xmlns='jabber:client' "
"xmlns:stream='http://etherx.jabber.org/streams' "
"from='example.com' id='12345' version='1.0'>")
self.init = DummySASLInitiatingInitializer(self.xmlstream)
def test_onFailure(self):
"""
Test that the SASL error condition is correctly extracted.
"""
failure = domish.Element(('urn:ietf:params:xml:ns:xmpp-sasl',
'failure'))
failure.addElement('not-authorized')
self.init._deferred = defer.Deferred()
self.init.onFailure(failure)
self.assertFailure(self.init._deferred, sasl.SASLAuthError)
self.init._deferred.addCallback(lambda e:
self.assertEqual('not-authorized',
e.condition))
return self.init._deferred
def test_sendAuthInitialResponse(self):
"""
Test starting authentication with an initial response.
"""
self.init.initialResponse = "dummy"
self.init.start()
auth = self.output[0]
self.assertEqual(NS_XMPP_SASL, auth.uri)
self.assertEqual('auth', auth.name)
self.assertEqual('DUMMY', auth['mechanism'])
self.assertEqual('ZHVtbXk=', str(auth))
def test_sendAuthNoInitialResponse(self):
"""
Test starting authentication without an initial response.
"""
self.init.initialResponse = None
self.init.start()
auth = self.output[0]
self.assertEqual('', str(auth))
def test_sendAuthEmptyInitialResponse(self):
"""
Test starting authentication where the initial response is empty.
"""
self.init.initialResponse = ""
self.init.start()
auth = self.output[0]
self.assertEqual('=', str(auth))
def test_onChallenge(self):
"""
Test receiving a challenge message.
"""
d = self.init.start()
challenge = domish.Element((NS_XMPP_SASL, 'challenge'))
challenge.addContent('bXkgY2hhbGxlbmdl')
self.init.onChallenge(challenge)
self.assertEqual('my challenge', self.init.mechanism.challenge)
self.init.onSuccess(None)
return d
def test_onChallengeEmpty(self):
"""
Test receiving an empty challenge message.
"""
d = self.init.start()
challenge = domish.Element((NS_XMPP_SASL, 'challenge'))
self.init.onChallenge(challenge)
self.assertEqual('', self.init.mechanism.challenge)
self.init.onSuccess(None)
return d
def test_onChallengeIllegalPadding(self):
"""
Test receiving a challenge message with illegal padding.
"""
d = self.init.start()
challenge = domish.Element((NS_XMPP_SASL, 'challenge'))
challenge.addContent('bXkg=Y2hhbGxlbmdl')
self.init.onChallenge(challenge)
self.assertFailure(d, sasl.SASLIncorrectEncodingError)
return d
def test_onChallengeIllegalCharacters(self):
"""
Test receiving a challenge message with illegal characters.
"""
d = self.init.start()
challenge = domish.Element((NS_XMPP_SASL, 'challenge'))
challenge.addContent('bXkg*Y2hhbGxlbmdl')
self.init.onChallenge(challenge)
self.assertFailure(d, sasl.SASLIncorrectEncodingError)
return d
def test_onChallengeMalformed(self):
"""
Test receiving a malformed challenge message.
"""
d = self.init.start()
challenge = domish.Element((NS_XMPP_SASL, 'challenge'))
challenge.addContent('a')
self.init.onChallenge(challenge)
self.assertFailure(d, sasl.SASLIncorrectEncodingError)
return d
class SASLInitiatingInitializerSetMechanismTest(unittest.TestCase):
"""
Test for L{sasl.SASLInitiatingInitializer.setMechanism}.
"""
def setUp(self):
self.output = []
self.authenticator = xmlstream.Authenticator()
self.xmlstream = xmlstream.XmlStream(self.authenticator)
self.xmlstream.send = self.output.append
self.xmlstream.connectionMade()
self.xmlstream.dataReceived("<stream:stream xmlns='jabber:client' "
"xmlns:stream='http://etherx.jabber.org/streams' "
"from='example.com' id='12345' version='1.0'>")
self.init = sasl.SASLInitiatingInitializer(self.xmlstream)
def _setMechanism(self, name):
"""
Set up the XML Stream to have a SASL feature with the given mechanism.
"""
feature = domish.Element((NS_XMPP_SASL, 'mechanisms'))
feature.addElement('mechanism', content=name)
self.xmlstream.features[(feature.uri, feature.name)] = feature
self.init.setMechanism()
return self.init.mechanism.name
def test_anonymous(self):
"""
Test setting ANONYMOUS as the authentication mechanism.
"""
self.authenticator.jid = jid.JID('example.com')
self.authenticator.password = None
name = "ANONYMOUS"
self.assertEqual(name, self._setMechanism(name))
def test_plain(self):
"""
Test setting PLAIN as the authentication mechanism.
"""
self.authenticator.jid = jid.JID('test@example.com')
self.authenticator.password = 'secret'
name = "PLAIN"
self.assertEqual(name, self._setMechanism(name))
def test_digest(self):
"""
Test setting DIGEST-MD5 as the authentication mechanism.
"""
self.authenticator.jid = jid.JID('test@example.com')
self.authenticator.password = 'secret'
name = "DIGEST-MD5"
self.assertEqual(name, self._setMechanism(name))
def test_notAcceptable(self):
"""
Test using an unacceptable SASL authentication mechanism.
"""
self.authenticator.jid = jid.JID('test@example.com')
self.authenticator.password = 'secret'
self.assertRaises(sasl.SASLNoAcceptableMechanism,
self._setMechanism, 'SOMETHING_UNACCEPTABLE')
def test_notAcceptableWithoutUser(self):
"""
Test using an unacceptable SASL authentication mechanism with no JID.
"""
self.authenticator.jid = jid.JID('example.com')
self.authenticator.password = 'secret'
self.assertRaises(sasl.SASLNoAcceptableMechanism,
self._setMechanism, 'SOMETHING_UNACCEPTABLE')
|
skycucumber/Messaging-Gateway
|
webapp/venv/lib/python2.7/site-packages/twisted/words/test/test_jabbersasl.py
|
Python
|
gpl-2.0
| 8,748 | 0.002743 |
if __name__ == '__main__':
a = int(raw_input())
b = int(raw_input())
print a + b
print a - b
print a * b
|
LuisUrrutia/hackerrank
|
python/introduction/python-arithmetic-operators.py
|
Python
|
mit
| 126 | 0.007937 |
"""
Manila configuration - file ``/etc/manila/manila.conf``
=======================================================
The Manila configuration file is a standard '.ini' file and this parser uses
the ``IniConfigFile`` class to read it.
Sample configuration::
[DEFAULT]
osapi_max_limit = 1000
osapi_share_base_URL = <None>
use_forwarded_for = false
api_paste_config = api-paste.ini
state_path = /var/lib/manila
scheduler_topic = manila-scheduler
share_topic = manila-share
share_driver = manila.share.drivers.generic.GenericShareDriver
enable_v1_api = false
enable_v2_api = false
[cors]
allowed_origin = <None>
allow_credentials = true
expose_headers = Content-Type,Cache-Control,Content-Language,Expires,Last-Modified,Pragma
allow_methods = GET,POST,PUT,DELETE,OPTIONS
allow_headers = Content-Type,Cache-Control,Content-Language,Expires,Last-Modified,Pragma
Examples:
>>> conf = shared[ManilaConf]
>>> conf.sections()
['DEFAULT', 'cors']
>>> 'cors' in conf
True
>>> conf.has_option('DEFAULT', 'share_topic')
True
>>> conf.get("DEFAULT", "share_topic")
"manila-share"
>>> conf.get("DEFAULT", "enable_v2_api")
"false"
>>> conf.getboolean("DEFAULT", "enable_v2_api")
False
>>> conf.getint("DEFAULT", "osapi_max_limit")
1000
"""
from .. import parser, IniConfigFile
from insights.specs import manila_conf
@parser(manila_conf)
class ManilaConf(IniConfigFile):
"""
Manila configuration parser class, based on the ``IniConfigFile`` class.
"""
pass
|
wcmitchell/insights-core
|
insights/parsers/manila_conf.py
|
Python
|
apache-2.0
| 1,598 | 0.001252 |
#!/usr/bin/env python
"""
This module reads all Gerber and Excellon files and stores the
data for each job.
--------------------------------------------------------------------
This program is licensed under the GNU General Public License (GPL)
Version 3. See http://www.fsf.org for details of the license.
Rugged Circuits LLC
http://ruggedcircuits.com/gerbmerge
"""
import sys
import re
import string
import __builtin__
import copy
import types
import aptable
import config
import makestroke
import amacro
import geometry
import util
# Parsing Gerber/Excellon files is currently very brittle. A more robust
# RS274X/Excellon parser would be a good idea and allow this program to work
# robustly with more than just Eagle CAM files.
# Reminder to self:
#
# D01 -- move and draw line with exposure on
# D02 -- move with exposure off
# D03 -- flash aperture
# Patterns for Gerber RS274X file interpretation
apdef_pat = re.compile(r'^%AD(D\d+)([^*$]+)\*%$') # Aperture definition
apmdef_pat = re.compile(r'^%AM([^*$]+)\*$') # Aperture macro definition
comment_pat = re.compile(r'G0?4[^*]*\*') # Comment (GerbTool comment omits the 0)
tool_pat = re.compile(r'(D\d+)\*') # Aperture selection
gcode_pat = re.compile(r'G(\d{1,2})\*?') # G-codes
drawXY_pat = re.compile(r'X([+-]?\d+)Y([+-]?\d+)D0?([123])\*') # Drawing command
drawX_pat = re.compile(r'X([+-]?\d+)D0?([123])\*') # Drawing command, Y is implied
drawY_pat = re.compile(r'Y([+-]?\d+)D0?([123])\*') # Drawing command, X is implied
format_pat = re.compile(r'%FS(L|T)?(A|I)(N\d+)?(X\d\d)(Y\d\d)\*%') # Format statement
layerpol_pat = re.compile(r'^%LP[CD]\*%') # Layer polarity (D=dark, C=clear)
# Circular interpolation drawing commands (from Protel)
cdrawXY_pat = re.compile(r'X([+-]?\d+)Y([+-]?\d+)I([+-]?\d+)J([+-]?\d+)D0?([123])\*')
cdrawX_pat = re.compile(r'X([+-]?\d+)I([+-]?\d+)J([+-]?\d+)D0?([123])\*') # Y is implied
cdrawY_pat = re.compile(r'Y([+-]?\d+)I([+-]?\d+)J([+-]?\d+)D0?([123])\*') # X is implied
IgnoreList = ( \
# These are for Eagle, and RS274X files in general
re.compile(r'^%OFA0B0\*%$'),
re.compile(r'^%IPPOS\*%'),
re.compile(r'^%AMOC8\*$'), # Eagle's octagon defined by macro with a $1 parameter
re.compile(r'^5,1,8,0,0,1\.08239X\$1,22\.5\*$'), # Eagle's octagon, 22.5 degree rotation
re.compile(r'^5,1,8,0,0,1\.08239X\$1,0\.0\*$'), # Eagle's octagon, 0.0 degree rotation
re.compile(r'^\*?%$'),
re.compile(r'^M0?2\*$'),
# These additional ones are for Orcad Layout, PCB, Protel, etc.
re.compile(r'\*'), # Empty statement
re.compile(r'^%IN.*\*%'),
re.compile(r'^%ICAS\*%'), # Not in RS274X spec.
re.compile(r'^%MOIN\*%'),
re.compile(r'^%ASAXBY\*%'),
re.compile(r'^%AD\*%'), # GerbTool empty aperture definition
re.compile(r'^%LN.*\*%') # Layer name
)
# Patterns for Excellon interpretation
xtool_pat = re.compile(r'^(T\d+)$') # Tool selection
xydraw_pat = re.compile(r'^X([+-]?\d+)Y([+-]?\d+)$') # Plunge command
xdraw_pat = re.compile(r'^X([+-]?\d+)$') # Plunge command, repeat last Y value
ydraw_pat = re.compile(r'^Y([+-]?\d+)$') # Plunge command, repeat last X value
xtdef_pat = re.compile(r'^(T\d+)(?:F\d+)?(?:S\d+)?C([0-9.]+)$') # Tool+diameter definition with optional
# feed/speed (for Protel)
xtdef2_pat = re.compile(r'^(T\d+)C([0-9.]+)(?:F\d+)?(?:S\d+)?$') # Tool+diameter definition with optional
# feed/speed at the end (for OrCAD)
xzsup_pat = re.compile(r'^INCH,([LT])Z$') # Leading/trailing zeros INCLUDED
XIgnoreList = ( \
re.compile(r'^%$'),
re.compile(r'^M30$'), # End of job
re.compile(r'^M48$'), # Program header to first %
re.compile(r'^M72$') # Inches
)
# A Job is a single input board. It is expected to have:
# - a board outline file in RS274X format
# - several (at least one) Gerber files in RS274X format
# - a drill file in Excellon format
#
# The board outline and Excellon filenames must be given separately.
# The board outline file determines the extents of the job.
class Job:
def __init__(self, name):
self.name = name
# Minimum and maximum (X,Y) absolute co-ordinates encountered
# in GERBER data only (not Excellon). Note that coordinates
# are stored in hundred-thousandsths of an inch so 9999999 is 99.99999
# inches.
self.maxx = self.maxy = -9999999 # in the case all coordinates are < 0, this will prevent maxx and maxy from defaulting to 0
self.minx = self.miny = 9999999
# Aperture translation table relative to GAT. This dictionary
# has as each key a layer name for the job. Each key's value
# is itself a dictionary where each key is an aperture in the file.
# The value is the key in the GAT. Example:
# apxlat['TopCopper']['D10'] = 'D12'
# apxlat['TopCopper']['D11'] = 'D15'
# apxlat['BottomCopper']['D10'] = 'D15'
self.apxlat = {}
# Aperture macro translation table relative to GAMT. This dictionary
# has as each key a layer name for the job. Each key's value
# is itself a dictionary where each key is an aperture macro name in the file.
# The value is the key in the GAMT. Example:
# apxlat['TopCopper']['THD10X'] = 'M1'
# apxlat['BottomCopper']['AND10'] = 'M5'
self.apmxlat = {}
# Commands are one of:
# A. strings for:
# - aperture changes like "D12"
# - G-code commands like "G36"
# - RS-274X commands like "%LPD*%" that begin with '%'
# B. (X,Y,D) triples comprising X,Y integers in the range 0 through 999999
# and draw commands that are either D01, D02, or D03. The character
# D in the triple above is the integer 1, 2, or 3.
# C. (X,Y,I,J,D,s) 6-tuples comprising X,Y,I,J integers in the range 0 through 999999
# and D as with (X,Y,D) triples. The 's' integer is non-zero to indicate that
# the (I,J) tuple is a SIGNED offset (for multi-quadrant circular interpolation)
# else the tuple is unsigned.
#
# This variable is, as for apxlat, a dictionary keyed by layer name.
self.commands = {}
# This dictionary stores all GLOBAL apertures actually needed by this
# layer, i.e., apertures specified prior to draw commands. The dictionary
# is indexed by layer name, and each dictionary entry is a list of aperture
# code strings, like 'D12'. This dictionary helps us to figure out the
# minimum number of apertures that need to be written out in the Gerber
# header of the merged file. Once again, the list of apertures refers to
# GLOBAL aperture codes in the GAT, not ones local to this layer.
self.apertures = {}
# Excellon commands are grouped by tool number in a dictionary.
# This is to help sorting all jobs and writing out all plunge
# commands for a single tool.
#
# The key to this dictionary is the full tool name, e.g., T03
# as a string. Each command is an (X,Y) integer tuple.
self.xcommands = {}
# This is a dictionary mapping LOCAL tool names (e.g., T03) to diameters
# in inches for THIS JOB. This dictionary will be initially empty
# for old-style Excellon files with no embedded tool sizes. The
# main program will construct this dictionary from the global tool
# table in this case, once all jobs have been read in.
self.xdiam = {}
# This is a mapping from tool name to diameter for THIS JOB
self.ToolList = None
# How many times to replicate this job if using auto-placement
self.Repeat = 1
# How many decimal digits of precision there are in the Excellon file.
# A value greater than 0 overrides the global ExcellonDecimals setting
# for this file, allowing jobs with different Excellon decimal settings
# to be combined.
self.ExcellonDecimals = 0 # 0 means global value prevails
def width_in(self):
"Return width in INCHES"
return float(self.maxx-self.minx)*0.00001
def height_in(self):
"Return height in INCHES"
return float(self.maxy-self.miny)*0.00001
def jobarea(self):
return self.width_in()*self.height_in()
def maxdimension(self):
return max(self.width_in(),self.height_in())
def mincoordinates(self):
"Return minimum X and Y coordinate"
return self.minx, self.miny
def fixcoordinates(self, x_shift, y_shift):
"Add x_shift and y_shift to all coordinates in the job"
# Shift maximum and minimum coordinates
self.minx += x_shift
self.maxx += x_shift
self.miny += y_shift
self.maxy += y_shift
# Shift all commands
for layer, command in self.commands.iteritems():
# Loop through each command in each layer
for index in range( len(command) ):
c = command[index]
# Shift X and Y coordinate of command
if type(c) == types.TupleType: ## ensure that command is of type tuple
command_list = list(c) ## convert tuple to list
if (type( command_list[0] ) == types.IntType) \
and (type( command_list[1] ) == types.IntType): ## ensure that first two elemenst are integers
command_list[0] += x_shift
command_list[1] += y_shift
command[index] = tuple(command_list) ## convert list back to tuple
self.commands[layer] = command ## set modified command
# Shift all excellon commands
for tool, command in self.xcommands.iteritems():
# Loop through each command in each layer
for index in range( len(command) ):
c = command[index]
# Shift X and Y coordinate of command
command_list = list(c) ## convert tuple to list
if ( type( command_list[0] ) == types.IntType ) \
and ( type( command_list[1] ) == types.IntType ): ## ensure that first two elemenst are integers
command_list[0] += x_shift / 10
command_list[1] += y_shift / 10
command[index] = tuple(command_list) ## convert list back to tuple
self.xcommands[tool] = command ## set modified command
def parseGerber(self, fullname, layername, updateExtents = 0):
"""Do the dirty work. Read the Gerber file given the
global aperture table GAT and global aperture macro table GAMT"""
GAT = config.GAT
GAMT = config.GAMT
# First construct reverse GAT/GAMT, mapping definition to code
RevGAT = config.buildRevDict(GAT) # RevGAT[hash] = aperturename
RevGAMT = config.buildRevDict(GAMT) # RevGAMT[hash] = aperturemacroname
#print 'Reading data from %s ...' % fullname
fid = file(fullname, 'rt')
currtool = None
self.apxlat[layername] = {}
self.apmxlat[layername] = {}
self.commands[layername] = []
self.apertures[layername] = []
# These divisors are used to scale (X,Y) co-ordinates. We store
# everything as integers in hundred-thousandths of an inch (i.e., M.5
# format). If we get something in M.4 format, we must multiply by
# 10. If we get something in M.6 format we must divide by 10, etc.
x_div = 1.0
y_div = 1.0
# Drawing commands can be repeated with X or Y omitted if they are
# the same as before. These variables store the last X/Y value as
# integers in hundred-thousandths of an inch.
last_x = last_y = 0
# Last modal G-code. Some G-codes introduce "modes", such as circular interpolation
# mode, and we want to remember what mode we're in. We're interested in:
# G01 -- linear interpolation, cancels all circular interpolation modes
# G36 -- Turn on polygon area fill
# G37 -- Turn off polygon area fill
last_gmode = 1 # G01 by default, linear interpolation
# We want to know whether to do signed (G75) or unsigned (G74) I/J offsets. These
# modes are independent of G01/G02/G03, e.g., Protel will issue multiple G03/G01
# codes all in G75 mode.
# G74 -- Single-quadrant circular interpolation (disables multi-quadrant interpolation)
# G02/G03 codes set clockwise/counterclockwise arcs in a single quadrant only
# using X/Y/I/J commands with UNSIGNED (I,J).
# G75 -- Multi-quadrant circular interpolation --> X/Y/I/J with signed (I,J)
# G02/G03 codes set clockwise/counterclockwise arcs in all 4 quadrants
# using X/Y/I/J commands with SIGNED (I,J).
circ_signed = True # Assume G75...make sure this matches canned header we write out
# If the very first flash/draw is a shorthand command (i.e., without an Xxxxx or Yxxxx)
# component then we don't really "see" the first point X00000Y00000. To account for this
# we use the following Boolean flag as well as the isLastShorthand flag during parsing
# to manually insert the point X000000Y00000 into the command stream.
firstFlash = True
for line in fid:
# Get rid of CR characters (0x0D) and leading/trailing blanks
line = string.replace(line, '\x0D', '').strip()
# Old location of format_pat search. Now moved down into the sub-line parse loop below.
# RS-274X statement? If so, echo it. Currently, only the "LP" statement is expected
# (from Protel, of course). These will be distinguished from D-code and G-code
# commands by the fact that the first character of the string is '%'.
match = layerpol_pat.match(line)
if match:
self.commands[layername].append(line)
continue
# See if this is an aperture definition, and if so, map it.
match = apdef_pat.match(line)
if match:
if currtool:
raise RuntimeError, "File %s has an aperture definition that comes after drawing commands." % fullname
A = aptable.parseAperture(line, self.apmxlat[layername])
if not A:
raise RuntimeError, "Unknown aperture definition in file %s" % fullname
hash = A.hash()
if not RevGAT.has_key(hash):
#print line
#print self.apmxlat
#print RevGAT
raise RuntimeError, 'File %s has aperture definition "%s" not in global aperture table.' % (fullname, hash)
# This says that all draw commands with this aperture code will
# be replaced by aperture self.apxlat[layername][code].
self.apxlat[layername][A.code] = RevGAT[hash]
continue
# Ignore %AMOC8* from Eagle for now as it uses a macro parameter, which
# is not yet supported in GerbMerge.
if line[:7]=='%AMOC8*':
continue
# See if this is an aperture macro definition, and if so, map it.
M = amacro.parseApertureMacro(line,fid)
if M:
if currtool:
raise RuntimeError, "File %s has an aperture macro definition that comes after drawing commands." % fullname
hash = M.hash()
if not RevGAMT.has_key(hash):
raise RuntimeError, 'File %s has aperture macro definition not in global aperture macro table:\n%s' % (fullname, hash)
# This says that all aperture definition commands that reference this macro name
# will be replaced by aperture macro name self.apmxlat[layername][macroname].
self.apmxlat[layername][M.name] = RevGAMT[hash]
continue
# From this point on we may have more than one match on this line, e.g.:
# G54D11*X22400Y22300D02*X22500Y22200D01*
sub_line = line
while sub_line:
# Handle "comment" G-codes first
match = comment_pat.match(sub_line)
if match:
sub_line = sub_line[match.end():]
continue
# See if this is a format statement, and if so, map it. In version 1.3 this was moved down
# from the line-only parse checks above (see comment) to handle OrCAD lines like
# G74*%FSLAN2X34Y34*%
match = format_pat.match(sub_line) # Used to be format_pat.search
if match:
sub_line = sub_line[match.end():]
for item in match.groups():
if item is None: continue # Optional group didn't match
if item[0] in "LA": # omit leading zeroes and absolute co-ordinates
continue
if item[0]=='T': # omit trailing zeroes
raise RuntimeError, "Trailing zeroes not supported in RS274X files"
if item[0]=='I': # incremental co-ordinates
raise RuntimeError, "Incremental co-ordinates not supported in RS274X files"
if item[0]=='N': # Maximum digits for N* commands...ignore it
continue
if item[0]=='X': # M.N specification for X-axis.
fracpart = int(item[2])
x_div = 10.0**(5-fracpart)
if item[0]=='Y': # M.N specification for Y-axis.
fracpart = int(item[2])
y_div = 10.0**(5-fracpart)
continue
# Parse and interpret G-codes
match = gcode_pat.match(sub_line)
if match:
sub_line = sub_line[match.end():]
gcode = int(match.group(1))
# Determine if this is a G-Code that should be ignored because it has no effect
# (e.g., G70 specifies "inches" which is already in effect).
if gcode in [54, 70, 90]:
continue
# Determine if this is a G-Code that we have to emit because it matters.
if gcode in [1, 2, 3, 36, 37, 74, 75]:
self.commands[layername].append("G%02d" % gcode)
# Determine if this is a G-code that sets a new mode
if gcode in [1, 36, 37]:
last_gmode = gcode
# Remember last G74/G75 code so we know whether to do signed or unsigned I/J
# offsets.
if gcode==74:
circ_signed = False
elif gcode==75:
circ_signed = True
continue
raise RuntimeError, "G-Code 'G%02d' is not supported" % gcode
# See if this is a tool change (aperture change) command
match = tool_pat.match(sub_line)
if match:
currtool = match.group(1)
# Protel likes to issue random D01, D02, and D03 commands instead of aperture
# codes. We can ignore D01 because it simply means to move to the current location
# while drawing. Well, that's drawing a point. We can ignore D02 because it means
# to move to the current location without drawing. Truly pointless. We do NOT want
# to ignore D03 because it implies a flash. Protel very inefficiently issues a D02
# move to a location without drawing, then a single-line D03 to flash. However, a D02
# terminates a polygon in G36 mode, so keep D02's in this case.
if currtool=='D01' or (currtool=='D02' and (last_gmode != 36)):
sub_line = sub_line[match.end():]
continue
if (currtool == 'D03') or (currtool=='D02' and (last_gmode == 36)):
self.commands[layername].append(currtool)
sub_line = sub_line[match.end():]
continue
# Map it using our translation table
if not self.apxlat[layername].has_key(currtool):
raise RuntimeError, 'File %s has tool change command "%s" with no corresponding translation' % (fullname, currtool)
currtool = self.apxlat[layername][currtool]
# Add it to the list of things to write out
self.commands[layername].append(currtool)
# Add it to the list of all apertures needed by this layer
self.apertures[layername].append(currtool)
# Move on to next match, if any
sub_line = sub_line[match.end():]
continue
# Is it a simple draw command?
I = J = None # For circular interpolation drawing commands
match = drawXY_pat.match(sub_line)
isLastShorthand = False # By default assume we don't make use of last_x and last_y
if match:
x, y, d = map(__builtin__.int, match.groups())
else:
match = drawX_pat.match(sub_line)
if match:
x, d = map(__builtin__.int, match.groups())
y = last_y
isLastShorthand = True # Indicate we're making use of last_x/last_y
else:
match = drawY_pat.match(sub_line)
if match:
y, d = map(__builtin__.int, match.groups())
x = last_x
isLastShorthand = True # Indicate we're making use of last_x/last_y
# Maybe it's a circular interpolation draw command with IJ components
if match is None:
match = cdrawXY_pat.match(sub_line)
if match:
x, y, I, J, d = map(__builtin__.int, match.groups())
else:
match = cdrawX_pat.match(sub_line)
if match:
x, I, J, d = map(__builtin__.int, match.groups())
y = last_y
isLastShorthand = True # Indicate we're making use of last_x/last_y
else:
match = cdrawY_pat.match(sub_line)
if match:
y, I, J, d = map(__builtin__.int, match.groups())
x = last_x
isLastShorthand = True # Indicate we're making use of last_x/last_y
if match:
if currtool is None:
# It's OK if this is an exposure-off movement command (specified with D02).
# It's also OK if we're in the middle of a G36 polygon fill as we're only defining
# the polygon extents.
if (d != 2) and (last_gmode != 36):
raise RuntimeError, 'File %s has draw command %s with no aperture chosen' % (fullname, sub_line)
# Save last_x/y BEFORE scaling to 2.5 format else subsequent single-ordinate
# flashes (e.g., Y with no X) will be scaled twice!
last_x = x
last_y = y
# Corner case: if this is the first flash/draw and we are using shorthand (i.e., missing Xxxx
# or Yxxxxx) then prepend the point X0000Y0000 into the commands as it is actually the starting
# point of our layer. We prepend the command X0000Y0000D02, i.e., a move to (0,0) without drawing.
if (isLastShorthand and firstFlash):
self.commands[layername].append((0,0,2))
if updateExtents:
self.minx = min(self.minx,0)
self.maxx = max(self.maxx,0)
self.miny = min(self.miny,0)
self.maxy = max(self.maxy,0)
x = int(round(x*x_div))
y = int(round(y*y_div))
if I is not None:
I = int(round(I*x_div))
J = int(round(J*y_div))
self.commands[layername].append((x,y,I,J,d,circ_signed))
else:
self.commands[layername].append((x,y,d))
firstFlash = False
# Update dimensions...this is complicated for circular interpolation commands
# that span more than one quadrant. For now, we ignore this problem since users
# should be using a border layer to indicate extents.
if updateExtents:
if x < self.minx: self.minx = x
if x > self.maxx: self.maxx = x
if y < self.miny: self.miny = y
if y > self.maxy: self.maxy = y
# Move on to next match, if any
sub_line = sub_line[match.end():]
continue
# If it's none of the above, it had better be on our ignore list.
for pat in IgnoreList:
match = pat.match(sub_line)
if match:
break
else:
raise RuntimeError, 'File %s has uninterpretable line:\n %s' % (fullname, line)
sub_line = sub_line[match.end():]
# end while still things to match on this line
# end of for each line in file
fid.close()
if 0:
print layername
print self.commands[layername]
def parseExcellon(self, fullname):
#print 'Reading data from %s ...' % fullname
fid = file(fullname, 'rt')
currtool = None
suppress_leading = True # Suppress leading zeros by default, equivalent to 'INCH,TZ'
# We store Excellon X/Y data in ten-thousandths of an inch. If the Config
# option ExcellonDecimals is not 4, we must adjust the values read from the
# file by a divisor to convert to ten-thousandths. This is only used in
# leading-zero suppression mode. In trailing-zero suppression mode, we must
# trailing-zero-pad all input integers to M+N digits (e.g., 6 digits for 2.4 mode)
# specified by the 'zeropadto' variable.
if self.ExcellonDecimals > 0:
divisor = 10.0**(4 - self.ExcellonDecimals)
zeropadto = 2+self.ExcellonDecimals
else:
divisor = 10.0**(4 - config.Config['excellondecimals'])
zeropadto = 2+config.Config['excellondecimals']
# Protel takes advantage of optional X/Y components when the previous one is the same,
# so we have to remember them.
last_x = last_y = 0
# Helper function to convert X/Y strings into integers in units of ten-thousandth of an inch.
def xln2tenthou(L, divisor=divisor, zeropadto=zeropadto):
V = []
for s in L:
if not suppress_leading:
s = s + '0'*(zeropadto-len(s))
V.append(int(round(int(s)*divisor)))
return tuple(V)
for line in fid.xreadlines():
# Get rid of CR characters
line = string.replace(line, '\x0D', '')
# Protel likes to embed comment lines beginning with ';'
if line[0]==';':
continue
# Check for leading/trailing zeros included ("INCH,LZ" or "INCH,TZ")
match = xzsup_pat.match(line)
if match:
if match.group(1)=='L':
# LZ --> Leading zeros INCLUDED
suppress_leading = False
else:
# TZ --> Trailing zeros INCLUDED
suppress_leading = True
continue
# See if a tool is being defined. First try to match with tool name+size
match = xtdef_pat.match(line) # xtdef_pat and xtdef2_pat expect tool name and diameter
if match is None: # but xtdef_pat expects optional feed/speed between T and C
match = xtdef2_pat.match(line) # and xtdef_2pat expects feed/speed at the end
if match:
currtool, diam = match.groups()
try:
diam = float(diam)
except:
raise RuntimeError, "File %s has illegal tool diameter '%s'" % (fullname, diam)
# Canonicalize tool number because Protel (of course) sometimes specifies it
# as T01 and sometimes as T1. We canonicalize to T01.
currtool = 'T%02d' % int(currtool[1:])
if self.xdiam.has_key(currtool):
raise RuntimeError, "File %s defines tool %s more than once" % (fullname, currtool)
self.xdiam[currtool] = diam
continue
# Didn't match TxxxCyyy. It could be a tool change command 'Tdd'.
match = xtool_pat.match(line)
if match:
currtool = match.group(1)
# Canonicalize tool number because Protel (of course) sometimes specifies it
# as T01 and sometimes as T1. We canonicalize to T01.
currtool = 'T%02d' % int(currtool[1:])
# Diameter will be obtained from embedded tool definition, local tool list or if not found, the global tool list
try:
diam = self.xdiam[currtool]
except:
if self.ToolList:
try:
diam = self.ToolList[currtool]
except:
raise RuntimeError, "File %s uses tool code %s that is not defined in the job's tool list" % (fullname, currtool)
else:
try:
diam = config.DefaultToolList[currtool]
except:
#print config.DefaultToolList
raise RuntimeError, "File %s uses tool code %s that is not defined in default tool list" % (fullname, currtool)
self.xdiam[currtool] = diam
continue
# Plunge command?
match = xydraw_pat.match(line)
if match:
x, y = xln2tenthou(match.groups())
else:
match = xdraw_pat.match(line)
if match:
x = xln2tenthou(match.groups())[0]
y = last_y
else:
match = ydraw_pat.match(line)
if match:
y = xln2tenthou(match.groups())[0]
x = last_x
if match:
if currtool is None:
raise RuntimeError, 'File %s has plunge command without previous tool selection' % fullname
try:
self.xcommands[currtool].append((x,y))
except KeyError:
self.xcommands[currtool] = [(x,y)]
last_x = x
last_y = y
continue
# It had better be an ignorable
for pat in XIgnoreList:
if pat.match(line):
break
else:
raise RuntimeError, 'File %s has uninterpretable line:\n %s' % (fullname, line)
def hasLayer(self, layername):
return self.commands.has_key(layername)
def writeGerber(self, fid, layername, Xoff, Yoff):
"Write out the data such that the lower-left corner of this job is at the given (X,Y) position, in inches"
# Maybe we don't have this layer
if not self.hasLayer(layername): return
# First convert given inches to 2.5 co-ordinates
X = int(round(Xoff/0.00001))
Y = int(round(Yoff/0.00001))
# Now calculate displacement for each position so that we end up at specified origin
DX = X - self.minx
DY = Y - self.miny
# Rock and roll. First, write out a dummy flash using code D02
# (exposure off). This prevents an unintentional draw from the end
# of one job to the beginning of the next when a layer is repeated
# due to panelizing.
fid.write('X%07dY%07dD02*\n' % (X, Y))
for cmd in self.commands[layername]:
if type(cmd) is types.TupleType:
if len(cmd)==3:
x, y, d = cmd
fid.write('X%07dY%07dD%02d*\n' % (x+DX, y+DY, d))
else:
x, y, I, J, d, s = cmd
fid.write('X%07dY%07dI%07dJ%07dD%02d*\n' % (x+DX, y+DY, I, J, d)) # I,J are relative
else:
# It's an aperture change, G-code, or RS274-X command that begins with '%'. If
# it's an aperture code, the aperture has already been translated
# to the global aperture table during the parse phase.
if cmd[0]=='%':
fid.write('%s\n' % cmd) # The command already has a * in it (e.g., "%LPD*%")
else:
fid.write('%s*\n' % cmd)
def findTools(self, diameter):
"Find the tools, if any, with the given diameter in inches. There may be more than one!"
L = []
for tool, diam in self.xdiam.items():
if diam==diameter:
L.append(tool)
return L
def writeExcellon(self, fid, diameter, Xoff, Yoff):
"Write out the data such that the lower-left corner of this job is at the given (X,Y) position, in inches"
# First convert given inches to 2.4 co-ordinates. Note that Gerber is 2.5 (as of GerbMerge 1.2)
# and our internal Excellon representation is 2.4 as of GerbMerge
# version 0.91. We use X,Y to calculate DX,DY in 2.4 units (i.e., with a
# resolution of 0.0001".
X = int(round(Xoff/0.00001)) # First work in 2.5 format to match Gerber
Y = int(round(Yoff/0.00001))
# Now calculate displacement for each position so that we end up at specified origin
DX = X - self.minx
DY = Y - self.miny
# Now round down to 2.4 format
DX = int(round(DX/10.0))
DY = int(round(DY/10.0))
ltools = self.findTools(diameter)
if config.Config['excellonleadingzeros']:
fmtstr = 'X%06dY%06d\n'
else:
fmtstr = 'X%dY%d\n'
# Boogie
for ltool in ltools:
if self.xcommands.has_key(ltool):
for cmd in self.xcommands[ltool]:
x, y = cmd
fid.write(fmtstr % (x+DX, y+DY))
def writeDrillHits(self, fid, diameter, toolNum, Xoff, Yoff):
"""Write a drill hit pattern. diameter is tool diameter in inches, while toolNum is
an integer index into strokes.DrillStrokeList"""
# First convert given inches to 2.5 co-ordinates
X = int(round(Xoff/0.00001))
Y = int(round(Yoff/0.00001))
# Now calculate displacement for each position so that we end up at specified origin
DX = X - self.minx
DY = Y - self.miny
# Do NOT round down to 2.4 format. These drill hits are in Gerber 2.5 format, not
# Excellon plunge commands.
ltools = self.findTools(diameter)
for ltool in ltools:
if self.xcommands.has_key(ltool):
for cmd in self.xcommands[ltool]:
x, y = cmd
makestroke.drawDrillHit(fid, 10*x+DX, 10*y+DY, toolNum)
def aperturesAndMacros(self, layername):
"Return dictionaries whose keys are all necessary aperture names and macro names for this layer"
GAT=config.GAT
if self.apertures.has_key(layername):
apdict = {}.fromkeys(self.apertures[layername])
apmlist = [GAT[ap].dimx for ap in self.apertures[layername] if GAT[ap].apname=='Macro']
apmdict = {}.fromkeys(apmlist)
return apdict, apmdict
else:
return {}, {}
def makeLocalApertureCode(self, layername, AP):
"Find or create a layer-specific aperture code to represent the global aperture given"
if AP.code not in self.apxlat[layername].values():
lastCode = aptable.findHighestApertureCode(self.apxlat[layername].keys())
localCode = 'D%d' % (lastCode+1)
self.apxlat[layername][localCode] = AP.code
def inBorders(self, x, y):
return (x >= self.minx) and (x <= self.maxx) and (y >= self.miny) and (y <= self.maxy)
def trimGerberLayer(self, layername):
"Modify drawing commands that are outside job dimensions"
newcmds = []
lastInBorders = True
lastx, lasty, lastd = self.minx, self.miny, 2 # (minx,miny,exposure off)
bordersRect = (self.minx, self.miny, self.maxx, self.maxy)
lastAperture = None
for cmd in self.commands[layername]:
if type(cmd) == types.TupleType:
# It is a data command: tuple (X, Y, D), all integers, or (X, Y, I, J, D), all integers.
if len(cmd)==3:
x, y, d = cmd
# I=J=None # In case we support circular interpolation in the future
else:
# We don't do anything with circular interpolation for now, so just issue
# the command and be done with it.
# x, y, I, J, d, s = cmd
newcmds.append(cmd)
continue
newInBorders = self.inBorders(x,y)
# Flash commands are easy (for now). If they're outside borders,
# ignore them. There's no need to consider the previous command.
# What should we do if the flash is partially inside and partially
# outside the border? Ideally, define a macro that constructs the
# part of the flash that is inside the border. Practically, you've
# got to be kidding.
#
# Actually, it's not that tough for rectangle apertures. We identify
# the intersection rectangle of the aperture and the bounding box,
# determine the new rectangular aperture required along with the
# new flash point, add the aperture to the GAT if necessary, and
# make the change. Spiffy.
#
# For circular interpolation commands, it's definitely harder since
# we have to construct arcs that are a subset of the original arc.
#
# For polygon fills, we similarly have to break up the polygon into
# sub-polygons that are contained within the allowable extents.
#
# Both circular interpolation and polygon fills are a) uncommon,
# and b) hard to handle. The current version of GerbMerge does not
# handle these cases.
if d==3:
if lastAperture.isRectangle():
apertureRect = lastAperture.rectangleAsRect(x,y)
if geometry.isRect1InRect2(apertureRect, bordersRect):
newcmds.append(cmd)
else:
newRect = geometry.intersectExtents(apertureRect, bordersRect)
if newRect:
newRectWidth = geometry.rectWidth(newRect)
newRectHeight = geometry.rectHeight(newRect)
newX, newY = geometry.rectCenter(newRect)
# We arbitrarily remove all flashes that lead to rectangles
# with a width or length less than 1 mil (10 Gerber units).
# Should we make this configurable?
if min(newRectWidth, newRectHeight) >= 10:
# Construct an Aperture that is a Rectangle of dimensions (newRectWidth,newRectHeight)
newAP = aptable.Aperture(aptable.Rectangle, 'D??', \
util.gerb2in(newRectWidth), util.gerb2in(newRectHeight))
global_code = aptable.findOrAddAperture(newAP)
# We need an unused local aperture code to correspond to this newly-created global one.
self.makeLocalApertureCode(layername, newAP)
# Make sure to indicate that the new aperture is one that is used by this layer
if global_code not in self.apertures[layername]:
self.apertures[layername].append(global_code)
# Switch to new aperture code, flash new aperture, switch back to previous aperture code
newcmds.append(global_code)
newcmds.append((newX, newY, 3))
newcmds.append(lastAperture.code)
else:
pass # Ignore this flash...area in common is too thin
else:
pass # Ignore this flash...no area in common
elif self.inBorders(x, y):
# Aperture is not a rectangle and its center is somewhere within our
# borders. Flash it and ignore part outside borders (for now).
newcmds.append(cmd)
else:
pass # Ignore this flash
# If this is a exposure off command, then it doesn't matter what the
# previous command is. This command just updates the (X,Y) position
# and sets the start point for a line draw to a new location.
elif d==2:
if self.inBorders(x, y):
newcmds.append(cmd)
else:
# This is an exposure on (draw line) command. Now things get interesting.
# Regardless of what the last command was (draw, exposure off, flash), we
# are planning on drawing a visible line using the current aperture from
# the (lastx,lasty) position to the new (x,y) position. The cases are:
# A: (lastx,lasty) is outside borders, (x,y) is outside borders.
# (lastx,lasty) have already been eliminated. Just update (lastx,lasty)
# with new (x,y) and remove the new command too. There is one case which
# may be of concern, and that is when the line defined by (lastx,lasty)-(x,y)
# actually crosses through the job. In this case, we have to draw the
# partial line (x1,y1)-(x2,y2) where (x1,y1) and (x2,y2) lie on the
# borders. We will add 3 commands:
# X(x1)Y(y1)D02 # exposure off
# X(x2)Y(y2)D01 # exposure on
# X(x)Y(y)D02 # exposure off
#
# B: (lastx,lasty) is outside borders, (x,y) is inside borders.
# We have to find the intersection of the line (lastx,lasty)-(x,y)
# with the borders and draw only the line segment (x1,y1)-(x,y):
# X(x1)Y(y1)D02 # exposure off
# X(x)Y(y)D01 # exposure on
#
# C: (lastx,lasty) is inside borders, (x,y) is outside borders.
# We have to find the intersection of the line (lastx,lasty)-(x,y)
# with the borders and draw only the line segment (lastx,lasty)-(x1,y1):
# then update to the new position:
# X(x1)Y(y1)D01 # exposure on
# X(x)Y(y)D02 # exposure off
#
# D: (lastx,lasty) is inside borders, (x,y) is inside borders. This is
# the most common and simplest case...just copy the command over:
# X(x)Y(y)D01 # exposure on
#
# All of the above are for linear interpolation. Circular interpolation
# is ignored for now.
if lastInBorders and newInBorders: # Case D
newcmds.append(cmd)
else:
# segmentXbox() returns a list of 0, 1, or 2 points describing the intersection
# points of the segment (lastx,lasty)-(x,y) with the box defined
# by lower-left corner (minx,miny) and upper-right corner (maxx,maxy).
pointsL = geometry.segmentXbox((lastx,lasty), (x,y), (self.minx,self.miny), (self.maxx,self.maxy))
if len(pointsL)==0: # Case A, no intersection
# Both points are outside the box and there is no overlap with box.
d = 2 # Command is effectively removed since newcmds wasn't extended.
# Ensure "last command" is exposure off to reflect this.
elif len(pointsL)==1: # Cases B and C
pt1 = pointsL[0]
if newInBorders: # Case B
newcmds.append((pt1[0], pt1[1], 2)) # Go to intersection point, exposure off
newcmds.append(cmd) # Go to destination point, exposure on
else: # Case C
newcmds.append((pt1[0], pt1[1], 1)) # Go to intersection point, exposure on
newcmds.append((x, y, 2)) # Go to destination point, exposure off
d = 2 # Make next 'lastd' represent exposure off
else: # Case A, two points of intersection
pt1 = pointsL[0]
pt2 = pointsL[1]
newcmds.append((pt1[0], pt1[1], 2)) # Go to first intersection point, exposure off
newcmds.append((pt2[0], pt2[1], 1)) # Draw to second intersection point, exposure on
newcmds.append((x, y, 2)) # Go to destination point, exposure off
d = 2 # Make next 'lastd' represent exposure off
lastx, lasty, lastd = x, y, d
lastInBorders = newInBorders
else:
# It's a string indicating an aperture change, G-code, or RS-274X
# command (e.g., "D13", "G75", "%LPD*%")
newcmds.append(cmd)
if cmd[0]=='D' and int(cmd[1:])>=10: # Don't interpret D01, D02, D03
lastAperture = config.GAT[cmd]
self.commands[layername] = newcmds
def trimGerber(self):
for layername in self.commands.keys():
self.trimGerberLayer(layername)
def trimExcellon(self):
"Remove plunge commands that are outside job dimensions"
keys = self.xcommands.keys()
for toolname in keys:
# Remember Excellon is 2.4 format while Gerber data is 2.5 format
validList = [(x,y) for x,y in self.xcommands[toolname] if self.inBorders(10*x,10*y)]
if validList:
self.xcommands[toolname] = validList
else:
del self.xcommands[toolname]
del self.xdiam[toolname]
# This class encapsulates a Job object, providing absolute
# positioning information.
class JobLayout:
def __init__(self, job):
self.job = job
self.x = None
self.y = None
def canonicalize(self): # Must return a JobLayout object as a list
return [self]
def writeGerber(self, fid, layername):
assert self.x is not None
self.job.writeGerber(fid, layername, self.x, self.y)
def aperturesAndMacros(self, layername):
return self.job.aperturesAndMacros(layername)
def writeExcellon(self, fid, diameter):
assert self.x is not None
self.job.writeExcellon(fid, diameter, self.x, self.y)
def writeDrillHits(self, fid, diameter, toolNum):
assert self.x is not None
self.job.writeDrillHits(fid, diameter, toolNum, self.x, self.y)
def writeCutLines(self, fid, drawing_code, X1, Y1, X2, Y2):
"""Draw a board outline using the given aperture code"""
def notEdge(x, X):
return round(abs(1000*(x-X)))
assert self.x and self.y
radius = config.GAT[drawing_code].dimx/2.0
# Start at lower-left, proceed clockwise
x = self.x - radius
y = self.y - radius
left = notEdge(self.x, X1)
right = notEdge(self.x+self.width_in(), X2)
bot = notEdge(self.y, Y1)
top = notEdge(self.y+self.height_in(), Y2)
BL = ((x), (y))
TL = ((x), (y+self.height_in()+2*radius))
TR = ((x+self.width_in()+2*radius), (y+self.height_in()+2*radius))
BR = ((x+self.width_in()+2*radius), (y))
if not left:
BL = (BL[0]+2*radius, BL[1])
TL = (TL[0]+2*radius, TL[1])
if not top:
TL = (TL[0], TL[1]-2*radius)
TR = (TR[0], TR[1]-2*radius)
if not right:
TR = (TR[0]-2*radius, TR[1])
BR = (BR[0]-2*radius, BR[1])
if not bot:
BL = (BL[0], BL[1]+2*radius)
BR = (BR[0], BR[1]+2*radius)
BL = (util.in2gerb(BL[0]), util.in2gerb(BL[1]))
TL = (util.in2gerb(TL[0]), util.in2gerb(TL[1]))
TR = (util.in2gerb(TR[0]), util.in2gerb(TR[1]))
BR = (util.in2gerb(BR[0]), util.in2gerb(BR[1]))
# The "if 1 or ..." construct draws all four sides of the job. By
# removing the 1 from the expression, only the sides that do not
# correspond to panel edges are drawn. The former is probably better
# since panels tend to have a little slop from the cutting operation
# and it's easier to just cut it smaller when there's a cut line.
# The way it is now with "if 1 or....", much of this function is
# unnecessary. Heck, we could even just use the boardoutline layer
# directly.
if 1 or left:
fid.write('X%07dY%07dD02*\n' % BL)
fid.write('X%07dY%07dD01*\n' % TL)
if 1 or top:
if not left: fid.write('X%07dY%07dD02*\n' % TL)
fid.write('X%07dY%07dD01*\n' % TR)
if 1 or right:
if not top: fid.write('X%07dY%07dD02*\n' % TR)
fid.write('X%07dY%07dD01*\n' % BR)
if 1 or bot:
if not right: fid.write('X%07dY%07dD02*\n' % BR)
fid.write('X%07dY%07dD01*\n' % BL)
def setPosition(self, x, y):
self.x=x
self.y=y
def width_in(self):
return self.job.width_in()
def height_in(self):
return self.job.height_in()
def drillhits(self, diameter):
tools = self.job.findTools(diameter)
total = 0
for tool in tools:
try:
total += len(self.job.xcommands[tool])
except:
pass
return total
def jobarea(self):
return self.job.jobarea()
def rotateJob(job, degrees = 90, firstpass = True):
"""Create a new job from an existing one, rotating by specified degrees in 90 degree passes"""
GAT = config.GAT
GAMT = config.GAMT
##print "rotating job:", job.name, degrees, firstpass
if firstpass:
if degrees == 270:
J = Job(job.name+'*rotated270')
elif degrees == 180:
J = Job(job.name+'*rotated180')
else:
J = Job(job.name+'*rotated90')
else:
J = Job(job.name)
# Keep the origin (lower-left) in the same place
J.maxx = job.minx + job.maxy-job.miny
J.maxy = job.miny + job.maxx-job.minx
J.minx = job.minx
J.miny = job.miny
RevGAT = config.buildRevDict(GAT) # RevGAT[hash] = aperturename
RevGAMT = config.buildRevDict(GAMT) # RevGAMT[hash] = aperturemacroname
# Keep list of tool diameters and default tool list
J.xdiam = job.xdiam
J.ToolList = job.ToolList
J.Repeat = job.Repeat
# D-code translation table is the same, except we have to rotate
# those apertures which have an orientation: rectangles, ovals, and macros.
ToolChangeReplace = {}
for layername in job.apxlat.keys():
J.apxlat[layername] = {}
for ap in job.apxlat[layername].keys():
code = job.apxlat[layername][ap]
A = GAT[code]
if A.apname in ('Circle', 'Octagon'):
# This aperture is fine. Copy it over.
J.apxlat[layername][ap] = code
continue
# Must rotate the aperture
APR = A.rotated(RevGAMT)
# Does it already exist in the GAT?
hash = APR.hash()
try:
# Yup...add it to apxlat
newcode = RevGAT[hash]
except KeyError:
# Must add new aperture to GAT
newcode = aptable.addToApertureTable(APR)
# Rebuild RevGAT
#RevGAT = config.buildRevDict(GAT)
RevGAT[hash] = newcode
J.apxlat[layername][ap] = newcode
# Must also replace all tool change commands from
# old code to new command.
ToolChangeReplace[code] = newcode
# Now we copy commands, rotating X,Y positions.
# Rotations will occur counterclockwise about the
# point (minx,miny). Then, we shift to the right
# by the height so that the lower-left point of
# the rotated job continues to be (minx,miny).
#
# We also have to take aperture change commands and
# replace them with the new aperture code if we have
# a rotation.
offset = job.maxy-job.miny
for layername in job.commands.keys():
J.commands[layername] = []
J.apertures[layername] = []
for cmd in job.commands[layername]:
# Is it a drawing command?
if type(cmd) is types.TupleType:
if len(cmd)==3:
x, y, d = map(__builtin__.int, cmd)
II=JJ=None
else:
x, y, II, JJ, d, signed = map(__builtin__.int, cmd) # J is already used as Job object
else:
# No, must be a string indicating aperture change, G-code, or RS274-X command.
if cmd[0] in ('G', '%'):
# G-codes and RS274-X commands are just copied verbatim and not affected by rotation
J.commands[layername].append(cmd)
continue
# It's a D-code. See if we need to replace aperture changes with a rotated aperture.
# But only for D-codes >= 10.
if int(cmd[1:]) < 10:
J.commands[layername].append(cmd)
continue
try:
newcmd = ToolChangeReplace[cmd]
J.commands[layername].append(newcmd)
J.apertures[layername].append(newcmd)
except KeyError:
J.commands[layername].append(cmd)
J.apertures[layername].append(cmd)
continue
# (X,Y) --> (-Y,X) effects a 90-degree counterclockwise shift
# Adding 'offset' to -Y maintains the lower-left origin of (minx,miny).
newx = -(y - job.miny) + job.minx + offset
newy = (x-job.minx) + job.miny
# For circular interpolation commands, (I,J) components are always relative
# so we do not worry about offsets, just reverse their sense, i.e., I becomes J
# and J becomes I. For 360-degree circular interpolation, I/J are signed and we
# must map (I,J) --> (-J,I).
if II is not None:
if signed:
J.commands[layername].append((newx, newy, -JJ, II, d, signed))
else:
J.commands[layername].append((newx, newy, JJ, II, d, signed))
else:
J.commands[layername].append((newx,newy,d))
if 0:
print job.minx, job.miny, offset
print layername
print J.commands[layername]
# Finally, rotate drills. Offset is in hundred-thousandths (2.5) while Excellon
# data is in 2.4 format.
for tool in job.xcommands.keys():
J.xcommands[tool] = []
for x,y in job.xcommands[tool]:
newx = -(10*y - job.miny) + job.minx + offset
newy = (10*x - job.minx) + job.miny
newx = int(round(newx/10.0))
newy = int(round(newy/10.0))
J.xcommands[tool].append((newx,newy))
# Rotate some more if required
degrees -= 90
if degrees > 0:
return rotateJob(J, degrees, False)
else:
##print "rotated:", J.name
return J
|
fightingwalrus/gerbmerge
|
gerbmerge/jobs.py
|
Python
|
gpl-3.0
| 52,296 | 0.015049 |
############################################################################
#
# Copyright (C) 2016 The Qt Company Ltd.
# Contact: https://www.qt.io/licensing/
#
# This file is part of Qt Creator.
#
# Commercial License Usage
# Licensees holding valid commercial Qt licenses may use this file in
# accordance with the commercial license agreement provided with the
# Software or, alternatively, in accordance with the terms contained in
# a written agreement between you and The Qt Company. For licensing terms
# and conditions see https://www.qt.io/terms-conditions. For further
# information use the contact form at https://www.qt.io/contact-us.
#
# GNU General Public License Usage
# Alternatively, this file may be used under the terms of the GNU
# General Public License version 3 as published by the Free Software
# Foundation with exceptions as appearing in the file LICENSE.GPL3-EXCEPT
# included in the packaging of this file. Please review the following
# information to ensure the GNU General Public License requirements will
# be met: https://www.gnu.org/licenses/gpl-3.0.html.
#
############################################################################
source("../../shared/qtcreator.py")
import random
from datetime import date
def __platformToBeRunToday__():
return (('Linux'), ('Darwin'), ('Microsoft', 'Windows'))[date.today().day % 3]
# Be careful with Pastebin.Com, there are only 10 pastes per 24h
# for all machines using the same IP-address like you.
skipPastingToPastebinCom = platform.system() not in __platformToBeRunToday__()
NAME_KDE = "Paste.KDE.Org"
NAME_PBCA = "Pastebin.Ca"
NAME_PBCOM = "Pastebin.Com"
NAME_PCXYZ = "Pastecode.Xyz"
serverProblems = "Server side problems."
def invalidPasteId(protocol):
if protocol == NAME_KDE:
return None
else:
return -1
def closeHTTPStatusAndPasterDialog(protocol, pasterDialog):
try:
mBoxStr = "{type='QMessageBox' unnamed='1' visible='1' windowTitle?='%s *'}" % protocol
mBox = waitForObject(mBoxStr, 1000)
text = str(mBox.text)
# close message box and paster window
clickButton("{type='QPushButton' text='Cancel' visible='1' window=%s}" % mBoxStr)
clickButton("{type='QPushButton' text='Cancel' visible='1' window='%s'}" % pasterDialog)
if 'Service Unavailable' in text:
test.warning(text)
return True
except:
t,v = sys.exc_info()[:2]
test.warning("An exception occurred in closeHTTPStatusAndPasterDialog(): %s(%s)"
% (str(t), str(v)))
test.log("Closed dialog without expected error.", text)
return False
def pasteFile(sourceFile, protocol):
def resetFiles():
clickButton(waitForObject(":*Qt Creator.Clear_QToolButton"))
invokeMenuItem('File', 'Revert "main.cpp" to Saved')
clickButton(waitForObject(":Revert to Saved.Proceed_QPushButton"))
snooze(1) # "Close All" might be disabled
invokeMenuItem("File", "Close All")
aut = currentApplicationContext()
invokeMenuItem("File", "Open File or Project...")
selectFromFileDialog(sourceFile)
editor = waitForObject(":Qt Creator_CppEditor::Internal::CPPEditorWidget")
jumpToFirstLine(editor)
typeLines(editor, "// tst_codepasting %s" % datetime.utcnow())
sourceText = editor.plainText
invokeMenuItem("Tools", "Code Pasting", "Paste Snippet...")
selectFromCombo(":Send to Codepaster.protocolBox_QComboBox", protocol)
pasteEditor = waitForObject(":stackedWidget.plainTextEdit_QPlainTextEdit")
test.compare(pasteEditor.plainText, sourceText, "Verify that dialog shows text from the editor")
description = "Description %s" % datetime.utcnow()
type(waitForObject(":uiDescription_QLineEdit"), description)
typeLines(pasteEditor, "// tst_codepasting %s" % datetime.utcnow())
pastedText = str(pasteEditor.plainText)
expiry = waitForObject(":Send to Codepaster.qt_spinbox_lineedit_QLineEdit")
expiryDays = random.randint(1, 10)
replaceEditorContent(expiry, "%d" % expiryDays)
test.log("Using expiry of %d days." % expiryDays)
# make sure to read all former errors (they won't get read twice)
aut.readStderr()
clickButton(waitForObject(":Send to Codepaster.Paste_QPushButton"))
try:
outputWindow = waitForObject(":Qt Creator_Core::OutputWindow")
waitFor("'https://' in str(outputWindow.plainText)", 20000)
output = str(outputWindow.plainText).splitlines()[-1]
except:
output = ""
if closeHTTPStatusAndPasterDialog(protocol, ':Send to Codepaster_CodePaster::PasteView'):
resetFiles()
raise Exception(serverProblems)
stdErrOut = aut.readStderr()
match = re.search("^%s protocol error: (.*)$" % protocol, stdErrOut, re.MULTILINE)
if match:
pasteId = invalidPasteId(protocol)
if "Internal Server Error" in match.group(1):
test.warning("Server Error - trying to continue...")
else:
test.fail("%s protocol error: %s" % (protocol, match.group(1)))
elif output.strip() == "":
pasteId = invalidPasteId(protocol)
elif "FAIL:There was an error communicating with the database" in output:
resetFiles()
raise Exception(serverProblems)
elif "Post limit, maximum pastes per 24h reached" in output:
test.warning("Maximum pastes per day exceeded.")
pasteId = None
else:
pasteId = output.rsplit("/", 1)[1]
resetFiles()
return pasteId, description, pastedText
def fetchSnippet(protocol, description, pasteId, skippedPasting):
foundSnippet = True
invokeMenuItem("Tools", "Code Pasting", "Fetch Snippet...")
selectFromCombo(":PasteSelectDialog.protocolBox_QComboBox", protocol)
try:
pasteModel = waitForObject(":PasteSelectDialog.listWidget_QListWidget").model()
except:
closeHTTPStatusAndPasterDialog(protocol, ':PasteSelectDialog_CodePaster::PasteSelectDialog')
return -1
waitFor("pasteModel.rowCount() > 1", 20000)
if (not skippedPasting and not protocol == NAME_PBCA
and not any(map(lambda str:pasteId in str, dumpItems(pasteModel)))):
test.warning("Fetching too fast for server of %s - waiting 3s and trying to refresh." % protocol)
snooze(3)
clickButton("{text='Refresh' type='QPushButton' unnamed='1' visible='1' "
"window=':PasteSelectDialog_CodePaster::PasteSelectDialog'}")
waitFor("pasteModel.rowCount() == 1", 1000)
waitFor("pasteModel.rowCount() > 1", 20000)
if protocol == NAME_PBCA:
description = description[:32]
if pasteId == -1:
try:
pasteLine = filter(lambda str:description in str, dumpItems(pasteModel))[0]
pasteId = pasteLine.split(" ", 1)[0]
except:
test.fail("Could not find description line in list of pastes from %s" % protocol)
clickButton(waitForObject(":PasteSelectDialog.Cancel_QPushButton"))
return pasteId
else:
try:
pasteLine = filter(lambda str:pasteId in str, dumpItems(pasteModel))[0]
if protocol in (NAME_PBCA, NAME_PBCOM):
test.verify(description in pasteLine,
"Verify that line in list of pastes contains the description")
except:
if protocol == NAME_PBCA:
test.xfail("%s does not list the pasted snippet on time" % NAME_PBCA)
elif not skippedPasting:
test.fail("Could not find id '%s' in list of pastes from %s" % (pasteId, protocol))
foundSnippet = False
replaceEditorContent(waitForObject(":PasteSelectDialog.pasteEdit_QLineEdit"), pasteId)
if foundSnippet:
pasteLine = pasteLine.replace(".", "\\.")
waitForObjectItem(":PasteSelectDialog.listWidget_QListWidget", pasteLine)
clickItem(":PasteSelectDialog.listWidget_QListWidget", pasteLine, 5, 5, 0, Qt.LeftButton)
clickButton(waitForObject(":PasteSelectDialog.OK_QPushButton"))
return pasteId
def main():
startQC()
if not startedWithoutPluginError():
return
protocolsToTest = [NAME_KDE, NAME_PBCA, NAME_PBCOM, NAME_PCXYZ]
sourceFile = os.path.join(os.getcwd(), "testdata", "main.cpp")
# make sure General Messages is open
openGeneralMessages()
clickButton(waitForObject(":*Qt Creator.Clear_QToolButton"))
for protocol in protocolsToTest:
with TestSection(protocol):
skippedPasting = True
description = "Paste from 2017-05-11"
if protocol == NAME_KDE:
pasteId = "pysjk6n2i"
pastedText = readFile(os.path.join(os.getcwd(), "testdata", "main-prepasted.cpp"))
elif skipPastingToPastebinCom and protocol == NAME_PBCOM:
pasteId = "8XHP0ZgH"
pastedText = readFile(os.path.join(os.getcwd(), "testdata", "main-prepasted.cpp"))
else:
skippedPasting = False
try:
pasteId, description, pastedText = pasteFile(sourceFile, protocol)
except Exception as e:
if e.message == serverProblems:
test.warning("Ignoring server side issues")
continue
else: # if it was not our own exception re-raise
raise e
if not pasteId:
message = "Could not get id of paste to %s" % protocol
if protocol == NAME_PBCOM:
test.log("%s, using prepasted file instead" % message)
skippedPasting = True
pasteId = "8XHP0ZgH"
pastedText = readFile(os.path.join(os.getcwd(),
"testdata", "main-prepasted.cpp"))
else:
test.fatal(message)
continue
pasteId = fetchSnippet(protocol, description, pasteId, skippedPasting)
if pasteId == -1:
continue
filenameCombo = waitForObject(":Qt Creator_FilenameQComboBox")
waitFor("not filenameCombo.currentText.isEmpty()", 20000)
try:
editor = waitForObject(":Qt Creator_CppEditor::Internal::CPPEditorWidget")
except:
outputWindow = waitForObject(":Qt Creator_Core::OutputWindow")
test.fail("Could not find editor with snippet", str(outputWindow.plainText))
clickButton(waitForObject(":*Qt Creator.Clear_QToolButton"))
continue
test.compare(filenameCombo.currentText, "%s: %s" % (protocol, pasteId), "Verify title of editor")
if protocol in (NAME_KDE, NAME_PBCOM) and pastedText.endswith("\n"):
pastedText = pastedText[:-1]
test.compare(editor.plainText, pastedText, "Verify that pasted and fetched texts are the same")
invokeMenuItem("File", "Close All")
invokeMenuItem("File", "Open File or Project...")
selectFromFileDialog(sourceFile)
editor = waitForObject(":Qt Creator_CppEditor::Internal::CPPEditorWidget")
jumpToFirstLine(editor)
markText(editor, "Down", 7)
# QString QTextCursor::selectedText () const:
# "Note: If the selection obtained from an editor spans a line break, the text will contain a
# Unicode U+2029 paragraph separator character instead of a newline \n character."
selectedText = str(editor.textCursor().selectedText()).replace(unichr(0x2029), "\n")
invokeMenuItem("Tools", "Code Pasting", "Paste Snippet...")
test.compare(waitForObject(":stackedWidget.plainTextEdit_QPlainTextEdit").plainText,
selectedText, "Verify that dialog shows selected text from the editor")
clickButton(waitForObject(":Send to Codepaster.Cancel_QPushButton"))
invokeMenuItem("File", "Exit")
|
sailfish-sdk/sailfish-qtcreator
|
tests/system/suite_tools/tst_codepasting/test.py
|
Python
|
gpl-3.0
| 11,988 | 0.004922 |
# Copyright (c) 2016 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from murano.dsl import dsl_types
from murano.dsl import exceptions
from murano.tests.unit.dsl.foundation import object_model as om
from murano.tests.unit.dsl.foundation import test_case
class TestStatics(test_case.DslTestCase):
def setUp(self):
super(TestStatics, self).setUp()
self._runner = self.new_runner(
om.Object('test.TestStatics', staticProperty2='INVALID'))
def test_call_static_method_on_object(self):
self.assertEqual(123, self._runner.testCallStaticMethodOnObject())
def test_call_static_method_on_class_name(self):
self.assertEqual(123, self._runner.testCallStaticMethodOnClassName())
def test_call_static_method_on_class_name_with_ns(self):
self.assertEqual(
123, self._runner.testCallStaticMethodOnClassNameWithNs())
def test_call_static_method_from_another_method(self):
self.assertEqual(
123 * 5, self._runner.testCallStaticMethodFromAnotherMethod())
def test_static_this(self):
self.assertIsInstance(
self._runner.testStaticThis(), dsl_types.MuranoTypeReference)
def test_no_access_to_instance_properties(self):
self.assertRaises(
exceptions.NoPropertyFound,
self._runner.testNoAccessToInstanceProperties)
def test_access_static_property_from_instance_method(self):
self.assertEqual(
'xxx', self._runner.testAccessStaticPropertyFromInstanceMethod())
def test_access_static_property_from_static_method(self):
self.assertEqual(
'xxx', self._runner.testAccessStaticPropertyFromStaticMethod())
def test_modify_static_property_using_dollar(self):
self.assertEqual(
'qq', self._runner.testModifyStaticPropertyUsingDollar())
def test_modify_static_property_using_this(self):
self.assertEqual(
'qq', self._runner.testModifyStaticPropertyUsingThis())
def test_modify_static_property_using_class_name(self):
self.assertEqual(
'qq', self._runner.testModifyStaticPropertyUsingClassName())
def test_modify_static_property_using_ns_class_name(self):
self.assertEqual(
'qq', self._runner.testModifyStaticPropertyUsingNsClassName())
def test_modify_static_property_using_type_func(self):
self.assertEqual(
'qq', self._runner.testModifyStaticPropertyUsingTypeFunc())
def test_property_is_static(self):
self.assertEqual('qq', self._runner.testPropertyIsStatic())
def test_static_properties_excluded_from_object_model(self):
self.assertEqual(
'staticProperty',
self._runner.testStaticPropertisNotLoaded())
def test_type_is_singleton(self):
self.assertTrue(self._runner.testTypeIsSingleton())
def test_static_property_inheritance(self):
self.assertEqual(
'baseStaticProperty' * 3,
self._runner.testStaticPropertyInheritance())
def test_static_property_override(self):
self.assertEqual(
[
'conflictingStaticProperty-child',
'conflictingStaticProperty-child',
'conflictingStaticProperty-base',
'conflictingStaticProperty-child',
'conflictingStaticProperty-base'
], self._runner.testStaticPropertyOverride())
def test_type_info_of_type(self):
self.assertTrue(self._runner.testTypeinfoOfType())
|
olivierlemasle/murano
|
murano/tests/unit/dsl/test_statics.py
|
Python
|
apache-2.0
| 4,079 | 0 |
# Natural Language Toolkit: Paradigm Visualisation
#
# Copyright (C) 2005 University of Melbourne
# Author: Will Hardy
# URL: <http://nltk.sf.net>
# For license information, see LICENSE.TXT
# Front end to a Python implementation of David
# Penton's paradigm visualisation model.
# Author:
#
# Run: To run, first load a paradigm using
# >>> a = paradigm('paradigm.xml')
# And run the system to produce output
# >>> a.show('table(one, two, three)')
#
# Other methods:
# demo() # a quick demonstration
# a.setFormat('html') # output is formatted as HTML
# a.setFormat('text') # output is formatted as HTML
# a.setOutput('filename') # output is sent to filename
# a.setOutput('term') # output is sent to terminal
from xml.dom.ext.reader import Sax2
from paradigmquery import ParadigmQuery
import re, os
class Paradigm(object):
"""
Paradigm visualisation class
*Usage*
Simple usage of the system would be:
>>> from paradigm import Paradigm
>>> p = Paradigm('german.xml')
>>> p.show('table(case, gender/number, content)')
Here, a table is generated in HTML format and sent to the file ``output.html``.
The table can be viewed in a browser, and is updated for every new query.
A more advanced usage of the system is show below.
The user simply creates a paradigm p, changes the output format and location,
and calls a dedicated prompt to enter the query:
>>> from paradigm import Paradigm
>>> p = Paradigm('german.xml')
>>> p.setFormat('html')
>>> p.setOutput('test.html')
>>> p.setCSS('simple.css')
>>> p.prompt()
> table(case, gender/number, content)
Please note, however, that plain text tables have not yet been implemented.
"""
def __init__(self, p_filename):
"""
Load the given paradigm
p_filename is a string representing the filename of a paradigm xml file
"""
# Store input paradigm filename
self.loadParadigm(p_filename)
# set default values (text output, to terminal)
self.format = "html"
self.output = "output.html"
self.css = "simple.css"
def prompt(self):
"""
Changes to a dedicated prompt
Type 'exit' or 'quit' to exit
"""
s = ""
while s != "exit":
s = "exit"
try: s = raw_input(">")
except EOFError:
print s
if s == "exit":
return
if s == "quit":
return
if s:
while s[-1] in "!.": s = s[:-1]
self.show(s)
def show(self, p_string):
"""
Process and display the given query
"""
try:
# parse the query
parse = ParadigmQuery(p_string)
except:
print "Could not parse query."
return
try:
# Fetch the parsed tree and make presentation
result = Sentence(self, parse.getTree())
# Check that a presentation actually exists
if result == None:
raise Error
except:
print "Sorry, no result can be returned"
return
try:
# Print HTML output if format is set, otherwise plain text
if self.format == "html":
output = '<html>\n'
# Include CSS if we need to
if self.css <> None:
output += '<link rel="stylesheet" href="'
output += self.css
output += '" type="text/css" media="screen" />\n'
output += '<body>'
output += "<table cellspacing=\"0\" cellpadding=\"0\">"
output += result.getHTML()
output += "</table>\n"
output += '</body></html>\n'
else:
output = result.getText()
except:
output = None
print "--no output--"
return
# Print to terminal if output is set, otherwise to file
if self.output == "term":
print output
else:
print "Output written to file:", self.output
f = open(self.output, 'w')
f.write(output)
# Return happily
return
def setFormat(self, p_string=None):
"""
Set the output format: "html" or "text"
"""
# Default value
if p_string == None:
p_string = "text"
# set to html if requested, otherwise text
if p_string == "html":
self.format = "html"
elif p_string == "text":
self.format = "text"
else:
print "Unknown format:", p_string
print "Valid formats are: text, html"
print "Setting format = text"
self.format = "text"
def setCSS(self, p_string=None):
"""
Set the file location for a Cascading Stylesheet: None or filename
This allows for simple formatting
"""
if p_string <> None:
print "Using CSS file:", p_string
self.output = p_string
def setOutput(self, p_string=None):
"""
Set the output location: "term" or filename
"""
# Default
if p_string == None:
p_string = "term"
# set to term if requested, otherwise filename
if p_string == "term":
print "Directing output to terminal"
else:
print "Directing output to file:", p_string
self.output = p_string
def loadParadigm(self, p_filename ):
"""
Load the given paradigm (XML file)
Attributes are stored in self.attributes
Data are stored in self.data
They can be accessed as follows:
self.attributes['gender'] # list of genders
self.data[6]['gender'] # gender for the sixth data object
self.data[6]['content'] # content for the sixth data object
"""
from nodebox_linguistics_extended.parser.nltk_lite.corpora import get_basedir
basedir = get_basedir()
# Look for the file
try_filename = os.path.join(get_basedir(), "paradigms", p_filename)
try:
f = open(try_filename)
p_filename = try_filename
except IOError:
print "Cannot find file"
return None
f.close()
# These variables will be set by this method
self.attributes = {} # A new dictionary
self.data = [] # A new list
# XML admin: create Reader object, parse document
reader = Sax2.Reader()
doc = reader.fromStream(p_filename)
# Cycle through the given attributes and add them to self.attributes
# for <name> in <attributes>
attributes = doc.getElementsByTagName('attributes')[0]
for name in attributes.getElementsByTagName('name'):
# Setup a list of attribute values
tmp_list = []
# for each value under name, store in list
for value in name.getElementsByTagName('value'):
tmp_list.append(value.getAttribute('value'))
# Store list of values in dictionary
self.attributes[name.getAttribute('name')] = tmp_list
# Cycle through data objects and add them to self.data
# for <form> in <paradigm>
forms = doc.getElementsByTagName('paradigm')[0]
for form in forms.getElementsByTagName('form'):
# Initialise a temporary dictionary
tmp_dict = {}
for value in form.getElementsByTagName('attribute'):
tmp_dict[value.getAttribute('name')] = value.getAttribute('value')
# Add the new dictionary to the data list
self.data.append(tmp_dict)
# Talk to the user
print "Paradigm information successfully loaded from file:", p_filename
# State the number and print out a list of attributes
print " "*4 + str(len(self.attributes)) + " attributes imported:",
for att in self.attributes:
print att,
print
# State the number of paradigm objects imported
print " "*4 + str(len(self.data)) + " paradigm objects imported."
return
class Sentence(object):
"""
Manages any operation
Passes request onto other handlers if necessary
"""
def __init__(self, p_paradigm, p_tree):
"""
p_paradigm is the given paradigm (attributes and data)
p_tree is the query tree
"""
# store parameters
self.paradigm = p_paradigm
self.tree = p_tree
# discover the type
self.type = self.getType(self.tree)
# Handle each possible type
if self.type == 'O':
self.item = Sentence(self.paradigm, self.tree[0])
if self.type == 'D':
self.item = Domain(self.paradigm, self.tree)
if self.type == 'H':
self.item = Hierarchy(self.paradigm, self.tree)
if self.type == 'T':
self.item = Table(self.paradigm, self.tree)
def getList(self):
"""
Returns values in the form of a list
"""
if self.tree == None:
return None
return self.item.getList()
def getHTML(self):
"""
Returns values in html (table) form
"""
return self.item.getHTML()
def getHorizontalHTML(self,p_parentSpan=1):
"""
Returns values in html (table) form
"""
return self.item.getHorizontalHTML(p_parentSpan)
def getText(self):
"""
Returns values in plain text form
"""
return self.item.getText()
def getConditions(self):
"""
Return a list of conditions for each combination (cell)
"""
return self.item.getConditions()
def getMaxWidth(self):
"""
Returns the width in number of characters
"""
return self.item.getMaxWidth()
def getSpan(self):
"""
Returns the span (requred for "rowspan" and "colspan" HTML attributes)
"""
return self.item.getSpan()
def getDepth(self):
"""
Get the depth
"""
return self.item.getDepth()
def getType(self, p_tree=None):
"""
Determine the type of the current node of the tree
This need not be overridden
"""
if p_tree == None:
p_tree = self.tree
# This is in the second character of the string representation
return str(p_tree)[1:2]
class Domain(Sentence):
"""
Manages a domain operation
Provides: Domain(paradigm,tree)
"""
def __init__(self, p_paradigm, p_tree):
"""
p_paradigm is the given paradigm (attributes and data)
p_tree is the query tree
"""
self.paradigm = p_paradigm
# Validate that this is a domain
assert self.getType(p_tree) == 'D'
# Store the attribute
self.attribute = p_tree[0]
self.error = None
# Check that the requested attribute is available
try:
self.paradigm.attributes[self.attribute]
except KeyError:
self.error = "I couldn't find this attribute: " + self.attribute
print self.error
def __getitem__(self, p_index):
return self.paradigm.attributes[self.attribute][p_index]
def getList(self):
"""
Return the domain in list form
"""
return self.paradigm.attributes[self.attribute]
def getHTML(self):
"""
Return html for this domain
"""
ret_string = ""
for item in self.getList():
ret_string += "<tr><td>" + item + "</td></tr>"
return ret_string
def getHorizontalHTML(self,p_parentSpan=1):
"""
Return a horizontal html table
"""
ret_string = ""
for item in self.getList():
ret_string += "<td>" + item + "</td>"
return "<tr>" + ret_string*p_parentSpan + "</tr>"
def getText(self):
"""
Return text for this domain
"""
ret_string = ""
for item in self.getList():
ret_string += item + "\n"
return ret_string
def getConditions(self):
"""
Return a list of conditions for each combination (cell)
"""
ret_conds = []
for item in self.getList():
new = {self.attribute: item}
#new[self.attribute] = item
ret_conds.append(new)
return ret_conds
def getMaxWidth(self):
"""
Get max width (chars) for display purposes
"""
max_width = 0
for item in self.getList():
if max_width < len(item):
max_width = len(item)
return max_width
def getSpan(self):
"""
Get the span of this domain (number of elements)
"""
return len(self.getList())
def getDepth(self):
"""
Get the depth of this domain (always one!)
"""
return 1
class Hierarchy(Sentence):
"""
Manages a hierarchy operation
Provides: Hierarchy(paradigm,tree)
"""
def __init__(self, p_paradigm, p_tree):
"""
p_paradigm is the given paradigm (attributes and data)
p_tree is the tree representation of this part of the query (Tree)
"""
self.paradigm = p_paradigm
self.error = None
self.tree = p_tree
# Validate that this is a Hierarchy
assert self.getType(p_tree) == 'H'
# Validate that the root is a Domain
assert self.getType(p_tree[0]) == 'D'
# Set the root and the leaf
self.root = Domain(self.paradigm, p_tree[0])
self.leaf = Sentence(self.paradigm, p_tree[1])
def getList(self):
"""
Return the hierarchy in list form
"""
# Get child lists
rootList = self.root.getList()
leafList = self.leaf.getList()
# Combine lists into an array
ret_val = []
for item_root in rootList:
for item_leaf in leafList:
ret_val.append([item_root,item_leaf])
return ret_val
def getHTML(self):
"""
Return a html table for this hierarchy
"""
ret_string = ""
for index in range(len(self.root.getList())):
leafCells = self.leaf.getHTML()[4:]
ret_string += "<tr><td rowspan=\"" + str(self.leaf.getSpan()) + "\">" + self.root[index] \
+ "</td>" + leafCells
return ret_string
def getHorizontalHTML(self,p_parentSpan=1):
"""
Return a horizontal html table
"""
ret_string = ""
# Add a new cell for each root item
for index in range(len(self.root.getList())):
ret_string += "<td colspan=\"" + str(self.leaf.getSpan()) + "\">" \
+ self.root[index] + "</td>"
# Recusively get the horizontalHTML from the leaf children
leafCells = self.leaf.getHorizontalHTML(p_parentSpan*len(self.root.getList()))
# Return the new row and the leaf cells
return "<tr>" + ret_string*p_parentSpan + "</tr>" + leafCells
def getText(self):
"""
Return text for this hierarchy
"""
ret_string = ""
# Lengths for rendering display
max_width_root = self.root.getMaxWidth()
max_width_leaf = self.leaf.getMaxWidth()
# add root string and call getText() for leaf node
# (newlines in the leaf node need to have whitespace added)
for index in range(len(self.root.getList())):
ret_string += self.root[index].ljust(max_width_root) + " " \
+ self.leaf.getText().ljust(max_width_leaf).replace('\n',"\n" \
+ " "*(max_width_root+1)) + "\n"
# Remove any blank lines and return the string
re_blank = re.compile('\n[ ]+\n')
return re_blank.sub('\n',ret_string)
def getConditions(self):
"""
Return a list of conditions for each combination (cell)
"""
ret_conds = []
# For each root item
for item_r in self.root.getList():
# for each leaf condition
for cond_l in self.leaf.getConditions():
# Add the root node's condition
cond_l[self.root.attribute] = item_r
# Append this to the return list of conditions
ret_conds.append(cond_l)
# Return our list
return ret_conds
def getMaxWidth(self):
"""
Return the maximum width (in chars) this hierarchy will take up
"""
return self.root.getMaxWidth() + self.leaf.getMaxWidth() + 1
def getDepth(self):
"""
Get the depth of this hierarchy
"""
return 1 + self.leaf.getDepth()
def getSpan(self):
"""
Get the span (for HTML tables) of this hierarchy
"""
return self.root.getSpan() * self.leaf.getSpan()
class Table(Sentence):
"""
Manages a table operation
Provides: Table(paradigm,tree)
"""
def __init__(self, p_paradigm, p_tree):
"""
p_paradigm is the given paradigm (attributes and data)
p_tree is the tree representation of this part of the query (Tree)
"""
self.paradigm = p_paradigm
self.error = None
self.tree = p_tree
# Validate that this is a Table
assert self.getType(p_tree) == 'T'
# Set the table arguments
self.horizontal = Sentence(self.paradigm, p_tree[0])
self.vertical = Sentence(self.paradigm, p_tree[1])
self.cells = Sentence(self.paradigm, p_tree[2])
def getList(self):
"""
Return the table (cells) in list form
"""
ret_val = []
return ret_val
def getHTML(self):
"""
Return a html table for this table operation
"""
# Start with the dead cell
dead_cell = "<tr><td colspan=\"" + str(self.vertical.getDepth()) \
+ "\" rowspan=\"" + str(self.horizontal.getDepth()) \
+ "\"></td>"
# Insert horizintal header
horizontal_header = self.horizontal.getHorizontalHTML()[4:].replace('td','th')
#horizontal_header = self.horizontal.getHorizontalHTML().replace('td','th')
# Get the vertical header
vertical_header = self.vertical.getHTML().replace('td','th')
str_cells = ""
# Reset conditions
conditions = {}
# get a list of conditions for the row
conditions_v = self.vertical.getConditions()
# for each row
for cond_v in conditions_v:
str_cells += "<tr>"
# get a list of conditions for the row
conditions_h = self.horizontal.getConditions()
# For each column
for cond_h in conditions_h:
# Get the data for this cell, given the hori and vert conditions
cell_data = self.getData(self.cells.tree, dictJoin(cond_v,cond_h))
# Add the cell
str_cells += "<td>" + cell_data + "</td>"
# End the row
str_cells += "</tr>"
# VERTICAL HEADER INCLUSION
# Split rows into a list
vertical_header_rows = vertical_header.split('</tr>')
cell_rows = str_cells.replace('<tr>','').split('</tr>')
# Join two lists
zipped = zip(vertical_header_rows, cell_rows)
str_zipped = ""
for (header,cells) in zipped:
if header <> '':
str_zipped += header + cells + "</tr>\n"
# Return all the elements
return dead_cell + horizontal_header + str_zipped
def getHorizontalHTML(self,p_parentSpan=1):
"""
Return a horizontal html table (?)
"""
print "?: getHorizontalHTML() called on a table."
return None
def getText(self):
"""
Return text for this table (?)
"""
print "?: getText() for a table? HAHAHAHAHA"
print "call setFormat('html') if you want to run queries like that"
return
def getConditions(self):
"""
Return conditions for this table (?)
"""
print "?: getConditions() called on a table. I don't think so."
return None
def getMaxWidth(self):
"""
Return the maximum width this table could take up.
... I hope you're not trying to nest tables ...
"""
return self.cells.getMaxWidth() + self.vertical.getMaxWidth() + 1
def getSpan(self):
"""
Return span for this table (?)
"""
print "WTF: getSpan() called on a table."
return None
def getData(self, p_return, p_attDict):
"""
Retrieve data that matches the given list of attributes
Returns (an HTML) string of values that match.
p_return is a tree pointing to the key of the value to include in the return
p_attDict is a dictionary of conditions.
"""
output = []
return_key = p_return.leaves()[0]
# For each data object in the paradigm
for datum in self.paradigm.data:
inc = True
# For each given attribute requirement
for att in p_attDict.keys():
# If the data object fails the requirement do not include
if datum[att] != p_attDict[att]:
inc = False
break
# If it passed all the tests, include it
if inc == True:
output.append(datum[return_key])
# Return what we found (make sure this is a string)
if len(output) == 1:
return output[0]
else:
# Hardcoded HTML goodness
# (Obviously this will have to change for text output)
ret_str = "<table>"
for item in output:
ret_str += "<tr><td>" + item + "</td></tr>"
ret_str += "</table>"
return ret_str
def dictJoin(dict1,dict2):
"""
A handy function to join two dictionaries
If there is any key overlap, dict1 wins!
(just make sure this doesn't happen)
"""
for key in dict1.keys():
dict2[key] = dict1[key]
return dict2
def demo():
# Print the query
print """
================================================================================
Load: Paradigm(file)
================================================================================
"""
print
print ">>> a = Paradigm('german.xml')"
print
a = Paradigm('german.xml')
print
print ">>> a.setOutput('term')"
print
a.setOutput('term')
print
print ">>> a.setFormat('text')"
print
a.setFormat('text')
# Print a domain
print """
================================================================================
Domain: case
================================================================================
"""
print
print ">>> a.show('case')"
print
a.show('case')
# Print a hierarchy
print """
================================================================================
Hierarchy: case/gender
================================================================================
"""
print
print ">>> a.show('case/gender')"
print
a.show('case/gender')
# Print a table
print """
================================================================================
Table: table(case/number,gender,content)
================================================================================
"""
print
print ">>> a.setOutput('demo.html')"
print
a.setOutput('demo.html')
print
print ">>> a.setFormat('html')"
print
a.setFormat('html')
print
print ">>> a.show('table(case/number,gender,content)')"
print
a.show('table(case/number,gender,content)')
# Some space
print
if __name__ == '__main__':
demo()
|
RensaProject/nodebox_linguistics_extended
|
nodebox_linguistics_extended/parser/nltk_lite/contrib/paradigm.py
|
Python
|
gpl-2.0
| 24,313 | 0.00473 |
# pylint: disable=W0611
# coding: utf-8
'''
Window
======
Core class for creating the default Kivy window. Kivy supports only one window
per application: please don't try to create more than one.
'''
__all__ = ('Keyboard', 'WindowBase', 'Window')
from os.path import join, exists
from os import getcwd
from kivy.core import core_select_lib
from kivy.clock import Clock
from kivy.config import Config
from kivy.logger import Logger
from kivy.base import EventLoop, stopTouchApp
from kivy.modules import Modules
from kivy.event import EventDispatcher
from kivy.properties import ListProperty, ObjectProperty, AliasProperty, \
NumericProperty, OptionProperty, StringProperty, BooleanProperty
from kivy.utils import platform, reify, deprecated
from kivy.context import get_current_context
from kivy.uix.behaviors import FocusBehavior
from kivy.setupconfig import USE_SDL2
from kivy.graphics.transformation import Matrix
# late import
VKeyboard = None
android = None
Animation = None
class Keyboard(EventDispatcher):
'''Keyboard interface that is returned by
:meth:`WindowBase.request_keyboard`. When you request a keyboard,
you'll get an instance of this class. Whatever the keyboard input is
(system or virtual keyboard), you'll receive events through this
instance.
:Events:
`on_key_down`: keycode, text, modifiers
Fired when a new key is pressed down
`on_key_up`: keycode
Fired when a key is released (up)
Here is an example of how to request a Keyboard in accordance with the
current configuration:
.. include:: ../../examples/widgets/keyboardlistener.py
:literal:
'''
# Keycodes mapping, between str <-> int. These keycodes are
# currently taken from pygame.key. But when a new provider will be
# used, it must do the translation to these keycodes too.
keycodes = {
# specials keys
'backspace': 8, 'tab': 9, 'enter': 13, 'rshift': 303, 'shift': 304,
'alt': 308, 'rctrl': 306, 'lctrl': 305,
'super': 309, 'alt-gr': 307, 'compose': 311, 'pipe': 310,
'capslock': 301, 'escape': 27, 'spacebar': 32, 'pageup': 280,
'pagedown': 281, 'end': 279, 'home': 278, 'left': 276, 'up':
273, 'right': 275, 'down': 274, 'insert': 277, 'delete': 127,
'numlock': 300, 'print': 144, 'screenlock': 145, 'pause': 19,
# a-z keys
'a': 97, 'b': 98, 'c': 99, 'd': 100, 'e': 101, 'f': 102, 'g': 103,
'h': 104, 'i': 105, 'j': 106, 'k': 107, 'l': 108, 'm': 109, 'n': 110,
'o': 111, 'p': 112, 'q': 113, 'r': 114, 's': 115, 't': 116, 'u': 117,
'v': 118, 'w': 119, 'x': 120, 'y': 121, 'z': 122,
# 0-9 keys
'0': 48, '1': 49, '2': 50, '3': 51, '4': 52,
'5': 53, '6': 54, '7': 55, '8': 56, '9': 57,
# numpad
'numpad0': 256, 'numpad1': 257, 'numpad2': 258, 'numpad3': 259,
'numpad4': 260, 'numpad5': 261, 'numpad6': 262, 'numpad7': 263,
'numpad8': 264, 'numpad9': 265, 'numpaddecimal': 266,
'numpaddivide': 267, 'numpadmul': 268, 'numpadsubstract': 269,
'numpadadd': 270, 'numpadenter': 271,
# F1-15
'f1': 282, 'f2': 283, 'f3': 284, 'f4': 285, 'f5': 286, 'f6': 287,
'f7': 288, 'f8': 289, 'f9': 290, 'f10': 291, 'f11': 292, 'f12': 293,
'f13': 294, 'f14': 295, 'f15': 296,
# other keys
'(': 40, ')': 41,
'[': 91, ']': 93,
'{': 123, '}': 125,
':': 58, ';': 59,
'=': 61, '+': 43,
'-': 45, '_': 95,
'/': 47, '*': 42,
'?': 47,
'`': 96, '~': 126,
'´': 180, '¦': 166,
'\\': 92, '|': 124,
'"': 34, "'": 39,
',': 44, '.': 46,
'<': 60, '>': 62,
'@': 64, '!': 33,
'#': 35, '$': 36,
'%': 37, '^': 94,
'&': 38, '¬': 172,
'¨': 168, '…': 8230,
'ù': 249, 'à': 224,
'é': 233, 'è': 232,
}
__events__ = ('on_key_down', 'on_key_up', 'on_textinput')
def __init__(self, **kwargs):
super(Keyboard, self).__init__()
#: Window which the keyboard is attached too
self.window = kwargs.get('window', None)
#: Callback that will be called when the keyboard is released
self.callback = kwargs.get('callback', None)
#: Target that have requested the keyboard
self.target = kwargs.get('target', None)
#: VKeyboard widget, if allowed by the configuration
self.widget = kwargs.get('widget', None)
def on_key_down(self, keycode, text, modifiers):
pass
def on_key_up(self, keycode):
pass
def on_textinput(self, text):
pass
def release(self):
'''Call this method to release the current keyboard.
This will ensure that the keyboard is no longer attached to your
callback.'''
if self.window:
self.window.release_keyboard(self.target)
def _on_window_textinput(self, instance, text):
return self.dispatch('on_textinput', text)
def _on_window_key_down(self, instance, keycode, scancode, text,
modifiers):
keycode = (keycode, self.keycode_to_string(keycode))
if text == '\x04':
Window.trigger_keyboard_height()
return
return self.dispatch('on_key_down', keycode, text, modifiers)
def _on_window_key_up(self, instance, keycode, *largs):
keycode = (keycode, self.keycode_to_string(keycode))
return self.dispatch('on_key_up', keycode)
def _on_vkeyboard_key_down(self, instance, keycode, text, modifiers):
if keycode is None:
keycode = text.lower()
keycode = (self.string_to_keycode(keycode), keycode)
return self.dispatch('on_key_down', keycode, text, modifiers)
def _on_vkeyboard_key_up(self, instance, keycode, text, modifiers):
if keycode is None:
keycode = text
keycode = (self.string_to_keycode(keycode), keycode)
return self.dispatch('on_key_up', keycode)
def _on_vkeyboard_textinput(self, instance, text):
return self.dispatch('on_textinput', text)
def string_to_keycode(self, value):
'''Convert a string to a keycode number according to the
:attr:`Keyboard.keycodes`. If the value is not found in the
keycodes, it will return -1.
'''
return Keyboard.keycodes.get(value, -1)
def keycode_to_string(self, value):
'''Convert a keycode number to a string according to the
:attr:`Keyboard.keycodes`. If the value is not found in the
keycodes, it will return ''.
'''
keycodes = list(Keyboard.keycodes.values())
if value in keycodes:
return list(Keyboard.keycodes.keys())[keycodes.index(value)]
return ''
class WindowBase(EventDispatcher):
'''WindowBase is an abstract window widget for any window implementation.
:Parameters:
`borderless`: str, one of ('0', '1')
Set the window border state. Check the
:mod:`~kivy.config` documentation for a
more detailed explanation on the values.
`fullscreen`: str, one of ('0', '1', 'auto', 'fake')
Make the window fullscreen. Check the
:mod:`~kivy.config` documentation for a
more detailed explanation on the values.
`width`: int
Width of the window.
`height`: int
Height of the window.
`minimum_width`: int
Minimum width of the window (only works for sdl2 window provider).
`minimum_height`: int
Minimum height of the window (only works for sdl2 window provider).
:Events:
`on_motion`: etype, motionevent
Fired when a new :class:`~kivy.input.motionevent.MotionEvent` is
dispatched
`on_touch_down`:
Fired when a new touch event is initiated.
`on_touch_move`:
Fired when an existing touch event changes location.
`on_touch_up`:
Fired when an existing touch event is terminated.
`on_draw`:
Fired when the :class:`Window` is being drawn.
`on_flip`:
Fired when the :class:`Window` GL surface is being flipped.
`on_rotate`: rotation
Fired when the :class:`Window` is being rotated.
`on_close`:
Fired when the :class:`Window` is closed.
`on_request_close`:
Fired when the event loop wants to close the window, or if the
escape key is pressed and `exit_on_escape` is `True`. If a function
bound to this event returns `True`, the window will not be closed.
If the the event is triggered because of the keyboard escape key,
the keyword argument `source` is dispatched along with a value of
`keyboard` to the bound functions.
.. versionadded:: 1.9.0
`on_cursor_enter`:
Fired when the cursor enters the window.
.. versionadded:: 1.9.1
`on_cursor_leave`:
Fired when the cursor leaves the window.
.. versionadded:: 1.9.1
`on_minimize`:
Fired when the window is minimized.
.. versionadded:: 1.9.2
`on_maximize`:
Fired when the window is maximized.
.. versionadded:: 1.9.2
`on_restore`:
Fired when the window is restored.
.. versionadded:: 1.9.2
`on_hide`:
Fired when the window is hidden.
.. versionadded:: 1.9.2
`on_show`:
Fired when when the window is shown.
.. versionadded:: 1.9.2
`on_keyboard`: key, scancode, codepoint, modifier
Fired when the keyboard is used for input.
.. versionchanged:: 1.3.0
The *unicode* parameter has been deprecated in favor of
codepoint, and will be removed completely in future versions.
`on_key_down`: key, scancode, codepoint, modifier
Fired when a key pressed.
.. versionchanged:: 1.3.0
The *unicode* parameter has been deprecated in favor of
codepoint, and will be removed completely in future versions.
`on_key_up`: key, scancode, codepoint
Fired when a key is released.
.. versionchanged:: 1.3.0
The *unicode* parameter has be deprecated in favor of
codepoint, and will be removed completely in future versions.
`on_dropfile`: str
Fired when a file is dropped on the application.
`on_memorywarning`:
Fired when the platform have memory issue (iOS / Android mostly)
You can listen to this one, and clean whatever you can.
.. versionadded:: 1.9.0
'''
__instance = None
__initialized = False
_fake_fullscreen = False
_density = 1
# private properties
_size = ListProperty([0, 0])
_modifiers = ListProperty([])
_rotation = NumericProperty(0)
_clearcolor = ObjectProperty([0, 0, 0, 1])
_focus = BooleanProperty(True)
children = ListProperty([])
'''List of the children of this window.
:attr:`children` is a :class:`~kivy.properties.ListProperty` instance and
defaults to an empty list.
Use :meth:`add_widget` and :meth:`remove_widget` to manipulate the list of
children. Don't manipulate the list directly unless you know what you are
doing.
'''
parent = ObjectProperty(None, allownone=True)
'''Parent of this window.
:attr:`parent` is a :class:`~kivy.properties.ObjectProperty` instance and
defaults to None. When created, the parent is set to the window itself.
You must take care of it if you are doing a recursive check.
'''
icon = StringProperty()
def _get_modifiers(self):
return self._modifiers
modifiers = AliasProperty(_get_modifiers, None)
'''List of keyboard modifiers currently active.
'''
def _get_size(self):
r = self._rotation
w, h = self._size
if self._density != 1:
w, h = self._win._get_gl_size()
if self.softinput_mode == 'resize':
h -= self.keyboard_height
if r in (0, 180):
return w, h
return h, w
def _set_size(self, size):
if self._size != size:
r = self._rotation
if r in (0, 180):
self._size = size
else:
self._size = size[1], size[0]
self.dispatch('on_resize', *size)
return True
else:
return False
minimum_width = NumericProperty(0)
'''The minimum width to restrict the window to.
.. versionadded:: 1.9.1
:attr:`minimum_width` is a :class:`~kivy.properties.NumericProperty` and
defaults to 0.
'''
minimum_height = NumericProperty(0)
'''The minimum height to restrict the window to.
.. versionadded:: 1.9.1
:attr:`minimum_height` is a :class:`~kivy.properties.NumericProperty` and
defaults to 0.
'''
size = AliasProperty(_get_size, _set_size, bind=('_size', ))
'''Get the rotated size of the window. If :attr:`rotation` is set, then the
size will change to reflect the rotation.
'''
def _get_clearcolor(self):
return self._clearcolor
def _set_clearcolor(self, value):
if value is not None:
if type(value) not in (list, tuple):
raise Exception('Clearcolor must be a list or tuple')
if len(value) != 4:
raise Exception('Clearcolor must contain 4 values')
self._clearcolor = value
clearcolor = AliasProperty(_get_clearcolor, _set_clearcolor,
bind=('_clearcolor', ))
'''Color used to clear the window.
::
from kivy.core.window import Window
# red background color
Window.clearcolor = (1, 0, 0, 1)
# don't clear background at all
Window.clearcolor = None
.. versionchanged:: 1.7.2
The clearcolor default value is now: (0, 0, 0, 1).
'''
# make some property read-only
def _get_width(self):
_size = self._size
if self._density != 1:
_size = self._win._get_gl_size()
r = self._rotation
if r == 0 or r == 180:
return _size[0]
return _size[1]
width = AliasProperty(_get_width, None, bind=('_rotation', '_size'))
'''Rotated window width.
:attr:`width` is a read-only :class:`~kivy.properties.AliasProperty`.
'''
def _get_height(self):
'''Rotated window height'''
r = self._rotation
_size = self._size
if self._density != 1:
_size = self._win._get_gl_size()
kb = self.keyboard_height if self.softinput_mode == 'resize' else 0
if r == 0 or r == 180:
return _size[1] - kb
return _size[0] - kb
height = AliasProperty(_get_height, None, bind=('_rotation', '_size'))
'''Rotated window height.
:attr:`height` is a read-only :class:`~kivy.properties.AliasProperty`.
'''
def _get_center(self):
return self.width / 2., self.height / 2.
center = AliasProperty(_get_center, None, bind=('width', 'height'))
'''Center of the rotated window.
:attr:`center` is a :class:`~kivy.properties.AliasProperty`.
'''
def _get_rotation(self):
return self._rotation
def _set_rotation(self, x):
x = int(x % 360)
if x == self._rotation:
return
if x not in (0, 90, 180, 270):
raise ValueError('can rotate only 0, 90, 180, 270 degrees')
self._rotation = x
if not self.initialized:
return
self.dispatch('on_resize', *self.size)
self.dispatch('on_rotate', x)
rotation = AliasProperty(_get_rotation, _set_rotation,
bind=('_rotation', ))
'''Get/set the window content rotation. Can be one of 0, 90, 180, 270
degrees.
'''
softinput_mode = OptionProperty('', options=(
'', 'below_target', 'pan', 'scale', 'resize'))
'''This specifies the behavior of window contents on display of the soft
keyboard on mobile platforms. It can be one of '', 'pan', 'scale',
'resize' or 'below_target'. Their effects are listed below.
+----------------+-------------------------------------------------------+
| Value | Effect |
+================+=======================================================+
| '' | The main window is left as is, allowing you to use |
| | the :attr:`keyboard_height` to manage the window |
| | contents manually. |
+----------------+-------------------------------------------------------+
| 'pan' | The main window pans, moving the bottom part of the |
| | window to be always on top of the keyboard. |
+----------------+-------------------------------------------------------+
| 'resize' | The window is resized and the contents scaled to fit |
| | the remaining space. |
+----------------+-------------------------------------------------------+
| 'below_target' | The window pans so that the current target TextInput |
| | widget requesting the keyboard is presented just above|
| | the soft keyboard. |
+----------------+-------------------------------------------------------+
:attr:`softinput_mode` is an :class:`~kivy.properties.OptionProperty` and
defaults to `None`.
.. versionadded:: 1.9.0
.. versionchanged:: 1.9.1
The 'below_target' option was added.
'''
_keyboard_changed = BooleanProperty(False)
_kheight = NumericProperty(0)
def _animate_content(self):
'''Animate content to IME height.
'''
kargs = self.keyboard_anim_args
global Animation
if not Animation:
from kivy.animation import Animation
Animation.cancel_all(self)
Animation(
_kheight = self.keyboard_height + self.keyboard_padding,
d=kargs['d'], t=kargs['t']).start(self)
def _upd_kbd_height(self, *kargs):
self._keyboard_changed = not self._keyboard_changed
self._animate_content()
def _get_ios_kheight(self):
import ios
return ios.get_kheight()
def _get_android_kheight(self):
if USE_SDL2: # Placeholder until the SDL2 bootstrap supports this
return 0
global android
if not android:
import android
return android.get_keyboard_height()
def _get_kheight(self):
if platform == 'android':
return self._get_android_kheight()
if platform == 'ios':
return self._get_ios_kheight()
return 0
keyboard_height = AliasProperty(_get_kheight, None,
bind=('_keyboard_changed',), cached=True)
'''Returns the height of the softkeyboard/IME on mobile platforms.
Will return 0 if not on mobile platform or if IME is not active.
.. versionadded:: 1.9.0
:attr:`keyboard_height` is a read-only
:class:`~kivy.properties.AliasProperty` and defaults to 0.
'''
keyboard_anim_args = {'t': 'in_out_quart', 'd': .5}
'''The attributes for animating softkeyboard/IME.
`t` = `transition`, `d` = `duration`. Will have no effect on desktops.
.. versionadded:: 1.9.2
:attr:`keyboard_anim_args` is a dict with values
't': 'in_out_quart', 'd': `.5`.
'''
keyboard_padding = NumericProperty(0)
'''The padding to have between the softkeyboard/IME & target
or bottom of window. Will have no effect on desktops.
.. versionadded:: 1.9.2
:attr:`keyboard_padding` is a
:class:`~kivy.properties.NumericProperty` and defaults to 0.
'''
def _set_system_size(self, size):
self._size = size
def _get_system_size(self):
if self.softinput_mode == 'resize':
return self._size[0], self._size[1] - self.keyboard_height
return self._size
system_size = AliasProperty(
_get_system_size,
_set_system_size,
bind=('_size', ))
'''Real size of the window ignoring rotation.
'''
def _get_effective_size(self):
'''On density=1 and non-ios displays, return system_size, else
return scaled / rotated size.
Used by MouseMotionEvent.update_graphics() and WindowBase.on_motion().
'''
w, h = self.system_size
if platform == 'ios' or self._density != 1:
w, h = self.size
return w, h
borderless = BooleanProperty(False)
'''When set to True, this property removes the window border/decoration.
Check the :mod:`~kivy.config` documentation for a more detailed
explanation on the values.
.. versionadded:: 1.9.0
:attr:`borderless` is a :class:`~kivy.properties.BooleanProperty` and
defaults to False.
'''
fullscreen = OptionProperty(False, options=(True, False, 'auto', 'fake'))
'''This property sets the fullscreen mode of the window. Available options
are: True, False, 'auto' and 'fake'. Check the :mod:`~kivy.config`
documentation for more detailed explanations on these values.
fullscreen is an :class:`~kivy.properties.OptionProperty` and defaults to
`False`.
.. versionadded:: 1.2.0
.. note::
The 'fake' option has been deprecated, use the :attr:`borderless`
property instead.
'''
mouse_pos = ObjectProperty([0, 0])
'''2d position of the mouse within the window.
.. versionadded:: 1.2.0
'''
show_cursor = BooleanProperty(True)
'''Set whether or not the cursor is shown on the window.
.. versionadded:: 1.9.1
:attr:`show_cursor` is a :class:`~kivy.properties.BooleanProperty` and
defaults to True.
'''
def _get_focus(self):
return self._focus
focus = AliasProperty(_get_focus, None, bind=('_focus',))
'''Check whether or not the window currently has focus.
.. versionadded:: 1.9.1
:attr:`focus` is a read-only :class:`~kivy.properties.AliasProperty` and
defaults to True.
'''
def _set_cursor_state(self, value):
pass
@property
def __self__(self):
return self
top = NumericProperty(None, allownone=True)
left = NumericProperty(None, allownone=True)
position = OptionProperty('auto', options=['auto', 'custom'])
render_context = ObjectProperty(None)
canvas = ObjectProperty(None)
title = StringProperty('Kivy')
trigger_create_window = None
__events__ = (
'on_draw', 'on_flip', 'on_rotate', 'on_resize', 'on_close',
'on_minimize', 'on_maximize', 'on_restore', 'on_hide', 'on_show',
'on_motion', 'on_touch_down', 'on_touch_move', 'on_touch_up',
'on_mouse_down', 'on_mouse_move', 'on_mouse_up', 'on_keyboard',
'on_key_down', 'on_key_up', 'on_textinput', 'on_dropfile',
'on_request_close', 'on_cursor_enter', 'on_cursor_leave',
'on_joy_axis', 'on_joy_hat', 'on_joy_ball',
'on_joy_button_down', 'on_joy_button_up', 'on_memorywarning')
def __new__(cls, **kwargs):
if cls.__instance is None:
cls.__instance = EventDispatcher.__new__(cls)
return cls.__instance
def __init__(self, **kwargs):
force = kwargs.pop('force', False)
# don't init window 2 times,
# except if force is specified
if WindowBase.__instance is not None and not force:
return
self.initialized = False
self._is_desktop = Config.getboolean('kivy', 'desktop')
# create a trigger for update/create the window when one of window
# property changes
self.trigger_create_window = Clock.create_trigger(
self.create_window, -1)
# Create a trigger for updating the keyboard height
self.trigger_keyboard_height = Clock.create_trigger(
self._upd_kbd_height, .5)
self.bind(_kheight=lambda *args: self.update_viewport())
# set the default window parameter according to the configuration
if 'borderless' not in kwargs:
kwargs['borderless'] = Config.getboolean('graphics', 'borderless')
if 'fullscreen' not in kwargs:
fullscreen = Config.get('graphics', 'fullscreen')
if fullscreen not in ('auto', 'fake'):
fullscreen = fullscreen.lower() in ('true', '1', 'yes')
kwargs['fullscreen'] = fullscreen
if 'width' not in kwargs:
kwargs['width'] = Config.getint('graphics', 'width')
if 'height' not in kwargs:
kwargs['height'] = Config.getint('graphics', 'height')
if 'minimum_width' not in kwargs:
kwargs['minimum_width'] = Config.getint('graphics',
'minimum_width')
if 'minimum_height' not in kwargs:
kwargs['minimum_height'] = Config.getint('graphics',
'minimum_height')
if 'rotation' not in kwargs:
kwargs['rotation'] = Config.getint('graphics', 'rotation')
if 'position' not in kwargs:
kwargs['position'] = Config.getdefault('graphics', 'position',
'auto')
if 'top' in kwargs:
kwargs['position'] = 'custom'
kwargs['top'] = kwargs['top']
else:
kwargs['top'] = Config.getint('graphics', 'top')
if 'left' in kwargs:
kwargs['position'] = 'custom'
kwargs['left'] = kwargs['left']
else:
kwargs['left'] = Config.getint('graphics', 'left')
kwargs['_size'] = (kwargs.pop('width'), kwargs.pop('height'))
if 'show_cursor' not in kwargs:
kwargs['show_cursor'] = Config.getboolean('graphics',
'show_cursor')
super(WindowBase, self).__init__(**kwargs)
# bind all the properties that need to recreate the window
self._bind_create_window()
self.bind(size=self.trigger_keyboard_height,
rotation=self.trigger_keyboard_height)
self.bind(softinput_mode=lambda *dt: self.update_viewport(),
keyboard_height=lambda *dt: self.update_viewport())
self.bind(show_cursor=lambda *dt: self._set_cursor_state(dt[1]))
# init privates
self._system_keyboard = Keyboard(window=self)
self._keyboards = {'system': self._system_keyboard}
self._vkeyboard_cls = None
self.children = []
self.parent = self
# before creating the window
import kivy.core.gl # NOQA
# configure the window
self.create_window()
# attach modules + listener event
EventLoop.set_window(self)
Modules.register_window(self)
EventLoop.add_event_listener(self)
# manage keyboard(s)
self.configure_keyboards()
# assign the default context of the widget creation
if not hasattr(self, '_context'):
self._context = get_current_context()
# mark as initialized
self.initialized = True
def _bind_create_window(self):
for prop in (
'fullscreen', 'borderless', 'position', 'top',
'left', '_size', 'system_size'):
self.bind(**{prop: self.trigger_create_window})
def _unbind_create_window(self):
for prop in (
'fullscreen', 'borderless', 'position', 'top',
'left', '_size', 'system_size'):
self.unbind(**{prop: self.trigger_create_window})
@deprecated
def toggle_fullscreen(self):
'''Toggle between fullscreen and windowed mode.
.. deprecated:: 1.9.0
Use :attr:`fullscreen` instead.
'''
pass
def maximize(self):
'''Maximizes the window. This method should be used on desktop
platforms only.
.. versionadded:: 1.9.0
.. note::
This feature requires the SDL2 window provider and is currently only
supported on desktop platforms.
'''
Logger.warning('Window: maximize() is not implemented in the current '
'window provider.')
def minimize(self):
'''Minimizes the window. This method should be used on desktop
platforms only.
.. versionadded:: 1.9.0
.. note::
This feature requires the SDL2 window provider and is currently only
supported on desktop platforms.
'''
Logger.warning('Window: minimize() is not implemented in the current '
'window provider.')
def restore(self):
'''Restores the size and position of a maximized or minimized window.
This method should be used on desktop platforms only.
.. versionadded:: 1.9.0
.. note::
This feature requires the SDL2 window provider and is currently only
supported on desktop platforms.
'''
Logger.warning('Window: restore() is not implemented in the current '
'window provider.')
def hide(self):
'''Hides the window. This method should be used on desktop
platforms only.
.. versionadded:: 1.9.0
.. note::
This feature requires the SDL2 window provider and is currently only
supported on desktop platforms.
'''
Logger.warning('Window: hide() is not implemented in the current '
'window provider.')
def show(self):
'''Shows the window. This method should be used on desktop
platforms only.
.. versionadded:: 1.9.0
.. note::
This feature requires the SDL2 window provider and is currently only
supported on desktop platforms.
'''
Logger.warning('Window: show() is not implemented in the current '
'window provider.')
def raise_window(self):
'''Raise the window. This method should be used on desktop
platforms only.
.. versionadded:: 1.9.1
.. note::
This feature requires the SDL2 window provider and is currently only
supported on desktop platforms.
'''
Logger.warning('Window: raise_window is not implemented in the current '
'window provider.')
def close(self):
'''Close the window'''
pass
def create_window(self, *largs):
'''Will create the main window and configure it.
.. warning::
This method is called automatically at runtime. If you call it, it
will recreate a RenderContext and Canvas. This means you'll have a
new graphics tree, and the old one will be unusable.
This method exist to permit the creation of a new OpenGL context
AFTER closing the first one. (Like using runTouchApp() and
stopTouchApp()).
This method has only been tested in a unittest environment and
is not suitable for Applications.
Again, don't use this method unless you know exactly what you are
doing!
'''
# just to be sure, if the trigger is set, and if this method is
# manually called, unset the trigger
self.trigger_create_window.cancel()
# ensure the window creation will not be called twice
if platform in ('android', 'ios'):
self._unbind_create_window()
if not self.initialized:
from kivy.core.gl import init_gl
init_gl()
# create the render context and canvas, only the first time.
from kivy.graphics import RenderContext, Canvas
self.render_context = RenderContext()
self.canvas = Canvas()
self.render_context.add(self.canvas)
else:
# if we get initialized more than once, then reload opengl state
# after the second time.
# XXX check how it's working on embed platform.
if platform == 'linux' or Window.__class__.__name__ == 'WindowSDL':
# on linux, it's safe for just sending a resize.
self.dispatch('on_resize', *self.system_size)
else:
# on other platform, window are recreated, we need to reload.
from kivy.graphics.context import get_context
get_context().reload()
Clock.schedule_once(lambda x: self.canvas.ask_update(), 0)
self.dispatch('on_resize', *self.system_size)
# ensure the gl viewport is correct
self.update_viewport()
def on_flip(self):
'''Flip between buffers (event)'''
self.flip()
def flip(self):
'''Flip between buffers'''
pass
def _update_childsize(self, instance, value):
self.update_childsize([instance])
def add_widget(self, widget, canvas=None):
'''Add a widget to a window'''
widget.parent = self
self.children.insert(0, widget)
canvas = self.canvas.before if canvas == 'before' else \
self.canvas.after if canvas == 'after' else self.canvas
canvas.add(widget.canvas)
self.update_childsize([widget])
widget.bind(
pos_hint=self._update_childsize,
size_hint=self._update_childsize,
size=self._update_childsize,
pos=self._update_childsize)
def remove_widget(self, widget):
'''Remove a widget from a window
'''
if not widget in self.children:
return
self.children.remove(widget)
if widget.canvas in self.canvas.children:
self.canvas.remove(widget.canvas)
elif widget.canvas in self.canvas.after.children:
self.canvas.after.remove(widget.canvas)
elif widget.canvas in self.canvas.before.children:
self.canvas.before.remove(widget.canvas)
widget.parent = None
widget.unbind(
pos_hint=self._update_childsize,
size_hint=self._update_childsize,
size=self._update_childsize,
pos=self._update_childsize)
def clear(self):
'''Clear the window with the background color'''
# XXX FIXME use late binding
from kivy.graphics.opengl import glClearColor, glClear, \
GL_COLOR_BUFFER_BIT, GL_DEPTH_BUFFER_BIT, GL_STENCIL_BUFFER_BIT
cc = self._clearcolor
if cc is not None:
glClearColor(*cc)
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT |
GL_STENCIL_BUFFER_BIT)
def set_title(self, title):
'''Set the window title.
.. versionadded:: 1.0.5
'''
self.title = title
def set_icon(self, filename):
'''Set the icon of the window.
.. versionadded:: 1.0.5
'''
self.icon = filename
def to_widget(self, x, y, initial=True, relative=False):
return (x, y)
def to_window(self, x, y, initial=True, relative=False):
return (x, y)
def _apply_transform(self, m):
return m
def get_window_matrix(self, x=0, y=0):
m = Matrix()
m.translate(x, y, 0)
return m
def get_root_window(self):
return self
def get_parent_window(self):
return self
def get_parent_layout(self):
return None
def on_draw(self):
self.clear()
self.render_context.draw()
def on_motion(self, etype, me):
'''Event called when a Motion Event is received.
:Parameters:
`etype`: str
One of 'begin', 'update', 'end'
`me`: :class:`~kivy.input.motionevent.MotionEvent`
The Motion Event currently dispatched.
'''
if me.is_touch:
w, h = self._get_effective_size()
me.scale_for_screen(w, h, rotation=self._rotation,
smode=self.softinput_mode,
kheight=self.keyboard_height)
if etype == 'begin':
self.dispatch('on_touch_down', me)
elif etype == 'update':
self.dispatch('on_touch_move', me)
elif etype == 'end':
self.dispatch('on_touch_up', me)
FocusBehavior._handle_post_on_touch_up(me)
def on_touch_down(self, touch):
'''Event called when a touch down event is initiated.
.. versionchanged:: 1.9.0
The touch `pos` is now transformed to window coordinates before
this method is called. Before, the touch `pos` coordinate would be
`(0, 0)` when this method was called.
'''
for w in self.children[:]:
if w.dispatch('on_touch_down', touch):
return True
def on_touch_move(self, touch):
'''Event called when a touch event moves (changes location).
.. versionchanged:: 1.9.0
The touch `pos` is now transformed to window coordinates before
this method is called. Before, the touch `pos` coordinate would be
`(0, 0)` when this method was called.
'''
for w in self.children[:]:
if w.dispatch('on_touch_move', touch):
return True
def on_touch_up(self, touch):
'''Event called when a touch event is released (terminated).
.. versionchanged:: 1.9.0
The touch `pos` is now transformed to window coordinates before
this method is called. Before, the touch `pos` coordinate would be
`(0, 0)` when this method was called.
'''
for w in self.children[:]:
if w.dispatch('on_touch_up', touch):
return True
def on_resize(self, width, height):
'''Event called when the window is resized.'''
self.update_viewport()
def update_viewport(self):
from kivy.graphics.opengl import glViewport
from kivy.graphics.transformation import Matrix
from math import radians
w, h = self.system_size
if self._density != 1:
w, h = self.size
smode = self.softinput_mode
target = self._system_keyboard.target
targettop = max(0, target.to_window(0, target.y)[1]) if target else 0
kheight = self._kheight
w2, h2 = w / 2., h / 2.
r = radians(self.rotation)
x, y = 0, 0
_h = h
if smode == 'pan':
y = kheight
elif smode == 'below_target':
y = 0 if kheight < targettop else (kheight - targettop)
if smode == 'scale':
_h -= kheight
# prepare the viewport
glViewport(x, y, w, _h)
# do projection matrix
projection_mat = Matrix()
projection_mat.view_clip(0.0, w, 0.0, h, -1.0, 1.0, 0)
self.render_context['projection_mat'] = projection_mat
# do modelview matrix
modelview_mat = Matrix().translate(w2, h2, 0)
modelview_mat = modelview_mat.multiply(Matrix().rotate(r, 0, 0, 1))
w, h = self.size
w2, h2 = w / 2., h / 2.
modelview_mat = modelview_mat.multiply(Matrix().translate(-w2, -h2, 0))
self.render_context['modelview_mat'] = modelview_mat
# redraw canvas
self.canvas.ask_update()
# and update childs
self.update_childsize()
def update_childsize(self, childs=None):
width, height = self.size
if childs is None:
childs = self.children
for w in childs:
shw, shh = w.size_hint
if shw and shh:
w.size = shw * width, shh * height
elif shw:
w.width = shw * width
elif shh:
w.height = shh * height
for key, value in w.pos_hint.items():
if key == 'x':
w.x = value * width
elif key == 'right':
w.right = value * width
elif key == 'y':
w.y = value * height
elif key == 'top':
w.top = value * height
elif key == 'center_x':
w.center_x = value * width
elif key == 'center_y':
w.center_y = value * height
def screenshot(self, name='screenshot{:04d}.png'):
'''Save the actual displayed image in a file
'''
i = 0
path = None
if name != 'screenshot{:04d}.png':
_ext = name.split('.')[-1]
name = ''.join((name[:-(len(_ext) + 1)], '{:04d}.', _ext))
while True:
i += 1
path = join(getcwd(), name.format(i))
if not exists(path):
break
return path
def on_rotate(self, rotation):
'''Event called when the screen has been rotated.
'''
pass
def on_close(self, *largs):
'''Event called when the window is closed'''
Modules.unregister_window(self)
EventLoop.remove_event_listener(self)
def on_minimize(self, *largs):
'''Event called when the window is minimized.
.. versionadded:: 1.9.2
.. note::
This feature requires the SDL2 window provider.
'''
pass
def on_maximize(self, *largs):
'''Event called when the window is maximized.
.. versionadded:: 1.9.2
.. note::
This feature requires the SDL2 window provider.
'''
pass
def on_restore(self, *largs):
'''Event called when the window is restored.
.. versionadded:: 1.9.2
.. note::
This feature requires the SDL2 window provider.
'''
pass
def on_hide(self, *largs):
'''Event called when the window is hidden.
.. versionadded:: 1.9.2
.. note::
This feature requires the SDL2 window provider.
'''
pass
def on_show(self, *largs):
'''Event called when the window is shown.
.. versionadded:: 1.9.2
.. note::
This feature requires the SDL2 window provider.
'''
pass
def on_request_close(self, *largs, **kwargs):
'''Event called before we close the window. If a bound function returns
`True`, the window will not be closed. If the the event is triggered
because of the keyboard escape key, the keyword argument `source` is
dispatched along with a value of `keyboard` to the bound functions.
.. warning::
When the bound function returns True the window will not be closed,
so use with care because the user would not be able to close the
program, even if the red X is clicked.
'''
pass
def on_cursor_enter(self, *largs):
'''Event called when the cursor enters the window.
.. versionadded:: 1.9.1
.. note::
This feature requires the SDL2 window provider.
'''
pass
def on_cursor_leave(self, *largs):
'''Event called when the cursor leaves the window.
.. versionadded:: 1.9.1
.. note::
This feature requires the SDL2 window provider.
'''
pass
def on_mouse_down(self, x, y, button, modifiers):
'''Event called when the mouse is used (pressed/released)'''
pass
def on_mouse_move(self, x, y, modifiers):
'''Event called when the mouse is moved with buttons pressed'''
pass
def on_mouse_up(self, x, y, button, modifiers):
'''Event called when the mouse is moved with buttons pressed'''
pass
def on_joy_axis(self, stickid, axisid, value):
'''Event called when a joystick has a stick or other axis moved
.. versionadded:: 1.9.0'''
pass
def on_joy_hat(self, stickid, hatid, value):
'''Event called when a joystick has a hat/dpad moved
.. versionadded:: 1.9.0'''
pass
def on_joy_ball(self, stickid, ballid, value):
'''Event called when a joystick has a ball moved
.. versionadded:: 1.9.0'''
pass
def on_joy_button_down(self, stickid, buttonid):
'''Event called when a joystick has a button pressed
.. versionadded:: 1.9.0'''
pass
def on_joy_button_up(self, stickid, buttonid):
'''Event called when a joystick has a button released
.. versionadded:: 1.9.0'''
pass
def on_keyboard(self, key, scancode=None, codepoint=None,
modifier=None, **kwargs):
'''Event called when keyboard is used.
.. warning::
Some providers may omit `scancode`, `codepoint` and/or `modifier`.
'''
if 'unicode' in kwargs:
Logger.warning("The use of the unicode parameter is deprecated, "
"and will be removed in future versions. Use "
"codepoint instead, which has identical "
"semantics.")
# Quit if user presses ESC or the typical OSX shortcuts CMD+q or CMD+w
# TODO If just CMD+w is pressed, only the window should be closed.
is_osx = platform == 'darwin'
if WindowBase.on_keyboard.exit_on_escape:
if key == 27 or all([is_osx, key in [113, 119], modifier == 1024]):
if not self.dispatch('on_request_close', source='keyboard'):
stopTouchApp()
self.close()
return True
if Config:
on_keyboard.exit_on_escape = Config.getboolean('kivy', 'exit_on_escape')
def __exit(section, name, value):
WindowBase.__dict__['on_keyboard'].exit_on_escape = \
Config.getboolean('kivy', 'exit_on_escape')
Config.add_callback(__exit, 'kivy', 'exit_on_escape')
def on_key_down(self, key, scancode=None, codepoint=None,
modifier=None, **kwargs):
'''Event called when a key is down (same arguments as on_keyboard)'''
if 'unicode' in kwargs:
Logger.warning("The use of the unicode parameter is deprecated, "
"and will be removed in future versions. Use "
"codepoint instead, which has identical "
"semantics.")
def on_key_up(self, key, scancode=None, codepoint=None,
modifier=None, **kwargs):
'''Event called when a key is released (same arguments as on_keyboard)
'''
if 'unicode' in kwargs:
Logger.warning("The use of the unicode parameter is deprecated, "
"and will be removed in future versions. Use "
"codepoint instead, which has identical "
"semantics.")
def on_textinput(self, text):
'''Event called when text: i.e. alpha numeric non control keys or set
of keys is entered. As it is not guaranteed whether we get one
character or multiple ones, this event supports handling multiple
characters.
.. versionadded:: 1.9.0
'''
pass
def on_dropfile(self, filename):
'''Event called when a file is dropped on the application.
.. warning::
This event currently works with sdl2 window provider, on pygame
window provider and OS X with a patched version of pygame.
This event is left in place for further evolution
(ios, android etc.)
.. versionadded:: 1.2.0
'''
pass
def on_memorywarning(self):
'''Event called when the platform have memory issue.
Your goal is to clear the cache in your app as much as you can,
release unused widget, etc.
Currently, this event is fired only from SDL2 provider, for
iOS and Android.
.. versionadded:: 1.9.0
'''
pass
@reify
def dpi(self):
'''Return the DPI of the screen. If the implementation doesn't support
any DPI lookup, it will just return 96.
.. warning::
This value is not cross-platform. Use
:attr:`kivy.base.EventLoop.dpi` instead.
'''
return 96.
def configure_keyboards(self):
# Configure how to provide keyboards (virtual or not)
# register system keyboard to listening keys from window
sk = self._system_keyboard
self.bind(
on_key_down=sk._on_window_key_down,
on_key_up=sk._on_window_key_up,
on_textinput=sk._on_window_textinput)
# use the device's real keyboard
self.use_syskeyboard = True
# use the device's real keyboard
self.allow_vkeyboard = False
# one single vkeyboard shared between all widgets
self.single_vkeyboard = True
# the single vkeyboard is always sitting at the same position
self.docked_vkeyboard = False
# now read the configuration
mode = Config.get('kivy', 'keyboard_mode')
if mode not in ('', 'system', 'dock', 'multi', 'systemanddock',
'systemandmulti'):
Logger.critical('Window: unknown keyboard mode %r' % mode)
# adapt mode according to the configuration
if mode == 'system':
self.use_syskeyboard = True
self.allow_vkeyboard = False
self.single_vkeyboard = True
self.docked_vkeyboard = False
elif mode == 'dock':
self.use_syskeyboard = False
self.allow_vkeyboard = True
self.single_vkeyboard = True
self.docked_vkeyboard = True
elif mode == 'multi':
self.use_syskeyboard = False
self.allow_vkeyboard = True
self.single_vkeyboard = False
self.docked_vkeyboard = False
elif mode == 'systemanddock':
self.use_syskeyboard = True
self.allow_vkeyboard = True
self.single_vkeyboard = True
self.docked_vkeyboard = True
elif mode == 'systemandmulti':
self.use_syskeyboard = True
self.allow_vkeyboard = True
self.single_vkeyboard = False
self.docked_vkeyboard = False
Logger.info(
'Window: virtual keyboard %sallowed, %s, %s' % (
'' if self.allow_vkeyboard else 'not ',
'single mode' if self.single_vkeyboard else 'multiuser mode',
'docked' if self.docked_vkeyboard else 'not docked'))
def set_vkeyboard_class(self, cls):
'''.. versionadded:: 1.0.8
Set the VKeyboard class to use. If set to `None`, it will use the
:class:`kivy.uix.vkeyboard.VKeyboard`.
'''
self._vkeyboard_cls = cls
def release_all_keyboards(self):
'''.. versionadded:: 1.0.8
This will ensure that no virtual keyboard / system keyboard is
requested. All instances will be closed.
'''
for key in list(self._keyboards.keys())[:]:
keyboard = self._keyboards[key]
if keyboard:
keyboard.release()
def request_keyboard(self, callback, target, input_type='text'):
'''.. versionadded:: 1.0.4
Internal widget method to request the keyboard. This method is rarely
required by the end-user as it is handled automatically by the
:class:`~kivy.uix.textinput.TextInput`. We expose it in case you want
to handle the keyboard manually for unique input scenarios.
A widget can request the keyboard, indicating a callback to call
when the keyboard is released (or taken by another widget).
:Parameters:
`callback`: func
Callback that will be called when the keyboard is
closed. This can be because somebody else requested the
keyboard or the user closed it.
`target`: Widget
Attach the keyboard to the specified `target`. This should be
the widget that requested the keyboard. Ensure you have a
different target attached to each keyboard if you're working in
a multi user mode.
.. versionadded:: 1.0.8
`input_type`: string
Choose the type of soft keyboard to request. Can be one of
'text', 'number', 'url', 'mail', 'datetime', 'tel', 'address'.
.. note::
`input_type` is currently only honored on mobile devices.
.. versionadded:: 1.8.0
:Return:
An instance of :class:`Keyboard` containing the callback, target,
and if the configuration allows it, a
:class:`~kivy.uix.vkeyboard.VKeyboard` instance attached as a
*.widget* property.
.. note::
The behavior of this function is heavily influenced by the current
`keyboard_mode`. Please see the Config's
:ref:`configuration tokens <configuration-tokens>` section for
more information.
'''
# release any previous keyboard attached.
self.release_keyboard(target)
# if we can use virtual vkeyboard, activate it.
if self.allow_vkeyboard:
keyboard = None
# late import
global VKeyboard
if VKeyboard is None and self._vkeyboard_cls is None:
from kivy.uix.vkeyboard import VKeyboard
self._vkeyboard_cls = VKeyboard
# if the keyboard doesn't exist, create it.
key = 'single' if self.single_vkeyboard else target
if key not in self._keyboards:
vkeyboard = self._vkeyboard_cls()
keyboard = Keyboard(widget=vkeyboard, window=self)
vkeyboard.bind(
on_key_down=keyboard._on_vkeyboard_key_down,
on_key_up=keyboard._on_vkeyboard_key_up,
on_textinput=keyboard._on_vkeyboard_textinput)
self._keyboards[key] = keyboard
else:
keyboard = self._keyboards[key]
# configure vkeyboard
keyboard.target = keyboard.widget.target = target
keyboard.callback = keyboard.widget.callback = callback
# add to the window
self.add_widget(keyboard.widget)
# only after add, do dock mode
keyboard.widget.docked = self.docked_vkeyboard
keyboard.widget.setup_mode()
else:
# system keyboard, just register the callback.
keyboard = self._system_keyboard
keyboard.callback = callback
keyboard.target = target
# use system (hardware) keyboard according to flag
if self.allow_vkeyboard and self.use_syskeyboard:
self.unbind(
on_key_down=keyboard._on_window_key_down,
on_key_up=keyboard._on_window_key_up,
on_textinput=keyboard._on_window_textinput)
self.bind(
on_key_down=keyboard._on_window_key_down,
on_key_up=keyboard._on_window_key_up,
on_textinput=keyboard._on_window_textinput)
return keyboard
def release_keyboard(self, target=None):
'''.. versionadded:: 1.0.4
Internal method for the widget to release the real-keyboard. Check
:meth:`request_keyboard` to understand how it works.
'''
if self.allow_vkeyboard:
key = 'single' if self.single_vkeyboard else target
if key not in self._keyboards:
return
keyboard = self._keyboards[key]
callback = keyboard.callback
if callback:
keyboard.callback = None
callback()
keyboard.target = None
self.remove_widget(keyboard.widget)
if key != 'single' and key in self._keyboards:
del self._keyboards[key]
elif self._system_keyboard.callback:
# this way will prevent possible recursion.
callback = self._system_keyboard.callback
self._system_keyboard.callback = None
callback()
return True
#: Instance of a :class:`WindowBase` implementation
window_impl = []
if platform == 'linux':
window_impl += [('egl_rpi', 'window_egl_rpi', 'WindowEglRpi')]
if USE_SDL2:
window_impl += [('sdl2', 'window_sdl2', 'WindowSDL')]
else:
window_impl += [
('pygame', 'window_pygame', 'WindowPygame')]
if platform == 'linux':
window_impl += [('x11', 'window_x11', 'WindowX11')]
Window = core_select_lib('window', window_impl, True)
|
aron-bordin/kivy
|
kivy/core/window/__init__.py
|
Python
|
mit
| 57,469 | 0.000331 |
# encoding: utf-8
# Copyright 2013–2017 California Institute of Technology. ALL RIGHTS
# RESERVED. U.S. Government Sponsorship acknowledged.
from .setuphandlers import publish
from edrn.rdf import DEFAULT_PROFILE
from plone.dexterity.utils import createContentInContainer
from edrn.rdf.labcascollectionrdfgenerator import ILabCASCollectionRDFGenerator
import plone.api
def nullUpgradeStep(setupTool):
'''A null step when a profile upgrade requires no custom activity.'''
def upgrade3to4(setupTool):
setupTool.runImportStepFromProfile(DEFAULT_PROFILE, 'typeinfo')
def upgrade4to5(setupTool):
# Note that I (kelly) went ahead and added these through the web to the
# running https://edrn.jpl.nasa.gov/cancerdataexpo/ so we could take
# immediate advantage of the new data without cutting a new release.
# This is provided just in case there is a disaster and we need to
# re-release.
portal = setupTool.getSite()
if 'rdf-generators' in list(portal.keys()):
rdfGenerators = portal['rdf-generators']
if 'person-generator' in list(rdfGenerators.keys()):
personGenerator = rdfGenerators['person-generator']
if 'staff_status' not in list(personGenerator.keys()):
predicate = createContentInContainer(
personGenerator,
'edrn.rdf.literalpredicatehandler',
title='Staff_Status',
description='''Maps from DMCC's Staff_Status to the EDRN-specific predicate for employmentActive.''',
predicateURI='http://edrn.nci.nih.gov/rdf/schema.rdf#employmentActive'
)
publish(predicate, plone.api.portal.get_tool('portal_workflow'))
if 'publications-generator' in list(rdfGenerators.keys()):
publicationsGenerator = rdfGenerators['publications-generator']
if 'siteid' not in list(publicationsGenerator.keys()):
predicate = createContentInContainer(
publicationsGenerator,
'edrn.rdf.referencepredicatehandler',
title='SiteID',
description='''Maps from the DMCC's SiteID to the EDRN-specific predicate for site ID.''',
predicateURI='http://edrn.nci.nih.gov/rdf/schema.rdf#site',
uriPrefix='http://edrn.nci.nih.gov/data/sites/'
)
publish(predicate, plone.api.portal.get_tool('portal_workflow'))
def upgrade5to6(setupTool):
catalog = plone.api.portal.get_tool('portal_catalog')
for brain in catalog(object_provides=ILabCASCollectionRDFGenerator.__identifier__):
obj = brain.getObject()
obj.labcasSolrURL = 'https://edrn-labcas.jpl.nasa.gov/data-access-api'
|
EDRN/CancerDataExpo
|
src/edrn.rdf/edrn/rdf/upgrades.py
|
Python
|
apache-2.0
| 2,788 | 0.002154 |
#<pycode(py_choose)>
class Choose:
"""
Choose - class for choose() with callbacks
"""
def __init__(self, list, title, flags=0, deflt=1, icon=37):
self.list = list
self.title = title
self.flags = flags
self.x0 = -1
self.x1 = -1
self.y0 = -1
self.y1 = -1
self.width = -1
self.deflt = deflt
self.icon = icon
# HACK: Add a circular reference for non-modal choosers. This prevents the GC
# from collecting the class object the callbacks need. Unfortunately this means
# that the class will never be collected, unless refhack is set to None explicitly.
if (flags & Choose2.CH_MODAL) == 0:
self.refhack = self
def sizer(self):
"""
Callback: sizer - returns the length of the list
"""
return len(self.list)
def getl(self, n):
"""
Callback: getl - get one item from the list
"""
if n == 0:
return self.title
if n <= self.sizer():
return str(self.list[n-1])
else:
return "<Empty>"
def ins(self):
pass
def update(self, n):
pass
def edit(self, n):
pass
def enter(self, n):
print "enter(%d) called" % n
def destroy(self):
pass
def get_icon(self, n):
pass
def choose(self):
"""
choose - Display the choose dialogue
"""
old = set_script_timeout(0)
n = _idaapi.choose_choose(
self,
self.flags,
self.x0,
self.y0,
self.x1,
self.y1,
self.width,
self.deflt,
self.icon)
set_script_timeout(old)
return n
#</pycode(py_choose)>
|
nihilus/src
|
pywraps/py_choose.py
|
Python
|
bsd-3-clause
| 1,595 | 0.016928 |
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "Django_study.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
zhangyage/Python-oldboy
|
day13/Django_study/manage.py
|
Python
|
apache-2.0
| 255 | 0 |
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def hasPathSum(self, root, sum):
"""
:type root: TreeNode
:type sum: int
:rtype: bool
"""
if root == None:
return False
if root.val == sum and root.left == None and root.right == None:
return True
left = self.hasPathSum(root.left, sum - root.val)
right = self.hasPathSum(root.right, sum - root.val)
return (left or right)
|
hawkphantomnet/leetcode
|
PathSum/Solution.py
|
Python
|
mit
| 625 | 0.0048 |
#!/usr/bin/env python
"""
a simple script can run and test your html rendering classes.
Uncomment the steps as you add to your rendering.
"""
import codecs
import cStringIO
# importing the html_rendering code with a short name for easy typing.
import html_render as hr
## writing the file out:
def render(page, filename):
"""
render the tree of elements
This uses cSstringIO to renderto memory, then dump to console and
write to file -- very handy!
"""
f = cStringIO.StringIO()
page.render(f)
f.reset()
print f.read()
f.reset()
codecs.open(filename, 'w', encoding="utf-8").write( f.read() )
## Step 1
##########
page = hr.Element()
page.append(u"Here is a paragraph of text -- there could be more of them, but this is enough to show that we can do some text")
page.append(u"And here is another piece of text -- you should be able to add any number")
render(page, u"test_html_output1.html")
# ## Step 2
# ##########
# page = hr.Html()
# body = hr.Body()
# body.append(hr.P(u"Here is a paragraph of text -- there could be more of them, but this is enough to show that we can do some text"))
# body.append(hr.P(u"And here is another piece of text -- you should be able to add any number"))
# page.append(body)
# render(page, u"test_html_output2.html")
# # Step 3
# ##########
# page = hr.Html()
# head = hr.Head()
# head.append(hr.Title(u"PythonClass = Revision 1087:"))
# page.append(head)
# body = hr.Body()
# body.append(hr.P(u"Here is a paragraph of text -- there could be more of them, but this is enough to show that we can do some text"))
# body.append(hr.P(u"And here is another piece of text -- you should be able to add any number"))
# page.append(body)
# render(page, u"test_html_output3.html")
# # Step 4
# ##########
# page = hr.Html()
# head = hr.Head()
# head.append(hr.Title(u"PythonClass = Revision 1087:"))
# page.append(head)
# body = hr.Body()
# body.append(hr.P(u"Here is a paragraph of text -- there could be more of them, but this is enough to show that we can do some text",
# style=u"text-align: center; font-style: oblique;"))
# page.append(body)
# render(page, u"test_html_output4.html")
# # Step 5
# #########
# page = hr.Html()
# head = hr.Head()
# head.append(hr.Title(u"PythonClass = Revision 1087:"))
# page.append(head)
# body = hr.Body()
# body.append(hr.P(u"Here is a paragraph of text -- there could be more of them, but this is enough to show that we can do some text",
# style=u"text-align: center; font-style: oblique;"))
# body.append(hr.Hr())
# page.append(body)
# render(page, u"test_html_output5.html")
# # Step 6
# #########
# page = hr.Html()
# head = hr.Head()
# head.append(hr.Title(u"PythonClass = Revision 1087:"))
# page.append(head)
# body = hr.Body()
# body.append(hr.P(u"Here is a paragraph of text -- there could be more of them, but this is enough to show that we can do some text",
# style=u"text-align: center; font-style: oblique;"))
# body.append(hr.Hr())
# body.append(u"And this is a ")
# body.append( hr.A(u"http://google.com", "link") )
# body.append(u"to google")
# page.append(body)
# render(page, u"test_html_output6.html")
# # Step 7
# #########
# page = hr.Html()
# head = hr.Head()
# head.append(hr.Title(u"PythonClass = Revision 1087:"))
# page.append(head)
# body = hr.Body()
# body.append( hr.H(2, u"PythonClass - Class 6 example") )
# body.append(hr.P(u"Here is a paragraph of text -- there could be more of them, but this is enough to show that we can do some text",
# style=u"text-align: center; font-style: oblique;"))
# body.append(hr.Hr())
# list = hr.Ul(id=u"TheList", style=u"line-height:200%")
# list.append( hr.Li(u"The first item in a list") )
# list.append( hr.Li(u"This is the second item", style="color: red") )
# item = hr.Li()
# item.append(u"And this is a ")
# item.append( hr.A(u"http://google.com", u"link") )
# item.append(u"to google")
# list.append(item)
# body.append(list)
# page.append(body)
# render(page, u"test_html_output7.html")
# # Step 8
# ########
# page = hr.Html()
# head = hr.Head()
# head.append( hr.Meta(charset=u"UTF-8") )
# head.append(hr.Title(u"PythonClass = Revision 1087:"))
# page.append(head)
# body = hr.Body()
# body.append( hr.H(2, u"PythonClass - Class 6 example") )
# body.append(hr.P(u"Here is a paragraph of text -- there could be more of them, but this is enough to show that we can do some text",
# style=u"text-align: center; font-style: oblique;"))
# body.append(hr.Hr())
# list = hr.Ul(id=u"TheList", style=u"line-height:200%")
# list.append( hr.Li(u"The first item in a list") )
# list.append( hr.Li(u"This is the second item", style="color: red") )
# item = hr.Li()
# item.append(u"And this is a ")
# item.append( hr.A(u"http://google.com", "link") )
# item.append(u"to google")
# list.append(item)
# body.append(list)
# page.append(body)
# render(page, u"test_html_output8.html")
|
AmandaMoen/AmandaMoen
|
code/session06/run_html_render.py
|
Python
|
gpl-2.0
| 5,015 | 0.004786 |
# coding: utf-8
"""
Server API
Reference for Server API (REST/Json)
OpenAPI spec version: 2.0.6
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class WidgetHomeRail(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self, id=None, label=None, type=None, type_id=None, visibility=None, position=None):
"""
WidgetHomeRail - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'id': 'int',
'label': 'list[I18nField]',
'type': 'str',
'type_id': 'int',
'visibility': 'str',
'position': 'int'
}
self.attribute_map = {
'id': 'id',
'label': 'label',
'type': 'type',
'type_id': 'type_id',
'visibility': 'visibility',
'position': 'position'
}
self._id = id
self._label = label
self._type = type
self._type_id = type_id
self._visibility = visibility
self._position = position
@property
def id(self):
"""
Gets the id of this WidgetHomeRail.
:return: The id of this WidgetHomeRail.
:rtype: int
"""
return self._id
@id.setter
def id(self, id):
"""
Sets the id of this WidgetHomeRail.
:param id: The id of this WidgetHomeRail.
:type: int
"""
self._id = id
@property
def label(self):
"""
Gets the label of this WidgetHomeRail.
:return: The label of this WidgetHomeRail.
:rtype: list[I18nField]
"""
return self._label
@label.setter
def label(self, label):
"""
Sets the label of this WidgetHomeRail.
:param label: The label of this WidgetHomeRail.
:type: list[I18nField]
"""
self._label = label
@property
def type(self):
"""
Gets the type of this WidgetHomeRail.
:return: The type of this WidgetHomeRail.
:rtype: str
"""
return self._type
@type.setter
def type(self, type):
"""
Sets the type of this WidgetHomeRail.
:param type: The type of this WidgetHomeRail.
:type: str
"""
self._type = type
@property
def type_id(self):
"""
Gets the type_id of this WidgetHomeRail.
:return: The type_id of this WidgetHomeRail.
:rtype: int
"""
return self._type_id
@type_id.setter
def type_id(self, type_id):
"""
Sets the type_id of this WidgetHomeRail.
:param type_id: The type_id of this WidgetHomeRail.
:type: int
"""
self._type_id = type_id
@property
def visibility(self):
"""
Gets the visibility of this WidgetHomeRail.
:return: The visibility of this WidgetHomeRail.
:rtype: str
"""
return self._visibility
@visibility.setter
def visibility(self, visibility):
"""
Sets the visibility of this WidgetHomeRail.
:param visibility: The visibility of this WidgetHomeRail.
:type: str
"""
self._visibility = visibility
@property
def position(self):
"""
Gets the position of this WidgetHomeRail.
:return: The position of this WidgetHomeRail.
:rtype: int
"""
return self._position
@position.setter
def position(self, position):
"""
Sets the position of this WidgetHomeRail.
:param position: The position of this WidgetHomeRail.
:type: int
"""
self._position = position
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
|
kinow-io/kinow-python-sdk
|
kinow_client/models/widget_home_rail.py
|
Python
|
apache-2.0
| 5,545 | 0.000361 |
# -*- coding: utf-8 -*-
##############################################################################
# 2014 E2OpenPlugins #
# #
# This file is open source software; you can redistribute it and/or modify #
# it under the terms of the GNU General Public License version 2 as #
# published by the Free Software Foundation. #
# #
##############################################################################
# Simulate the oe-a boxbranding module (Only functions required by OWIF) #
##############################################################################
from Plugins.Extensions.ModernWebif.__init__ import _
from Components.About import about
from socket import has_ipv6
from Tools.Directories import fileExists, pathExists
import string
import os, hashlib
try:
from Components.About import about
except:
pass
tpmloaded = 1
try:
from enigma import eTPM
if not hasattr(eTPM, 'getData'):
tpmloaded = 0
except:
tpmloaded = 0
def validate_certificate(cert, key):
buf = decrypt_block(cert[8:], key)
if buf is None:
return None
return buf[36:107] + cert[139:196]
def get_random():
try:
xor = lambda a,b: ''.join(chr(ord(c)^ord(d)) for c,d in zip(a,b*100))
random = urandom(8)
x = str(time())[-8:]
result = xor(random, x)
return result
except:
return None
def bin2long(s):
return reduce( lambda x,y:(x<<8L)+y, map(ord, s))
def long2bin(l):
res = ""
for byte in range(128):
res += chr((l >> (1024 - (byte + 1) * 8)) & 0xff)
return res
def rsa_pub1024(src, mod):
return long2bin(pow(bin2long(src), 65537, bin2long(mod)))
def decrypt_block(src, mod):
if len(src) != 128 and len(src) != 202:
return None
dest = rsa_pub1024(src[:128], mod)
hash = hashlib.sha1(dest[1:107])
if len(src) == 202:
hash.update(src[131:192])
result = hash.digest()
if result == dest[107:127]:
return dest
return None
def tpm_check():
try:
tpm = eTPM()
rootkey = ['\x9f', '|', '\xe4', 'G', '\xc9', '\xb4', '\xf4', '#', '&', '\xce', '\xb3', '\xfe', '\xda', '\xc9', 'U', '`', '\xd8', '\x8c', 's', 'o', '\x90', '\x9b', '\\', 'b', '\xc0', '\x89', '\xd1', '\x8c', '\x9e', 'J', 'T', '\xc5', 'X', '\xa1', '\xb8', '\x13', '5', 'E', '\x02', '\xc9', '\xb2', '\xe6', 't', '\x89', '\xde', '\xcd', '\x9d', '\x11', '\xdd', '\xc7', '\xf4', '\xe4', '\xe4', '\xbc', '\xdb', '\x9c', '\xea', '}', '\xad', '\xda', 't', 'r', '\x9b', '\xdc', '\xbc', '\x18', '3', '\xe7', '\xaf', '|', '\xae', '\x0c', '\xe3', '\xb5', '\x84', '\x8d', '\r', '\x8d', '\x9d', '2', '\xd0', '\xce', '\xd5', 'q', '\t', '\x84', 'c', '\xa8', ')', '\x99', '\xdc', '<', '"', 'x', '\xe8', '\x87', '\x8f', '\x02', ';', 'S', 'm', '\xd5', '\xf0', '\xa3', '_', '\xb7', 'T', '\t', '\xde', '\xa7', '\xf1', '\xc9', '\xae', '\x8a', '\xd7', '\xd2', '\xcf', '\xb2', '.', '\x13', '\xfb', '\xac', 'j', '\xdf', '\xb1', '\x1d', ':', '?']
random = None
result = None
l2r = False
l2k = None
l3k = None
l2c = tpm.getData(eTPM.DT_LEVEL2_CERT)
if l2c is None:
return 0
l2k = validate_certificate(l2c, rootkey)
if l2k is None:
return 0
l3c = tpm.getData(eTPM.DT_LEVEL3_CERT)
if l3c is None:
return 0
l3k = validate_certificate(l3c, l2k)
if l3k is None:
return 0
random = get_random()
if random is None:
return 0
value = tpm.computeSignature(random)
result = decrypt_block(value, l3k)
if result is None:
return 0
if result [80:88] != random:
return 0
return 1
except:
return 0
def getAllInfo():
info = {}
brand = "unknown"
model = "unknown"
procmodel = "unknown"
orgdream = 0
if tpmloaded:
orgdream = tpm_check()
if fileExists("/proc/stb/info/hwmodel"):
brand = "DAGS"
f = open("/proc/stb/info/hwmodel",'r')
procmodel = f.readline().strip()
f.close()
if (procmodel.startswith("optimuss") or procmodel.startswith("pingulux")):
brand = "Edision"
model = procmodel.replace("optimmuss", "Optimuss ").replace("plus", " Plus").replace(" os", " OS")
elif (procmodel.startswith("fusion")):
brand = "Xsarius"
if procmodel == "fusionhd":
model = procmodel.replace("fusionhd", "Fusion HD")
elif procmodel == "fusionhdse":
model = procmodel.replace("fusionhdse", "Fusion HD SE")
elif fileExists("/proc/stb/info/azmodel"):
brand = "AZBox"
f = open("/proc/stb/info/model",'r') # To-Do: Check if "model" is really correct ...
procmodel = f.readline().strip()
f.close()
model = procmodel.lower()
elif fileExists("/proc/stb/info/gbmodel"):
brand = "GigaBlue"
f = open("/proc/stb/info/gbmodel",'r')
procmodel = f.readline().strip()
f.close()
model = procmodel.upper().replace("GBQUAD", "Quad").replace("PLUS", " Plus")
elif fileExists("/proc/stb/info/vumodel"):
brand = "Vu+"
f = open("/proc/stb/info/vumodel",'r')
procmodel = f.readline().strip()
f.close()
model = procmodel.title().replace("olose", "olo SE").replace("olo2se", "olo2 SE").replace("2", "²")
elif fileExists("/proc/boxtype"):
f = open("/proc/boxtype",'r')
procmodel = f.readline().strip().lower()
f.close()
if procmodel in ("adb2850", "adb2849", "bska", "bsla", "bxzb", "bzzb"):
brand = "Advanced Digital Broadcast"
if procmodel in ("bska", "bxzb"):
model = "ADB 5800S"
elif procmodel in ("bsla", "bzzb"):
model = "ADB 5800SX"
elif procmodel == "adb2849":
model = "ADB 2849ST"
else:
model = "ADB 2850ST"
elif procmodel in ("esi88", "uhd88"):
brand = "Sagemcom"
if procmodel == "uhd88":
model = "UHD 88"
else:
model = "ESI 88"
elif fileExists("/proc/stb/info/boxtype"):
f = open("/proc/stb/info/boxtype",'r')
procmodel = f.readline().strip().lower()
f.close()
if procmodel.startswith("et"):
brand = "Xtrend"
model = procmodel.upper()
elif procmodel.startswith("xp"):
brand = "MaxDigital"
model = procmodel
elif procmodel.startswith("ixuss"):
brand = "Medialink"
model = procmodel.replace(" ", "")
elif procmodel.startswith("formuler"):
brand = "Formuler"
model = procmodel.replace("formuler","")
elif procmodel.startswith("ini"):
if procmodel.endswith("9000ru"):
brand = "Sezam"
model = "Marvel"
elif procmodel.endswith("5000ru"):
brand = "Sezam"
model = "hdx"
elif procmodel.endswith("1000ru"):
brand = "Sezam"
model = "hde"
elif procmodel.endswith("5000sv"):
brand = "Miraclebox"
model = "mbtwin"
elif procmodel.endswith("1000sv"):
brand = "Miraclebox"
model = "mbmini"
elif procmodel.endswith("1000de"):
brand = "Golden Interstar"
model = "Xpeed LX"
elif procmodel.endswith("9000de"):
brand = "Golden Interstar"
model = "Xpeed LX3"
elif procmodel.endswith("1000lx"):
brand = "Golden Interstar"
model = "Xpeed LX"
elif procmodel.endswith("de"):
brand = "Golden Interstar"
elif procmodel.endswith("1000am"):
brand = "Atemio"
model = "5x00"
else:
brand = "Venton"
model = "HDx"
elif procmodel.startswith("unibox-"):
brand = "Venton"
model = "HDe"
elif procmodel == "hd1100":
brand = "Mut@nt"
model = "hd1100"
elif procmodel == "hd1200":
brand = "Mut@nt"
model = "hd1200"
elif procmodel == "hd2400":
brand = "Mut@nt"
model = "hd2400"
elif procmodel == "arivalink200":
brand = "Ferguson"
model = "Ariva @Link 200"
elif procmodel.startswith("spark"):
brand = "Fulan"
if procmodel == "spark7162":
model = "Spark 7162"
else:
model = "Spark"
elif procmodel == "wetekplay":
brand = "WeTeK"
model = procmodel
elif procmodel == "osmini":
brand = "Edision"
model = procmodel
elif fileExists("/proc/stb/info/model"):
f = open("/proc/stb/info/model",'r')
procmodel = f.readline().strip().lower()
f.close()
if procmodel == "tf7700hdpvr":
brand = "Topfield"
model = "TF7700 HDPVR"
elif procmodel == "dsi87":
brand = "Sagemcom"
model = "DSI 87"
elif procmodel.startswith("spark"):
brand = "Fulan"
if procmodel == "spark7162":
model = "Spark 7162"
else:
model = "Spark"
elif (procmodel.startswith("dm") and not procmodel == "dm8000"):
brand = "Dream Multimedia"
model = procmodel.replace("dm", "DM", 1)
# A "dm8000" is only a Dreambox if it passes the tpm verification:
elif procmodel == "dm8000" and orgdream:
brand = "Dream Multimedia"
model = "DM8000"
else:
model = procmodel
if fileExists("/etc/.box"):
distro = "HDMU"
f = open("/etc/.box",'r')
tempmodel = f.readline().strip().lower()
if tempmodel.startswith("ufs") or model.startswith("ufc"):
brand = "Kathrein"
model = tempmodel.upcase()
procmodel = tempmodel
elif tempmodel.startswith("spark"):
brand = "Fulan"
model = tempmodel.title()
procmodel = tempmodel
elif tempmodel.startswith("xcombo"):
brand = "EVO"
model = "enfinityX combo plus"
procmodel = "vg2000"
type = procmodel
if type in ("et9000", "et9100", "et9200", "et9500"):
type = "et9x00"
elif type in ("et5000", "et6000", "et6x00"):
type = "et5x00"
elif type == "et4000":
type = "et4x00"
elif type == "xp1000":
type = "xp1000"
elif type in ("bska", "bxzb"):
type = "nbox_white"
elif type in ("bsla", "bzzb"):
type = "nbox"
elif type == "sagemcom88":
type = "esi88"
elif type in ("tf7700hdpvr", "topf"):
type = "topf"
info['brand'] = brand
info['model'] = model
info['procmodel'] = procmodel
info['type'] = type
remote = "dmm"
if procmodel in ("solo", "duo", "uno", "solo2", "solose", "zero", "solo4k"):
remote = "vu_normal"
elif procmodel == "duo2":
remote = "vu_duo2"
elif procmodel == "ultimo":
remote = "vu_ultimo"
elif procmodel == "e3hd":
remote = "e3hd"
elif procmodel in ("et9x00", "et9000", "et9100", "et9200", "et9500"):
remote = "et9x00"
elif procmodel in ("et5x00", "et5000", "et6x00", "et6000"):
remote = "et5x00"
elif procmodel in ("et4x00", "et4000"):
remote = "et4x00"
elif procmodel == "et6500":
remote = "et6500"
elif procmodel in ("et8x00", "et8000", "et8500", "et8500s","et1x000", "et10000"):
remote = "et8000"
elif procmodel in ("et7x00", "et7000", "et7500"):
remote = "et7x00"
elif procmodel == "gbquad":
remote = "gigablue"
elif procmodel == "gbquadplus":
remote = "gbquadplus"
elif procmodel in ("formuler1", "formuler3"):
remote = "formuler1"
elif procmodel in ("azboxme", "azboxminime", "me", "minime"):
remote = "me"
elif procmodel in ("optimussos1", "optimussos1plus", "optimussos2", "optimussos2plus"):
remote = "optimuss"
elif procmodel in ("premium", "premium+"):
remote = "premium"
elif procmodel in ("elite", "ultra"):
remote = "elite"
elif procmodel in ("ini-1000", "ini-1000ru"):
remote = "ini-1000"
elif procmodel in ("ini-1000sv", "ini-5000sv", "ini-9000de"):
remote = "miraclebox"
elif procmodel == "ini-3000":
remote = "ini-3000"
elif procmodel in ("ini-7012", "ini-7000", "ini-5000", "ini-5000ru"):
remote = "ini-7000"
elif procmodel.startswith("spark"):
remote = "spark"
elif procmodel == "xp1000":
remote = "xp1000"
elif procmodel.startswith("xpeedlx"):
remote = "xpeedlx"
elif procmodel in ("adb2850", "adb2849", "bska", "bsla", "bxzb", "bzzb", "esi88", "uhd88", "dsi87", "arivalink200"):
remote = "nbox"
elif procmodel in ("hd1100", "hd1200"):
remote = "hd1x00"
elif procmodel == "hd2400":
remote = "hd2400"
elif procmodel.startswith("ixuss"):
remote = procmodel.replace(" ", "")
elif procmodel == "vg2000":
remote = "xcombo"
elif procmodel == "dm8000" and orgdream:
remote = "dmm1"
elif procmodel in ("dm7080", "dm7020hd", "dm7020hdv2", "dm800sev2", "dm500hdv2", "dm820"):
remote = "dmm2"
elif procmodel == "wetekplay":
remote = procmodel
elif procmodel == "osmini":
remote = procmodel
info['remote'] = remote
kernel = about.getKernelVersionString()[0]
distro = "unknown"
imagever = "unknown"
imagebuild = ""
driverdate = "unknown"
# Assume OE 1.6
oever = "OE 1.6"
if kernel>2:
oever = "OE 2.0"
if fileExists("/etc/.box"):
distro = "HDMU"
oever = "private"
elif fileExists("/etc/bhversion"):
distro = "Black Hole"
f = open("/etc/bhversion",'r')
imagever = f.readline().strip()
f.close()
if kernel>2:
oever = "OpenVuplus 2.1"
elif fileExists("/etc/vtiversion.info"):
distro = "VTi-Team Image"
f = open("/etc/vtiversion.info",'r')
imagever = f.readline().strip().replace("VTi-Team Image ", "").replace("Release ", "").replace("v.", "")
f.close()
oever = "OE 1.6"
imagelist = imagever.split('.')
imagebuild = imagelist.pop()
imagever = ".".join(imagelist)
if kernel>2:
oever = "OpenVuplus 2.1"
if ((imagever == "5.1") or (imagever[0] > 5)):
oever = "OpenVuplus 2.1"
elif fileExists("/var/grun/grcstype"):
distro = "Graterlia OS"
try:
imagever = about.getImageVersionString()
except:
pass
# ToDo: If your distro gets detected as OpenPLi, feel free to add a detection for your distro here ...
else:
# OE 2.2 uses apt, not opkg
if not fileExists("/etc/opkg/all-feed.conf"):
oever = "OE 2.2"
else:
try:
f = open("/etc/opkg/all-feed.conf",'r')
oeline = f.readline().strip().lower()
f.close()
distro = oeline.split( )[1].replace("-all","")
except:
pass
if distro == "openpli":
imagever = "2.1"
# Todo: Detect OpenPLi 3.0
if has_ipv6:
# IPv6 support for Python was added in 4.0
imagever = "4.0"
oever = "PLi-OE"
imagelist = imagever.split('.')
imagebuild = imagelist.pop()
imagever = ".".join(imagelist)
elif distro == "openrsi":
oever = "PLi-OE"
else:
try:
imagever = about.getImageVersionString()
except:
pass
if (distro == "unknown" and brand == "Vu+" and fileExists("/etc/version")):
# Since OE-A uses boxbranding and bh or vti can be detected, there isn't much else left for Vu+ boxes
distro = "Vu+ original"
f = open("/etc/version",'r')
imagever = f.readline().strip()
f.close()
if kernel.major>2:
oever = "OpenVuplus 2.1"
# reporting the installed dvb-module version is as close as we get without too much hassle
driverdate = 'unknown'
try:
driverdate = os.popen('/usr/bin/opkg -V0 list_installed *dvb-modules*').readline().split( )[2]
except:
try:
driverdate = os.popen('/usr/bin/opkg -V0 list_installed *dvb-proxy*').readline().split( )[2]
except:
try:
driverdate = os.popen('/usr/bin/opkg -V0 list_installed *kernel-core-default-gos*').readline().split( )[2]
except:
pass
info['oever'] = oever
info['distro'] = distro
info['imagever'] = imagever
info['imagebuild'] = imagebuild
info['driverdate'] = driverdate
return info
STATIC_INFO_DIC = getAllInfo()
def getMachineBuild():
return STATIC_INFO_DIC['procmodel']
def getMachineBrand():
return STATIC_INFO_DIC['brand']
def getMachineName():
return STATIC_INFO_DIC['model']
def getMachineProcModel():
return STATIC_INFO_DIC['procmodel']
def getBoxType():
return STATIC_INFO_DIC['type']
def getOEVersion():
return STATIC_INFO_DIC['oever']
def getDriverDate():
return STATIC_INFO_DIC['driverdate']
def getImageVersion():
return STATIC_INFO_DIC['imagever']
def getImageBuild():
return STATIC_INFO_DIC['imagebuild']
def getImageDistro():
return STATIC_INFO_DIC['distro']
class rc_model:
def getRcFolder(self):
return STATIC_INFO_DIC['remote']
|
MDXDave/ModernWebif
|
plugin/controllers/models/owibranding.py
|
Python
|
gpl-2.0
| 15,558 | 0.03349 |
"""Unittests that do not require the server to be running an common tests of responses.
The TestCase here just calls the functions that provide the logic to the ws views with DummyRequest
objects to mock a real request.
The functions starting with `check_...` are called with UnitTest.TestCase instance as the first
arg and the response. These functions are used within the unit tests in this file, but also
in the `ws-tests` calls that perform the tests through http.
"""
import os
import unittest
from pyramid import testing
from phylesystem_api.utility import fill_app_settings, umbrella_from_request
from phylesystem_api.views import import_nexson_from_crossref_metadata
def get_app_settings_for_testing(settings):
"""Fills the settings of a DummyRequest, with info from the development.ini
This allows the dummy requests to mock a real request wrt configuration-dependent settings."""
from peyotl.utility.imports import SafeConfigParser
cfg = SafeConfigParser()
devini_path = os.path.abspath(os.path.join('..', 'development.ini'))
if not os.path.isfile(devini_path):
raise RuntimeError('Expecting a INI file at "{}" to run tests'.format(devini_path))
cfg.read(devini_path)
settings['repo_parent'] = cfg.get('app:main', 'repo_parent')
fill_app_settings(settings=settings)
def gen_versioned_dummy_request():
"""Adds a version number (3) to the request to mimic the matching based on URL in the real app.
"""
req = testing.DummyRequest()
get_app_settings_for_testing(req.registry.settings)
req.matchdict['api_version'] = 'v3'
return req
def check_index_response(test_case, response):
"""Verifies the existene of expected keys in the response to an index call.
'documentation_url', 'description', and 'source_url' keys must be in the response.
"""
for k in ['documentation_url', 'description', 'source_url']:
test_case.assertIn(k, response)
def check_render_markdown_response(test_case, response):
"""Check of `response` to a `render_markdown` call."""
expected = '<p>hi from <a href="http://phylo.bio.ku.edu" target="_blank">' \
'http://phylo.bio.ku.edu</a> and ' \
'<a href="https://github.com/orgs/OpenTreeOfLife/dashboard" target="_blank">' \
'https://github.com/orgs/OpenTreeOfLife/dashboard</a></p>'
test_case.assertEquals(response.body, expected)
def check_study_list_and_config_response(test_case,
sl_response,
config_response,
from_generic_config):
"""Checks of responses from study_list, config, and the generic config calls."""
nsis = sum([i['number of documents'] for i in config_response['shards']])
test_case.assertEquals(nsis, len(sl_response))
test_case.assertEquals(from_generic_config, config_response)
def check_unmerged_response(test_case, ub):
"""Check of `ub` response from an `unmerged_branches` call"""
test_case.assertTrue('master' not in ub)
def check_config_response(test_case, cfg):
"""Check of `cfg` response from a `config` call"""
test_case.assertSetEqual(set(cfg.keys()), {"initialization", "shards", "number_of_shards"})
def check_external_url_response(test_case, doc_id, resp):
"""Simple check of an `external_url` `resp` response for `doc_id`.
`doc_id` and `url` fields of the response are checked."""
test_case.assertEquals(resp.get('doc_id'), doc_id)
test_case.assertTrue(resp.get('url', '').endswith('{}.json'.format(doc_id)))
def check_push_failure_response(test_case, resp):
"""Check of the `resp` response of a `push_failure` method call to verify it has the right keys.
"""
test_case.assertSetEqual(set(resp.keys()), {"doc_type", "errors", "pushes_succeeding"})
test_case.assertTrue(resp["pushes_succeeding"])
render_test_input = 'hi from <a href="http://phylo.bio.ku.edu" target="new">' \
'http://phylo.bio.ku.edu</a> and ' \
'https://github.com/orgs/OpenTreeOfLife/dashboard'
class ViewTests(unittest.TestCase):
"""UnitTest of the functions that underlie the ws views."""
def setUp(self):
"""Calls pyramid testing.setUp"""
self.config = testing.setUp()
def tearDown(self):
"""Calls pyramid testing.tearDown"""
testing.tearDown()
def test_index(self):
"""Test of index view"""
request = gen_versioned_dummy_request()
from phylesystem_api.views import index
check_index_response(self, index(request))
def test_render_markdown(self):
"""Test of render_markdown view"""
request = testing.DummyRequest(post={'src': render_test_input})
from phylesystem_api.views import render_markdown
check_render_markdown_response(self, render_markdown(request))
def test_study_list_and_config(self):
"""Test of study_list and phylesystem_config views"""
request = gen_versioned_dummy_request()
from phylesystem_api.views import study_list
sl = study_list(request)
request = gen_versioned_dummy_request()
from phylesystem_api.views import phylesystem_config
x = phylesystem_config(request)
request = gen_versioned_dummy_request()
request.matchdict['resource_type'] = 'study'
from phylesystem_api.views import generic_config
y = generic_config(request)
check_study_list_and_config_response(self, sl, x, y)
if not sl:
return
from phylesystem_api.views import external_url
doc_id = sl[0]
request.matchdict['doc_id'] = doc_id
e = external_url(request)
check_external_url_response(self, doc_id, e)
def test_unmerged(self):
"""Test of unmerged_branches view"""
request = gen_versioned_dummy_request()
request.matchdict['resource_type'] = 'study'
from phylesystem_api.views import unmerged_branches
check_unmerged_response(self, unmerged_branches(request))
def test_config(self):
"""Test of generic_config view"""
request = gen_versioned_dummy_request()
from phylesystem_api.views import phylesystem_config, generic_config
r2 = phylesystem_config(request)
check_config_response(self, r2)
request.matchdict['resource_type'] = 'study'
r = generic_config(request)
check_config_response(self, r)
self.assertDictEqual(r, r2)
request.matchdict['resource_type'] = 'amendment'
ra = generic_config(request)
check_config_response(self, ra)
self.assertNotEqual(ra, r)
def test_push_failure_state(self):
"""Test of push_failure view"""
request = gen_versioned_dummy_request()
request.matchdict['resource_type'] = 'collection'
from phylesystem_api.views import push_failure
pf = push_failure(request)
check_push_failure_response(self, pf)
def test_doi_import(self):
"""Make sure that fetching from DOI generates a valid study shell."""
doi = "10.3732/ajb.0800060"
document = import_nexson_from_crossref_metadata(doi=doi,
ref_string=None,
include_cc0=None)
request = gen_versioned_dummy_request()
request.matchdict['resource_type'] = 'study'
umbrella = umbrella_from_request(request)
errors = umbrella.validate_and_convert_doc(document, {})[1]
self.assertEquals(len(errors), 0)
if __name__ == '__main__':
unittest.main()
|
mtholder/pyraphyletic
|
phylesystem_api/tests.py
|
Python
|
bsd-2-clause
| 7,707 | 0.001946 |
from setuptools import setup, find_packages
import os
ROOT = os.path.dirname(os.path.realpath(__file__))
setup(
name='grab',
version='0.6.22',
description='Web Scraping Framework',
long_description=open(os.path.join(ROOT, 'README.rst')).read(),
url='http://grablib.org',
author='Gregory Petukhov',
author_email='lorien@lorien.name',
packages=find_packages(exclude=['test', 'test.files']),
install_requires=['lxml', 'pycurl', 'selection', 'weblib>=0.1.10', 'six'],
license="MIT",
keywords="pycurl multicurl curl network parsing grabbing scraping"
" lxml xpath data mining",
classifiers=(
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: Implementation :: CPython',
'License :: OSI Approved :: MIT License',
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Topic :: Software Development :: Libraries :: Application Frameworks',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Internet :: WWW/HTTP',
),
)
|
liorvh/grab
|
setup.py
|
Python
|
mit
| 1,293 | 0 |
"""List iSCSI Snapshots."""
# :license: MIT, see LICENSE for more details.
import SoftLayer
from SoftLayer.CLI import environment
from SoftLayer.CLI import formatting
from SoftLayer.CLI import helpers
from SoftLayer import utils
import click
@click.command()
@click.argument('iscsi-identifier')
@environment.pass_env
def cli(env, iscsi_identifier):
"""List iSCSI Snapshots."""
iscsi_mgr = SoftLayer.ISCSIManager(env.client)
iscsi_id = helpers.resolve_id(iscsi_mgr.resolve_ids,
iscsi_identifier,
'iSCSI')
iscsi = env.client['Network_Storage_Iscsi']
snapshots = iscsi.getPartnerships(
mask='volumeId,partnerVolumeId,createDate,type', id=iscsi_id)
snapshots = [utils.NestedDict(n) for n in snapshots]
table = formatting.Table(['id', 'createDate', 'name', 'description'])
for snapshot in snapshots:
table.add_row([
snapshot['partnerVolumeId'],
snapshot['createDate'],
snapshot['type']['name'],
snapshot['type']['description'],
])
return table
|
cloudify-cosmo/softlayer-python
|
SoftLayer/CLI/snapshot/list.py
|
Python
|
mit
| 1,122 | 0 |
from .frontend import JSON_Editor, mode, Page
from . import frontend
from .character import Character
from .util import load_json, debug
class CHARACTERS(JSON_Editor):
def __init__(self):
self._name = 'character'
JSON_Editor.__init__(self)
self._icons = 'avatars'
self._obj = Character({})
def render(self, requestdata):
if mode() == 'dm':
return JSON_Editor.render(self, requestdata)
else:
char = frontend.campaign.current_char()
return self.view(char.name())
def view(self, item):
page = Page()
if not item:
page.error('No item specified')
return page.render()
try:
debug('try %s/%s' % (self._name, item))
json = load_json('%ss' % self._name, item)
except:
debug('except')
page.error('No files matching %s found in %s' % (item, self._name))
return page.render()
c = Character(json)
rendered = {}
rendered['json'] = c.render()
return page.tplrender('json_viewer.tpl', rendered)
|
ajventer/ezdm
|
ezdm_libs/all_characters.py
|
Python
|
gpl-3.0
| 1,127 | 0.000887 |
# Eve W-Space
# Copyright 2014 Andrew Austin and contributors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
A registry module for registration of searches.
This is based on the registry modules from django_autocomplete_light
"""
from django.db import models
from search_base import SearchBase
class SearchRegistry(dict):
"""
Dict with methods for handling search registration.
"""
def __init__(self):
self._models = {}
def search_for_model(self, model):
try:
return self._models[model]
except KeyError:
return
def unregister(self, name):
search = self[name]
del self[name]
def register(self, model, name, search_field, queryset):
"""
Registers a search on a model.
This is a simple form of the registry from django_autocomplete_light
that must be provided with a model, name, and the field on the model
to search.
"""
if not issubclass(model, models.Model):
raise AttributeError
if not search_field:
raise AttributeError
if not name:
name = '%sSearch' % model.__name__
base = SearchBase
try:
search_model_field = model._meta.get_field(search_field)
except:
raise Exception('The provided search field is not defined int he model.')
if not queryset:
queryset = model.objects.all()
baseContext = {'choices': queryset,
'search_field': search_model_field}
search = type(name, (base,), baseContext)
self[search.__name__] = search
self._models[model] = search
def _autodiscover(registry):
import copy
from django.conf import settings
from importlib import import_module
from django.utils.module_loading import module_has_submodule
for app in settings.INSTALLED_APPS:
mod = import_module(app)
# Attempt to import app's searches module
try:
before_import_registry = copy.copy(registry)
import_module('%s.searches' % app)
except:
registry = before_import_registry
if module_has_submodule(mod, 'searches'):
raise
registry = SearchRegistry()
def autodiscover():
_autodiscover(registry)
def register(model, name, search_field, queryset=None):
"""Proxy for registry register method."""
return registry.register(model, name, search_field, queryset)
|
marbindrakon/eve-wspace
|
evewspace/search/registry.py
|
Python
|
apache-2.0
| 3,019 | 0.002981 |
class CheckoutDiscardMixin():
def discard_all_unstaged(self):
"""
Any changes that are not staged or committed will be reverted
to their state in HEAD. Any new files will be deleted.
"""
self.git("clean", "-df")
self.git("checkout", "--", ".")
def checkout_file(self, fpath):
"""
Given an absolute path or path relative to the repo's root, discard
any changes made to the file and revert it in the working directory
to the state it is in HEAD.
"""
self.git("checkout", "--", fpath)
|
ypersyntelykos/GitSavvy
|
core/git_mixins/checkout_discard.py
|
Python
|
mit
| 589 | 0 |
import serial
import serial.tools.list_ports
import copy
import numpy as np
import math
import random
class AsciiSerial:
def __init__(self):
self._graphsChannels = {'graph1': None, 'graph2': None, 'graph3': None, 'graph4': None}
self._enChannels = {'graph1': False, 'graph2': False, 'graph3': False, 'graph4': False}
# Structure definition:
# {'ChannelName': channelData('display', {'lineName': [lowLevelID, xFieldID, yFieldID(optional)], ... }), ... }
self._channelsDataStructure = {
'POSITION': channelData('line-scatter', {'p': [0, 0, 1]}),
'TRAJECTORY': channelData('line-scatter', {'t': [1, 0, 1]}),
'PID_V_G': channelData('line', {'setPoint': [2, 0], 'value': [2, 1], 'output': [2, 2]}),
'PID_V_D': channelData('line', {'setPoint': [3, 0], 'value': [3, 1], 'output': [3, 2]}),
'PID_TRANS': channelData('line', {'setPoint': [4, 0], 'value': [4, 1], 'output': [4, 2]}),
'BLOCKING_M_G': channelData('line', {'aimSpeed': [5, 0], 'realSpeed': [5, 1], 'isBlocked': [5, 2]}),
'BLOCKING_M_D': channelData('line', {'aimSpeed': [6, 0], 'realSpeed': [6, 1], 'isBlocked': [6, 2]}),
'STOPPING_MGR': channelData('line', {'speed': [7, 0], 'isStopped': [7, 1]}),
'DIRECTION': channelData('line', {'aimDirection': [8, 0], 'realDirection': [8, 1]}),
'SENSORS': channelData('scatter', {'sensorTest': [9, 0, 1]}),
'PID_TRAJ': channelData('scatter', {'p': [0, 0, 1], 't': [1, 0, 1]}),
'TRAJ_ERRORS': channelData('line', {'t': [10, 0], 'c': [10, 1], 'ac': [10, 2], 'ang': [10, 3], 'pos': [10, 4]}),
'AX12_G': channelData('line', {'aim': [8, 4], 'real': [8, 2]}),
'AX12_D': channelData('line', {'aim': [8, 5], 'real': [8, 3]})
}
self._shapeInitData = {
'line': [],
'line-scatter': [[], []],
'scatter': [[], []]
}
self.linesToSend = []
self.receivedLines_main = []
self.receivedLines_warning = []
self.receivedLines_error = []
self.serial = serial.Serial()
self.incomingLine = ""
# Format des données :
# {'graphN': {'data': {'lineName': lineData, ...}, 'shape': String}
#
# 'shape' peut être :
# "line" : ligne continue reliant chaque point
# "scatter": nuage de points (x,y) indépendants
# "line-scatter: nuage de points (x,y) reliés entre eux
#
# Pour chaque 'shape', 'lineData' a une forme différente :
# "line" : tableau à 1 dimension représentant les coordonnées y de chaque point
# "scatter": tableau t à 2 dimensions. t[0] est un tableau représentant x pour chaque point. t[1] représente y
# "line-scatter": idem que 'scatter'
self.graphData = {'graph1': {'data': None, 'shape': None},
'graph2': {'data': None, 'shape': None},
'graph3': {'data': None, 'shape': None},
'graph4': {'data': None, 'shape': None}}
self.phase = np.linspace(0, 10 * np.pi, 100)
self.index = 0
@staticmethod
def scanPorts():
return list(serial.tools.list_ports.comports())
def open(self, port):
self.serial.port = port.split(" ")[0]
self.serial.open()
def close(self):
self.serial.close()
def getChannelsList(self):
channelsList = []
for key in self._channelsDataStructure:
channelsList.append(key)
channelsList.sort()
return channelsList
def getChannelsFromID(self, identifier):
channels = set()
for channel, cData in self._channelsDataStructure.items():
lines = cData.lineNames
for lineName, lineIds in lines.items():
if lineIds[0] == identifier:
channels.add(channel)
return channels
def getIDsFromChannel(self, channel):
ids = set()
lines = self._channelsDataStructure[channel].lineNames
for lineName, lineIds in lines.items():
ids.add(lineIds[0])
return ids
def communicate(self):
if self.serial.is_open:
for line in self.linesToSend:
self.serial.write(line.encode('ascii'))
self.linesToSend.clear()
nbB = self.serial.in_waiting
if nbB > 0:
self.incomingLine += self.serial.read(nbB).decode(encoding='utf-8', errors='ignore')
newLineIndex = self.incomingLine.find('\n')
while newLineIndex != -1:
self.addLineToProperList(self.incomingLine[0:newLineIndex+1])
self.incomingLine = self.incomingLine[newLineIndex+1:]
newLineIndex = self.incomingLine.find('\n')
def addLineToProperList(self, line):
if len(line) > 5 and line[0:6] == "_data_":
try:
self.addGraphData(line[6:])
except ValueError:
self.receivedLines_main.append(line)
elif len(line) > 8 and line[0:9] == "_warning_":
self.receivedLines_warning.append(line[9:])
elif len(line) > 7 and line[0:7] == "_error_":
splittedLine = line.split("_")
errorLine = "#" + splittedLine[2] + "# "
for s in splittedLine[3:]:
errorLine += s
self.receivedLines_error.append(errorLine)
else:
self.receivedLines_main.append(line)
def addGraphData(self, strData):
data = strData.split("_")
idChannel = int(data[0])
channels = self.getChannelsFromID(idChannel)
values = []
for strValue in data[1:]:
values.append(float(strValue))
for graph in ['graph1', 'graph2', 'graph3', 'graph4']:
gChannel = self._graphsChannels[graph]
if gChannel in channels and self._enChannels[graph]:
lines = self._channelsDataStructure[gChannel].lineNames
for lineName, ids in lines.items():
if ids[0] == idChannel:
if len(ids) == 2: # One dimension data
if len(values) <= 1:
raise ValueError
self.graphData[graph]['data'][lineName].append(values[ids[1]])
elif len(ids) == 3: # Two dimensions data
if len(values) <= 2:
raise ValueError
self.graphData[graph]['data'][lineName][0].append(values[ids[1]])
self.graphData[graph]['data'][lineName][1].append(values[ids[2]])
def setEnabledChannels(self, competeConfig):
newGraphsChannels = {'graph1': competeConfig['graph1']['channel'],
'graph2': competeConfig['graph2']['channel'],
'graph3': competeConfig['graph3']['channel'],
'graph4': competeConfig['graph4']['channel']}
newEnabledList = {'graph1': competeConfig['graph1']['enable'],
'graph2': competeConfig['graph2']['enable'],
'graph3': competeConfig['graph3']['enable'],
'graph4': competeConfig['graph4']['enable']}
commandLines = []
graphs = ['graph1', 'graph2', 'graph3', 'graph4']
for graph in graphs:
if newGraphsChannels[graph] != self._graphsChannels[graph]:
if self._enChannels[graph]:
commandLines += self.enableChannel(self._graphsChannels[graph], False)
else:
if newEnabledList[graph] != self._enChannels[graph]:
if not newEnabledList[graph]:
commandLines += self.enableChannel(self._graphsChannels[graph], False)
for graph in graphs:
if newGraphsChannels[graph] != self._graphsChannels[graph]:
if newEnabledList[graph]:
self.resetGraphData(graph, newGraphsChannels[graph])
commandLines += self.enableChannel(newGraphsChannels[graph], True)
else:
if newEnabledList[graph] != self._enChannels[graph]:
if newEnabledList[graph]:
self.resetGraphData(graph, newGraphsChannels[graph])
commandLines += self.enableChannel(self._graphsChannels[graph], True)
self._graphsChannels = newGraphsChannels
self._enChannels = newEnabledList
return commandLines
def enableChannel(self, channel, enable):
commandLines = []
ids = self.getIDsFromChannel(channel)
for i in ids:
if enable:
commandLine = "logon "
else:
commandLine = "logoff "
commandLine += str(i)
commandLine += '\n'
self.addLinesToSend([commandLine])
commandLines.append(commandLine)
return commandLines
def resetGraphData(self, graph, channel):
cData = self._channelsDataStructure[channel]
self.graphData[graph]['shape'] = cData.shape
initData = self._shapeInitData[cData.shape]
initDict = {}
for name in cData.lineNames:
initDict[name] = copy.deepcopy(initData)
self.graphData[graph]['data'] = copy.deepcopy(initDict)
def getLines_main(self):
lines = copy.deepcopy(self.receivedLines_main)
self.receivedLines_main.clear()
return lines
def getLines_warning(self):
lines = copy.deepcopy(self.receivedLines_warning)
self.receivedLines_warning.clear()
return lines
def getLines_error(self):
lines = copy.deepcopy(self.receivedLines_error)
self.receivedLines_error.clear()
return lines
def addLinesToSend(self, lines):
self.linesToSend += lines
def clearLinesToSend(self):
self.linesToSend = []
def getAllData(self):
# y = np.multiply(np.sin(np.linspace(0, 6 * np.pi, 100) + self.phase[self.index]), self.index/20)
# y2 = np.multiply(np.sin(np.linspace(0, 6 * np.pi, 100) + (self.phase[self.index] + 0.1)), self.index/30)
# self.index = int(math.fmod((self.index + 1), len(self.phase)))
# return {'graph1': {'data': {'pwm': y, 'bite': y2}, 'shape': 'line'},
# 'graph2': {'data':
# {'traj': [[0,1,5*random.random(),9,12,6,3],[0,2,3,6*random.random(),7,2,-3]],
# 'bite': [[0, 2, 4 * random.random(), 9, 12, 7, 3],
# [3, 2, 3, 5 * random.random(), 3, 2, -1]]},
# 'shape': 'scatter'},
# 'graph3': {'data': {}, 'shape': 'line'},
# 'graph4': {'data': {}, 'shape': 'line'}
# }
for graph in ['graph1', 'graph2', 'graph3', 'graph4']:
if self.graphData[graph]['data'] is not None:
for key, value in self.graphData[graph]['data'].items():
if len(value) > 1000:
value = value[len(value) - 1000:]
#print(key, value)
return self.graphData
class channelData:
def __init__(self, shape, lineNames):
self.shape = shape
self.lineNames = lineNames
|
INTechSenpai/moon-rover
|
debug_tools/python_debug_console/AsciiSerial.py
|
Python
|
gpl-3.0
| 11,638 | 0.003268 |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.1 on 2016-11-10 08:19
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('reporte', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='reporteproducto',
name='tipo',
field=models.IntegerField(choices=[(1, 'Diaria'), (2, 'Semana'), (3, 'Mensual'), (4, 'Anual')]),
),
]
|
vpadillar/pventa
|
reporte/migrations/0002_auto_20161110_0819.py
|
Python
|
mit
| 504 | 0.001984 |
import os
from pathlib import Path
import gi
import logging
from gi.repository import Gtk
import json_config
from .login_window import LoginWindow
TOP_DIR = os.path.dirname(os.path.abspath(__file__))
config = json_config.connect('config.json')
gi.require_version('Gtk', '3.0')
class WatsonCredentialsDialog(Gtk.Dialog):
def __init__(self, parent):
Gtk.Dialog.__init__(self, "Enter Credentials", parent, 0,
(Gtk.STOCK_CANCEL, Gtk.ResponseType.CANCEL,
Gtk.STOCK_OK, Gtk.ResponseType.OK))
self.set_default_size(150, 100)
username_field = Gtk.Entry()
username_field.set_placeholder_text("Username")
password_field = Gtk.Entry()
password_field.set_placeholder_text("Password")
password_field.set_visibility(False)
password_field.set_invisible_char('*')
self.username_field = username_field
self.password_field = password_field
box = self.get_content_area()
box.set_margin_top(10)
box.set_margin_bottom(10)
box.set_margin_left(10)
box.set_margin_right(10)
box.set_spacing(10)
box.add(username_field)
box.add(password_field)
self.show_all()
class BingCredentialDialog(Gtk.Dialog):
def __init__(self, parent):
Gtk.Dialog.__init__(self, "Enter API Key", parent, 0,
(Gtk.STOCK_CANCEL, Gtk.ResponseType.CANCEL,
Gtk.STOCK_OK, Gtk.ResponseType.OK))
self.set_default_size(150, 100)
api_key_field = Gtk.Entry()
api_key_field.set_placeholder_text("API Key")
self.api_key_field = api_key_field
box = self.get_content_area()
box.set_margin_top(10)
box.set_margin_bottom(10)
box.set_margin_left(10)
box.set_margin_right(10)
box.set_spacing(10)
box.add(api_key_field)
self.show_all()
class ConfigurationWindow:
def __init__(self) -> None:
super().__init__()
builder = Gtk.Builder()
builder.add_from_file(os.path.join(
TOP_DIR, "glade_files/configure.glade"))
self.window = builder.get_object("configuration_window")
self.stt_combobox = builder.get_object("stt_combobox")
self.tts_combobox = builder.get_object("tts_combobox")
self.auth_switch = builder.get_object("auth_switch")
self.snowboy_switch = builder.get_object("snowboy_switch")
self.wake_button_switch = builder.get_object("wake_button_switch")
self.init_auth_switch()
self.init_tts_combobox()
self.init_stt_combobox()
self.init_hotword_switch()
self.init_wake_button_switch()
builder.connect_signals(ConfigurationWindow.Handler(self))
self.window.set_resizable(False)
def show_window(self):
self.window.show_all()
Gtk.main()
def exit_window(self):
self.window.destroy()
Gtk.main_quit()
def init_tts_combobox(self):
default_tts = config['default_tts']
if default_tts == 'google':
self.tts_combobox.set_active(0)
elif default_tts == 'flite':
self.tts_combobox.set_active(1)
elif default_tts == 'watson':
self.tts_combobox.set_active(2)
else:
self.tts_combobox.set_active(0)
config['default_tts'] = 'google'
def init_stt_combobox(self):
default_stt = config['default_stt']
if default_stt == 'google':
self.stt_combobox.set_active(0)
elif default_stt == 'watson':
self.stt_combobox.set_active(1)
elif default_stt == 'bing':
self.stt_combobox.set_active(2)
else:
self.tts_combobox.set_active(0)
config['default_tts'] = 'google'
def init_auth_switch(self):
usage_mode = config['usage_mode']
if usage_mode == 'authenticated':
self.auth_switch.set_active(True)
else:
self.auth_switch.set_active(False)
def init_hotword_switch(self):
try:
parent_dir = os.path.dirname(TOP_DIR)
snowboyDetectFile = Path(os.path.join(
parent_dir, "hotword_engine/snowboy/_snowboydetect.so"))
print(snowboyDetectFile)
if not snowboyDetectFile.exists():
self.snowboy_switch.set_sensitive(False)
config['hotword_engine'] = 'PocketSphinx'
except Exception as e:
logging.error(e)
config['hotword_engine'] = 'PocketSphinx'
if config['hotword_engine'] == 'Snowboy':
self.snowboy_switch.set_active(True)
else:
self.snowboy_switch.set_active(False)
def init_wake_button_switch(self):
try:
import RPi.GPIO
if config['WakeButton'] == 'enabled':
self.wake_button_switch.set_active(True)
else:
self.wake_button_switch.set_active(False)
except ImportError:
self.wake_button_switch.set_sensitive(False)
class Handler:
def __init__(self, config_window):
self.config_window = config_window
def on_delete_window(self, *args):
print('Exiting')
self.config_window.exit_window()
def on_stt_combobox_changed(self, combo: Gtk.ComboBox):
selection = combo.get_active()
if selection == 0:
config['default_stt'] = 'google'
elif selection == 1:
credential_dialog = WatsonCredentialsDialog(
self.config_window.window)
response = credential_dialog.run()
if response == Gtk.ResponseType.OK:
username = credential_dialog.username_field.get_text()
password = credential_dialog.password_field.get_text()
config['default_stt'] = 'watson'
config['watson_stt_config']['username'] = username
config['watson_stt_config']['password'] = password
else:
self.config_window.init_stt_combobox()
credential_dialog.destroy()
elif selection == 2:
credential_dialog = BingCredentialDialog(
self.config_window.window)
response = credential_dialog.run()
if response == Gtk.ResponseType.OK:
api_key = credential_dialog.api_key_field.get_text()
config['default_stt'] = 'bing'
config['bing_speech_api_key']['username'] = api_key
else:
self.config_window.init_stt_combobox()
credential_dialog.destroy()
def on_tts_combobox_changed(self, combo):
selection = combo.get_active()
if selection == 0:
config['default_tts'] = 'google'
elif selection == 1:
config['default_tts'] = 'flite'
elif selection == 2:
credential_dialog = WatsonCredentialsDialog(
self.config_window.window)
response = credential_dialog.run()
if response == Gtk.ResponseType.OK:
username = credential_dialog.username_field.get_text()
password = credential_dialog.password_field.get_text()
config['default_tts'] = 'watson'
config['watson_tts_config']['username'] = username
config['watson_tts_config']['password'] = password
config['watson_tts_config']['voice'] = 'en-US_AllisonVoice'
else:
self.config_window.init_tts_combobox()
credential_dialog.destroy()
def on_auth_switch_active_notify(self, switch, gparam):
if switch.get_active():
login_window = LoginWindow()
login_window.show_window()
if config['usage_mode'] == 'authenticated':
switch.set_active(True)
else:
switch.set_active(False)
def on_snowboy_switch_active_notify(self, switch, gparam):
if switch.get_active():
config['hotword_engine'] = 'Snowboy'
else:
config['hotword_engine'] = 'PocketSphinx'
def on_wake_button_switch_active_notify(self, switch, gparam):
if switch.get_active():
config['wake_button'] = 'enabled'
else:
config['wake_button'] = 'disabled'
|
betterclever/susi_linux
|
main/renderer/configuration_window.py
|
Python
|
apache-2.0
| 8,711 | 0 |
# -*- coding: utf-8 -*-
from django import forms
from django.contrib.staticfiles.templatetags.staticfiles import static
from django.utils.translation import ugettext as _
from filer.admin.fileadmin import FileAdmin
from filer.models import Image
class ImageAdminForm(forms.ModelForm):
subject_location = forms.CharField(
max_length=64, required=False,
label=_('Subject location'),
help_text=_('Location of the main subject of the scene.'))
def sidebar_image_ratio(self):
if self.instance:
# this is very important. It forces the value to be returned as a
# string and always with a "." as seperator. If the conversion
# from float to string is done in the template, the locale will
# be used and in some cases there would be a "," instead of ".".
# javascript would parse that to an integer.
return '%.6F' % self.instance.sidebar_image_ratio()
else:
return ''
class Meta:
model = Image
exclude = ()
class Media:
css = {
# 'all': (settings.MEDIA_URL + 'filer/css/focal_point.css',)
}
js = (
static('filer/js/raphael.js'),
static('filer/js/focal_point.js'),
)
class ImageAdmin(FileAdmin):
form = ImageAdminForm
ImageAdmin.fieldsets = ImageAdmin.build_fieldsets(
extra_main_fields=('author', 'default_alt_text', 'default_caption',),
extra_fieldsets=(
('Subject Location', {
'fields': ('subject_location',),
'classes': ('collapse',),
}),
)
)
|
mkoistinen/django-filer
|
filer/admin/imageadmin.py
|
Python
|
bsd-3-clause
| 1,629 | 0 |
from django.conf import settings
from django.contrib.auth.views import (
LoginView, LogoutView, redirect_to_login as redirect_to_intercept,
)
from django.core.exceptions import PermissionDenied, ValidationError
from django.template.response import TemplateResponse
from django.urls import Resolver404, resolve, reverse
from django.utils import timezone
from django.utils.deprecation import MiddlewareMixin
from django.utils.text import format_lazy
from django.utils.translation import ugettext_lazy as _
from core.models import Agreement, Policy, SiteConfiguration
from core.views import AgreementRejectView, AgreementView, HomeView
from hosting.models import Preferences, Profile
from hosting.validators import TooNearPastValidator
from pasportaservo.urls import (
url_index_debug, url_index_maps, url_index_postman,
)
class AccountFlagsMiddleware(MiddlewareMixin):
"""
Updates any flags and settings related to the user's account, whose value
cannot be pre-determined.
Checks that pre-conditions for site usage are satisfied.
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
exclude_urls = [
reverse('admin:index'),
url_index_debug,
settings.STATIC_URL,
settings.MEDIA_URL,
'/favicon.ico',
url_index_maps,
]
self.exclude_urls = tuple(str(url) for url in exclude_urls)
def process_request(self, request):
if not request.user.is_authenticated:
# Only relevant to logged in users.
return
if request.path.startswith(self.exclude_urls):
# Only relevant when using the website itself (not Django-Admin or debug tools),
# when the file requested is not a static one,
# and when the request is not for resources or configurations related to maps.
request.skip_hosting_checks = True
return
profile = Profile.all_objects.filter(user=request.user)[0:1]
if 'flag_analytics_setup' not in request.session:
# Update user's analytics consent according to the DNT setting in the browser, first time
# when the user logs in (DNT==True => opt out). Prior to that the consent is undefined.
pref = Preferences.objects.filter(profile=profile, site_analytics_consent__isnull=True)
pref.update(site_analytics_consent=not request.DNT)
request.session['flag_analytics_setup'] = str(timezone.now())
# Is user's age above the legally required minimum?
birth_date = profile.values_list('birth_date', flat=True)
trouble_view = None
try:
trouble_view = resolve(request.path)
if (hasattr(trouble_view.func, 'view_class') and trouble_view.func.view_class not in
[LoginView, LogoutView, HomeView, AgreementRejectView]):
try:
resolve(request.path, 'pages.urls')
except Resolver404:
# The URL accessed is not one of the general pages.
pass
else:
# A general page is ok.
trouble_view = None
else:
trouble_view = None
except Resolver404:
# A non-existent page is ok.
pass
if trouble_view is not None and len(birth_date) != 0 and birth_date[0]:
birth_date_value = birth_date[0] # Returned as a list from the query.
try:
TooNearPastValidator(SiteConfiguration.USER_MIN_AGE)(birth_date_value)
except ValidationError:
raise PermissionDenied(format_lazy(
_("Unfortunately, you are still too young to use Pasporta Servo. "
"Wait until you are {min_age} years of age!"),
min_age=SiteConfiguration.USER_MIN_AGE
))
# Has the user consented to the most up-to-date usage policy?
policy = (Policy.objects.order_by('-id').values('version', 'content'))[0:1]
if trouble_view is not None:
agreement = Agreement.objects.filter(
user=request.user, policy_version__in=policy.values_list('version'), withdrawn__isnull=True)
if not agreement.exists():
# Policy will be needed to display the following page anyway,
# so it is immediately fetched from the database.
request.user.consent_required = [policy.first()]
if request.user.consent_required[0] is None:
raise RuntimeError("Service misconfigured: No user agreement was defined.")
if trouble_view.func.view_class != AgreementView:
return redirect_to_intercept(
request.get_full_path(),
reverse('agreement'),
redirect_field_name=settings.REDIRECT_FIELD_NAME
)
else:
# Policy most probably will not be needed, so it is lazily
# evaluated to spare a superfluous query on the database.
request.user.consent_obtained = policy
if request.path.startswith(url_index_postman) and len(birth_date) == 0 and not request.user.is_superuser:
# We can reuse the birth date query result to avoid an additional
# query in the DB. For users with a profile, the result will not
# be empty and hold some value (either datetime or None).
t = TemplateResponse(
request, 'registration/profile_create.html', status=403,
context={
'function_title': _("Inbox"),
'function_description': _("To be able to communicate with other members of the PS community, "
"you need to create a profile."),
})
t.render()
return t
|
tejo-esperanto/pasportaservo
|
core/middleware.py
|
Python
|
agpl-3.0
| 6,047 | 0.002481 |
from setuptools import setup, find_packages
import sys, os
here = os.path.abspath(os.path.dirname(__file__))
try:
README = open(os.path.join(here, 'README.rst')).read()
except IOError:
README = ''
version = "0.0.1"
TEST_REQUIREMENTS = [
'numpy',
'pillow',
'webtest'
]
setup(
name='tgext.matplotrender',
version=version,
description="Renderer to expose matplotlib figures",
long_description=README,
classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
keywords='turbogears2.extension',
author='Alessandro Molina',
author_email='amol@turbogears.org',
url='https://github.com/amol-/tgext.matplotrender',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
namespace_packages = ['tgext'],
include_package_data=True,
zip_safe=False,
install_requires=[
"TurboGears2 >= 2.3.8",
'matplotlib'
],
extras_require={
# Used by Travis and Coverage due to setup.py nosetests
# causing a coredump when used with coverage
'testing': TEST_REQUIREMENTS,
},
entry_points="""
# -*- Entry points: -*-
"""
)
|
amol-/tgext.matplotrender
|
setup.py
|
Python
|
mit
| 1,201 | 0.003331 |
#!/usr/bin/env python
from os.path import join, dirname
from cloudify import ctx
ctx.download_resource(
join('components', 'utils.py'),
join(dirname(__file__), 'utils.py'))
import utils # NOQA
# Most images already ship with the following packages:
#
# python-setuptools
# python-backports
# python-backports-ssl_match_hostname
#
# - as they are dependencies of cloud-init, which is extremely popular.
#
# However, cloud-init is irrelevant for certain IaaS (such as vSphere) so
# images used there may not have these packages preinstalled.
#
# We're currently considering whether to include these libraries in the
# manager resources package. Until then, we only validate that they're
# preinstalled, and if not - instruct the user to install them.
missing_packages = set()
for pkg in ['python-setuptools',
'python-backports',
'python-backports-ssl_match_hostname']:
ctx.logger.info('Ensuring {0} is installed'.format(pkg))
is_installed = utils.RpmPackageHandler.is_package_installed(pkg)
if not is_installed:
missing_packages.add(pkg)
if missing_packages:
ctx.abort_operation('Prerequisite packages missing: {0}. '
'Please ensure these packages are installed and '
'try again'.format(', '.join(missing_packages)))
|
cloudify-cosmo/cloudify-manager-blueprints
|
components/python/scripts/bootstrap_validate.py
|
Python
|
apache-2.0
| 1,334 | 0 |
"""
===========
gaussfitter
===========
.. codeauthor:: Adam Ginsburg <adam.g.ginsburg@gmail.com> 3/17/08
Latest version available at <http://code.google.com/p/agpy/source/browse/trunk/agpy/gaussfitter.py>
"""
import numpy
from numpy.ma import median
from numpy import pi
#from scipy import optimize,stats,pi
from mpfit import mpfit
"""
Note about mpfit/leastsq:
I switched everything over to the Markwardt mpfit routine for a few reasons,
but foremost being the ability to set limits on parameters, not just force them
to be fixed. As far as I can tell, leastsq does not have that capability.
The version of mpfit I use can be found here:
http://code.google.com/p/agpy/source/browse/trunk/mpfit
.. todo::
-turn into a class instead of a collection of objects
-implement WCS-based gaussian fitting with correct coordinates
"""
def moments(data,circle,rotate,vheight,estimator=median,**kwargs):
"""Returns (height, amplitude, x, y, width_x, width_y, rotation angle)
the gaussian parameters of a 2D distribution by calculating its
moments. Depending on the input parameters, will only output
a subset of the above.
If using masked arrays, pass estimator=numpy.ma.median
"""
total = numpy.abs(data).sum()
Y, X = numpy.indices(data.shape) # python convention: reverse x,y numpy.indices
y = numpy.argmax((X*numpy.abs(data)).sum(axis=1)/total)
x = numpy.argmax((Y*numpy.abs(data)).sum(axis=0)/total)
col = data[int(y),:]
# FIRST moment, not second!
width_x = numpy.sqrt(numpy.abs((numpy.arange(col.size)-y)*col).sum()/numpy.abs(col).sum())
row = data[:, int(x)]
width_y = numpy.sqrt(numpy.abs((numpy.arange(row.size)-x)*row).sum()/numpy.abs(row).sum())
width = ( width_x + width_y ) / 2.
height = estimator(data.ravel())
amplitude = data.max()-height
mylist = [amplitude,x,y]
if numpy.isnan(width_y) or numpy.isnan(width_x) or numpy.isnan(height) or numpy.isnan(amplitude):
raise ValueError("something is nan")
if vheight==1:
mylist = [height] + mylist
if circle==0:
mylist = mylist + [width_x,width_y]
if rotate==1:
mylist = mylist + [0.] #rotation "moment" is just zero...
# also, circles don't rotate.
else:
mylist = mylist + [width]
return mylist
def twodgaussian(inpars, circle=False, rotate=True, vheight=True, shape=None):
"""Returns a 2d gaussian function of the form:
x' = numpy.cos(rota) * x - numpy.sin(rota) * y
y' = numpy.sin(rota) * x + numpy.cos(rota) * y
(rota should be in degrees)
g = b + a * numpy.exp ( - ( ((x-center_x)/width_x)**2 +
((y-center_y)/width_y)**2 ) / 2 )
inpars = [b,a,center_x,center_y,width_x,width_y,rota]
(b is background height, a is peak amplitude)
where x and y are the input parameters of the returned function,
and all other parameters are specified by this function
However, the above values are passed by list. The list should be:
inpars = (height,amplitude,center_x,center_y,width_x,width_y,rota)
You can choose to ignore / neglect some of the above input parameters
unumpy.sing the following options:
circle=0 - default is an elliptical gaussian (different x, y
widths), but can reduce the input by one parameter if it's a
circular gaussian
rotate=1 - default allows rotation of the gaussian ellipse. Can
remove last parameter by setting rotate=0
vheight=1 - default allows a variable height-above-zero, i.e. an
additive constant for the Gaussian function. Can remove first
parameter by setting this to 0
shape=None - if shape is set (to a 2-parameter list) then returns
an image with the gaussian defined by inpars
"""
inpars_old = inpars
inpars = list(inpars)
if vheight == 1:
height = inpars.pop(0)
height = float(height)
else:
height = float(0)
amplitude, center_y, center_x = inpars.pop(0),inpars.pop(0),inpars.pop(0)
amplitude = float(amplitude)
center_x = float(center_x)
center_y = float(center_y)
if circle == 1:
width = inpars.pop(0)
width_x = float(width)
width_y = float(width)
rotate = 0
else:
width_x, width_y = inpars.pop(0),inpars.pop(0)
width_x = float(width_x)
width_y = float(width_y)
if rotate == 1:
rota = inpars.pop(0)
rota = pi/180. * float(rota)
rcen_x = center_x * numpy.cos(rota) - center_y * numpy.sin(rota)
rcen_y = center_x * numpy.sin(rota) + center_y * numpy.cos(rota)
else:
rcen_x = center_x
rcen_y = center_y
if len(inpars) > 0:
raise ValueError("There are still input parameters:" + str(inpars) + \
" and you've input: " + str(inpars_old) + \
" circle=%d, rotate=%d, vheight=%d" % (circle,rotate,vheight) )
def rotgauss(x,y):
if rotate==1:
xp = x * numpy.cos(rota) - y * numpy.sin(rota)
yp = x * numpy.sin(rota) + y * numpy.cos(rota)
else:
xp = x
yp = y
g = height+amplitude*numpy.exp(
-(((rcen_x-xp)/width_x)**2+
((rcen_y-yp)/width_y)**2)/2.)
return g
if shape is not None:
return rotgauss(*numpy.indices(shape))
else:
return rotgauss
def gaussfit(data,err=None,params=(),autoderiv=True,return_all=False,circle=False,
fixed=numpy.repeat(False,7),limitedmin=[False,False,False,False,True,True,True],
limitedmax=[False,False,False,False,False,False,True],
usemoment=numpy.array([],dtype='bool'),
minpars=numpy.repeat(0,7),maxpars=[0,0,0,0,0,0,360],
rotate=1,vheight=1,quiet=True,returnmp=False,
returnfitimage=False,**kwargs):
"""
Gaussian fitter with the ability to fit a variety of different forms of
2-dimensional gaussian.
Input Parameters:
data - 2-dimensional data array
err=None - error array with same size as data array
params=[] - initial input parameters for Gaussian function.
(height, amplitude, x, y, width_x, width_y, rota)
if not input, these will be determined from the moments of the system,
assuming no rotation
autoderiv=1 - use the autoderiv provided in the lmder.f function (the
alternative is to us an analytic derivative with lmdif.f: this method
is less robust)
return_all=0 - Default is to return only the Gaussian parameters.
1 - fit params, fit error
returnfitimage - returns (best fit params,best fit image)
returnmp - returns the full mpfit struct
circle=0 - default is an elliptical gaussian (different x, y widths),
but can reduce the input by one parameter if it's a circular gaussian
rotate=1 - default allows rotation of the gaussian ellipse. Can remove
last parameter by setting rotate=0. numpy.expects angle in DEGREES
vheight=1 - default allows a variable height-above-zero, i.e. an
additive constant for the Gaussian function. Can remove first
parameter by setting this to 0
usemoment - can choose which parameters to use a moment estimation for.
Other parameters will be taken from params. Needs to be a boolean
array.
Output:
Default output is a set of Gaussian parameters with the same shape as
the input parameters
Can also output the covariance matrix, 'infodict' that contains a lot
more detail about the fit (see scipy.optimize.leastsq), and a message
from leastsq telling what the exit status of the fitting routine was
Warning: Does NOT necessarily output a rotation angle between 0 and 360 degrees.
"""
usemoment=numpy.array(usemoment,dtype='bool')
params=numpy.array(params,dtype='float')
if usemoment.any() and len(params)==len(usemoment):
moment = numpy.array(moments(data,circle,rotate,vheight,**kwargs),dtype='float')
params[usemoment] = moment[usemoment]
elif params == [] or len(params)==0:
params = (moments(data,circle,rotate,vheight,**kwargs))
if vheight==0:
vheight=1
params = numpy.concatenate([[0],params])
fixed[0] = 1
# mpfit will fail if it is given a start parameter outside the allowed range:
for i in xrange(len(params)):
if params[i] > maxpars[i] and limitedmax[i]: params[i] = maxpars[i]
if params[i] < minpars[i] and limitedmin[i]: params[i] = minpars[i]
if err is None:
errorfunction = lambda p: numpy.ravel((twodgaussian(p,circle,rotate,vheight)\
(*numpy.indices(data.shape)) - data))
else:
errorfunction = lambda p: numpy.ravel((twodgaussian(p,circle,rotate,vheight)\
(*numpy.indices(data.shape)) - data)/err)
def mpfitfun(data,err):
if err is None:
def f(p,fjac=None): return [0,numpy.ravel(data-twodgaussian(p,circle,rotate,vheight)\
(*numpy.indices(data.shape)))]
else:
def f(p,fjac=None): return [0,numpy.ravel((data-twodgaussian(p,circle,rotate,vheight)\
(*numpy.indices(data.shape)))/err)]
return f
parinfo = [
{'n':1,'value':params[1],'limits':[minpars[1],maxpars[1]],'limited':[limitedmin[1],limitedmax[1]],'fixed':fixed[1],'parname':"AMPLITUDE",'error':0},
{'n':2,'value':params[2],'limits':[minpars[2],maxpars[2]],'limited':[limitedmin[2],limitedmax[2]],'fixed':fixed[2],'parname':"XSHIFT",'error':0},
{'n':3,'value':params[3],'limits':[minpars[3],maxpars[3]],'limited':[limitedmin[3],limitedmax[3]],'fixed':fixed[3],'parname':"YSHIFT",'error':0},
{'n':4,'value':params[4],'limits':[minpars[4],maxpars[4]],'limited':[limitedmin[4],limitedmax[4]],'fixed':fixed[4],'parname':"XWIDTH",'error':0} ]
if vheight == 1:
parinfo.insert(0,{'n':0,'value':params[0],'limits':[minpars[0],maxpars[0]],'limited':[limitedmin[0],limitedmax[0]],'fixed':fixed[0],'parname':"HEIGHT",'error':0})
if circle == 0:
parinfo.append({'n':5,'value':params[5],'limits':[minpars[5],maxpars[5]],'limited':[limitedmin[5],limitedmax[5]],'fixed':fixed[5],'parname':"YWIDTH",'error':0})
if rotate == 1:
parinfo.append({'n':6,'value':params[6],'limits':[minpars[6],maxpars[6]],'limited':[limitedmin[6],limitedmax[6]],'fixed':fixed[6],'parname':"ROTATION",'error':0})
if autoderiv == 0:
# the analytic derivative, while not terribly difficult, is less
# efficient and useful. I only bothered putting it here because I was
# instructed to do so for a class project - please ask if you would
# like this feature implemented
raise ValueError("I'm sorry, I haven't implemented this feature yet.")
else:
# p, cov, infodict, errmsg, success = optimize.leastsq(errorfunction,\
# params, full_output=1)
mp = mpfit(mpfitfun(data,err),parinfo=parinfo,quiet=quiet)
if returnmp:
returns = (mp)
elif return_all == 0:
returns = mp.params
elif return_all == 1:
returns = mp.params,mp.perror
if returnfitimage:
fitimage = twodgaussian(mp.params,circle,rotate,vheight)(*numpy.indices(data.shape))
returns = (returns,fitimage)
return returns
def onedmoments(Xax,data,vheight=True,estimator=median,negamp=None,
veryverbose=False, **kwargs):
"""Returns (height, amplitude, x, width_x)
the gaussian parameters of a 1D distribution by calculating its
moments. Depending on the input parameters, will only output
a subset of the above.
If using masked arrays, pass estimator=numpy.ma.median
'estimator' is used to measure the background level (height)
negamp can be used to force the peak negative (True), positive (False),
or it will be "autodetected" (negamp=None)
"""
dx = numpy.mean(Xax[1:] - Xax[:-1]) # assume a regular grid
integral = (data*dx).sum()
height = estimator(data)
# try to figure out whether pos or neg based on the minimum width of the pos/neg peaks
Lpeakintegral = integral - height*len(Xax)*dx - (data[data>height]*dx).sum()
Lamplitude = data.min()-height
Lwidth_x = 0.5*(numpy.abs(Lpeakintegral / Lamplitude))
Hpeakintegral = integral - height*len(Xax)*dx - (data[data<height]*dx).sum()
Hamplitude = data.max()-height
Hwidth_x = 0.5*(numpy.abs(Hpeakintegral / Hamplitude))
Lstddev = Xax[data<data.mean()].std()
Hstddev = Xax[data>data.mean()].std()
#print "Lstddev: %10.3g Hstddev: %10.3g" % (Lstddev,Hstddev)
#print "Lwidth_x: %10.3g Hwidth_x: %10.3g" % (Lwidth_x,Hwidth_x)
if negamp: # can force the guess to be negative
xcen,amplitude,width_x = Xax[numpy.argmin(data)],Lamplitude,Lwidth_x
elif negamp is None:
if Hstddev < Lstddev:
xcen,amplitude,width_x, = Xax[numpy.argmax(data)],Hamplitude,Hwidth_x
else:
xcen,amplitude,width_x, = Xax[numpy.argmin(data)],Lamplitude,Lwidth_x
else: # if negamp==False, make positive
xcen,amplitude,width_x = Xax[numpy.argmax(data)],Hamplitude,Hwidth_x
if veryverbose:
print "negamp: %s amp,width,cen Lower: %g, %g Upper: %g, %g Center: %g" %\
(negamp,Lamplitude,Lwidth_x,Hamplitude,Hwidth_x,xcen)
mylist = [amplitude,xcen,width_x]
if numpy.isnan(width_x) or numpy.isnan(height) or numpy.isnan(amplitude):
raise ValueError("something is nan")
if vheight:
mylist = [height] + mylist
return mylist
def onedgaussian(x,H,A,dx,w):
"""
Returns a 1-dimensional gaussian of form
H+A*numpy.exp(-(x-dx)**2/(2*w**2))
"""
return H+A*numpy.exp(-(x-dx)**2/(2*w**2))
def onedgaussfit(xax, data, err=None,
params=[0,1,0,1],fixed=[False,False,False,False],
limitedmin=[False,False,False,True],
limitedmax=[False,False,False,False], minpars=[0,0,0,0],
maxpars=[0,0,0,0], quiet=True, shh=True,
veryverbose=False,
vheight=True, negamp=False,
usemoments=False):
"""
Inputs:
xax - x axis
data - y axis
err - error corresponding to data
params - Fit parameters: Height of background, Amplitude, Shift, Width
fixed - Is parameter fixed?
limitedmin/minpars - set lower limits on each parameter (default: width>0)
limitedmax/maxpars - set upper limits on each parameter
quiet - should MPFIT output each iteration?
shh - output final parameters?
usemoments - replace default parameters with moments
Returns:
Fit parameters
Model
Fit errors
chi2
"""
def mpfitfun(x,y,err):
if err is None:
def f(p,fjac=None): return [0,(y-onedgaussian(x,*p))]
else:
def f(p,fjac=None): return [0,(y-onedgaussian(x,*p))/err]
return f
if xax == None:
xax = numpy.arange(len(data))
if vheight is False:
height = params[0]
fixed[0] = True
if usemoments:
params = onedmoments(xax,data,vheight=vheight,negamp=negamp, veryverbose=veryverbose)
if vheight is False: params = [height]+params
if veryverbose: print "OneD moments: h: %g a: %g c: %g w: %g" % tuple(params)
parinfo = [ {'n':0,'value':params[0],'limits':[minpars[0],maxpars[0]],'limited':[limitedmin[0],limitedmax[0]],'fixed':fixed[0],'parname':"HEIGHT",'error':0} ,
{'n':1,'value':params[1],'limits':[minpars[1],maxpars[1]],'limited':[limitedmin[1],limitedmax[1]],'fixed':fixed[1],'parname':"AMPLITUDE",'error':0},
{'n':2,'value':params[2],'limits':[minpars[2],maxpars[2]],'limited':[limitedmin[2],limitedmax[2]],'fixed':fixed[2],'parname':"SHIFT",'error':0},
{'n':3,'value':params[3],'limits':[minpars[3],maxpars[3]],'limited':[limitedmin[3],limitedmax[3]],'fixed':fixed[3],'parname':"WIDTH",'error':0}]
mp = mpfit(mpfitfun(xax,data,err),parinfo=parinfo,quiet=quiet)
mpp = mp.params
mpperr = mp.perror
chi2 = mp.fnorm
if mp.status == 0:
raise Exception(mp.errmsg)
if (not shh) or veryverbose:
print "Fit status: ",mp.status
for i,p in enumerate(mpp):
parinfo[i]['value'] = p
print parinfo[i]['parname'],p," +/- ",mpperr[i]
print "Chi2: ",mp.fnorm," Reduced Chi2: ",mp.fnorm/len(data)," DOF:",len(data)-len(mpp)
return mpp,onedgaussian(xax,*mpp),mpperr,chi2
def n_gaussian(pars=None,a=None,dx=None,sigma=None):
"""
Returns a function that sums over N gaussians, where N is the length of
a,dx,sigma *OR* N = len(pars) / 3
The background "height" is assumed to be zero (you must "baseline" your
spectrum before fitting)
pars - a list with len(pars) = 3n, assuming a,dx,sigma repeated
dx - offset (velocity center) values
sigma - line widths
a - amplitudes
"""
if len(pars) % 3 == 0:
a = [pars[ii] for ii in xrange(0,len(pars),3)]
dx = [pars[ii] for ii in xrange(1,len(pars),3)]
sigma = [pars[ii] for ii in xrange(2,len(pars),3)]
elif not(len(dx) == len(sigma) == len(a)):
raise ValueError("Wrong array lengths! dx: %i sigma: %i a: %i" % (len(dx),len(sigma),len(a)))
def g(x):
v = numpy.zeros(len(x))
for i in range(len(dx)):
v += a[i] * numpy.exp( - ( x - dx[i] )**2 / (2.0*sigma[i]**2) )
return v
return g
def multigaussfit(xax, data, ngauss=1, err=None, params=[1,0,1],
fixed=[False,False,False], limitedmin=[False,False,True],
limitedmax=[False,False,False], minpars=[0,0,0], maxpars=[0,0,0],
quiet=True, shh=True, veryverbose=False):
"""
An improvement on onedgaussfit. Lets you fit multiple gaussians.
Inputs:
xax - x axis
data - y axis
ngauss - How many gaussians to fit? Default 1 (this could supersede onedgaussfit)
err - error corresponding to data
These parameters need to have length = 3*ngauss. If ngauss > 1 and length = 3, they will
be replicated ngauss times, otherwise they will be reset to defaults:
params - Fit parameters: [amplitude, offset, width] * ngauss
If len(params) % 3 == 0, ngauss will be set to len(params) / 3
fixed - Is parameter fixed?
limitedmin/minpars - set lower limits on each parameter (default: width>0)
limitedmax/maxpars - set upper limits on each parameter
quiet - should MPFIT output each iteration?
shh - output final parameters?
Returns:
Fit parameters
Model
Fit errors
chi2
"""
if len(params) != ngauss and (len(params) / 3) > ngauss:
ngauss = len(params) / 3
if isinstance(params,numpy.ndarray): params=params.tolist()
# make sure all various things are the right length; if they're not, fix them using the defaults
for parlist in (params,fixed,limitedmin,limitedmax,minpars,maxpars):
if len(parlist) != 3*ngauss:
# if you leave the defaults, or enter something that can be multiplied by 3 to get to the
# right number of gaussians, it will just replicate
if len(parlist) == 3:
parlist *= ngauss
elif parlist==params:
parlist[:] = [1,0,1] * ngauss
elif parlist==fixed or parlist==limitedmax:
parlist[:] = [False,False,False] * ngauss
elif parlist==limitedmin:
parlist[:] = [False,False,True] * ngauss
elif parlist==minpars or parlist==maxpars:
parlist[:] = [0,0,0] * ngauss
def mpfitfun(x,y,err):
if err is None:
def f(p,fjac=None): return [0,(y-n_gaussian(pars=p)(x))]
else:
def f(p,fjac=None): return [0,(y-n_gaussian(pars=p)(x))/err]
return f
if xax == None:
xax = numpy.arange(len(data))
parnames = {0:"AMPLITUDE",1:"SHIFT",2:"WIDTH"}
parinfo = [ {'n':ii, 'value':params[ii],
'limits':[minpars[ii],maxpars[ii]],
'limited':[limitedmin[ii],limitedmax[ii]], 'fixed':fixed[ii],
'parname':parnames[ii%3]+str(ii%3), 'error':ii}
for ii in xrange(len(params)) ]
if veryverbose:
print "GUESSES: "
print "\n".join(["%s: %s" % (p['parname'],p['value']) for p in parinfo])
mp = mpfit(mpfitfun(xax,data,err),parinfo=parinfo,quiet=quiet)
mpp = mp.params
mpperr = mp.perror
chi2 = mp.fnorm
if mp.status == 0:
raise Exception(mp.errmsg)
if not shh:
print "Final fit values: "
for i,p in enumerate(mpp):
parinfo[i]['value'] = p
print parinfo[i]['parname'],p," +/- ",mpperr[i]
print "Chi2: ",mp.fnorm," Reduced Chi2: ",mp.fnorm/len(data)," DOF:",len(data)-len(mpp)
return mpp,n_gaussian(pars=mpp)(xax),mpperr,chi2
def collapse_gaussfit(cube,xax=None,axis=2,negamp=False,usemoments=True,nsigcut=1.0,mppsigcut=1.0,
return_errors=False, **kwargs):
import time
std_coll = cube.std(axis=axis)
std_coll[std_coll==0] = numpy.nan # must eliminate all-zero spectra
mean_std = median(std_coll[std_coll==std_coll])
if axis > 0:
cube = cube.swapaxes(0,axis)
width_arr = numpy.zeros(cube.shape[1:]) + numpy.nan
amp_arr = numpy.zeros(cube.shape[1:]) + numpy.nan
chi2_arr = numpy.zeros(cube.shape[1:]) + numpy.nan
offset_arr = numpy.zeros(cube.shape[1:]) + numpy.nan
width_err = numpy.zeros(cube.shape[1:]) + numpy.nan
amp_err = numpy.zeros(cube.shape[1:]) + numpy.nan
offset_err = numpy.zeros(cube.shape[1:]) + numpy.nan
if xax is None:
xax = numpy.arange(cube.shape[0])
starttime = time.time()
print "Cube shape: ",cube.shape
if negamp: extremum=numpy.min
else: extremum=numpy.max
print "Fitting a total of %i spectra with peak signal above %f" % ((numpy.abs(extremum(cube,axis=0)) > (mean_std*nsigcut)).sum(),mean_std*nsigcut)
for i in xrange(cube.shape[1]):
t0 = time.time()
nspec = (numpy.abs(extremum(cube[:,i,:],axis=0)) > (mean_std*nsigcut)).sum()
print "Working on row %d with %d spectra to fit" % (i,nspec) ,
for j in xrange(cube.shape[2]):
if numpy.abs(extremum(cube[:,i,j])) > (mean_std*nsigcut):
mpp,gfit,mpperr,chi2 = onedgaussfit(xax,cube[:,i,j],err=numpy.ones(cube.shape[0])*mean_std,negamp=negamp,usemoments=usemoments,**kwargs)
if numpy.abs(mpp[1]) > (mpperr[1]*mppsigcut):
width_arr[i,j] = mpp[3]
offset_arr[i,j] = mpp[2]
chi2_arr[i,j] = chi2
amp_arr[i,j] = mpp[1]
width_err[i,j] = mpperr[3]
offset_err[i,j] = mpperr[2]
amp_err[i,j] = mpperr[1]
dt = time.time()-t0
if nspec > 0:
print "in %f seconds (average: %f)" % (dt,dt/float(nspec))
else:
print "in %f seconds" % (dt)
print "Total time %f seconds" % (time.time()-starttime)
if return_errors:
return width_arr,offset_arr,amp_arr,width_err,offset_err,amp_err,chi2_arr
else:
return width_arr,offset_arr,amp_arr,chi2_arr
|
kirillzhuravlev/atrex
|
Software/gaussfitter.py
|
Python
|
lgpl-3.0
| 23,723 | 0.024702 |
#Copyright ReportLab Europe Ltd. 2000-2016
#see license.txt for license details
#history http://www.reportlab.co.uk/cgi-bin/viewcvs.cgi/public/reportlab/trunk/reportlab/graphics/widgets/grids.py
__version__='3.3.0'
from reportlab.lib import colors
from reportlab.lib.validators import isNumber, isColorOrNone, isBoolean, isListOfNumbers, OneOf, isListOfColors, isNumberOrNone
from reportlab.lib.attrmap import AttrMap, AttrMapValue
from reportlab.graphics.shapes import Drawing, Group, Line, Rect, LineShape, definePath, EmptyClipPath
from reportlab.graphics.widgetbase import Widget
def frange(start, end=None, inc=None):
"A range function, that does accept float increments..."
if end == None:
end = start + 0.0
start = 0.0
if inc == None:
inc = 1.0
L = []
end = end - inc*0.0001 #to avoid numrical problems
while 1:
next = start + len(L) * inc
if inc > 0 and next >= end:
break
elif inc < 0 and next <= end:
break
L.append(next)
return L
def makeDistancesList(list):
"""Returns a list of distances between adjacent numbers in some input list.
E.g. [1, 1, 2, 3, 5, 7] -> [0, 1, 1, 2, 2]
"""
d = []
for i in range(len(list[:-1])):
d.append(list[i+1] - list[i])
return d
class Grid(Widget):
"""This makes a rectangular grid of equidistant stripes.
The grid contains an outer border rectangle, and stripes
inside which can be drawn with lines and/or as solid tiles.
The drawing order is: outer rectangle, then lines and tiles.
The stripes' width is indicated as 'delta'. The sequence of
stripes can have an offset named 'delta0'. Both values need
to be positive!
"""
_attrMap = AttrMap(
x = AttrMapValue(isNumber, desc="The grid's lower-left x position."),
y = AttrMapValue(isNumber, desc="The grid's lower-left y position."),
width = AttrMapValue(isNumber, desc="The grid's width."),
height = AttrMapValue(isNumber, desc="The grid's height."),
orientation = AttrMapValue(OneOf(('vertical', 'horizontal')),
desc='Determines if stripes are vertical or horizontal.'),
useLines = AttrMapValue(OneOf((0, 1)),
desc='Determines if stripes are drawn with lines.'),
useRects = AttrMapValue(OneOf((0, 1)),
desc='Determines if stripes are drawn with solid rectangles.'),
delta = AttrMapValue(isNumber,
desc='Determines the width/height of the stripes.'),
delta0 = AttrMapValue(isNumber,
desc='Determines the stripes initial width/height offset.'),
deltaSteps = AttrMapValue(isListOfNumbers,
desc='List of deltas to be used cyclically.'),
stripeColors = AttrMapValue(isListOfColors,
desc='Colors applied cyclically in the right or upper direction.'),
fillColor = AttrMapValue(isColorOrNone,
desc='Background color for entire rectangle.'),
strokeColor = AttrMapValue(isColorOrNone,
desc='Color used for lines.'),
strokeWidth = AttrMapValue(isNumber,
desc='Width used for lines.'),
rectStrokeColor = AttrMapValue(isColorOrNone, desc='Color for outer rect stroke.'),
rectStrokeWidth = AttrMapValue(isNumberOrNone, desc='Width for outer rect stroke.'),
)
def __init__(self):
self.x = 0
self.y = 0
self.width = 100
self.height = 100
self.orientation = 'vertical'
self.useLines = 0
self.useRects = 1
self.delta = 20
self.delta0 = 0
self.deltaSteps = []
self.fillColor = colors.white
self.stripeColors = [colors.red, colors.green, colors.blue]
self.strokeColor = colors.black
self.strokeWidth = 2
def demo(self):
D = Drawing(100, 100)
g = Grid()
D.add(g)
return D
def makeOuterRect(self):
strokeColor = getattr(self,'rectStrokeColor',self.strokeColor)
strokeWidth = getattr(self,'rectStrokeWidth',self.strokeWidth)
if self.fillColor or (strokeColor and strokeWidth):
rect = Rect(self.x, self.y, self.width, self.height)
rect.fillColor = self.fillColor
rect.strokeColor = strokeColor
rect.strokeWidth = strokeWidth
return rect
else:
return None
def makeLinePosList(self, start, isX=0):
"Returns a list of positions where to place lines."
w, h = self.width, self.height
if isX:
length = w
else:
length = h
if self.deltaSteps:
r = [start + self.delta0]
i = 0
while 1:
if r[-1] > start + length:
del r[-1]
break
r.append(r[-1] + self.deltaSteps[i % len(self.deltaSteps)])
i = i + 1
else:
r = frange(start + self.delta0, start + length, self.delta)
r.append(start + length)
if self.delta0 != 0:
r.insert(0, start)
#print 'Grid.makeLinePosList() -> %s' % r
return r
def makeInnerLines(self):
# inner grid lines
group = Group()
w, h = self.width, self.height
if self.useLines == 1:
if self.orientation == 'vertical':
r = self.makeLinePosList(self.x, isX=1)
for x in r:
line = Line(x, self.y, x, self.y + h)
line.strokeColor = self.strokeColor
line.strokeWidth = self.strokeWidth
group.add(line)
elif self.orientation == 'horizontal':
r = self.makeLinePosList(self.y, isX=0)
for y in r:
line = Line(self.x, y, self.x + w, y)
line.strokeColor = self.strokeColor
line.strokeWidth = self.strokeWidth
group.add(line)
return group
def makeInnerTiles(self):
# inner grid lines
group = Group()
w, h = self.width, self.height
# inner grid stripes (solid rectangles)
if self.useRects == 1:
cols = self.stripeColors
if self.orientation == 'vertical':
r = self.makeLinePosList(self.x, isX=1)
elif self.orientation == 'horizontal':
r = self.makeLinePosList(self.y, isX=0)
dist = makeDistancesList(r)
i = 0
for j in range(len(dist)):
if self.orientation == 'vertical':
x = r[j]
stripe = Rect(x, self.y, dist[j], h)
elif self.orientation == 'horizontal':
y = r[j]
stripe = Rect(self.x, y, w, dist[j])
stripe.fillColor = cols[i % len(cols)]
stripe.strokeColor = None
group.add(stripe)
i = i + 1
return group
def draw(self):
# general widget bits
group = Group()
group.add(self.makeOuterRect())
group.add(self.makeInnerTiles())
group.add(self.makeInnerLines(),name='_gridLines')
return group
class DoubleGrid(Widget):
"""This combines two ordinary Grid objects orthogonal to each other.
"""
_attrMap = AttrMap(
x = AttrMapValue(isNumber, desc="The grid's lower-left x position."),
y = AttrMapValue(isNumber, desc="The grid's lower-left y position."),
width = AttrMapValue(isNumber, desc="The grid's width."),
height = AttrMapValue(isNumber, desc="The grid's height."),
grid0 = AttrMapValue(None, desc="The first grid component."),
grid1 = AttrMapValue(None, desc="The second grid component."),
)
def __init__(self):
self.x = 0
self.y = 0
self.width = 100
self.height = 100
g0 = Grid()
g0.x = self.x
g0.y = self.y
g0.width = self.width
g0.height = self.height
g0.orientation = 'vertical'
g0.useLines = 1
g0.useRects = 0
g0.delta = 20
g0.delta0 = 0
g0.deltaSteps = []
g0.fillColor = colors.white
g0.stripeColors = [colors.red, colors.green, colors.blue]
g0.strokeColor = colors.black
g0.strokeWidth = 1
g1 = Grid()
g1.x = self.x
g1.y = self.y
g1.width = self.width
g1.height = self.height
g1.orientation = 'horizontal'
g1.useLines = 1
g1.useRects = 0
g1.delta = 20
g1.delta0 = 0
g1.deltaSteps = []
g1.fillColor = colors.white
g1.stripeColors = [colors.red, colors.green, colors.blue]
g1.strokeColor = colors.black
g1.strokeWidth = 1
self.grid0 = g0
self.grid1 = g1
## # This gives an AttributeError:
## # DoubleGrid instance has no attribute 'grid0'
## def __setattr__(self, name, value):
## if name in ('x', 'y', 'width', 'height'):
## setattr(self.grid0, name, value)
## setattr(self.grid1, name, value)
def demo(self):
D = Drawing(100, 100)
g = DoubleGrid()
D.add(g)
return D
def draw(self):
group = Group()
g0, g1 = self.grid0, self.grid1
# Order groups to make sure both v and h lines
# are visible (works only when there is only
# one kind of stripes, v or h).
G = g0.useRects == 1 and g1.useRects == 0 and (g0,g1) or (g1,g0)
for g in G:
group.add(g.makeOuterRect())
for g in G:
group.add(g.makeInnerTiles())
group.add(g.makeInnerLines(),name='_gridLines')
return group
class ShadedRect(Widget):
"""This makes a rectangle with shaded colors between two colors.
Colors are interpolated linearly between 'fillColorStart'
and 'fillColorEnd', both of which appear at the margins.
If 'numShades' is set to one, though, only 'fillColorStart'
is used.
"""
_attrMap = AttrMap(
x = AttrMapValue(isNumber, desc="The grid's lower-left x position."),
y = AttrMapValue(isNumber, desc="The grid's lower-left y position."),
width = AttrMapValue(isNumber, desc="The grid's width."),
height = AttrMapValue(isNumber, desc="The grid's height."),
orientation = AttrMapValue(OneOf(('vertical', 'horizontal')), desc='Determines if stripes are vertical or horizontal.'),
numShades = AttrMapValue(isNumber, desc='The number of interpolating colors.'),
fillColorStart = AttrMapValue(isColorOrNone, desc='Start value of the color shade.'),
fillColorEnd = AttrMapValue(isColorOrNone, desc='End value of the color shade.'),
strokeColor = AttrMapValue(isColorOrNone, desc='Color used for border line.'),
strokeWidth = AttrMapValue(isNumber, desc='Width used for lines.'),
cylinderMode = AttrMapValue(isBoolean, desc='True if shading reverses in middle.'),
)
def __init__(self,**kw):
self.x = 0
self.y = 0
self.width = 100
self.height = 100
self.orientation = 'vertical'
self.numShades = 20
self.fillColorStart = colors.pink
self.fillColorEnd = colors.black
self.strokeColor = colors.black
self.strokeWidth = 2
self.cylinderMode = 0
self.setProperties(kw)
def demo(self):
D = Drawing(100, 100)
g = ShadedRect()
D.add(g)
return D
def _flipRectCorners(self):
"Flip rectangle's corners if width or height is negative."
x, y, width, height, fillColorStart, fillColorEnd = self.x, self.y, self.width, self.height, self.fillColorStart, self.fillColorEnd
if width < 0 and height > 0:
x = x + width
width = -width
if self.orientation=='vertical': fillColorStart, fillColorEnd = fillColorEnd, fillColorStart
elif height<0 and width>0:
y = y + height
height = -height
if self.orientation=='horizontal': fillColorStart, fillColorEnd = fillColorEnd, fillColorStart
elif height < 0 and height < 0:
x = x + width
width = -width
y = y + height
height = -height
return x, y, width, height, fillColorStart, fillColorEnd
def draw(self):
# general widget bits
group = Group()
x, y, w, h, c0, c1 = self._flipRectCorners()
numShades = self.numShades
if self.cylinderMode:
if not numShades%2: numShades = numShades+1
halfNumShades = int((numShades-1)/2) + 1
num = float(numShades) # must make it float!
vertical = self.orientation == 'vertical'
if vertical:
if numShades == 1:
V = [x]
else:
V = frange(x, x + w, w/num)
else:
if numShades == 1:
V = [y]
else:
V = frange(y, y + h, h/num)
for v in V:
stripe = vertical and Rect(v, y, w/num, h) or Rect(x, v, w, h/num)
if self.cylinderMode:
if V.index(v)>=halfNumShades:
col = colors.linearlyInterpolatedColor(c1,c0,V[halfNumShades],V[-1], v)
else:
col = colors.linearlyInterpolatedColor(c0,c1,V[0],V[halfNumShades], v)
else:
col = colors.linearlyInterpolatedColor(c0,c1,V[0],V[-1], v)
stripe.fillColor = col
stripe.strokeColor = col
stripe.strokeWidth = 1
group.add(stripe)
if self.strokeColor and self.strokeWidth>=0:
rect = Rect(x, y, w, h)
rect.strokeColor = self.strokeColor
rect.strokeWidth = self.strokeWidth
rect.fillColor = None
group.add(rect)
return group
def colorRange(c0, c1, n):
"Return a range of intermediate colors between c0 and c1"
if n==1: return [c0]
C = []
if n>1:
lim = n-1
for i in range(n):
C.append(colors.linearlyInterpolatedColor(c0,c1,0,lim, i))
return C
def centroid(P):
'''compute average point of a set of points'''
cx = 0
cy = 0
for x,y in P:
cx+=x
cy+=y
n = float(len(P))
return cx/n, cy/n
def rotatedEnclosingRect(P, angle, rect):
'''
given P a sequence P of x,y coordinate pairs and an angle in degrees
find the centroid of P and the axis at angle theta through it
find the extreme points of P wrt axis parallel distance and axis
orthogonal distance. Then compute the least rectangle that will still
enclose P when rotated by angle.
The class R
'''
from math import pi, cos, sin, tan
x0, y0 = centroid(P)
theta = (angle/180.)*pi
s,c=sin(theta),cos(theta)
def parallelAxisDist(xy,s=s,c=c,x0=x0,y0=y0):
x,y = xy
return (s*(y-y0)+c*(x-x0))
def orthogonalAxisDist(xy,s=s,c=c,x0=x0,y0=y0):
x,y = xy
return (c*(y-y0)+s*(x-x0))
L = list(map(parallelAxisDist,P))
L.sort()
a0, a1 = L[0], L[-1]
L = list(map(orthogonalAxisDist,P))
L.sort()
b0, b1 = L[0], L[-1]
rect.x, rect.width = a0, a1-a0
rect.y, rect.height = b0, b1-b0
g = Group(transform=(c,s,-s,c,x0,y0))
g.add(rect)
return g
class ShadedPolygon(Widget,LineShape):
_attrMap = AttrMap(BASE=LineShape,
angle = AttrMapValue(isNumber,desc="Shading angle"),
fillColorStart = AttrMapValue(isColorOrNone),
fillColorEnd = AttrMapValue(isColorOrNone),
numShades = AttrMapValue(isNumber, desc='The number of interpolating colors.'),
cylinderMode = AttrMapValue(isBoolean, desc='True if shading reverses in middle.'),
points = AttrMapValue(isListOfNumbers),
)
def __init__(self,**kw):
self.angle = 90
self.fillColorStart = colors.red
self.fillColorEnd = colors.green
self.cylinderMode = 0
self.numShades = 50
self.points = [-1,-1,2,2,3,-1]
LineShape.__init__(self,kw)
def draw(self):
P = self.points
P = list(map(lambda i, P=P:(P[i],P[i+1]),range(0,len(P),2)))
path = definePath([('moveTo',)+P[0]]+[('lineTo',)+x for x in P[1:]]+['closePath'],
fillColor=None, strokeColor=None)
path.isClipPath = 1
g = Group()
g.add(path)
angle = self.angle
orientation = 'vertical'
if angle==180:
angle = 0
elif angle in (90,270):
orientation ='horizontal'
angle = 0
rect = ShadedRect(strokeWidth=0,strokeColor=None,orientation=orientation)
for k in 'fillColorStart', 'fillColorEnd', 'numShades', 'cylinderMode':
setattr(rect,k,getattr(self,k))
g.add(rotatedEnclosingRect(P, angle, rect))
g.add(EmptyClipPath)
path = path.copy()
path.isClipPath = 0
path.strokeColor = self.strokeColor
path.strokeWidth = self.strokeWidth
g.add(path)
return g
if __name__=='__main__': #noruntests
from reportlab.lib.colors import blue
from reportlab.graphics.shapes import Drawing
angle=45
D = Drawing(120,120)
D.add(ShadedPolygon(points=(10,10,60,60,110,10),strokeColor=None,strokeWidth=1,angle=90,numShades=50,cylinderMode=0))
D.save(formats=['gif'],fnRoot='shobj',outDir='/tmp')
|
EduPepperPDTesting/pepper2013-testing
|
lms/djangoapps/reportlab/graphics/widgets/grids.py
|
Python
|
agpl-3.0
| 18,133 | 0.013511 |
import os
import time
import traceback
from lib.FileManager.FM import REQUEST_DELAY
from lib.FileManager.workers.baseWorkerCustomer import BaseWorkerCustomer
class CreateCopy(BaseWorkerCustomer):
def __init__(self, paths, session, *args, **kwargs):
super(CreateCopy, self).__init__(*args, **kwargs)
self.paths = paths
self.session = session
def run(self):
try:
self.preload()
self.logger.info("CreateCopy process run")
ftp = self.get_ftp_connection(self.session)
# Временная хеш таблица для директорий по которым будем делать листинг
directories = {}
for path in self.paths:
dirname = ftp.path.dirname(path)
if dirname not in directories.keys():
directories[dirname] = []
directories[dirname].append(path)
# Массив хешей source -> target для каждого пути
copy_paths = []
# Эта содомия нужна чтобы составтить массив source -> target для создания копии файла с красивым именем
# с учетом того что могут быть совпадения
for dirname, dir_paths in directories.items():
dir_listing = ftp.listdir(dirname)
for dir_path in dir_paths:
i = 0
exist = False
if ftp.isdir(dir_path):
filename = os.path.basename(dir_path)
ext = ''
else:
filename, file_extension = ftp.path.splitext(os.path.basename(dir_path))
ext = file_extension
copy_name = filename + ' copy' + ext if i == 0 else filename + ' copy(' + str(i) + ')' + ext
for dir_current_path in dir_listing:
if copy_name == dir_current_path:
exist = True
i += 1
break
if not exist:
copy_paths.append({
'source': dir_path,
'target': ftp.path.join(dirname, copy_name)
})
while exist:
exist = False
if ftp.isdir(dir_path):
filename = ftp.path.basename(dir_path)
ext = ''
else:
filename, file_extension = ftp.path.splitext(dir_path)
ext = file_extension
copy_name = filename + ' copy' + ext if i == 0 else filename + ' copy(' + str(i) + ')' + ext
for dir_current_path in dir_listing:
if copy_name == dir_current_path:
exist = True
i += 1
break
if not exist:
dir_listing.append(copy_name)
copy_paths.append({
'source': dir_path,
'target': os.path.join(dirname, copy_name)
})
success_paths = []
error_paths = []
created_paths = []
next_tick = time.time() + REQUEST_DELAY
for copy_path in copy_paths:
try:
source_path = copy_path.get('source')
target_path = copy_path.get('target')
if ftp.isfile(source_path):
copy_result = ftp.copy_file(source_path, ftp.path.dirname(target_path), overwrite=True,
rename=target_path)
if not copy_result['success'] or len(copy_result['file_list']['failed']) > 0:
raise copy_result['error'] if copy_result['error'] is not None else Exception(
"Upload error")
elif ftp.isdir(source_path):
copy_result = ftp.copy_dir(source_path, ftp.path.dirname(target_path), overwrite=True,
rename=target_path)
if not copy_result['success'] or len(copy_result['file_list']['failed']) > 0:
raise copy_result['error'] if copy_result['error'] is not None else Exception(
"Upload error")
else:
error_paths.append(source_path)
break
success_paths.append(source_path)
created_paths.append(ftp.file_info(target_path))
if time.time() > next_tick:
progress = {
'percent': round(float(len(success_paths)) / float(len(copy_paths)), 2),
'text': str(
int(round(float(len(success_paths)) / float(len(copy_paths)), 2) * 100)) + '%'
}
self.on_running(self.status_id, progress=progress, pid=self.pid, pname=self.name)
next_tick = time.time() + REQUEST_DELAY
except Exception as e:
self.logger.error("Error copy file %s , error %s" % (str(source_path), str(e)))
error_paths.append(source_path)
result = {
"success": success_paths,
"errors": error_paths,
"items": created_paths
}
# иначе пользователям кажется что скопировалось не полностью )
progress = {
'percent': round(float(len(success_paths)) / float(len(copy_paths)), 2),
'text': str(int(round(float(len(success_paths)) / float(len(copy_paths)), 2) * 100)) + '%'
}
time.sleep(REQUEST_DELAY)
self.on_success(self.status_id, data=result, progress=progress, pid=self.pid, pname=self.name)
except Exception as e:
result = {
"error": True,
"message": str(e),
"traceback": traceback.format_exc()
}
self.on_error(self.status_id, result, pid=self.pid, pname=self.name)
|
LTD-Beget/sprutio-rpc
|
lib/FileManager/workers/ftp/createCopy.py
|
Python
|
gpl-3.0
| 6,690 | 0.003101 |
from __future__ import unicode_literals
from django.db.models import fields
from django.utils.translation import ugettext_lazy as _
from ...models.field import FieldDefinition
class _BooleanMeta:
defined_field_category = _('Boolean')
class BooleanFieldDefinition(FieldDefinition):
class Meta(_BooleanMeta):
app_label = 'boolean'
proxy = True
defined_field_class = fields.BooleanField
class NullBooleanFieldDefinition(FieldDefinition):
class Meta(_BooleanMeta):
app_label = 'boolean'
proxy = True
defined_field_class = fields.NullBooleanField
|
charettes/django-mutant
|
mutant/contrib/boolean/models.py
|
Python
|
mit
| 610 | 0 |
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Hardwarerelease.medium'
db.add_column('ashop_hardwarerelease', 'medium', self.gf('django.db.models.fields.CharField')(default=0, max_length=10), keep_default=False)
def backwards(self, orm):
# Deleting field 'Hardwarerelease.medium'
db.delete_column('ashop_hardwarerelease', 'medium')
models = {
'alibrary.artist': {
'Meta': {'ordering': "('name',)", 'object_name': 'Artist'},
'biography': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'excerpt': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'folder': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'artist_folder'", 'null': 'True', 'to': "orm['filer.Folder']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'main_image': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'artist_main_image'", 'null': 'True', 'to': "orm['filer.Image']"}),
'members': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['alibrary.Artist']", 'through': "orm['alibrary.ArtistMembership']", 'symmetrical': 'False'}),
'multiple': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'professions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['alibrary.Profession']", 'through': "orm['alibrary.ArtistProfessions']", 'symmetrical': 'False'}),
'real_name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '100', 'db_index': 'True'}),
'updated': ('django.db.models.fields.DateField', [], {'auto_now': 'True', 'blank': 'True'}),
'uuid': ('django.db.models.fields.CharField', [], {'max_length': '36', 'blank': 'True'})
},
'alibrary.artistmembership': {
'Meta': {'object_name': 'ArtistMembership'},
'child': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'artist_child'", 'to': "orm['alibrary.Artist']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'artist_parent'", 'to': "orm['alibrary.Artist']"}),
'profession': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'artist_membership_profession'", 'null': 'True', 'to': "orm['alibrary.Profession']"})
},
'alibrary.artistprofessions': {
'Meta': {'object_name': 'ArtistProfessions'},
'artist': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['alibrary.Artist']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'profession': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['alibrary.Profession']"})
},
'alibrary.label': {
'Meta': {'ordering': "('name',)", 'object_name': 'Label'},
'address': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'country': ('django_countries.fields.CountryField', [], {'max_length': '2', 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'email_main': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'first_placeholder': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Placeholder']", 'null': 'True'}),
'folder': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'label_folder'", 'null': 'True', 'to': "orm['filer.Folder']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'labelcode': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'parent': ('mptt.fields.TreeForeignKey', [], {'blank': 'True', 'related_name': "'label_children'", 'null': 'True', 'to': "orm['alibrary.Label']"}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '100', 'db_index': 'True'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'updated': ('django.db.models.fields.DateField', [], {'auto_now': 'True', 'blank': 'True'}),
'uuid': ('django.db.models.fields.CharField', [], {'default': "'7437b6be-ab03-4a9d-af4e-dbdd430c819e'", 'max_length': '36'})
},
'alibrary.profession': {
'Meta': {'ordering': "('name',)", 'object_name': 'Profession'},
'created': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'excerpt': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'in_listing': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'updated': ('django.db.models.fields.DateField', [], {'auto_now': 'True', 'blank': 'True'})
},
'alibrary.release': {
'Meta': {'ordering': "('releasedate',)", 'object_name': 'Release'},
'catalognumber': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'created': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'excerpt': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'extra_artists': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['alibrary.Profession']", 'null': 'True', 'through': "orm['alibrary.ReleaseExtraartists']", 'blank': 'True'}),
'folder': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'release_folder'", 'null': 'True', 'to': "orm['filer.Folder']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'release_label'", 'to': "orm['alibrary.Label']"}),
'main_image': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'release_main_image'", 'null': 'True', 'to': "orm['filer.Image']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'placeholder_1': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Placeholder']", 'null': 'True'}),
'pressings': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'max_length': '12'}),
'releasedate': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'releasetype': ('django.db.models.fields.CharField', [], {'default': "'other'", 'max_length': '12'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '100', 'db_index': 'True'}),
'updated': ('django.db.models.fields.DateField', [], {'auto_now': 'True', 'blank': 'True'}),
'uuid': ('django.db.models.fields.CharField', [], {'max_length': '36', 'blank': 'True'})
},
'alibrary.releaseextraartists': {
'Meta': {'object_name': 'ReleaseExtraartists'},
'artist': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'release_extraartist_artist'", 'to': "orm['alibrary.Artist']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'profession': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'release_extraartist_profession'", 'null': 'True', 'to': "orm['alibrary.Profession']"}),
'release': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'release_extraartist_release'", 'to': "orm['alibrary.Release']"})
},
'ashop.baseproduct': {
'Meta': {'ordering': "['name']", 'object_name': 'Baseproduct', '_ormbases': ['shop.Product']},
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'excerpt': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'needs_shipping': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'picture': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'baseproduct_picture'", 'null': 'True', 'to': "orm['filer.Image']"}),
'picture_listing': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'baseproduct_picture_listing'", 'null': 'True', 'to': "orm['filer.Image']"}),
'product_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['shop.Product']", 'unique': 'True', 'primary_key': 'True'}),
'subline': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'weight': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'})
},
'ashop.downloadrelease': {
'Meta': {'ordering': "['name']", 'object_name': 'Downloadrelease', '_ormbases': ['ashop.Releaseproduct']},
'releaseproduct_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['ashop.Releaseproduct']", 'unique': 'True', 'primary_key': 'True'})
},
'ashop.hardwarerelease': {
'Meta': {'ordering': "['name']", 'object_name': 'Hardwarerelease', '_ormbases': ['ashop.Releaseproduct']},
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'medium': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
'needs_shipping': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'releaseproduct_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['ashop.Releaseproduct']", 'unique': 'True', 'primary_key': 'True'}),
'weight': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'})
},
'ashop.releaseproduct': {
'Meta': {'ordering': "['name']", 'object_name': 'Releaseproduct', '_ormbases': ['shop.Product']},
'product_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['shop.Product']", 'unique': 'True', 'primary_key': 'True'}),
'release': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'releaseproduct_release'", 'to': "orm['alibrary.Release']"})
},
'ashop.singleproduct': {
'Meta': {'object_name': 'SingleProduct', 'db_table': "'cmsplugin_singleproduct'", '_ormbases': ['cms.CMSPlugin']},
'cmsplugin_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['cms.CMSPlugin']", 'unique': 'True', 'primary_key': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['ashop.Baseproduct']"}),
'style': ('django.db.models.fields.CharField', [], {'default': "'l'", 'max_length': '24'})
},
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'cms.cmsplugin': {
'Meta': {'object_name': 'CMSPlugin'},
'creation_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '15', 'db_index': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.CMSPlugin']", 'null': 'True', 'blank': 'True'}),
'placeholder': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Placeholder']", 'null': 'True'}),
'plugin_type': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
'position': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'})
},
'cms.placeholder': {
'Meta': {'object_name': 'Placeholder'},
'default_width': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'slot': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'filer.file': {
'Meta': {'object_name': 'File'},
'_file_size': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'file': ('django.db.models.fields.files.FileField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'folder': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'all_files'", 'null': 'True', 'to': "orm['filer.Folder']"}),
'has_all_mandatory_data': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_public': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'modified_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'original_filename': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'owned_files'", 'null': 'True', 'to': "orm['auth.User']"}),
'polymorphic_ctype': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'polymorphic_filer.file_set'", 'null': 'True', 'to': "orm['contenttypes.ContentType']"}),
'sha1': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '40', 'blank': 'True'}),
'uploaded_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'})
},
'filer.folder': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('parent', 'name'),)", 'object_name': 'Folder'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'modified_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'filer_owned_folders'", 'null': 'True', 'to': "orm['auth.User']"}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': "orm['filer.Folder']"}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'uploaded_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'})
},
'filer.image': {
'Meta': {'object_name': 'Image', '_ormbases': ['filer.File']},
'_height': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'_width': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'author': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'date_taken': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'default_alt_text': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'default_caption': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'file_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['filer.File']", 'unique': 'True', 'primary_key': 'True'}),
'must_always_publish_author_credit': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'must_always_publish_copyright': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'subject_location': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '64', 'null': 'True', 'blank': 'True'})
},
'shop.product': {
'Meta': {'object_name': 'Product'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'polymorphic_ctype': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'polymorphic_shop.product_set'", 'null': 'True', 'to': "orm['contenttypes.ContentType']"}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50', 'db_index': 'True'}),
'unit_price': ('django.db.models.fields.DecimalField', [], {'default': "'0.00'", 'max_digits': '12', 'decimal_places': '2'})
},
'taggit.tag': {
'Meta': {'object_name': 'Tag'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100', 'db_index': 'True'})
},
'taggit.taggeditem': {
'Meta': {'object_name': 'TaggedItem'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'taggit_taggeditem_tagged_items'", 'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True'}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'taggit_taggeditem_items'", 'to': "orm['taggit.Tag']"})
}
}
complete_apps = ['ashop']
|
hzlf/openbroadcast
|
website/apps/ashop/migrations/0015_auto__add_field_hardwarerelease_medium.py
|
Python
|
gpl-3.0
| 23,666 | 0.007817 |
import random
import re
from io import BytesIO
from typing import Awaitable, List
import matplotlib.pyplot as plt
import seaborn as sns
from curio.thread import async_thread
from curious.commands import Context, Plugin
from curious.commands.decorators import autoplugin, ratelimit
from yapf.yapflib.style import CreatePEP8Style
from yapf.yapflib.yapf_api import FormatCode
from jokusoramame.utils import rgbize
code_regexp = re.compile(r"```([^\n]+)\n?(.+)\n?```", re.DOTALL)
ADJECTIVES = {
"Trans-Exclusionary ": 1,
"Smithian ": 2,
"Ricardian ": 2,
"Randian ": 3,
"Hegelian ": 3,
"Synthesist ": 3,
"Woke ": 4,
"Vegan ": 4,
"Green ": 6,
"Insurrectionary ": 6,
"Anti-Imperialist ": 6,
"Jewish ": 8,
"Bolshevik ": 8,
"Post-left ": 8,
"Inclusive ": 9,
"Individualist ": 9,
"Queer ": 10,
"Atheist ": 10,
"Liberal ": 10,
"Libertarian ": 10,
"Conservative ": 10,
"Social ": 12,
"Islamic ": 12,
"Radical ": 12,
"Catholic ": 12,
"Esoteric ": 12,
"Christian ": 12,
"Progressive ": 12,
"Post-Colonial ": 12,
"Democratic ": 13,
"": 30
}
PREFIXES = {
"Alt-": 1,
"Bio-": 1,
"Taoist ": 2,
"Left ": 3,
"Post-": 3,
"Anarcha-": 3,
"Avant Garde ": 3,
"Eco-": 4,
"Communal ": 6,
"Afro-": 8,
"Ethno-": 8,
"Ultra-": 8,
"Neo-": 10,
"Pan-": 10,
"Anti-": 10,
"Paleo-": 10,
"Techno-": 10,
"Market ": 10,
"Revolutionary ": 10,
"Crypto-": 12,
"Anarcho-": 12,
"National ": 12,
"Orthodox ": 12,
"": 40
}
IDEOLOGIES = {
"Posadism": 1,
"Sexualism": 1,
"Kemalism": 2,
"Unruheism": 2,
"Distributism": 2,
"Titoism": 3,
"Putinism": 3,
"Makhnovism": 3,
"Georgism": 4,
"Keynesian": 4,
"Platformism": 4,
"Municipalism": 5,
"Confederalism": 5,
"Egoism": 6,
"Luddite": 6,
"Agorism": 6,
"Unionism": 6,
"Thatcherite": 6,
"Minarchism": 7,
"Ba'athism": 8,
"Trotskyism": 8,
"Syndicalism": 8,
"Luxemburgism": 8,
"Strasserism": 10,
"Maoism": 12,
"Fascism": 12,
"Marxism": 12,
"Zionism": 12,
"Centrism": 12,
"Pacifism": 12,
"Leninism": 12,
"Populism": 12,
"Futurism": 12,
"Feminism": 12,
"Humanism": 12,
"Mutualism": 12,
"Communism": 12,
"Stalinism": 12,
"Globalism": 12,
"Socialism": 12,
"Capitalism": 12,
"Monarchism": 12,
"Primitivism": 12,
"Nationalism": 12,
"Transhumanism": 12,
"Traditionalism": 12,
"Environmentalism": 12,
"Accelerationism": 12
}
SUFFIXES = {
" in One Country": 1,
" with Masonic elements": 1,
' with "rational debate"': 1,
" with Phlegmsky's vanguardism": 1,
" with Chinese characteristics": 1,
" with a new mode of production": 1,
"": 100
}
@autoplugin
class Misc(Plugin):
"""
Miscellaneous commands.
"""
async def command_ideology(self, ctx: Context):
"""
Creates an ideology just for you!
"""
message = ''
for d in (ADJECTIVES, PREFIXES, IDEOLOGIES, SUFFIXES):
message += random.choices(list(d.keys()), list(d.values()))[0]
await ctx.channel.messages.send(message)
@ratelimit(limit=1, time=30)
async def command_palette(self, ctx: Context, *, colours: List[int]):
"""
Shows a palette plot.
"""
pal_colours = rgbize(colours[:12])
@async_thread
def plot_palette() -> Awaitable[BytesIO]:
with ctx.bot._plot_lock:
sns.palplot(pal_colours, size=1)
plt.tight_layout() # remove useless padding
buf = BytesIO()
plt.savefig(buf, format="png")
buf.seek(0)
plt.clf()
plt.cla()
return buf
@async_thread()
def plot_dark_palette() -> Awaitable[BytesIO]:
with ctx.bot._plot_lock:
with plt.style.context("dark_background"):
sns.palplot(pal_colours, size=1)
plt.tight_layout() # remove useless padding
buf = BytesIO()
plt.savefig(buf, format="png")
buf.seek(0)
plt.clf()
plt.cla()
return buf
if ctx.bot._plot_lock.locked():
await ctx.channel.messages.send("Waiting for plot lock...")
async with ctx.channel.typing:
buf = await plot_palette()
buf2 = await plot_dark_palette()
await ctx.channel.messages.upload(fp=buf.read(), filename="plot.png")
await ctx.channel.messages.upload(fp=buf2, filename="plot_dark.png")
def _normalize_language(self, lang: str) -> str:
"""
Normalizes a language name into consistency.
"""
lang = lang.lower().rstrip("\n")
print(repr(lang))
if lang in ["py", "python", "py3k"]:
return "python"
return lang
async def command_reformat(self, ctx: Context, *, message: str):
"""
Reformats some code.
"""
code_match = code_regexp.match(message)
if code_match is None:
return await ctx.channel.messages.send(":x: Could not find a valid code block with "
"language.")
language, code = code_match.groups()
code = code.replace("\t", " ")
language = self._normalize_language(language)
if language == "python":
# yapfify
style = CreatePEP8Style()
style['COLUMN_LIMIT'] = 100
reformatted, changes = FormatCode(code, style_config=style)
return await ctx.channel.messages.send(f"```py\n{reformatted}```")
return await ctx.channel.messages.send(":x: Unknown language.")
|
SunDwarf/Jokusoramame
|
jokusoramame/plugins/misc.py
|
Python
|
gpl-3.0
| 5,938 | 0.000168 |
import collections
import datetime
import mock
import pytz
from babel import dates, Locale
from schema import Schema, And, Use, Or
from modularodm import Q
from modularodm.exceptions import NoResultsFound
from nose.tools import * # noqa PEP8 asserts
from framework.auth import Auth
from framework.auth.core import User
from framework.auth.signals import contributor_removed
from framework.auth.signals import node_deleted
from framework.guid.model import Guid
from website.notifications.tasks import get_users_emails, send_users_email, group_by_node, remove_notifications
from website.notifications import constants
from website.notifications.model import NotificationDigest
from website.notifications.model import NotificationSubscription
from website.notifications import emails
from website.notifications import utils
from website.project.model import Node, Comment
from website import mails
from website.util import api_url_for
from website.util import web_url_for
from tests import factories
from tests.base import capture_signals
from tests.base import OsfTestCase
class TestNotificationsModels(OsfTestCase):
def setUp(self):
super(TestNotificationsModels, self).setUp()
# Create project with component
self.user = factories.UserFactory()
self.consolidate_auth = Auth(user=self.user)
self.parent = factories.ProjectFactory(creator=self.user)
self.node = factories.NodeFactory(creator=self.user, parent=self.parent)
def test_has_permission_on_children(self):
non_admin_user = factories.UserFactory()
parent = factories.ProjectFactory()
parent.add_contributor(contributor=non_admin_user, permissions=['read'])
parent.save()
node = factories.NodeFactory(parent=parent, category='project')
sub_component = factories.NodeFactory(parent=node)
sub_component.add_contributor(contributor=non_admin_user)
sub_component.save()
sub_component2 = factories.NodeFactory(parent=node)
assert_true(
node.has_permission_on_children(non_admin_user, 'read')
)
def test_check_user_has_permission_excludes_deleted_components(self):
non_admin_user = factories.UserFactory()
parent = factories.ProjectFactory()
parent.add_contributor(contributor=non_admin_user, permissions=['read'])
parent.save()
node = factories.NodeFactory(parent=parent, category='project')
sub_component = factories.NodeFactory(parent=node)
sub_component.add_contributor(contributor=non_admin_user)
sub_component.is_deleted = True
sub_component.save()
sub_component2 = factories.NodeFactory(parent=node)
assert_false(
node.has_permission_on_children(non_admin_user,'read')
)
def test_check_user_does_not_have_permission_on_private_node_child(self):
non_admin_user = factories.UserFactory()
parent = factories.ProjectFactory()
parent.add_contributor(contributor=non_admin_user, permissions=['read'])
parent.save()
node = factories.NodeFactory(parent=parent, category='project')
sub_component = factories.NodeFactory(parent=node)
assert_false(
node.has_permission_on_children(non_admin_user,'read')
)
def test_check_user_child_node_permissions_false_if_no_children(self):
non_admin_user = factories.UserFactory()
parent = factories.ProjectFactory()
parent.add_contributor(contributor=non_admin_user, permissions=['read'])
parent.save()
node = factories.NodeFactory(parent=parent, category='project')
assert_false(
node.has_permission_on_children(non_admin_user,'read')
)
def test_check_admin_has_permissions_on_private_component(self):
parent = factories.ProjectFactory()
node = factories.NodeFactory(parent=parent, category='project')
sub_component = factories.NodeFactory(parent=node)
assert_true(
node.has_permission_on_children(parent.creator,'read')
)
def test_check_user_private_node_child_permissions_excludes_pointers(self):
user = factories.UserFactory()
parent = factories.ProjectFactory()
pointed = factories.ProjectFactory(contributor=user)
parent.add_pointer(pointed, Auth(parent.creator))
parent.save()
assert_false(
parent.has_permission_on_children(user,'read')
)
class TestSubscriptionView(OsfTestCase):
def setUp(self):
super(TestSubscriptionView, self).setUp()
self.node = factories.NodeFactory()
self.user = self.node.creator
def test_create_new_subscription(self):
payload = {
'id': self.node._id,
'event': 'comments',
'notification_type': 'email_transactional'
}
url = api_url_for('configure_subscription')
self.app.post_json(url, payload, auth=self.node.creator.auth)
# check that subscription was created
event_id = self.node._id + '_' + 'comments'
s = NotificationSubscription.find_one(Q('_id', 'eq', event_id))
# check that user was added to notification_type field
assert_equal(payload['id'], s.owner._id)
assert_equal(payload['event'], s.event_name)
assert_in(self.node.creator, getattr(s, payload['notification_type']))
# change subscription
new_payload = {
'id': self.node._id,
'event': 'comments',
'notification_type': 'email_digest'
}
url = api_url_for('configure_subscription')
self.app.post_json(url, new_payload, auth=self.node.creator.auth)
s.reload()
assert_false(self.node.creator in getattr(s, payload['notification_type']))
assert_in(self.node.creator, getattr(s, new_payload['notification_type']))
def test_adopt_parent_subscription_default(self):
payload = {
'id': self.node._id,
'event': 'comments',
'notification_type': 'adopt_parent'
}
url = api_url_for('configure_subscription')
self.app.post_json(url, payload, auth=self.node.creator.auth)
event_id = self.node._id + '_' + 'comments'
# confirm subscription was not created
with assert_raises(NoResultsFound):
NotificationSubscription.find_one(Q('_id', 'eq', event_id))
def test_change_subscription_to_adopt_parent_subscription_removes_user(self):
payload = {
'id': self.node._id,
'event': 'comments',
'notification_type': 'email_transactional'
}
url = api_url_for('configure_subscription')
self.app.post_json(url, payload, auth=self.node.creator.auth)
# check that subscription was created
event_id = self.node._id + '_' + 'comments'
s = NotificationSubscription.find_one(Q('_id', 'eq', event_id))
# change subscription to adopt_parent
new_payload = {
'id': self.node._id,
'event': 'comments',
'notification_type': 'adopt_parent'
}
url = api_url_for('configure_subscription')
self.app.post_json(url, new_payload, auth=self.node.creator.auth)
s.reload()
# assert that user is removed from the subscription entirely
for n in constants.NOTIFICATION_TYPES:
assert_false(self.node.creator in getattr(s, n))
class TestRemoveContributor(OsfTestCase):
def setUp(self):
super(OsfTestCase, self).setUp()
self.project = factories.ProjectFactory()
self.contributor = factories.UserFactory()
self.project.add_contributor(contributor=self.contributor, permissions=['read'])
self.project.save()
self.subscription = factories.NotificationSubscriptionFactory(
_id=self.project._id + '_comments',
owner=self.project
)
self.subscription.save()
self.subscription.email_transactional.append(self.contributor)
self.subscription.email_transactional.append(self.project.creator)
self.subscription.save()
self.node = factories.NodeFactory(parent=self.project)
self.node.add_contributor(contributor=self.project.creator, permissions=['read', 'write', 'admin'])
self.node.save()
self.node_subscription = factories.NotificationSubscriptionFactory(
_id=self.node._id + '_comments',
owner=self.node
)
self.node_subscription.save()
self.node_subscription.email_transactional.append(self.project.creator)
self.node_subscription.email_transactional.append(self.node.creator)
self.node_subscription.save()
def test_removed_non_admin_contributor_is_removed_from_subscriptions(self):
assert_in(self.contributor, self.subscription.email_transactional)
self.project.remove_contributor(self.contributor, auth=Auth(self.project.creator))
assert_not_in(self.contributor, self.project.contributors)
assert_not_in(self.contributor, self.subscription.email_transactional)
def test_removed_non_parent_admin_contributor_is_removed_from_subscriptions(self):
assert_in(self.node.creator, self.node_subscription.email_transactional)
self.node.remove_contributor(self.node.creator, auth=Auth(self.node.creator))
assert_not_in(self.node.creator, self.node.contributors)
assert_not_in(self.node.creator, self.node_subscription.email_transactional)
def test_removed_contributor_admin_on_parent_not_removed_from_node_subscription(self):
# Admin on parent project is removed as a contributor on a component. Check
# that admin is not removed from component subscriptions, as the admin
# now has read-only access.
assert_in(self.project.creator, self.node_subscription.email_transactional)
self.node.remove_contributor(self.project.creator, auth=Auth(self.project.creator))
assert_not_in(self.project.creator, self.node.contributors)
assert_in(self.project.creator, self.node_subscription.email_transactional)
def test_remove_contributor_signal_called_when_contributor_is_removed(self):
with capture_signals() as mock_signals:
self.project.remove_contributor(self.contributor, auth=Auth(self.project.creator))
assert_equal(mock_signals.signals_sent(), set([contributor_removed]))
class TestRemoveNodeSignal(OsfTestCase):
def test_node_subscriptions_and_backrefs_removed_when_node_is_deleted(self):
project = factories.ProjectFactory()
subscription = factories.NotificationSubscriptionFactory(
_id=project._id + '_comments',
owner=project
)
subscription.save()
subscription.email_transactional.append(project.creator)
subscription.save()
s = getattr(project.creator, 'email_transactional', [])
assert_equal(len(s), 1)
with capture_signals() as mock_signals:
project.remove_node(auth=Auth(project.creator))
assert_true(project.is_deleted)
assert_equal(mock_signals.signals_sent(), set([node_deleted]))
s = getattr(project.creator, 'email_transactional', [])
assert_equal(len(s), 0)
with assert_raises(NoResultsFound):
NotificationSubscription.find_one(Q('owner', 'eq', project))
def list_or_dict(data):
# Generator only returns lists or dicts from list or dict
if isinstance(data, dict):
for key in data:
if isinstance(data[key], dict) or isinstance(data[key], list):
yield data[key]
elif isinstance(data, list):
for item in data:
if isinstance(item, dict) or isinstance(item, list):
yield item
def has(data, sub_data):
# Recursive approach to look for a subset of data in data.
# WARNING: Don't use on huge structures
# :param data: Data structure
# :param sub_data: subset being checked for
# :return: True or False
try:
(item for item in data if item == sub_data).next()
return True
except StopIteration:
lists_and_dicts = list_or_dict(data)
for item in lists_and_dicts:
if has(item, sub_data):
return True
return False
def subscription_schema(project, structure, level=0):
# builds a schema from a list of nodes and events
# :param project: validation type
# :param structure: list of nodes (another list) and events
# :return: schema
sub_list = []
for item in list_or_dict(structure):
sub_list.append(subscription_schema(project, item, level=level+1))
sub_list.append(event_schema(level))
node_schema = {
'node': {
'id': Use(type(project._id), error="node_id{}".format(level)),
'title': Use(type(project.title), error="node_title{}".format(level)),
'url': Use(type(project.url), error="node_{}".format(level))
},
'kind': And(str, Use(lambda s: s in ('node', 'folder'),
error="kind didn't match node or folder {}".format(level))),
'nodeType': Use(lambda s: s in ('project', 'component'), error='nodeType not project or component'),
'category': Use(lambda s: s in Node.CATEGORY_MAP, error='category not in Node.CATEGORY_MAP'),
'permissions': {
'view': Use(lambda s: s in (True, False), error='view permissions is not True/False')
},
'children': sub_list
}
if level == 0:
return Schema([node_schema])
return node_schema
def event_schema(level=None):
return {
'event': {
'title': And(Use(str, error="event_title{} not a string".format(level)),
Use(lambda s: s in constants.NOTIFICATION_TYPES,
error="event_title{} not in list".format(level))),
'description': And(Use(str, error="event_desc{} not a string".format(level)),
Use(lambda s: s in constants.NODE_SUBSCRIPTIONS_AVAILABLE,
error="event_desc{} not in list".format(level))),
'notificationType': And(str, Or('adopt_parent', lambda s: s in constants.NOTIFICATION_TYPES)),
'parent_notification_type': Or(None, 'adopt_parent', lambda s: s in constants.NOTIFICATION_TYPES)
},
'kind': 'event',
'children': And(list, lambda l: len(l) == 0)
}
class TestNotificationUtils(OsfTestCase):
def setUp(self):
super(TestNotificationUtils, self).setUp()
self.user = factories.UserFactory()
self.project = factories.ProjectFactory(creator=self.user)
self.project_subscription = factories.NotificationSubscriptionFactory(
_id=self.project._id + '_' + 'comments',
owner=self.project,
event_name='comments'
)
self.project_subscription.save()
self.project_subscription.email_transactional.append(self.user)
self.project_subscription.save()
self.node = factories.NodeFactory(parent=self.project, creator=self.user)
self.node_subscription = factories.NotificationSubscriptionFactory(
_id=self.node._id + '_' + 'comments',
owner=self.node,
event_name='comments'
)
self.node_subscription.save()
self.node_subscription.email_transactional.append(self.user)
self.node_subscription.save()
self.user_subscription = factories.NotificationSubscriptionFactory(
_id=self.user._id + '_' + 'comment_replies',
owner=self.user,
event_name='comment_replies'
)
self.user_subscription.save()
self.user_subscription.email_transactional.append(self.user)
self.user_subscription.save()
def test_to_subscription_key(self):
key = utils.to_subscription_key('xyz', 'comments')
assert_equal(key, 'xyz_comments')
def test_from_subscription_key(self):
parsed_key = utils.from_subscription_key('xyz_comment_replies')
assert_equal(parsed_key, {
'uid': 'xyz',
'event': 'comment_replies'
})
def test_get_all_user_subscriptions(self):
user_subscriptions = [x for x in utils.get_all_user_subscriptions(self.user)]
assert_in(self.project_subscription, user_subscriptions)
assert_in(self.node_subscription, user_subscriptions)
assert_in(self.user_subscription, user_subscriptions)
assert_equal(len(user_subscriptions), 3)
def test_get_all_node_subscriptions_given_user_subscriptions(self):
user_subscriptions = utils.get_all_user_subscriptions(self.user)
node_subscriptions = [x for x in utils.get_all_node_subscriptions(self.user, self.node,
user_subscriptions=user_subscriptions)]
assert_equal(node_subscriptions, [self.node_subscription])
def test_get_all_node_subscriptions_given_user_and_node(self):
node_subscriptions = [x for x in utils.get_all_node_subscriptions(self.user, self.node)]
assert_equal(node_subscriptions, [self.node_subscription])
def test_get_configured_project_ids_does_not_return_user_or_node_ids(self):
configured_ids = utils.get_configured_projects(self.user)
# No dupilcates!
assert_equal(len(configured_ids), 1)
assert_in(self.project._id, configured_ids)
assert_not_in(self.node._id, configured_ids)
assert_not_in(self.user._id, configured_ids)
def test_get_configured_project_ids_excludes_deleted_projects(self):
project = factories.ProjectFactory()
subscription = factories.NotificationSubscriptionFactory(
_id=project._id + '_' + 'comments',
owner=project
)
subscription.save()
subscription.email_transactional.append(self.user)
subscription.save()
project.is_deleted = True
project.save()
assert_not_in(project._id, utils.get_configured_projects(self.user))
def test_get_configured_project_ids_excludes_node_with_project_category(self):
node = factories.NodeFactory(parent=self.project, category='project')
node_subscription = factories.NotificationSubscriptionFactory(
_id=node._id + '_' + 'comments',
owner=node,
event_name='comments'
)
node_subscription.save()
node_subscription.email_transactional.append(self.user)
node_subscription.save()
assert_not_in(node._id, utils.get_configured_projects(self.user))
def test_get_configured_project_ids_includes_top_level_private_projects_if_subscriptions_on_node(self):
private_project = factories.ProjectFactory()
node = factories.NodeFactory(parent=private_project)
node_subscription = factories.NotificationSubscriptionFactory(
_id=node._id + '_comments',
owner=node,
event_name='comments'
)
node_subscription.email_transactional.append(node.creator)
node_subscription.save()
configured_project_ids = utils.get_configured_projects(node.creator)
assert_in(private_project._id, configured_project_ids)
def test_get_configured_project_ids_excludes_private_projects_if_no_subscriptions_on_node(self):
private_project = factories.ProjectFactory()
node = factories.NodeFactory(parent=private_project)
configured_project_ids = utils.get_configured_projects(node.creator)
assert_not_in(private_project._id, configured_project_ids)
def test_get_parent_notification_type(self):
nt = utils.get_parent_notification_type(self.node, 'comments', self.user)
assert_equal(nt, 'email_transactional')
def test_get_parent_notification_type_no_parent_subscriptions(self):
node = factories.NodeFactory()
nt = utils.get_parent_notification_type(node._id, 'comments', self.user)
assert_equal(nt, None)
def test_get_parent_notification_type_no_parent(self):
project = factories.ProjectFactory()
nt = utils.get_parent_notification_type(project._id, 'comments', self.user)
assert_equal(nt, None)
def test_get_parent_notification_type_handles_user_id(self):
nt = utils.get_parent_notification_type(self.user._id, 'comments', self.user)
assert_equal(nt, None)
def test_format_data_project_settings(self):
data = utils.format_data(self.user, [self.project._id])
parent_event = {
'event': {
'title': 'comments',
'description': constants.NODE_SUBSCRIPTIONS_AVAILABLE['comments'],
'notificationType': 'email_transactional',
'parent_notification_type': None
},
'kind': 'event',
'children': []
}
child_event = {
'event': {
'title': 'comments',
'description': constants.NODE_SUBSCRIPTIONS_AVAILABLE['comments'],
'notificationType': 'email_transactional',
'parent_notification_type': 'email_transactional'
},
'kind': 'event',
'children': []
}
expected_new = [['event'], 'event']
schema = subscription_schema(self.project, expected_new)
assert schema.validate(data)
assert has(data, parent_event)
assert has(data, child_event)
def test_format_data_node_settings(self):
data = utils.format_data(self.user, [self.node._id])
event = {
'event': {
'title': 'comments',
'description': constants.NODE_SUBSCRIPTIONS_AVAILABLE['comments'],
'notificationType': 'email_transactional',
'parent_notification_type': 'email_transactional'
},
'kind': 'event',
'children': []
}
schema = subscription_schema(self.project, ['event'])
assert schema.validate(data)
assert has(data, event)
def test_format_includes_admin_view_only_component_subscriptions(self):
# Test private components in which parent project admins are not contributors still appear in their
# notifications settings.
node = factories.NodeFactory(parent=self.project)
data = utils.format_data(self.user, [self.project._id])
event = {
'event': {
'title': 'comments',
'description': constants.NODE_SUBSCRIPTIONS_AVAILABLE['comments'],
'notificationType': 'adopt_parent',
'parent_notification_type': 'email_transactional'
},
'kind': 'event',
'children': [],
}
schema = subscription_schema(self.project, ['event', ['event'], ['event']])
assert schema.validate(data)
assert has(data, event)
def test_format_data_excludes_pointers(self):
project = factories.ProjectFactory()
subscription = factories.NotificationSubscriptionFactory(
_id=project._id + '_comments',
owner=project,
event_name='comments'
)
subscription.email_transactional.append(project.creator)
subscription.save()
pointed = factories.ProjectFactory()
project.add_pointer(pointed, Auth(project.creator))
project.save()
configured_project_ids = utils.get_configured_projects(project.creator)
data = utils.format_data(project.creator, configured_project_ids)
event = {
'event': {
'title': 'comments',
'description': constants.NODE_SUBSCRIPTIONS_AVAILABLE['comments'],
'notificationType': 'email_transactional',
'parent_notification_type': None
},
'kind': 'event',
'children': [],
}
schema = subscription_schema(self.project, ['event'])
assert schema.validate(data)
assert has(data, event)
def test_format_data_user_subscriptions_includes_private_parent_if_configured_children(self):
private_project = factories.ProjectFactory()
node = factories.NodeFactory(parent=private_project)
node_subscription = factories.NotificationSubscriptionFactory(
_id=node._id + '_comments',
owner=node,
event_name='comments'
)
node_subscription.email_transactional.append(node.creator)
node_subscription.save()
configured_project_ids = utils.get_configured_projects(node.creator)
data = utils.format_data(node.creator, configured_project_ids)
event = {
'event': {
'title': 'comments',
'description': constants.NODE_SUBSCRIPTIONS_AVAILABLE['comments'],
'notificationType': 'email_transactional',
'parent_notification_type': None
},
'kind': 'event',
'children': [],
}
schema = subscription_schema(self.project, ['event', ['event']])
assert schema.validate(data)
assert has(data, event)
def test_format_user_subscriptions(self):
data = utils.format_user_subscriptions(self.user)
expected = [{
'event': {
'title': 'comment_replies',
'description': constants.USER_SUBSCRIPTIONS_AVAILABLE['comment_replies'],
'notificationType': 'email_transactional',
'parent_notification_type': None
},
'kind': 'event',
'children': [],
}]
assert_equal(data, expected)
def test_format_data_user_settings(self):
data = utils.format_user_and_project_subscriptions(self.user)
expected = [
{
'node': {
'id': self.user._id,
'title': 'User Notifications'
},
'kind': 'heading',
'children': utils.format_user_subscriptions(self.user)
},
{
'node': {
'id': '',
'title': 'Project Notifications'
},
'kind': 'heading',
'children': utils.format_data(self.user, utils.get_configured_projects(self.user))
}]
assert_equal(data, expected)
def test_serialize_user_level_event(self):
user_subscriptions = [x for x in utils.get_all_user_subscriptions(self.user)]
user_subscription = None
for subscription in user_subscriptions:
if 'comment_replies' in getattr(subscription, 'event_name'):
user_subscription = subscription
data = utils.serialize_event(self.user, event_description='comment_replies',
subscription=user_subscription)
expected = {
'event': {
'title': 'comment_replies',
'description': constants.USER_SUBSCRIPTIONS_AVAILABLE['comment_replies'],
'notificationType': 'email_transactional',
'parent_notification_type': None
},
'kind': 'event',
'children': []
}
assert_equal(data, expected)
def test_serialize_node_level_event(self):
node_subscriptions = [x for x in utils.get_all_node_subscriptions(self.user, self.node)]
data = utils.serialize_event(user=self.user, event_description='comments',
subscription=node_subscriptions[0], node=self.node)
expected = {
'event': {
'title': 'comments',
'description': constants.NODE_SUBSCRIPTIONS_AVAILABLE['comments'],
'notificationType': 'email_transactional',
'parent_notification_type': 'email_transactional'
},
'kind': 'event',
'children': [],
}
assert_equal(data, expected)
def test_serialize_node_level_event_that_adopts_parent_settings(self):
user = factories.UserFactory()
self.project.add_contributor(contributor=user, permissions=['read'])
self.project.save()
self.project_subscription.email_transactional.append(user)
self.project_subscription.save()
self.node.add_contributor(contributor=user, permissions=['read'])
self.node.save()
node_subscriptions = [x for x in utils.get_all_node_subscriptions(user, self.node)]
data = utils.serialize_event(user=user, event_description='comments',
subscription=node_subscriptions, node=self.node)
expected = {
'event': {
'title': 'comments',
'description': constants.NODE_SUBSCRIPTIONS_AVAILABLE['comments'],
'notificationType': 'adopt_parent',
'parent_notification_type': 'email_transactional'
},
'kind': 'event',
'children': [],
}
assert_equal(data, expected)
class TestNotificationsDict(OsfTestCase):
def test_notifications_dict_add_message_returns_proper_format(self):
d = utils.NotificationsDict()
message = {
'message': 'Freddie commented on your project',
'timestamp': datetime.datetime.utcnow().replace(tzinfo=pytz.utc)
}
message2 = {
'message': 'Mercury commented on your component',
'timestamp': datetime.datetime.utcnow().replace(tzinfo=pytz.utc)
}
d.add_message(['project'], message)
d.add_message(['project', 'node'], message2)
expected = {
'messages': [],
'children': collections.defaultdict(
utils.NotificationsDict, {
'project': {
'messages': [message],
'children': collections.defaultdict(utils.NotificationsDict, {
'node': {
'messages': [message2],
'children': collections.defaultdict(utils.NotificationsDict, {})
}
})
}
}
)}
assert_equal(d, expected)
class TestCompileSubscriptions(OsfTestCase):
def setUp(self):
super(TestCompileSubscriptions, self).setUp()
self.user_1 = factories.UserFactory()
self.user_2 = factories.UserFactory()
self.user_3 = factories.UserFactory()
self.user_4 = factories.UserFactory()
# Base project + 1 project shared with 3 + 1 project shared with 2
self.base_project = factories.ProjectFactory(is_public=False, creator=self.user_1)
self.shared_node = factories.NodeFactory(parent=self.base_project, is_public=False, creator=self.user_1)
self.private_node = factories.NodeFactory(parent=self.base_project, is_public=False, creator=self.user_1)
# Adding contributors
for node in [self.base_project, self.shared_node, self.private_node]:
node.add_contributor(self.user_2, permissions='admin')
self.base_project.add_contributor(self.user_3, permissions='write')
self.shared_node.add_contributor(self.user_3, permissions='write')
# Setting basic subscriptions
self.base_sub = factories.NotificationSubscriptionFactory(
_id=self.base_project._id + '_file_updated',
owner=self.base_project,
event_name='file_updated'
)
self.base_sub.save()
self.shared_sub = factories.NotificationSubscriptionFactory(
_id=self.shared_node._id + '_file_updated',
owner=self.shared_node,
event_name='file_updated'
)
self.shared_sub.save()
self.private_sub = factories.NotificationSubscriptionFactory(
_id=self.private_node._id + '_file_updated',
owner=self.private_node,
event_name='file_updated'
)
self.private_sub.save()
def test_no_subscription(self):
node = factories.NodeFactory()
result = emails.compile_subscriptions(node, 'file_updated')
assert_equal({'email_transactional': [], 'none': [], 'email_digest': []}, result)
def test_no_subscribers(self):
node = factories.NodeFactory()
node_sub = factories.NotificationSubscriptionFactory(
_id=node._id + '_file_updated',
owner=node,
event_name='file_updated'
)
node_sub.save()
result = emails.compile_subscriptions(node, 'file_updated')
assert_equal({'email_transactional': [], 'none': [], 'email_digest': []}, result)
def test_creator_subbed_parent(self):
# Basic sub check
self.base_sub.email_transactional.append(self.user_1)
self.base_sub.save()
result = emails.compile_subscriptions(self.base_project, 'file_updated')
assert_equal({'email_transactional': [self.user_1._id], 'none': [], 'email_digest': []}, result)
def test_creator_subbed_to_parent_from_child(self):
# checks the parent sub is the one to appear without a child sub
self.base_sub.email_transactional.append(self.user_1)
self.base_sub.save()
result = emails.compile_subscriptions(self.shared_node, 'file_updated')
assert_equal({'email_transactional': [self.user_1._id], 'none': [], 'email_digest': []}, result)
def test_creator_subbed_to_both_from_child(self):
# checks that only one sub is in the list.
self.base_sub.email_transactional.append(self.user_1)
self.base_sub.save()
self.shared_sub.email_transactional.append(self.user_1)
self.shared_sub.save()
result = emails.compile_subscriptions(self.shared_node, 'file_updated')
assert_equal({'email_transactional': [self.user_1._id], 'none': [], 'email_digest': []}, result)
def test_creator_diff_subs_to_both_from_child(self):
# Check that the child node sub overrides the parent node sub
self.base_sub.email_transactional.append(self.user_1)
self.base_sub.save()
self.shared_sub.none.append(self.user_1)
self.shared_sub.save()
result = emails.compile_subscriptions(self.shared_node, 'file_updated')
assert_equal({'email_transactional': [], 'none': [self.user_1._id], 'email_digest': []}, result)
def test_user_wo_permission_on_child_node_not_listed(self):
# Tests to see if a user without permission gets an Email about a node they cannot see.
self.base_sub.email_transactional.append(self.user_3)
self.base_sub.save()
result = emails.compile_subscriptions(self.private_node, 'file_updated')
assert_equal({'email_transactional': [], 'none': [], 'email_digest': []}, result)
def test_several_nodes_deep(self):
self.base_sub.email_transactional.append(self.user_1)
self.base_sub.save()
node2 = factories.NodeFactory(parent=self.shared_node)
node3 = factories.NodeFactory(parent=node2)
node4 = factories.NodeFactory(parent=node3)
node5 = factories.NodeFactory(parent=node4)
subs = emails.compile_subscriptions(node5, 'file_updated')
assert_equal(subs, {'email_transactional': [self.user_1._id], 'email_digest': [], 'none': []})
def test_several_nodes_deep_precedence(self):
self.base_sub.email_transactional.append(self.user_1)
self.base_sub.save()
node2 = factories.NodeFactory(parent=self.shared_node)
node3 = factories.NodeFactory(parent=node2)
node4 = factories.NodeFactory(parent=node3)
node4_subscription = factories.NotificationSubscriptionFactory(
_id=node4._id + '_file_updated',
owner=node4,
event_name='file_updated'
)
node4_subscription.save()
node4_subscription.email_digest.append(self.user_1)
node4_subscription.save()
node5 = factories.NodeFactory(parent=node4)
subs = emails.compile_subscriptions(node5, 'file_updated')
assert_equal(subs, {'email_transactional': [], 'email_digest': [self.user_1._id], 'none': []})
class TestMoveSubscription(OsfTestCase):
def setUp(self):
super(TestMoveSubscription, self).setUp()
self.blank = {key: [] for key in constants.NOTIFICATION_TYPES} # For use where it is blank.
self.user_1 = factories.AuthUserFactory()
self.auth = Auth(user=self.user_1)
self.user_2 = factories.AuthUserFactory()
self.user_3 = factories.AuthUserFactory()
self.user_4 = factories.AuthUserFactory()
self.project = factories.ProjectFactory(creator=self.user_1)
self.private_node = factories.NodeFactory(parent=self.project, is_public=False, creator=self.user_1)
self.sub = factories.NotificationSubscriptionFactory(
_id=self.project._id + '_file_updated',
owner=self.project,
event_name='file_updated'
)
self.sub.email_transactional.extend([self.user_1])
self.sub.save()
self.file_sub = factories.NotificationSubscriptionFactory(
_id=self.project._id + '_xyz42_file_updated',
owner=self.project,
event_name='xyz42_file_updated'
)
self.file_sub.save()
def test_separate_users(self):
self.private_node.add_contributor(self.user_2, permissions=['admin', 'write', 'read'], auth=self.auth)
self.private_node.add_contributor(self.user_3, permissions=['write', 'read'], auth=self.auth)
self.private_node.save()
subbed, removed = utils.separate_users(
self.private_node, [self.user_2._id, self.user_3._id, self.user_4._id]
)
assert_equal([self.user_2._id, self.user_3._id], subbed)
assert_equal([self.user_4._id], removed)
def test_event_subs_same(self):
self.file_sub.email_transactional.extend([self.user_2, self.user_3, self.user_4])
self.file_sub.save()
self.private_node.add_contributor(self.user_2, permissions=['admin', 'write', 'read'], auth=self.auth)
self.private_node.add_contributor(self.user_3, permissions=['write', 'read'], auth=self.auth)
self.private_node.save()
results = utils.users_to_remove('xyz42_file_updated', self.project, self.private_node)
assert_equal({'email_transactional': [self.user_4._id], 'email_digest': [], 'none': []}, results)
def test_event_nodes_same(self):
self.file_sub.email_transactional.extend([self.user_2, self.user_3, self.user_4])
self.file_sub.save()
self.private_node.add_contributor(self.user_2, permissions=['admin', 'write', 'read'], auth=self.auth)
self.private_node.add_contributor(self.user_3, permissions=['write', 'read'], auth=self.auth)
self.private_node.save()
results = utils.users_to_remove('xyz42_file_updated', self.project, self.project)
assert_equal({'email_transactional': [], 'email_digest': [], 'none': []}, results)
def test_move_sub(self):
# Tests old sub is replaced with new sub.
utils.move_subscription(self.blank, 'xyz42_file_updated', self.project, 'abc42_file_updated', self.private_node)
assert_equal('abc42_file_updated', self.file_sub.event_name)
assert_equal(self.private_node, self.file_sub.owner)
assert_equal(self.private_node._id + '_abc42_file_updated', self.file_sub._id)
def test_move_sub_with_none(self):
# Attempt to reproduce an error that is seen when moving files
self.project.add_contributor(self.user_2, permissions=['write', 'read'], auth=self.auth)
self.project.save()
self.file_sub.none.append(self.user_2)
self.file_sub.save()
results = utils.users_to_remove('xyz42_file_updated', self.project, self.private_node)
assert_equal({'email_transactional': [], 'email_digest': [], 'none': [self.user_2._id]}, results)
def test_remove_one_user(self):
# One user doesn't have permissions on the node the sub is moved to. Should be listed.
self.file_sub.email_transactional.extend([self.user_2, self.user_3, self.user_4])
self.file_sub.save()
self.private_node.add_contributor(self.user_2, permissions=['admin', 'write', 'read'], auth=self.auth)
self.private_node.add_contributor(self.user_3, permissions=['write', 'read'], auth=self.auth)
self.private_node.save()
results = utils.users_to_remove('xyz42_file_updated', self.project, self.private_node)
assert_equal({'email_transactional': [self.user_4._id], 'email_digest': [], 'none': []}, results)
def test_remove_one_user_warn_another(self):
# Two users do not have permissions on new node, but one has a project sub. Both should be listed.
self.private_node.add_contributor(self.user_2, permissions=['admin', 'write', 'read'], auth=self.auth)
self.private_node.save()
self.project.add_contributor(self.user_3, permissions=['write', 'read'], auth=self.auth)
self.project.save()
self.sub.email_digest.append(self.user_3)
self.sub.save()
self.file_sub.email_transactional.extend([self.user_2, self.user_4])
results = utils.users_to_remove('xyz42_file_updated', self.project, self.private_node)
utils.move_subscription(results, 'xyz42_file_updated', self.project, 'abc42_file_updated', self.private_node)
assert_equal({'email_transactional': [self.user_4._id], 'email_digest': [self.user_3._id], 'none': []}, results)
assert_in(self.user_3, self.sub.email_digest) # Is not removed from the project subscription.
def test_warn_user(self):
# One user with a project sub does not have permission on new node. User should be listed.
self.private_node.add_contributor(self.user_2, permissions=['admin', 'write', 'read'], auth=self.auth)
self.private_node.save()
self.project.add_contributor(self.user_3, permissions=['write', 'read'], auth=self.auth)
self.project.save()
self.sub.email_digest.append(self.user_3)
self.sub.save()
self.file_sub.email_transactional.extend([self.user_2])
results = utils.users_to_remove('xyz42_file_updated', self.project, self.private_node)
utils.move_subscription(results, 'xyz42_file_updated', self.project, 'abc42_file_updated', self.private_node)
assert_equal({'email_transactional': [], 'email_digest': [self.user_3._id], 'none': []}, results)
assert_in(self.user_3, self.sub.email_digest) # Is not removed from the project subscription.
def test_user_node_subbed_and_not_removed(self):
self.project.add_contributor(self.user_3, permissions=['write', 'read'], auth=self.auth)
self.project.save()
self.private_node.add_contributor(self.user_3, permissions=['write', 'read'], auth=self.auth)
self.private_node.save()
self.sub.email_digest.append(self.user_3)
self.sub.save()
utils.move_subscription(self.blank, 'xyz42_file_updated', self.project, 'abc42_file_updated', self.private_node)
assert_equal([], self.file_sub.email_digest)
class TestSendEmails(OsfTestCase):
def setUp(self):
super(TestSendEmails, self).setUp()
self.user = factories.AuthUserFactory()
self.project = factories.ProjectFactory()
self.project_subscription = factories.NotificationSubscriptionFactory(
_id=self.project._id + '_' + 'comments',
owner=self.project,
event_name='comments'
)
self.project_subscription.save()
self.project_subscription.email_transactional.append(self.project.creator)
self.project_subscription.save()
self.node = factories.NodeFactory(parent=self.project)
self.node_subscription = factories.NotificationSubscriptionFactory(
_id=self.node._id + '_comments',
owner=self.node,
event_name='comments'
)
self.node_subscription.save()
self.user_subscription = factories.NotificationSubscriptionFactory(
_id=self.user._id + '_' + 'comment_replies',
owner=self.user,
event_name='comment_replies',
email_transactional=[self.user._id]
)
@mock.patch('website.notifications.emails.store_emails')
def test_notify_no_subscription(self, mock_store):
node = factories.NodeFactory()
emails.notify('comments', user=self.user, node=node, timestamp=datetime.datetime.utcnow())
assert_false(mock_store.called)
@mock.patch('website.notifications.emails.store_emails')
def test_notify_no_subscribers(self, mock_store):
node = factories.NodeFactory()
node_subscription = factories.NotificationSubscriptionFactory(
_id=node._id + '_comments',
owner=node,
event_name='comments'
)
node_subscription.save()
emails.notify('comments', user=self.user, node=node, timestamp=datetime.datetime.utcnow())
assert_false(mock_store.called)
@mock.patch('website.notifications.emails.store_emails')
def test_notify_sends_with_correct_args(self, mock_store):
time_now = datetime.datetime.utcnow()
emails.notify('comments', user=self.user, node=self.node, timestamp=time_now)
assert_true(mock_store.called)
mock_store.assert_called_with([self.project.creator._id], 'email_transactional', 'comments', self.user,
self.node, time_now)
@mock.patch('website.notifications.emails.store_emails')
def test_notify_does_not_send_to_users_subscribed_to_none(self, mock_store):
node = factories.NodeFactory()
user = factories.UserFactory()
node_subscription = factories.NotificationSubscriptionFactory(
_id=node._id + '_comments',
owner=node,
event_name='comments'
)
node_subscription.save()
node_subscription.none.append(user)
node_subscription.save()
sent = emails.notify('comments', user=user, node=node, timestamp=datetime.datetime.utcnow())
assert_false(mock_store.called)
assert_equal(sent, [])
@mock.patch('website.notifications.emails.store_emails')
def test_notify_sends_comment_reply_event_if_comment_is_direct_reply(self, mock_store):
time_now = datetime.datetime.utcnow()
emails.notify('comments', user=self.user, node=self.node, timestamp=time_now, target_user=self.project.creator)
mock_store.assert_called_with([self.project.creator._id], 'email_transactional', 'comment_replies',
self.user, self.node, time_now, target_user=self.project.creator)
@mock.patch('website.notifications.emails.store_emails')
def test_notify_sends_comment_reply_when_target_user_is_subscribed_via_user_settings(self, mock_store):
time_now = datetime.datetime.utcnow()
emails.notify('comment_replies', user=self.project.creator, node=self.node, timestamp=time_now, target_user=self.user)
mock_store.assert_called_with([self.user._id], 'email_transactional', 'comment_replies',
self.project.creator, self.node, time_now, target_user=self.user)
@mock.patch('website.notifications.emails.store_emails')
def test_notify_sends_comment_event_if_comment_reply_is_not_direct_reply(self, mock_store):
user = factories.UserFactory()
time_now = datetime.datetime.utcnow()
emails.notify('comments', user=user, node=self.node, timestamp=time_now, target_user=user)
mock_store.assert_called_with([self.project.creator._id], 'email_transactional', 'comments', user,
self.node, time_now, target_user=user)
@mock.patch('website.mails.send_mail')
@mock.patch('website.notifications.emails.store_emails')
def test_notify_does_not_send_comment_if_they_reply_to_their_own_comment(self, mock_store, mock_send_mail):
time_now = datetime.datetime.utcnow()
emails.notify('comments', user=self.project.creator, node=self.project, timestamp=time_now,
target_user=self.project.creator)
assert_false(mock_store.called)
assert_false(mock_send_mail.called)
@mock.patch('website.notifications.emails.store_emails')
def test_notify_sends_comment_event_if_comment_reply_is_not_direct_reply_on_component(self, mock_store):
# Test that comment replies on components that are not direct replies to the subscriber use the
# "comments" email template.
user = factories.UserFactory()
time_now = datetime.datetime.utcnow()
emails.notify('comments', user, self.node, time_now, target_user=user)
mock_store.assert_called_with([self.project.creator._id], 'email_transactional', 'comments', user,
self.node, time_now, target_user=user)
def test_check_node_node_none(self):
subs = emails.check_node(None, 'comments')
assert_equal(subs, {'email_transactional': [], 'email_digest': [], 'none': []})
def test_check_node_one(self):
subs = emails.check_node(self.project, 'comments')
assert_equal(subs, {'email_transactional': [self.project.creator._id], 'email_digest': [], 'none': []})
@mock.patch('website.project.views.comment.notify')
def test_check_user_comment_reply_subscription_if_email_not_sent_to_target_user(self, mock_notify):
# user subscribed to comment replies
user = factories.UserFactory()
user_subscription = factories.NotificationSubscriptionFactory(
_id=user._id + '_comments',
owner=user,
event_name='comment_replies'
)
user_subscription.email_transactional.append(user)
user_subscription.save()
# user is not subscribed to project comment notifications
project = factories.ProjectFactory()
# user comments on project
target = factories.CommentFactory(node=project, user=user)
content = 'hammer to fall'
# reply to user (note: notify is called from Comment.create)
reply = Comment.create(
auth=Auth(project.creator),
user=project.creator,
node=project,
content=content,
target=Guid.load(target._id),
is_public=True,
)
assert_true(mock_notify.called)
assert_equal(mock_notify.call_count, 2)
def test_get_settings_url_for_node(self):
url = emails.get_settings_url(self.project._id, self.user)
assert_equal(url, self.project.absolute_url + 'settings/')
def test_get_settings_url_for_user(self):
url = emails.get_settings_url(self.user._id, self.user)
assert_equal(url, web_url_for('user_notifications', _absolute=True))
def test_get_node_lineage(self):
node_lineage = emails.get_node_lineage(self.node)
assert_equal(node_lineage, [self.project._id, self.node._id])
def test_localize_timestamp(self):
timestamp = datetime.datetime.utcnow().replace(tzinfo=pytz.utc)
self.user.timezone = 'America/New_York'
self.user.locale = 'en_US'
self.user.save()
tz = dates.get_timezone(self.user.timezone)
locale = Locale(self.user.locale)
formatted_date = dates.format_date(timestamp, format='full', locale=locale)
formatted_time = dates.format_time(timestamp, format='short', tzinfo=tz, locale=locale)
formatted_datetime = u'{time} on {date}'.format(time=formatted_time, date=formatted_date)
assert_equal(emails.localize_timestamp(timestamp, self.user), formatted_datetime)
def test_localize_timestamp_empty_timezone(self):
timestamp = datetime.datetime.utcnow().replace(tzinfo=pytz.utc)
self.user.timezone = ''
self.user.locale = 'en_US'
self.user.save()
tz = dates.get_timezone('Etc/UTC')
locale = Locale(self.user.locale)
formatted_date = dates.format_date(timestamp, format='full', locale=locale)
formatted_time = dates.format_time(timestamp, format='short', tzinfo=tz, locale=locale)
formatted_datetime = u'{time} on {date}'.format(time=formatted_time, date=formatted_date)
assert_equal(emails.localize_timestamp(timestamp, self.user), formatted_datetime)
def test_localize_timestamp_empty_locale(self):
timestamp = datetime.datetime.utcnow().replace(tzinfo=pytz.utc)
self.user.timezone = 'America/New_York'
self.user.locale = ''
self.user.save()
tz = dates.get_timezone(self.user.timezone)
locale = Locale('en')
formatted_date = dates.format_date(timestamp, format='full', locale=locale)
formatted_time = dates.format_time(timestamp, format='short', tzinfo=tz, locale=locale)
formatted_datetime = u'{time} on {date}'.format(time=formatted_time, date=formatted_date)
assert_equal(emails.localize_timestamp(timestamp, self.user), formatted_datetime)
def test_localize_timestamp_handles_unicode(self):
timestamp = datetime.datetime.utcnow().replace(tzinfo=pytz.utc)
self.user.timezone = 'Europe/Moscow'
self.user.locale = 'ru_RU'
self.user.save()
tz = dates.get_timezone(self.user.timezone)
locale = Locale(self.user.locale)
formatted_date = dates.format_date(timestamp, format='full', locale=locale)
formatted_time = dates.format_time(timestamp, format='short', tzinfo=tz, locale=locale)
formatted_datetime = u'{time} on {date}'.format(time=formatted_time, date=formatted_date)
assert_equal(emails.localize_timestamp(timestamp, self.user), formatted_datetime)
class TestSendDigest(OsfTestCase):
def setUp(self):
super(TestSendDigest, self).setUp()
self.user_1 = factories.UserFactory()
self.user_2 = factories.UserFactory()
self.project = factories.ProjectFactory()
self.timestamp = datetime.datetime.utcnow()
def test_group_notifications_by_user_transactional(self):
send_type = 'email_transactional'
d = factories.NotificationDigestFactory(
user_id=self.user_1._id,
send_type=send_type,
timestamp=self.timestamp,
message='Hello',
node_lineage=[self.project._id]
)
d.save()
d2 = factories.NotificationDigestFactory(
user_id=self.user_2._id,
send_type=send_type,
timestamp=self.timestamp,
message='Hello',
node_lineage=[self.project._id]
)
d2.save()
d3 = factories.NotificationDigestFactory(
user_id=self.user_2._id,
send_type='email_digest',
timestamp=self.timestamp,
message='Hello, but this should not appear (this is a digest)',
node_lineage=[self.project._id]
)
d3.save()
user_groups = get_users_emails(send_type)
expected = [
{
u'user_id': self.user_1._id,
u'info': [{
u'message': u'Hello',
u'node_lineage': [unicode(self.project._id)],
u'_id': d._id
}]
},
{
u'user_id': self.user_2._id,
u'info': [{
u'message': u'Hello',
u'node_lineage': [unicode(self.project._id)],
u'_id': d2._id
}]
}
]
assert_equal(len(user_groups), 2)
assert_equal(user_groups, expected)
digest_ids = [d._id, d2._id, d3._id]
remove_notifications(email_notification_ids=digest_ids)
def test_group_notifications_by_user_digest(self):
send_type = 'email_digest'
d = factories.NotificationDigestFactory(
user_id=self.user_1._id,
send_type=send_type,
timestamp=self.timestamp,
message='Hello',
node_lineage=[self.project._id]
)
d.save()
d2 = factories.NotificationDigestFactory(
user_id=self.user_2._id,
send_type=send_type,
timestamp=self.timestamp,
message='Hello',
node_lineage=[self.project._id]
)
d2.save()
d3 = factories.NotificationDigestFactory(
user_id=self.user_2._id,
send_type='email_transactional',
timestamp=self.timestamp,
message='Hello, but this should not appear (this is transactional)',
node_lineage=[self.project._id]
)
d3.save()
user_groups = get_users_emails(send_type)
expected = [
{
u'user_id': self.user_1._id,
u'info': [{
u'message': u'Hello',
u'node_lineage': [unicode(self.project._id)],
u'_id': d._id
}]
},
{
u'user_id': self.user_2._id,
u'info': [{
u'message': u'Hello',
u'node_lineage': [unicode(self.project._id)],
u'_id': d2._id
}]
}
]
assert_equal(len(user_groups), 2)
assert_equal(user_groups, expected)
digest_ids = [d._id, d2._id, d3._id]
remove_notifications(email_notification_ids=digest_ids)
@mock.patch('website.mails.send_mail')
def test_send_users_email_called_with_correct_args(self, mock_send_mail):
send_type = 'email_transactional'
d = factories.NotificationDigestFactory(
user_id=factories.UserFactory()._id,
send_type=send_type,
timestamp=datetime.datetime.utcnow(),
message='Hello',
node_lineage=[factories.ProjectFactory()._id]
)
d.save()
user_groups = get_users_emails(send_type)
send_users_email(send_type)
assert_true(mock_send_mail.called)
assert_equals(mock_send_mail.call_count, len(user_groups))
last_user_index = len(user_groups) - 1
user = User.load(user_groups[last_user_index]['user_id'])
email_notification_ids = [message['_id'] for message in user_groups[last_user_index]['info']]
args, kwargs = mock_send_mail.call_args
assert_equal(kwargs['to_addr'], user.username)
assert_equal(kwargs['mimetype'], 'html')
assert_equal(kwargs['mail'], mails.DIGEST)
assert_equal(kwargs['name'], user.fullname)
message = group_by_node(user_groups[last_user_index]['info'])
assert_equal(kwargs['message'], message)
assert_equal(kwargs['callback'], remove_notifications(email_notification_ids=email_notification_ids))
def test_remove_sent_digest_notifications(self):
d = factories.NotificationDigestFactory(
user_id=factories.UserFactory()._id,
timestamp=datetime.datetime.utcnow(),
message='Hello',
node_lineage=[factories.ProjectFactory()._id]
)
digest_id = d._id
remove_notifications(email_notification_ids=[digest_id])
with assert_raises(NoResultsFound):
NotificationDigest.find_one(Q('_id', 'eq', digest_id))
|
brandonPurvis/osf.io
|
tests/test_notifications.py
|
Python
|
apache-2.0
| 59,970 | 0.003035 |
import pyxel
from pyxel.ui import Widget
from .constants import OCTAVE_BAR_BACKGROUND_COLOR, OCTAVE_BAR_COLOR
class OctaveBar(Widget):
def __init__(self, parent, x, y):
super().__init__(parent, x, y, 4, 123)
self.add_event_handler("mouse_down", self.__on_mouse_down)
self.add_event_handler("mouse_drag", self.__on_mouse_drag)
self.add_event_handler("mouse_hover", self.__on_mouse_hover)
self.add_event_handler("draw", self.__on_draw)
def __on_mouse_down(self, key, x, y):
if key != pyxel.MOUSE_LEFT_BUTTON:
return
x -= self.x
y -= self.y
self.parent.octave = min(max(3 - ((y - 12) // 24), 0), 3)
def __on_mouse_drag(self, key, x, y, dx, dy):
self.__on_mouse_down(key, x, y)
def __on_mouse_hover(self, x, y):
self.parent.help_message = "OCTAVE:PAGEUP/PAGEDOWN"
def __on_draw(self):
pyxel.rect(self.x, self.y, self.width, self.height, OCTAVE_BAR_BACKGROUND_COLOR)
x = self.x + 1
y = self.y + 1 + (3 - self.parent.octave) * 24
pyxel.rect(x, y, 2, 47, OCTAVE_BAR_COLOR)
|
ferriman/SSandSP
|
pyxel-test/venv/lib/python3.8/site-packages/pyxel/editor/octave_bar.py
|
Python
|
gpl-3.0
| 1,129 | 0.000886 |
# vim:fileencoding=utf-8:noet
from __future__ import (unicode_literals, division, absolute_import, print_function)
import sys
import os
from functools import partial
from collections import namedtuple
from time import sleep
from platform import python_implementation
from powerline.segments import shell, tmux, pdb, i3wm
from powerline.lib.vcs import get_fallback_create_watcher
from powerline.lib.unicode import out_u
import tests.vim as vim_module
from tests.lib import Args, urllib_read, replace_attr, new_module, replace_module_module, replace_env, Pl
from tests import TestCase, SkipTest
def get_dummy_guess(**kwargs):
if 'directory' in kwargs:
def guess(path, create_watcher):
return Args(branch=lambda: out_u(os.path.basename(path)), **kwargs)
else:
def guess(path, create_watcher):
return Args(branch=lambda: out_u(os.path.basename(path)), directory=path, **kwargs)
return guess
class TestShell(TestCase):
def test_last_status(self):
pl = Pl()
segment_info = {'args': Args(last_exit_code=10)}
self.assertEqual(shell.last_status(pl=pl, segment_info=segment_info), [
{'contents': '10', 'highlight_groups': ['exit_fail']}
])
segment_info['args'].last_exit_code = 0
self.assertEqual(shell.last_status(pl=pl, segment_info=segment_info), None)
segment_info['args'].last_exit_code = None
self.assertEqual(shell.last_status(pl=pl, segment_info=segment_info), None)
segment_info['args'].last_exit_code = 'sigsegv'
self.assertEqual(shell.last_status(pl=pl, segment_info=segment_info), [
{'contents': 'sigsegv', 'highlight_groups': ['exit_fail']}
])
segment_info['args'].last_exit_code = 'sigsegv+core'
self.assertEqual(shell.last_status(pl=pl, segment_info=segment_info), [
{'contents': 'sigsegv+core', 'highlight_groups': ['exit_fail']}
])
def test_last_pipe_status(self):
pl = Pl()
segment_info = {'args': Args(last_pipe_status=[])}
self.assertEqual(shell.last_pipe_status(pl=pl, segment_info=segment_info), None)
segment_info['args'].last_pipe_status = [0, 0, 0]
self.assertEqual(shell.last_pipe_status(pl=pl, segment_info=segment_info), None)
segment_info['args'].last_pipe_status = [0, 2, 0]
self.assertEqual(shell.last_pipe_status(pl=pl, segment_info=segment_info), [
{'contents': '0', 'highlight_groups': ['exit_success'], 'draw_inner_divider': True},
{'contents': '2', 'highlight_groups': ['exit_fail'], 'draw_inner_divider': True},
{'contents': '0', 'highlight_groups': ['exit_success'], 'draw_inner_divider': True}
])
segment_info['args'].last_pipe_status = [0, 'sigsegv', 'sigsegv+core']
self.assertEqual(shell.last_pipe_status(pl=pl, segment_info=segment_info), [
{'contents': '0', 'highlight_groups': ['exit_success'], 'draw_inner_divider': True},
{'contents': 'sigsegv', 'highlight_groups': ['exit_fail'], 'draw_inner_divider': True},
{'contents': 'sigsegv+core', 'highlight_groups': ['exit_fail'], 'draw_inner_divider': True}
])
segment_info['args'].last_pipe_status = [0, 'sigsegv', 0]
self.assertEqual(shell.last_pipe_status(pl=pl, segment_info=segment_info), [
{'contents': '0', 'highlight_groups': ['exit_success'], 'draw_inner_divider': True},
{'contents': 'sigsegv', 'highlight_groups': ['exit_fail'], 'draw_inner_divider': True},
{'contents': '0', 'highlight_groups': ['exit_success'], 'draw_inner_divider': True}
])
segment_info['args'].last_pipe_status = [0, 'sigsegv+core', 0]
self.assertEqual(shell.last_pipe_status(pl=pl, segment_info=segment_info), [
{'contents': '0', 'highlight_groups': ['exit_success'], 'draw_inner_divider': True},
{'contents': 'sigsegv+core', 'highlight_groups': ['exit_fail'], 'draw_inner_divider': True},
{'contents': '0', 'highlight_groups': ['exit_success'], 'draw_inner_divider': True}
])
def test_jobnum(self):
pl = Pl()
segment_info = {'args': Args(jobnum=0)}
self.assertEqual(shell.jobnum(pl=pl, segment_info=segment_info), None)
self.assertEqual(shell.jobnum(pl=pl, segment_info=segment_info, show_zero=False), None)
self.assertEqual(shell.jobnum(pl=pl, segment_info=segment_info, show_zero=True), '0')
segment_info = {'args': Args(jobnum=1)}
self.assertEqual(shell.jobnum(pl=pl, segment_info=segment_info), '1')
self.assertEqual(shell.jobnum(pl=pl, segment_info=segment_info, show_zero=False), '1')
self.assertEqual(shell.jobnum(pl=pl, segment_info=segment_info, show_zero=True), '1')
def test_continuation(self):
pl = Pl()
self.assertEqual(shell.continuation(pl=pl, segment_info={}), [{
'contents': '',
'width': 'auto',
'highlight_groups': ['continuation:current', 'continuation'],
}])
segment_info = {'parser_state': 'if cmdsubst'}
self.assertEqual(shell.continuation(pl=pl, segment_info=segment_info), [
{
'contents': 'if',
'draw_inner_divider': True,
'highlight_groups': ['continuation:current', 'continuation'],
'width': 'auto',
'align': 'l',
},
])
self.assertEqual(shell.continuation(pl=pl, segment_info=segment_info, right_align=True), [
{
'contents': 'if',
'draw_inner_divider': True,
'highlight_groups': ['continuation:current', 'continuation'],
'width': 'auto',
'align': 'r',
},
])
self.assertEqual(shell.continuation(pl=pl, segment_info=segment_info, omit_cmdsubst=False), [
{
'contents': 'if',
'draw_inner_divider': True,
'highlight_groups': ['continuation'],
},
{
'contents': 'cmdsubst',
'draw_inner_divider': True,
'highlight_groups': ['continuation:current', 'continuation'],
'width': 'auto',
'align': 'l',
},
])
self.assertEqual(shell.continuation(pl=pl, segment_info=segment_info, omit_cmdsubst=False, right_align=True), [
{
'contents': 'if',
'draw_inner_divider': True,
'highlight_groups': ['continuation'],
'width': 'auto',
'align': 'r',
},
{
'contents': 'cmdsubst',
'draw_inner_divider': True,
'highlight_groups': ['continuation:current', 'continuation'],
},
])
self.assertEqual(shell.continuation(pl=pl, segment_info=segment_info, omit_cmdsubst=True, right_align=True), [
{
'contents': 'if',
'draw_inner_divider': True,
'highlight_groups': ['continuation:current', 'continuation'],
'width': 'auto',
'align': 'r',
},
])
self.assertEqual(shell.continuation(pl=pl, segment_info=segment_info, omit_cmdsubst=True, right_align=True, renames={'if': 'IF'}), [
{
'contents': 'IF',
'draw_inner_divider': True,
'highlight_groups': ['continuation:current', 'continuation'],
'width': 'auto',
'align': 'r',
},
])
self.assertEqual(shell.continuation(pl=pl, segment_info=segment_info, omit_cmdsubst=True, right_align=True, renames={'if': None}), [
{
'contents': '',
'highlight_groups': ['continuation:current', 'continuation'],
'width': 'auto',
'align': 'r',
},
])
segment_info = {'parser_state': 'then then then cmdsubst'}
self.assertEqual(shell.continuation(pl=pl, segment_info=segment_info), [
{
'contents': 'then',
'draw_inner_divider': True,
'highlight_groups': ['continuation'],
},
{
'contents': 'then',
'draw_inner_divider': True,
'highlight_groups': ['continuation'],
},
{
'contents': 'then',
'draw_inner_divider': True,
'highlight_groups': ['continuation:current', 'continuation'],
'width': 'auto',
'align': 'l',
},
])
def test_cwd(self):
new_os = new_module('os', path=os.path, sep='/')
pl = Pl()
cwd = [None]
def getcwd():
wd = cwd[0]
if isinstance(wd, Exception):
raise wd
else:
return wd
segment_info = {'getcwd': getcwd, 'home': None}
with replace_attr(shell, 'os', new_os):
cwd[0] = '/abc/def/ghi/foo/bar'
self.assertEqual(shell.cwd(pl=pl, segment_info=segment_info), [
{'contents': '/', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'abc', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'def', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'ghi', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'foo', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'bar', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True, 'highlight_groups': ['cwd:current_folder', 'cwd']},
])
segment_info['home'] = '/abc/def/ghi'
self.assertEqual(shell.cwd(pl=pl, segment_info=segment_info), [
{'contents': '~', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'foo', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'bar', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True, 'highlight_groups': ['cwd:current_folder', 'cwd']},
])
segment_info.update(shortened_path='~foo/ghi')
self.assertEqual(shell.cwd(pl=pl, segment_info=segment_info), [
{'contents': '~foo', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'ghi', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True, 'highlight_groups': ['cwd:current_folder', 'cwd']},
])
self.assertEqual(shell.cwd(pl=pl, segment_info=segment_info, use_shortened_path=False), [
{'contents': '~', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'foo', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'bar', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True, 'highlight_groups': ['cwd:current_folder', 'cwd']},
])
segment_info.pop('shortened_path')
self.assertEqual(shell.cwd(pl=pl, segment_info=segment_info, dir_limit_depth=3), [
{'contents': '~', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'foo', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'bar', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True, 'highlight_groups': ['cwd:current_folder', 'cwd']}
])
self.assertEqual(shell.cwd(pl=pl, segment_info=segment_info, dir_limit_depth=3, shorten_home=False), [
{'contents': '...', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'ghi', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'foo', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'bar', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True, 'highlight_groups': ['cwd:current_folder', 'cwd']}
])
self.assertEqual(shell.cwd(pl=pl, segment_info=segment_info, dir_limit_depth=1), [
{'contents': '...', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'bar', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True, 'highlight_groups': ['cwd:current_folder', 'cwd']}
])
self.assertEqual(shell.cwd(pl=pl, segment_info=segment_info, dir_limit_depth=1, ellipsis='---'), [
{'contents': '---', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'bar', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True, 'highlight_groups': ['cwd:current_folder', 'cwd']}
])
self.assertEqual(shell.cwd(pl=pl, segment_info=segment_info, dir_limit_depth=1, ellipsis=None), [
{'contents': 'bar', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True, 'highlight_groups': ['cwd:current_folder', 'cwd']}
])
self.assertEqual(shell.cwd(pl=pl, segment_info=segment_info, dir_limit_depth=1, use_path_separator=True), [
{'contents': '.../', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': False},
{'contents': 'bar', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': False, 'highlight_groups': ['cwd:current_folder', 'cwd']}
])
self.assertEqual(shell.cwd(pl=pl, segment_info=segment_info, dir_limit_depth=1, use_path_separator=True, ellipsis='---'), [
{'contents': '---/', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': False},
{'contents': 'bar', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': False, 'highlight_groups': ['cwd:current_folder', 'cwd']}
])
self.assertEqual(shell.cwd(pl=pl, segment_info=segment_info, dir_limit_depth=1, use_path_separator=True, ellipsis=None), [
{'contents': 'bar', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': False, 'highlight_groups': ['cwd:current_folder', 'cwd']}
])
self.assertEqual(shell.cwd(pl=pl, segment_info=segment_info, dir_limit_depth=2, dir_shorten_len=2), [
{'contents': '~', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'fo', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'bar', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True, 'highlight_groups': ['cwd:current_folder', 'cwd']}
])
self.assertEqual(shell.cwd(pl=pl, segment_info=segment_info, dir_limit_depth=2, dir_shorten_len=2, use_path_separator=True), [
{'contents': '~/', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': False},
{'contents': 'fo/', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': False},
{'contents': 'bar', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': False, 'highlight_groups': ['cwd:current_folder', 'cwd']}
])
cwd[0] = '/etc'
self.assertEqual(shell.cwd(pl=pl, segment_info=segment_info, use_path_separator=False), [
{'contents': '/', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'etc', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True, 'highlight_groups': ['cwd:current_folder', 'cwd']},
])
self.assertEqual(shell.cwd(pl=pl, segment_info=segment_info, use_path_separator=True), [
{'contents': '/', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': False},
{'contents': 'etc', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': False, 'highlight_groups': ['cwd:current_folder', 'cwd']},
])
cwd[0] = '/'
self.assertEqual(shell.cwd(pl=pl, segment_info=segment_info, use_path_separator=False), [
{'contents': '/', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True, 'highlight_groups': ['cwd:current_folder', 'cwd']},
])
self.assertEqual(shell.cwd(pl=pl, segment_info=segment_info, use_path_separator=True), [
{'contents': '/', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': False, 'highlight_groups': ['cwd:current_folder', 'cwd']},
])
ose = OSError()
ose.errno = 2
cwd[0] = ose
self.assertEqual(shell.cwd(pl=pl, segment_info=segment_info, dir_limit_depth=2, dir_shorten_len=2), [
{'contents': '[not found]', 'divider_highlight_group': 'cwd:divider', 'highlight_groups': ['cwd:current_folder', 'cwd'], 'draw_inner_divider': True}
])
cwd[0] = OSError()
self.assertRaises(OSError, shell.cwd, pl=pl, segment_info=segment_info, dir_limit_depth=2, dir_shorten_len=2)
cwd[0] = ValueError()
self.assertRaises(ValueError, shell.cwd, pl=pl, segment_info=segment_info, dir_limit_depth=2, dir_shorten_len=2)
class TestTmux(TestCase):
def test_attached_clients(self):
def get_tmux_output(pl, cmd, *args):
if cmd == 'list-panes':
return 'session_name\n'
elif cmd == 'list-clients':
return '/dev/pts/2: 0 [191x51 xterm-256color] (utf8)\n/dev/pts/3: 0 [191x51 xterm-256color] (utf8)'
pl = Pl()
with replace_attr(tmux, 'get_tmux_output', get_tmux_output):
self.assertEqual(tmux.attached_clients(pl=pl), '2')
self.assertEqual(tmux.attached_clients(pl=pl, minimum=3), None)
class TestCommon(TestCase):
@classmethod
def setUpClass(cls):
module = __import__(str('powerline.segments.common.{0}'.format(cls.module_name)))
cls.module = getattr(module.segments.common, str(cls.module_name))
class TestNet(TestCommon):
module_name = 'net'
def test_hostname(self):
pl = Pl()
with replace_env('SSH_CLIENT', '192.168.0.12 40921 22') as segment_info:
with replace_module_module(self.module, 'socket', gethostname=lambda: 'abc'):
self.assertEqual(self.module.hostname(pl=pl, segment_info=segment_info), 'abc')
self.assertEqual(self.module.hostname(pl=pl, segment_info=segment_info, only_if_ssh=True), 'abc')
with replace_module_module(self.module, 'socket', gethostname=lambda: 'abc.mydomain'):
self.assertEqual(self.module.hostname(pl=pl, segment_info=segment_info), 'abc.mydomain')
self.assertEqual(self.module.hostname(pl=pl, segment_info=segment_info, exclude_domain=True), 'abc')
self.assertEqual(self.module.hostname(pl=pl, segment_info=segment_info, only_if_ssh=True), 'abc.mydomain')
self.assertEqual(self.module.hostname(pl=pl, segment_info=segment_info, only_if_ssh=True, exclude_domain=True), 'abc')
segment_info['environ'].pop('SSH_CLIENT')
with replace_module_module(self.module, 'socket', gethostname=lambda: 'abc'):
self.assertEqual(self.module.hostname(pl=pl, segment_info=segment_info), 'abc')
self.assertEqual(self.module.hostname(pl=pl, segment_info=segment_info, only_if_ssh=True), None)
with replace_module_module(self.module, 'socket', gethostname=lambda: 'abc.mydomain'):
self.assertEqual(self.module.hostname(pl=pl, segment_info=segment_info), 'abc.mydomain')
self.assertEqual(self.module.hostname(pl=pl, segment_info=segment_info, exclude_domain=True), 'abc')
self.assertEqual(self.module.hostname(pl=pl, segment_info=segment_info, only_if_ssh=True, exclude_domain=True), None)
def test_external_ip(self):
pl = Pl()
with replace_attr(self.module, 'urllib_read', urllib_read):
self.assertEqual(self.module.external_ip(pl=pl), [{'contents': '127.0.0.1', 'divider_highlight_group': 'background:divider'}])
def test_internal_ip(self):
try:
import netifaces
except ImportError:
raise SkipTest('netifaces module is not available')
pl = Pl()
addr = {
'enp2s0': {
netifaces.AF_INET: [{'addr': '192.168.100.200'}],
netifaces.AF_INET6: [{'addr': 'feff::5446:5eff:fe5a:7777%enp2s0'}]
},
'lo': {
netifaces.AF_INET: [{'addr': '127.0.0.1'}],
netifaces.AF_INET6: [{'addr': '::1'}]
},
'teredo': {
netifaces.AF_INET6: [{'addr': 'feff::5446:5eff:fe5a:7777'}]
},
}
interfaces = ['lo', 'enp2s0', 'teredo']
with replace_module_module(
self.module, 'netifaces',
interfaces=(lambda: interfaces),
ifaddresses=(lambda interface: addr[interface]),
AF_INET=netifaces.AF_INET,
AF_INET6=netifaces.AF_INET6,
):
self.assertEqual(self.module.internal_ip(pl=pl), '192.168.100.200')
self.assertEqual(self.module.internal_ip(pl=pl, interface='auto'), '192.168.100.200')
self.assertEqual(self.module.internal_ip(pl=pl, interface='lo'), '127.0.0.1')
self.assertEqual(self.module.internal_ip(pl=pl, interface='teredo'), None)
self.assertEqual(self.module.internal_ip(pl=pl, ipv=4), '192.168.100.200')
self.assertEqual(self.module.internal_ip(pl=pl, interface='auto', ipv=4), '192.168.100.200')
self.assertEqual(self.module.internal_ip(pl=pl, interface='lo', ipv=4), '127.0.0.1')
self.assertEqual(self.module.internal_ip(pl=pl, interface='teredo', ipv=4), None)
self.assertEqual(self.module.internal_ip(pl=pl, ipv=6), 'feff::5446:5eff:fe5a:7777%enp2s0')
self.assertEqual(self.module.internal_ip(pl=pl, interface='auto', ipv=6), 'feff::5446:5eff:fe5a:7777%enp2s0')
self.assertEqual(self.module.internal_ip(pl=pl, interface='lo', ipv=6), '::1')
self.assertEqual(self.module.internal_ip(pl=pl, interface='teredo', ipv=6), 'feff::5446:5eff:fe5a:7777')
interfaces[1:2] = ()
self.assertEqual(self.module.internal_ip(pl=pl, ipv=6), 'feff::5446:5eff:fe5a:7777')
interfaces[1:2] = ()
self.assertEqual(self.module.internal_ip(pl=pl, ipv=6), '::1')
interfaces[:] = ()
self.assertEqual(self.module.internal_ip(pl=pl, ipv=6), None)
gateways = {
'default': {
netifaces.AF_INET: ('192.168.100.1', 'enp2s0'),
netifaces.AF_INET6: ('feff::5446:5eff:fe5a:0001', 'enp2s0')
}
}
with replace_module_module(
self.module, 'netifaces',
interfaces=(lambda: interfaces),
ifaddresses=(lambda interface: addr[interface]),
gateways=(lambda: gateways),
AF_INET=netifaces.AF_INET,
AF_INET6=netifaces.AF_INET6,
):
# default gateway has specified address family
self.assertEqual(self.module.internal_ip(pl=pl, interface='default_gateway', ipv=4), '192.168.100.200')
self.assertEqual(self.module.internal_ip(pl=pl, interface='default_gateway', ipv=6), 'feff::5446:5eff:fe5a:7777%enp2s0')
# default gateway doesn't have specified address family
gateways['default'] = {}
self.assertEqual(self.module.internal_ip(pl=pl, interface='default_gateway', ipv=4), None)
self.assertEqual(self.module.internal_ip(pl=pl, interface='default_gateway', ipv=6), None)
def test_network_load(self):
def gb(interface):
return None
f = [gb]
def _get_bytes(interface):
return f[0](interface)
pl = Pl()
with replace_attr(self.module, '_get_bytes', _get_bytes):
self.module.network_load.startup(pl=pl)
try:
self.assertEqual(self.module.network_load(pl=pl, interface='eth0'), None)
sleep(self.module.network_load.interval)
self.assertEqual(self.module.network_load(pl=pl, interface='eth0'), None)
while 'prev' not in self.module.network_load.interfaces.get('eth0', {}):
sleep(0.1)
self.assertEqual(self.module.network_load(pl=pl, interface='eth0'), None)
l = [0, 0]
def gb2(interface):
l[0] += 1200
l[1] += 2400
return tuple(l)
f[0] = gb2
while not self.module.network_load.interfaces.get('eth0', {}).get('prev', (None, None))[1]:
sleep(0.1)
self.assertEqual(self.module.network_load(pl=pl, interface='eth0'), [
{'divider_highlight_group': 'network_load:divider', 'contents': 'DL 1 KiB/s', 'highlight_groups': ['network_load_recv', 'network_load']},
{'divider_highlight_group': 'network_load:divider', 'contents': 'UL 2 KiB/s', 'highlight_groups': ['network_load_sent', 'network_load']},
])
self.assertEqual(self.module.network_load(pl=pl, interface='eth0', recv_format='r {value}', sent_format='s {value}'), [
{'divider_highlight_group': 'network_load:divider', 'contents': 'r 1 KiB/s', 'highlight_groups': ['network_load_recv', 'network_load']},
{'divider_highlight_group': 'network_load:divider', 'contents': 's 2 KiB/s', 'highlight_groups': ['network_load_sent', 'network_load']},
])
self.assertEqual(self.module.network_load(pl=pl, recv_format='r {value}', sent_format='s {value}', suffix='bps', interface='eth0'), [
{'divider_highlight_group': 'network_load:divider', 'contents': 'r 1 Kibps', 'highlight_groups': ['network_load_recv', 'network_load']},
{'divider_highlight_group': 'network_load:divider', 'contents': 's 2 Kibps', 'highlight_groups': ['network_load_sent', 'network_load']},
])
self.assertEqual(self.module.network_load(pl=pl, recv_format='r {value}', sent_format='s {value}', si_prefix=True, interface='eth0'), [
{'divider_highlight_group': 'network_load:divider', 'contents': 'r 1 kB/s', 'highlight_groups': ['network_load_recv', 'network_load']},
{'divider_highlight_group': 'network_load:divider', 'contents': 's 2 kB/s', 'highlight_groups': ['network_load_sent', 'network_load']},
])
self.assertEqual(self.module.network_load(pl=pl, recv_format='r {value}', sent_format='s {value}', recv_max=0, interface='eth0'), [
{'divider_highlight_group': 'network_load:divider', 'contents': 'r 1 KiB/s', 'highlight_groups': ['network_load_recv_gradient', 'network_load_gradient', 'network_load_recv', 'network_load'], 'gradient_level': 100},
{'divider_highlight_group': 'network_load:divider', 'contents': 's 2 KiB/s', 'highlight_groups': ['network_load_sent', 'network_load']},
])
class ApproxEqual(object):
def __eq__(self, i):
return abs(i - 50.0) < 1
self.assertEqual(self.module.network_load(pl=pl, recv_format='r {value}', sent_format='s {value}', sent_max=4800, interface='eth0'), [
{'divider_highlight_group': 'network_load:divider', 'contents': 'r 1 KiB/s', 'highlight_groups': ['network_load_recv', 'network_load']},
{'divider_highlight_group': 'network_load:divider', 'contents': 's 2 KiB/s', 'highlight_groups': ['network_load_sent_gradient', 'network_load_gradient', 'network_load_sent', 'network_load'], 'gradient_level': ApproxEqual()},
])
finally:
self.module.network_load.shutdown()
class TestEnv(TestCommon):
module_name = 'env'
def test_user(self):
new_os = new_module('os', getpid=lambda: 1)
class Process(object):
def __init__(self, pid):
pass
def username(self):
return 'def@DOMAIN.COM'
if hasattr(self.module, 'psutil') and not callable(self.module.psutil.Process.username):
username = property(username)
struct_passwd = namedtuple('struct_passwd', ('pw_name',))
new_psutil = new_module('psutil', Process=Process)
new_pwd = new_module('pwd', getpwuid=lambda uid: struct_passwd(pw_name='def@DOMAIN.COM'))
new_getpass = new_module('getpass', getuser=lambda: 'def@DOMAIN.COM')
pl = Pl()
with replace_attr(self.module, 'pwd', new_pwd):
with replace_attr(self.module, 'getpass', new_getpass):
with replace_attr(self.module, 'os', new_os):
with replace_attr(self.module, 'psutil', new_psutil):
with replace_attr(self.module, '_geteuid', lambda: 5):
self.assertEqual(self.module.user(pl=pl), [
{'contents': 'def@DOMAIN.COM', 'highlight_groups': ['user']}
])
self.assertEqual(self.module.user(pl=pl, hide_user='abc'), [
{'contents': 'def@DOMAIN.COM', 'highlight_groups': ['user']}
])
self.assertEqual(self.module.user(pl=pl, hide_domain=False), [
{'contents': 'def@DOMAIN.COM', 'highlight_groups': ['user']}
])
self.assertEqual(self.module.user(pl=pl, hide_user='def@DOMAIN.COM'), None)
self.assertEqual(self.module.user(pl=pl, hide_domain=True), [
{'contents': 'def', 'highlight_groups': ['user']}
])
with replace_attr(self.module, '_geteuid', lambda: 0):
self.assertEqual(self.module.user(pl=pl), [
{'contents': 'def', 'highlight_groups': ['superuser', 'user']}
])
def test_cwd(self):
new_os = new_module('os', path=os.path, sep='/')
pl = Pl()
cwd = [None]
def getcwd():
wd = cwd[0]
if isinstance(wd, Exception):
raise wd
else:
return wd
segment_info = {'getcwd': getcwd, 'home': None}
with replace_attr(self.module, 'os', new_os):
cwd[0] = '/abc/def/ghi/foo/bar'
self.assertEqual(self.module.cwd(pl=pl, segment_info=segment_info), [
{'contents': '/', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'abc', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'def', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'ghi', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'foo', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'bar', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True, 'highlight_groups': ['cwd:current_folder', 'cwd']},
])
segment_info['home'] = '/abc/def/ghi'
self.assertEqual(self.module.cwd(pl=pl, segment_info=segment_info), [
{'contents': '~', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'foo', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'bar', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True, 'highlight_groups': ['cwd:current_folder', 'cwd']},
])
self.assertEqual(self.module.cwd(pl=pl, segment_info=segment_info, dir_limit_depth=3), [
{'contents': '~', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'foo', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'bar', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True, 'highlight_groups': ['cwd:current_folder', 'cwd']}
])
self.assertEqual(self.module.cwd(pl=pl, segment_info=segment_info, dir_limit_depth=3, shorten_home=False), [
{'contents': '...', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'ghi', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'foo', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'bar', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True, 'highlight_groups': ['cwd:current_folder', 'cwd']}
])
self.assertEqual(self.module.cwd(pl=pl, segment_info=segment_info, dir_limit_depth=1), [
{'contents': '...', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'bar', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True, 'highlight_groups': ['cwd:current_folder', 'cwd']}
])
self.assertEqual(self.module.cwd(pl=pl, segment_info=segment_info, dir_limit_depth=1, ellipsis='---'), [
{'contents': '---', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'bar', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True, 'highlight_groups': ['cwd:current_folder', 'cwd']}
])
self.assertEqual(self.module.cwd(pl=pl, segment_info=segment_info, dir_limit_depth=1, ellipsis=None), [
{'contents': 'bar', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True, 'highlight_groups': ['cwd:current_folder', 'cwd']}
])
self.assertEqual(self.module.cwd(pl=pl, segment_info=segment_info, dir_limit_depth=1, use_path_separator=True), [
{'contents': '.../', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': False},
{'contents': 'bar', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': False, 'highlight_groups': ['cwd:current_folder', 'cwd']}
])
self.assertEqual(self.module.cwd(pl=pl, segment_info=segment_info, dir_limit_depth=1, use_path_separator=True, ellipsis='---'), [
{'contents': '---/', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': False},
{'contents': 'bar', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': False, 'highlight_groups': ['cwd:current_folder', 'cwd']}
])
self.assertEqual(self.module.cwd(pl=pl, segment_info=segment_info, dir_limit_depth=1, use_path_separator=True, ellipsis=None), [
{'contents': 'bar', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': False, 'highlight_groups': ['cwd:current_folder', 'cwd']}
])
self.assertEqual(self.module.cwd(pl=pl, segment_info=segment_info, dir_limit_depth=2, dir_shorten_len=2), [
{'contents': '~', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'fo', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'bar', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True, 'highlight_groups': ['cwd:current_folder', 'cwd']}
])
self.assertEqual(self.module.cwd(pl=pl, segment_info=segment_info, dir_limit_depth=2, dir_shorten_len=2, use_path_separator=True), [
{'contents': '~/', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': False},
{'contents': 'fo/', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': False},
{'contents': 'bar', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': False, 'highlight_groups': ['cwd:current_folder', 'cwd']}
])
cwd[0] = '/etc'
self.assertEqual(self.module.cwd(pl=pl, segment_info=segment_info, use_path_separator=False), [
{'contents': '/', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'etc', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True, 'highlight_groups': ['cwd:current_folder', 'cwd']},
])
self.assertEqual(self.module.cwd(pl=pl, segment_info=segment_info, use_path_separator=True), [
{'contents': '/', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': False},
{'contents': 'etc', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': False, 'highlight_groups': ['cwd:current_folder', 'cwd']},
])
cwd[0] = '/'
self.assertEqual(self.module.cwd(pl=pl, segment_info=segment_info, use_path_separator=False), [
{'contents': '/', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True, 'highlight_groups': ['cwd:current_folder', 'cwd']},
])
self.assertEqual(self.module.cwd(pl=pl, segment_info=segment_info, use_path_separator=True), [
{'contents': '/', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': False, 'highlight_groups': ['cwd:current_folder', 'cwd']},
])
ose = OSError()
ose.errno = 2
cwd[0] = ose
self.assertEqual(self.module.cwd(pl=pl, segment_info=segment_info, dir_limit_depth=2, dir_shorten_len=2), [
{'contents': '[not found]', 'divider_highlight_group': 'cwd:divider', 'highlight_groups': ['cwd:current_folder', 'cwd'], 'draw_inner_divider': True}
])
cwd[0] = OSError()
self.assertRaises(OSError, self.module.cwd, pl=pl, segment_info=segment_info, dir_limit_depth=2, dir_shorten_len=2)
cwd[0] = ValueError()
self.assertRaises(ValueError, self.module.cwd, pl=pl, segment_info=segment_info, dir_limit_depth=2, dir_shorten_len=2)
def test_virtualenv(self):
pl = Pl()
with replace_env('VIRTUAL_ENV', '/abc/def/ghi') as segment_info:
self.assertEqual(self.module.virtualenv(pl=pl, segment_info=segment_info), 'ghi')
self.assertEqual(self.module.virtualenv(pl=pl, segment_info=segment_info, ignore_conda=True), 'ghi')
self.assertEqual(self.module.virtualenv(pl=pl, segment_info=segment_info, ignore_venv=True), None)
self.assertEqual(self.module.virtualenv(pl=pl, segment_info=segment_info, ignore_venv=True, ignore_conda=True), None)
segment_info['environ'].pop('VIRTUAL_ENV')
self.assertEqual(self.module.virtualenv(pl=pl, segment_info=segment_info), None)
self.assertEqual(self.module.virtualenv(pl=pl, segment_info=segment_info, ignore_conda=True), None)
self.assertEqual(self.module.virtualenv(pl=pl, segment_info=segment_info, ignore_venv=True), None)
self.assertEqual(self.module.virtualenv(pl=pl, segment_info=segment_info, ignore_venv=True, ignore_conda=True), None)
with replace_env('CONDA_DEFAULT_ENV', 'foo') as segment_info:
self.assertEqual(self.module.virtualenv(pl=pl, segment_info=segment_info), 'foo')
self.assertEqual(self.module.virtualenv(pl=pl, segment_info=segment_info, ignore_conda=True), None)
self.assertEqual(self.module.virtualenv(pl=pl, segment_info=segment_info, ignore_venv=True), 'foo')
self.assertEqual(self.module.virtualenv(pl=pl, segment_info=segment_info, ignore_venv=True, ignore_conda=True), None)
segment_info['environ'].pop('CONDA_DEFAULT_ENV')
self.assertEqual(self.module.virtualenv(pl=pl, segment_info=segment_info), None)
self.assertEqual(self.module.virtualenv(pl=pl, segment_info=segment_info, ignore_conda=True), None)
self.assertEqual(self.module.virtualenv(pl=pl, segment_info=segment_info, ignore_venv=True), None)
self.assertEqual(self.module.virtualenv(pl=pl, segment_info=segment_info, ignore_venv=True, ignore_conda=True), None)
with replace_env('CONDA_DEFAULT_ENV', 'foo', environ={'VIRTUAL_ENV': '/sbc/def/ghi'}) as segment_info:
self.assertEqual(self.module.virtualenv(pl=pl, segment_info=segment_info), 'ghi')
self.assertEqual(self.module.virtualenv(pl=pl, segment_info=segment_info, ignore_conda=True), 'ghi')
self.assertEqual(self.module.virtualenv(pl=pl, segment_info=segment_info, ignore_venv=True), 'foo')
self.assertEqual(self.module.virtualenv(pl=pl, segment_info=segment_info, ignore_venv=True, ignore_conda=True), None)
segment_info['environ'].pop('CONDA_DEFAULT_ENV')
self.assertEqual(self.module.virtualenv(pl=pl, segment_info=segment_info), 'ghi')
self.assertEqual(self.module.virtualenv(pl=pl, segment_info=segment_info, ignore_conda=True), 'ghi')
self.assertEqual(self.module.virtualenv(pl=pl, segment_info=segment_info, ignore_venv=True), None)
self.assertEqual(self.module.virtualenv(pl=pl, segment_info=segment_info, ignore_venv=True, ignore_conda=True), None)
def test_environment(self):
pl = Pl()
variable = 'FOO'
value = 'bar'
with replace_env(variable, value) as segment_info:
self.assertEqual(self.module.environment(pl=pl, segment_info=segment_info, variable=variable), value)
segment_info['environ'].pop(variable)
self.assertEqual(self.module.environment(pl=pl, segment_info=segment_info, variable=variable), None)
class TestVcs(TestCommon):
module_name = 'vcs'
def test_branch(self):
pl = Pl()
create_watcher = get_fallback_create_watcher()
segment_info = {'getcwd': os.getcwd}
branch = partial(self.module.branch, pl=pl, create_watcher=create_watcher)
with replace_attr(self.module, 'guess', get_dummy_guess(status=lambda: None, directory='/tmp/tests')):
with replace_attr(self.module, 'tree_status', lambda repo, pl: None):
self.assertEqual(branch(segment_info=segment_info, status_colors=False), [{
'highlight_groups': ['branch'],
'contents': 'tests',
'divider_highlight_group': None
}])
self.assertEqual(branch(segment_info=segment_info, status_colors=True), [{
'contents': 'tests',
'highlight_groups': ['branch_clean', 'branch'],
'divider_highlight_group': None
}])
with replace_attr(self.module, 'guess', get_dummy_guess(status=lambda: 'D ', directory='/tmp/tests')):
with replace_attr(self.module, 'tree_status', lambda repo, pl: 'D '):
self.assertEqual(branch(segment_info=segment_info, status_colors=False), [{
'highlight_groups': ['branch'],
'contents': 'tests',
'divider_highlight_group': None
}])
self.assertEqual(branch(segment_info=segment_info, status_colors=True), [{
'contents': 'tests',
'highlight_groups': ['branch_dirty', 'branch'],
'divider_highlight_group': None
}])
self.assertEqual(branch(segment_info=segment_info, status_colors=False), [{
'highlight_groups': ['branch'],
'contents': 'tests',
'divider_highlight_group': None
}])
with replace_attr(self.module, 'guess', lambda path, create_watcher: None):
self.assertEqual(branch(segment_info=segment_info, status_colors=False), None)
with replace_attr(self.module, 'guess', get_dummy_guess(status=lambda: 'U')):
with replace_attr(self.module, 'tree_status', lambda repo, pl: 'U'):
self.assertEqual(branch(segment_info=segment_info, status_colors=False, ignore_statuses=['U']), [{
'highlight_groups': ['branch'],
'contents': 'tests',
'divider_highlight_group': None
}])
self.assertEqual(branch(segment_info=segment_info, status_colors=True, ignore_statuses=['DU']), [{
'highlight_groups': ['branch_dirty', 'branch'],
'contents': 'tests',
'divider_highlight_group': None
}])
self.assertEqual(branch(segment_info=segment_info, status_colors=True), [{
'highlight_groups': ['branch_dirty', 'branch'],
'contents': 'tests',
'divider_highlight_group': None
}])
self.assertEqual(branch(segment_info=segment_info, status_colors=True, ignore_statuses=['U']), [{
'highlight_groups': ['branch_clean', 'branch'],
'contents': 'tests',
'divider_highlight_group': None
}])
class TestTime(TestCommon):
module_name = 'time'
def test_date(self):
pl = Pl()
with replace_attr(self.module, 'datetime', Args(now=lambda: Args(strftime=lambda fmt: fmt))):
self.assertEqual(self.module.date(pl=pl), [{'contents': '%Y-%m-%d', 'highlight_groups': ['date'], 'divider_highlight_group': None}])
self.assertEqual(self.module.date(pl=pl, format='%H:%M', istime=True), [{'contents': '%H:%M', 'highlight_groups': ['time', 'date'], 'divider_highlight_group': 'time:divider'}])
unicode_date = self.module.date(pl=pl, format='\u231a', istime=True)
expected_unicode_date = [{'contents': '\u231a', 'highlight_groups': ['time', 'date'], 'divider_highlight_group': 'time:divider'}]
if python_implementation() == 'PyPy' and sys.version_info >= (3,):
if unicode_date != expected_unicode_date:
raise SkipTest('Dates do not match, see https://bitbucket.org/pypy/pypy/issues/2161/pypy3-strftime-does-not-accept-unicode')
self.assertEqual(unicode_date, expected_unicode_date)
def test_fuzzy_time(self):
time = Args(hour=0, minute=45)
pl = Pl()
with replace_attr(self.module, 'datetime', Args(now=lambda: time)):
self.assertEqual(self.module.fuzzy_time(pl=pl), 'quarter to one')
time.hour = 23
time.minute = 59
self.assertEqual(self.module.fuzzy_time(pl=pl), 'round about midnight')
time.minute = 33
self.assertEqual(self.module.fuzzy_time(pl=pl), 'twenty-five to twelve')
time.minute = 60
self.assertEqual(self.module.fuzzy_time(pl=pl), 'twelve o\'clock')
time.minute = 33
self.assertEqual(self.module.fuzzy_time(pl=pl, unicode_text=False), 'twenty-five to twelve')
time.minute = 60
self.assertEqual(self.module.fuzzy_time(pl=pl, unicode_text=False), 'twelve o\'clock')
time.minute = 33
self.assertEqual(self.module.fuzzy_time(pl=pl, unicode_text=True), 'twenty‐five to twelve')
time.minute = 60
self.assertEqual(self.module.fuzzy_time(pl=pl, unicode_text=True), 'twelve o’clock')
class TestSys(TestCommon):
module_name = 'sys'
def test_uptime(self):
pl = Pl()
with replace_attr(self.module, '_get_uptime', lambda: 259200):
self.assertEqual(self.module.uptime(pl=pl), [{'contents': '3d', 'divider_highlight_group': 'background:divider'}])
with replace_attr(self.module, '_get_uptime', lambda: 93784):
self.assertEqual(self.module.uptime(pl=pl), [{'contents': '1d 2h 3m', 'divider_highlight_group': 'background:divider'}])
self.assertEqual(self.module.uptime(pl=pl, shorten_len=4), [{'contents': '1d 2h 3m 4s', 'divider_highlight_group': 'background:divider'}])
with replace_attr(self.module, '_get_uptime', lambda: 65536):
self.assertEqual(self.module.uptime(pl=pl), [{'contents': '18h 12m 16s', 'divider_highlight_group': 'background:divider'}])
self.assertEqual(self.module.uptime(pl=pl, shorten_len=2), [{'contents': '18h 12m', 'divider_highlight_group': 'background:divider'}])
self.assertEqual(self.module.uptime(pl=pl, shorten_len=1), [{'contents': '18h', 'divider_highlight_group': 'background:divider'}])
def _get_uptime():
raise NotImplementedError
with replace_attr(self.module, '_get_uptime', _get_uptime):
self.assertEqual(self.module.uptime(pl=pl), None)
def test_system_load(self):
pl = Pl()
with replace_module_module(self.module, 'os', getloadavg=lambda: (7.5, 3.5, 1.5)):
with replace_attr(self.module, '_cpu_count', lambda: 2):
self.assertEqual(self.module.system_load(pl=pl), [
{'contents': '7.5 ', 'highlight_groups': ['system_load_gradient', 'system_load'], 'divider_highlight_group': 'background:divider', 'gradient_level': 100},
{'contents': '3.5 ', 'highlight_groups': ['system_load_gradient', 'system_load'], 'divider_highlight_group': 'background:divider', 'gradient_level': 75.0},
{'contents': '1.5', 'highlight_groups': ['system_load_gradient', 'system_load'], 'divider_highlight_group': 'background:divider', 'gradient_level': 0}
])
self.assertEqual(self.module.system_load(pl=pl, format='{avg:.0f}', threshold_good=0, threshold_bad=1), [
{'contents': '8 ', 'highlight_groups': ['system_load_gradient', 'system_load'], 'divider_highlight_group': 'background:divider', 'gradient_level': 100},
{'contents': '4 ', 'highlight_groups': ['system_load_gradient', 'system_load'], 'divider_highlight_group': 'background:divider', 'gradient_level': 100},
{'contents': '2', 'highlight_groups': ['system_load_gradient', 'system_load'], 'divider_highlight_group': 'background:divider', 'gradient_level': 75.0}
])
def test_cpu_load_percent(self):
try:
__import__('psutil')
except ImportError as e:
raise SkipTest('Failed to import psutil: {0}'.format(e))
pl = Pl()
with replace_module_module(self.module, 'psutil', cpu_percent=lambda **kwargs: 52.3):
self.assertEqual(self.module.cpu_load_percent(pl=pl), [{
'contents': '52%',
'gradient_level': 52.3,
'highlight_groups': ['cpu_load_percent_gradient', 'cpu_load_percent'],
}])
self.assertEqual(self.module.cpu_load_percent(pl=pl, format='{0:.1f}%'), [{
'contents': '52.3%',
'gradient_level': 52.3,
'highlight_groups': ['cpu_load_percent_gradient', 'cpu_load_percent'],
}])
class TestWthr(TestCommon):
module_name = 'wthr'
def test_weather(self):
pl = Pl()
with replace_attr(self.module, 'urllib_read', urllib_read):
self.assertEqual(self.module.weather(pl=pl), [
{'divider_highlight_group': 'background:divider', 'highlight_groups': ['weather_condition_partly_cloudy_day', 'weather_condition_cloudy', 'weather_conditions', 'weather'], 'contents': 'CLOUDS '},
{'divider_highlight_group': 'background:divider', 'highlight_groups': ['weather_temp_gradient', 'weather_temp', 'weather'], 'contents': '-9°C', 'gradient_level': 30.0}
])
self.assertEqual(self.module.weather(pl=pl, temp_coldest=0, temp_hottest=100), [
{'divider_highlight_group': 'background:divider', 'highlight_groups': ['weather_condition_partly_cloudy_day', 'weather_condition_cloudy', 'weather_conditions', 'weather'], 'contents': 'CLOUDS '},
{'divider_highlight_group': 'background:divider', 'highlight_groups': ['weather_temp_gradient', 'weather_temp', 'weather'], 'contents': '-9°C', 'gradient_level': 0}
])
self.assertEqual(self.module.weather(pl=pl, temp_coldest=-100, temp_hottest=-50), [
{'divider_highlight_group': 'background:divider', 'highlight_groups': ['weather_condition_partly_cloudy_day', 'weather_condition_cloudy', 'weather_conditions', 'weather'], 'contents': 'CLOUDS '},
{'divider_highlight_group': 'background:divider', 'highlight_groups': ['weather_temp_gradient', 'weather_temp', 'weather'], 'contents': '-9°C', 'gradient_level': 100}
])
self.assertEqual(self.module.weather(pl=pl, icons={'cloudy': 'o'}), [
{'divider_highlight_group': 'background:divider', 'highlight_groups': ['weather_condition_partly_cloudy_day', 'weather_condition_cloudy', 'weather_conditions', 'weather'], 'contents': 'o '},
{'divider_highlight_group': 'background:divider', 'highlight_groups': ['weather_temp_gradient', 'weather_temp', 'weather'], 'contents': '-9°C', 'gradient_level': 30.0}
])
self.assertEqual(self.module.weather(pl=pl, icons={'partly_cloudy_day': 'x'}), [
{'divider_highlight_group': 'background:divider', 'highlight_groups': ['weather_condition_partly_cloudy_day', 'weather_condition_cloudy', 'weather_conditions', 'weather'], 'contents': 'x '},
{'divider_highlight_group': 'background:divider', 'highlight_groups': ['weather_temp_gradient', 'weather_temp', 'weather'], 'contents': '-9°C', 'gradient_level': 30.0}
])
self.assertEqual(self.module.weather(pl=pl, unit='F'), [
{'divider_highlight_group': 'background:divider', 'highlight_groups': ['weather_condition_partly_cloudy_day', 'weather_condition_cloudy', 'weather_conditions', 'weather'], 'contents': 'CLOUDS '},
{'divider_highlight_group': 'background:divider', 'highlight_groups': ['weather_temp_gradient', 'weather_temp', 'weather'], 'contents': '16°F', 'gradient_level': 30.0}
])
self.assertEqual(self.module.weather(pl=pl, unit='K'), [
{'divider_highlight_group': 'background:divider', 'highlight_groups': ['weather_condition_partly_cloudy_day', 'weather_condition_cloudy', 'weather_conditions', 'weather'], 'contents': 'CLOUDS '},
{'divider_highlight_group': 'background:divider', 'highlight_groups': ['weather_temp_gradient', 'weather_temp', 'weather'], 'contents': '264K', 'gradient_level': 30.0}
])
self.assertEqual(self.module.weather(pl=pl, temp_format='{temp:.1e}C'), [
{'divider_highlight_group': 'background:divider', 'highlight_groups': ['weather_condition_partly_cloudy_day', 'weather_condition_cloudy', 'weather_conditions', 'weather'], 'contents': 'CLOUDS '},
{'divider_highlight_group': 'background:divider', 'highlight_groups': ['weather_temp_gradient', 'weather_temp', 'weather'], 'contents': '-9.0e+00C', 'gradient_level': 30.0}
])
with replace_attr(self.module, 'urllib_read', urllib_read):
self.module.weather.startup(pl=pl, location_query='Meppen,06,DE')
self.assertEqual(self.module.weather(pl=pl), [
{'divider_highlight_group': 'background:divider', 'highlight_groups': ['weather_condition_partly_cloudy_day', 'weather_condition_cloudy', 'weather_conditions', 'weather'], 'contents': 'CLOUDS '},
{'divider_highlight_group': 'background:divider', 'highlight_groups': ['weather_temp_gradient', 'weather_temp', 'weather'], 'contents': '-9°C', 'gradient_level': 30.0}
])
self.assertEqual(self.module.weather(pl=pl, location_query='Moscow,RU'), [
{'divider_highlight_group': 'background:divider', 'highlight_groups': ['weather_condition_partly_cloudy_day', 'weather_condition_cloudy', 'weather_conditions', 'weather'], 'contents': 'CLOUDS '},
{'divider_highlight_group': 'background:divider', 'highlight_groups': ['weather_temp_gradient', 'weather_temp', 'weather'], 'contents': '19°C', 'gradient_level': 70.0}
])
self.module.weather.shutdown()
class TestI3WM(TestCase):
def test_workspaces(self):
pl = Pl()
with replace_attr(i3wm, 'conn', Args(get_workspaces=lambda: iter([
{'name': '1: w1', 'output': 'LVDS1', 'focused': False, 'urgent': False, 'visible': False},
{'name': '2: w2', 'output': 'LVDS1', 'focused': False, 'urgent': False, 'visible': True},
{'name': '3: w3', 'output': 'HDMI1', 'focused': False, 'urgent': True, 'visible': True},
{'name': '4: w4', 'output': 'DVI01', 'focused': True, 'urgent': True, 'visible': True},
]))):
segment_info = {}
self.assertEqual(i3wm.workspaces(pl=pl, segment_info=segment_info), [
{'contents': '1: w1', 'highlight_groups': ['workspace']},
{'contents': '2: w2', 'highlight_groups': ['w_visible', 'workspace']},
{'contents': '3: w3', 'highlight_groups': ['w_urgent', 'w_visible', 'workspace']},
{'contents': '4: w4', 'highlight_groups': ['w_focused', 'w_urgent', 'w_visible', 'workspace']},
])
self.assertEqual(i3wm.workspaces(pl=pl, segment_info=segment_info, only_show=None), [
{'contents': '1: w1', 'highlight_groups': ['workspace']},
{'contents': '2: w2', 'highlight_groups': ['w_visible', 'workspace']},
{'contents': '3: w3', 'highlight_groups': ['w_urgent', 'w_visible', 'workspace']},
{'contents': '4: w4', 'highlight_groups': ['w_focused', 'w_urgent', 'w_visible', 'workspace']},
])
self.assertEqual(i3wm.workspaces(pl=pl, segment_info=segment_info, only_show=['focused', 'urgent']), [
{'contents': '3: w3', 'highlight_groups': ['w_urgent', 'w_visible', 'workspace']},
{'contents': '4: w4', 'highlight_groups': ['w_focused', 'w_urgent', 'w_visible', 'workspace']},
])
self.assertEqual(i3wm.workspaces(pl=pl, segment_info=segment_info, only_show=['visible']), [
{'contents': '2: w2', 'highlight_groups': ['w_visible', 'workspace']},
{'contents': '3: w3', 'highlight_groups': ['w_urgent', 'w_visible', 'workspace']},
{'contents': '4: w4', 'highlight_groups': ['w_focused', 'w_urgent', 'w_visible', 'workspace']},
])
self.assertEqual(i3wm.workspaces(pl=pl, segment_info=segment_info, only_show=['visible'], strip=3), [
{'contents': 'w2', 'highlight_groups': ['w_visible', 'workspace']},
{'contents': 'w3', 'highlight_groups': ['w_urgent', 'w_visible', 'workspace']},
{'contents': 'w4', 'highlight_groups': ['w_focused', 'w_urgent', 'w_visible', 'workspace']},
])
self.assertEqual(i3wm.workspaces(pl=pl, segment_info=segment_info, only_show=['focused', 'urgent'], output='DVI01'), [
{'contents': '4: w4', 'highlight_groups': ['w_focused', 'w_urgent', 'w_visible', 'workspace']},
])
self.assertEqual(i3wm.workspaces(pl=pl, segment_info=segment_info, only_show=['visible'], output='HDMI1'), [
{'contents': '3: w3', 'highlight_groups': ['w_urgent', 'w_visible', 'workspace']},
])
self.assertEqual(i3wm.workspaces(pl=pl, segment_info=segment_info, only_show=['visible'], strip=3, output='LVDS1'), [
{'contents': 'w2', 'highlight_groups': ['w_visible', 'workspace']},
])
segment_info['output'] = 'LVDS1'
self.assertEqual(i3wm.workspaces(pl=pl, segment_info=segment_info, only_show=['visible'], output='HDMI1'), [
{'contents': '3: w3', 'highlight_groups': ['w_urgent', 'w_visible', 'workspace']},
])
self.assertEqual(i3wm.workspaces(pl=pl, segment_info=segment_info, only_show=['visible'], strip=3), [
{'contents': 'w2', 'highlight_groups': ['w_visible', 'workspace']},
])
def test_mode(self):
pl = Pl()
self.assertEqual(i3wm.mode(pl=pl, segment_info={'mode': 'default'}), None)
self.assertEqual(i3wm.mode(pl=pl, segment_info={'mode': 'test'}), 'test')
self.assertEqual(i3wm.mode(pl=pl, segment_info={'mode': 'default'}, names={'default': 'test'}), 'test')
self.assertEqual(i3wm.mode(pl=pl, segment_info={'mode': 'test'}, names={'default': 'test', 'test': 't'}), 't')
class TestMail(TestCommon):
module_name = 'mail'
def test_email_imap_alert(self):
# TODO
pass
class TestPlayers(TestCommon):
module_name = 'players'
def test_now_playing(self):
# TODO
pass
class TestBat(TestCommon):
module_name = 'bat'
def test_battery(self):
pl = Pl()
def _get_battery_status(pl):
return 86, False
with replace_attr(self.module, '_get_battery_status', _get_battery_status):
self.assertEqual(self.module.battery(pl=pl), [{
'contents': ' 86%',
'highlight_groups': ['battery_gradient', 'battery'],
'gradient_level': 14,
}])
self.assertEqual(self.module.battery(pl=pl, format='{capacity:.2f}'), [{
'contents': '0.86',
'highlight_groups': ['battery_gradient', 'battery'],
'gradient_level': 14,
}])
self.assertEqual(self.module.battery(pl=pl, steps=7), [{
'contents': ' 86%',
'highlight_groups': ['battery_gradient', 'battery'],
'gradient_level': 14,
}])
self.assertEqual(self.module.battery(pl=pl, gamify=True), [
{
'contents': ' ',
'draw_inner_divider': False,
'highlight_groups': ['battery_offline', 'battery_ac_state', 'battery_gradient', 'battery'],
'gradient_level': 0
},
{
'contents': 'OOOO',
'draw_inner_divider': False,
'highlight_groups': ['battery_full', 'battery_gradient', 'battery'],
'gradient_level': 0
},
{
'contents': 'O',
'draw_inner_divider': False,
'highlight_groups': ['battery_empty', 'battery_gradient', 'battery'],
'gradient_level': 100
}
])
self.assertEqual(self.module.battery(pl=pl, gamify=True, full_heart='+', empty_heart='-', steps='10'), [
{
'contents': ' ',
'draw_inner_divider': False,
'highlight_groups': ['battery_offline', 'battery_ac_state', 'battery_gradient', 'battery'],
'gradient_level': 0
},
{
'contents': '++++++++',
'draw_inner_divider': False,
'highlight_groups': ['battery_full', 'battery_gradient', 'battery'],
'gradient_level': 0
},
{
'contents': '--',
'draw_inner_divider': False,
'highlight_groups': ['battery_empty', 'battery_gradient', 'battery'],
'gradient_level': 100
}
])
def test_battery_with_ac_online(self):
pl = Pl()
def _get_battery_status(pl):
return 86, True
with replace_attr(self.module, '_get_battery_status', _get_battery_status):
self.assertEqual(self.module.battery(pl=pl, online='C', offline=' '), [
{
'contents': 'C 86%',
'highlight_groups': ['battery_gradient', 'battery'],
'gradient_level': 14,
}])
def test_battery_with_ac_offline(self):
pl = Pl()
def _get_battery_status(pl):
return 86, False
with replace_attr(self.module, '_get_battery_status', _get_battery_status):
self.assertEqual(self.module.battery(pl=pl, online='C', offline=' '), [
{
'contents': ' 86%',
'highlight_groups': ['battery_gradient', 'battery'],
'gradient_level': 14,
}])
class TestVim(TestCase):
def test_mode(self):
pl = Pl()
segment_info = vim_module._get_segment_info()
self.assertEqual(self.vim.mode(pl=pl, segment_info=segment_info), 'NORMAL')
self.assertEqual(self.vim.mode(pl=pl, segment_info=segment_info, override={'i': 'INS'}), 'NORMAL')
self.assertEqual(self.vim.mode(pl=pl, segment_info=segment_info, override={'n': 'NORM'}), 'NORM')
with vim_module._with('mode', 'i') as segment_info:
self.assertEqual(self.vim.mode(pl=pl, segment_info=segment_info), 'INSERT')
with vim_module._with('mode', chr(ord('V') - 0x40)) as segment_info:
self.assertEqual(self.vim.mode(pl=pl, segment_info=segment_info), 'V-BLCK')
self.assertEqual(self.vim.mode(pl=pl, segment_info=segment_info, override={'^V': 'VBLK'}), 'VBLK')
def test_visual_range(self):
pl = Pl()
vr = partial(self.vim.visual_range, pl=pl)
vim_module.current.window.cursor = [0, 0]
try:
with vim_module._with('mode', 'i') as segment_info:
self.assertEqual(vr(segment_info=segment_info), '')
with vim_module._with('mode', '^V') as segment_info:
self.assertEqual(vr(segment_info=segment_info), '1 x 1')
with vim_module._with('vpos', line=5, col=5, off=0):
self.assertEqual(vr(segment_info=segment_info), '5 x 5')
with vim_module._with('vpos', line=5, col=4, off=0):
self.assertEqual(vr(segment_info=segment_info), '5 x 4')
with vim_module._with('mode', '^S') as segment_info:
self.assertEqual(vr(segment_info=segment_info), '1 x 1')
with vim_module._with('vpos', line=5, col=5, off=0):
self.assertEqual(vr(segment_info=segment_info), '5 x 5')
with vim_module._with('vpos', line=5, col=4, off=0):
self.assertEqual(vr(segment_info=segment_info), '5 x 4')
with vim_module._with('mode', 'V') as segment_info:
self.assertEqual(vr(segment_info=segment_info), 'L:1')
with vim_module._with('vpos', line=5, col=5, off=0):
self.assertEqual(vr(segment_info=segment_info), 'L:5')
with vim_module._with('vpos', line=5, col=4, off=0):
self.assertEqual(vr(segment_info=segment_info), 'L:5')
with vim_module._with('mode', 'S') as segment_info:
self.assertEqual(vr(segment_info=segment_info), 'L:1')
with vim_module._with('vpos', line=5, col=5, off=0):
self.assertEqual(vr(segment_info=segment_info), 'L:5')
with vim_module._with('vpos', line=5, col=4, off=0):
self.assertEqual(vr(segment_info=segment_info), 'L:5')
with vim_module._with('mode', 'v') as segment_info:
self.assertEqual(vr(segment_info=segment_info), 'C:1')
with vim_module._with('vpos', line=5, col=5, off=0):
self.assertEqual(vr(segment_info=segment_info), 'L:5')
with vim_module._with('vpos', line=5, col=4, off=0):
self.assertEqual(vr(segment_info=segment_info), 'L:5')
with vim_module._with('mode', 's') as segment_info:
self.assertEqual(vr(segment_info=segment_info), 'C:1')
with vim_module._with('vpos', line=5, col=5, off=0):
self.assertEqual(vr(segment_info=segment_info), 'L:5')
with vim_module._with('vpos', line=5, col=4, off=0):
self.assertEqual(vr(segment_info=segment_info), 'L:5')
finally:
vim_module._close(1)
def test_modified_indicator(self):
pl = Pl()
segment_info = vim_module._get_segment_info()
self.assertEqual(self.vim.modified_indicator(pl=pl, segment_info=segment_info), None)
segment_info['buffer'][0] = 'abc'
try:
self.assertEqual(self.vim.modified_indicator(pl=pl, segment_info=segment_info), '+')
self.assertEqual(self.vim.modified_indicator(pl=pl, segment_info=segment_info, text='-'), '-')
finally:
vim_module._bw(segment_info['bufnr'])
def test_paste_indicator(self):
pl = Pl()
segment_info = vim_module._get_segment_info()
self.assertEqual(self.vim.paste_indicator(pl=pl, segment_info=segment_info), None)
with vim_module._with('options', paste=1):
self.assertEqual(self.vim.paste_indicator(pl=pl, segment_info=segment_info), 'PASTE')
self.assertEqual(self.vim.paste_indicator(pl=pl, segment_info=segment_info, text='P'), 'P')
def test_readonly_indicator(self):
pl = Pl()
segment_info = vim_module._get_segment_info()
self.assertEqual(self.vim.readonly_indicator(pl=pl, segment_info=segment_info), None)
with vim_module._with('bufoptions', readonly=1):
self.assertEqual(self.vim.readonly_indicator(pl=pl, segment_info=segment_info), 'RO')
self.assertEqual(self.vim.readonly_indicator(pl=pl, segment_info=segment_info, text='L'), 'L')
def test_file_scheme(self):
pl = Pl()
segment_info = vim_module._get_segment_info()
self.assertEqual(self.vim.file_scheme(pl=pl, segment_info=segment_info), None)
with vim_module._with('buffer', '/tmp/’’/abc') as segment_info:
self.assertEqual(self.vim.file_scheme(pl=pl, segment_info=segment_info), None)
with vim_module._with('buffer', 'zipfile:/tmp/abc.zip::abc/abc.vim') as segment_info:
self.assertEqual(self.vim.file_scheme(pl=pl, segment_info=segment_info), 'zipfile')
def test_file_directory(self):
pl = Pl()
segment_info = vim_module._get_segment_info()
self.assertEqual(self.vim.file_directory(pl=pl, segment_info=segment_info), None)
with replace_env('HOME', '/home/foo', os.environ):
with vim_module._with('buffer', '/tmp/’’/abc') as segment_info:
self.assertEqual(self.vim.file_directory(pl=pl, segment_info=segment_info), '/tmp/’’/')
with vim_module._with('buffer', b'/tmp/\xFF\xFF/abc') as segment_info:
self.assertEqual(self.vim.file_directory(pl=pl, segment_info=segment_info), '/tmp/<ff><ff>/')
with vim_module._with('buffer', '/tmp/abc') as segment_info:
self.assertEqual(self.vim.file_directory(pl=pl, segment_info=segment_info), '/tmp/')
os.environ['HOME'] = '/tmp'
self.assertEqual(self.vim.file_directory(pl=pl, segment_info=segment_info), '~/')
with vim_module._with('buffer', 'zipfile:/tmp/abc.zip::abc/abc.vim') as segment_info:
self.assertEqual(self.vim.file_directory(pl=pl, segment_info=segment_info, remove_scheme=False), 'zipfile:/tmp/abc.zip::abc/')
self.assertEqual(self.vim.file_directory(pl=pl, segment_info=segment_info, remove_scheme=True), '/tmp/abc.zip::abc/')
self.assertEqual(self.vim.file_directory(pl=pl, segment_info=segment_info), '/tmp/abc.zip::abc/')
os.environ['HOME'] = '/tmp'
self.assertEqual(self.vim.file_directory(pl=pl, segment_info=segment_info, remove_scheme=False), 'zipfile:/tmp/abc.zip::abc/')
self.assertEqual(self.vim.file_directory(pl=pl, segment_info=segment_info, remove_scheme=True), '/tmp/abc.zip::abc/')
self.assertEqual(self.vim.file_directory(pl=pl, segment_info=segment_info), '/tmp/abc.zip::abc/')
def test_file_name(self):
pl = Pl()
segment_info = vim_module._get_segment_info()
self.assertEqual(self.vim.file_name(pl=pl, segment_info=segment_info), None)
self.assertEqual(self.vim.file_name(pl=pl, segment_info=segment_info, display_no_file=True), [
{'contents': '[No file]', 'highlight_groups': ['file_name_no_file', 'file_name']}
])
self.assertEqual(self.vim.file_name(pl=pl, segment_info=segment_info, display_no_file=True, no_file_text='X'), [
{'contents': 'X', 'highlight_groups': ['file_name_no_file', 'file_name']}
])
with vim_module._with('buffer', '/tmp/abc') as segment_info:
self.assertEqual(self.vim.file_name(pl=pl, segment_info=segment_info), 'abc')
with vim_module._with('buffer', '/tmp/’’') as segment_info:
self.assertEqual(self.vim.file_name(pl=pl, segment_info=segment_info), '’’')
with vim_module._with('buffer', b'/tmp/\xFF\xFF') as segment_info:
self.assertEqual(self.vim.file_name(pl=pl, segment_info=segment_info), '<ff><ff>')
def test_file_size(self):
pl = Pl()
segment_info = vim_module._get_segment_info()
self.assertEqual(self.vim.file_size(pl=pl, segment_info=segment_info), '0 B')
with vim_module._with('buffer', os.path.join(os.path.dirname(__file__), 'empty')) as segment_info:
self.assertEqual(self.vim.file_size(pl=pl, segment_info=segment_info), '0 B')
def test_file_opts(self):
pl = Pl()
segment_info = vim_module._get_segment_info()
self.assertEqual(self.vim.file_format(pl=pl, segment_info=segment_info), [
{'divider_highlight_group': 'background:divider', 'contents': 'unix'}
])
self.assertEqual(self.vim.file_encoding(pl=pl, segment_info=segment_info), [
{'divider_highlight_group': 'background:divider', 'contents': 'utf-8'}
])
self.assertEqual(self.vim.file_type(pl=pl, segment_info=segment_info), None)
with vim_module._with('bufoptions', filetype='python'):
self.assertEqual(self.vim.file_type(pl=pl, segment_info=segment_info), [
{'divider_highlight_group': 'background:divider', 'contents': 'python'}
])
def test_window_title(self):
pl = Pl()
segment_info = vim_module._get_segment_info()
self.assertEqual(self.vim.window_title(pl=pl, segment_info=segment_info), None)
with vim_module._with('wvars', quickfix_title='Abc'):
self.assertEqual(self.vim.window_title(pl=pl, segment_info=segment_info), 'Abc')
def test_line_percent(self):
pl = Pl()
segment_info = vim_module._get_segment_info()
segment_info['buffer'][0:-1] = [str(i) for i in range(100)]
try:
self.assertEqual(self.vim.line_percent(pl=pl, segment_info=segment_info), '1')
vim_module._set_cursor(50, 0)
self.assertEqual(self.vim.line_percent(pl=pl, segment_info=segment_info), '50')
self.assertEqual(self.vim.line_percent(pl=pl, segment_info=segment_info, gradient=True), [
{'contents': '50', 'highlight_groups': ['line_percent_gradient', 'line_percent'], 'gradient_level': 50 * 100.0 / 101}
])
finally:
vim_module._bw(segment_info['bufnr'])
def test_line_count(self):
pl = Pl()
segment_info = vim_module._get_segment_info()
segment_info['buffer'][0:-1] = [str(i) for i in range(99)]
try:
self.assertEqual(self.vim.line_count(pl=pl, segment_info=segment_info), '100')
vim_module._set_cursor(50, 0)
self.assertEqual(self.vim.line_count(pl=pl, segment_info=segment_info), '100')
finally:
vim_module._bw(segment_info['bufnr'])
def test_position(self):
pl = Pl()
segment_info = vim_module._get_segment_info()
try:
segment_info['buffer'][0:-1] = [str(i) for i in range(99)]
vim_module._set_cursor(49, 0)
self.assertEqual(self.vim.position(pl=pl, segment_info=segment_info), '50%')
self.assertEqual(self.vim.position(pl=pl, segment_info=segment_info, gradient=True), [
{'contents': '50%', 'highlight_groups': ['position_gradient', 'position'], 'gradient_level': 50.0}
])
vim_module._set_cursor(0, 0)
self.assertEqual(self.vim.position(pl=pl, segment_info=segment_info), 'Top')
vim_module._set_cursor(97, 0)
self.assertEqual(self.vim.position(pl=pl, segment_info=segment_info, position_strings={'top': 'Comienzo', 'bottom': 'Final', 'all': 'Todo'}), 'Final')
segment_info['buffer'][0:-1] = [str(i) for i in range(2)]
vim_module._set_cursor(0, 0)
self.assertEqual(self.vim.position(pl=pl, segment_info=segment_info, position_strings={'top': 'Comienzo', 'bottom': 'Final', 'all': 'Todo'}), 'Todo')
self.assertEqual(self.vim.position(pl=pl, segment_info=segment_info, gradient=True), [
{'contents': 'All', 'highlight_groups': ['position_gradient', 'position'], 'gradient_level': 0.0}
])
finally:
vim_module._bw(segment_info['bufnr'])
def test_cursor_current(self):
pl = Pl()
segment_info = vim_module._get_segment_info()
self.assertEqual(self.vim.line_current(pl=pl, segment_info=segment_info), '1')
self.assertEqual(self.vim.col_current(pl=pl, segment_info=segment_info), '1')
self.assertEqual(self.vim.virtcol_current(pl=pl, segment_info=segment_info), [{
'highlight_groups': ['virtcol_current_gradient', 'virtcol_current', 'col_current'], 'contents': '1', 'gradient_level': 100.0 / 80,
}])
self.assertEqual(self.vim.virtcol_current(pl=pl, segment_info=segment_info, gradient=False), [{
'highlight_groups': ['virtcol_current', 'col_current'], 'contents': '1',
}])
def test_modified_buffers(self):
pl = Pl()
self.assertEqual(self.vim.modified_buffers(pl=pl), None)
def test_branch(self):
pl = Pl()
create_watcher = get_fallback_create_watcher()
branch = partial(self.vim.branch, pl=pl, create_watcher=create_watcher)
with vim_module._with('buffer', '/foo') as segment_info:
with replace_attr(self.vcs, 'guess', get_dummy_guess(status=lambda: None)):
with replace_attr(self.vcs, 'tree_status', lambda repo, pl: None):
self.assertEqual(branch(segment_info=segment_info, status_colors=False), [
{'divider_highlight_group': 'branch:divider', 'highlight_groups': ['branch'], 'contents': 'foo'}
])
self.assertEqual(branch(segment_info=segment_info, status_colors=True), [
{'divider_highlight_group': 'branch:divider', 'highlight_groups': ['branch_clean', 'branch'], 'contents': 'foo'}
])
with replace_attr(self.vcs, 'guess', get_dummy_guess(status=lambda: 'DU')):
with replace_attr(self.vcs, 'tree_status', lambda repo, pl: 'DU'):
self.assertEqual(branch(segment_info=segment_info, status_colors=False), [
{'divider_highlight_group': 'branch:divider', 'highlight_groups': ['branch'], 'contents': 'foo'}
])
self.assertEqual(branch(segment_info=segment_info, status_colors=True), [
{'divider_highlight_group': 'branch:divider', 'highlight_groups': ['branch_dirty', 'branch'], 'contents': 'foo'}
])
with replace_attr(self.vcs, 'guess', get_dummy_guess(status=lambda: 'U')):
with replace_attr(self.vcs, 'tree_status', lambda repo, pl: 'U'):
self.assertEqual(branch(segment_info=segment_info, status_colors=False, ignore_statuses=['U']), [
{'divider_highlight_group': 'branch:divider', 'highlight_groups': ['branch'], 'contents': 'foo'}
])
self.assertEqual(branch(segment_info=segment_info, status_colors=True, ignore_statuses=['DU']), [
{'divider_highlight_group': 'branch:divider', 'highlight_groups': ['branch_dirty', 'branch'], 'contents': 'foo'}
])
self.assertEqual(branch(segment_info=segment_info, status_colors=True), [
{'divider_highlight_group': 'branch:divider', 'highlight_groups': ['branch_dirty', 'branch'], 'contents': 'foo'}
])
self.assertEqual(branch(segment_info=segment_info, status_colors=True, ignore_statuses=['U']), [
{'divider_highlight_group': 'branch:divider', 'highlight_groups': ['branch_clean', 'branch'], 'contents': 'foo'}
])
def test_file_vcs_status(self):
pl = Pl()
create_watcher = get_fallback_create_watcher()
file_vcs_status = partial(self.vim.file_vcs_status, pl=pl, create_watcher=create_watcher)
with vim_module._with('buffer', '/foo') as segment_info:
with replace_attr(self.vim, 'guess', get_dummy_guess(status=lambda file: 'M')):
self.assertEqual(file_vcs_status(segment_info=segment_info), [
{'highlight_groups': ['file_vcs_status_M', 'file_vcs_status'], 'contents': 'M'}
])
with replace_attr(self.vim, 'guess', get_dummy_guess(status=lambda file: None)):
self.assertEqual(file_vcs_status(segment_info=segment_info), None)
with vim_module._with('buffer', '/bar') as segment_info:
with vim_module._with('bufoptions', buftype='nofile'):
with replace_attr(self.vim, 'guess', get_dummy_guess(status=lambda file: 'M')):
self.assertEqual(file_vcs_status(segment_info=segment_info), None)
def test_trailing_whitespace(self):
pl = Pl()
with vim_module._with('buffer', 'tws') as segment_info:
trailing_whitespace = partial(self.vim.trailing_whitespace, pl=pl, segment_info=segment_info)
self.assertEqual(trailing_whitespace(), None)
self.assertEqual(trailing_whitespace(), None)
vim_module.current.buffer[0] = ' '
self.assertEqual(trailing_whitespace(), [{
'highlight_groups': ['trailing_whitespace', 'warning'],
'contents': '1',
}])
self.assertEqual(trailing_whitespace(), [{
'highlight_groups': ['trailing_whitespace', 'warning'],
'contents': '1',
}])
vim_module.current.buffer[0] = ''
self.assertEqual(trailing_whitespace(), None)
self.assertEqual(trailing_whitespace(), None)
def test_tabnr(self):
pl = Pl()
segment_info = vim_module._get_segment_info()
self.assertEqual(self.vim.tabnr(pl=pl, segment_info=segment_info, show_current=True), '1')
self.assertEqual(self.vim.tabnr(pl=pl, segment_info=segment_info, show_current=False), None)
def test_tab(self):
pl = Pl()
segment_info = vim_module._get_segment_info()
self.assertEqual(self.vim.tab(pl=pl, segment_info=segment_info), [{
'contents': None,
'literal_contents': (0, '%1T'),
}])
self.assertEqual(self.vim.tab(pl=pl, segment_info=segment_info, end=True), [{
'contents': None,
'literal_contents': (0, '%T'),
}])
def test_bufnr(self):
pl = Pl()
segment_info = vim_module._get_segment_info()
self.assertEqual(self.vim.bufnr(pl=pl, segment_info=segment_info, show_current=True), str(segment_info['bufnr']))
self.assertEqual(self.vim.bufnr(pl=pl, segment_info=segment_info, show_current=False), None)
def test_winnr(self):
pl = Pl()
segment_info = vim_module._get_segment_info()
self.assertEqual(self.vim.winnr(pl=pl, segment_info=segment_info, show_current=True), str(segment_info['winnr']))
self.assertEqual(self.vim.winnr(pl=pl, segment_info=segment_info, show_current=False), None)
def test_segment_info(self):
pl = Pl()
with vim_module._with('tabpage'):
with vim_module._with('buffer', '1') as segment_info:
self.assertEqual(self.vim.tab_modified_indicator(pl=pl, segment_info=segment_info), None)
vim_module.current.buffer[0] = ' '
self.assertEqual(self.vim.tab_modified_indicator(pl=pl, segment_info=segment_info), [{
'contents': '+',
'highlight_groups': ['tab_modified_indicator', 'modified_indicator'],
}])
vim_module._undo()
self.assertEqual(self.vim.tab_modified_indicator(pl=pl, segment_info=segment_info), None)
old_buffer = vim_module.current.buffer
vim_module._new('2')
segment_info = vim_module._get_segment_info()
self.assertEqual(self.vim.tab_modified_indicator(pl=pl, segment_info=segment_info), None)
old_buffer[0] = ' '
self.assertEqual(self.vim.modified_indicator(pl=pl, segment_info=segment_info), None)
self.assertEqual(self.vim.tab_modified_indicator(pl=pl, segment_info=segment_info), [{
'contents': '+',
'highlight_groups': ['tab_modified_indicator', 'modified_indicator'],
}])
def test_csv_col_current(self):
pl = Pl()
segment_info = vim_module._get_segment_info()
def csv_col_current(**kwargs):
self.vim.csv_cache and self.vim.csv_cache.clear()
return self.vim.csv_col_current(pl=pl, segment_info=segment_info, **kwargs)
buffer = segment_info['buffer']
try:
self.assertEqual(csv_col_current(), None)
buffer.options['filetype'] = 'csv'
self.assertEqual(csv_col_current(), None)
buffer[:] = ['1;2;3', '4;5;6']
vim_module._set_cursor(1, 1)
self.assertEqual(csv_col_current(), [{
'contents': '1', 'highlight_groups': ['csv:column_number', 'csv']
}])
vim_module._set_cursor(2, 3)
self.assertEqual(csv_col_current(), [{
'contents': '2', 'highlight_groups': ['csv:column_number', 'csv']
}])
vim_module._set_cursor(2, 3)
self.assertEqual(csv_col_current(display_name=True), [{
'contents': '2', 'highlight_groups': ['csv:column_number', 'csv']
}, {
'contents': ' (2)', 'highlight_groups': ['csv:column_name', 'csv']
}])
buffer[:0] = ['Foo;Bar;Baz']
vim_module._set_cursor(2, 3)
self.assertEqual(csv_col_current(), [{
'contents': '2', 'highlight_groups': ['csv:column_number', 'csv']
}, {
'contents': ' (Bar)', 'highlight_groups': ['csv:column_name', 'csv']
}])
if sys.version_info < (2, 7):
raise SkipTest('csv module in Python-2.6 does not handle multiline csv files well')
buffer[len(buffer):] = ['1;"bc', 'def', 'ghi', 'jkl";3']
vim_module._set_cursor(5, 1)
self.assertEqual(csv_col_current(), [{
'contents': '2', 'highlight_groups': ['csv:column_number', 'csv']
}, {
'contents': ' (Bar)', 'highlight_groups': ['csv:column_name', 'csv']
}])
vim_module._set_cursor(7, 6)
self.assertEqual(csv_col_current(), [{
'contents': '3', 'highlight_groups': ['csv:column_number', 'csv']
}, {
'contents': ' (Baz)', 'highlight_groups': ['csv:column_name', 'csv']
}])
self.assertEqual(csv_col_current(name_format=' ({column_name:.1})'), [{
'contents': '3', 'highlight_groups': ['csv:column_number', 'csv']
}, {
'contents': ' (B)', 'highlight_groups': ['csv:column_name', 'csv']
}])
self.assertEqual(csv_col_current(display_name=True, name_format=' ({column_name:.1})'), [{
'contents': '3', 'highlight_groups': ['csv:column_number', 'csv']
}, {
'contents': ' (B)', 'highlight_groups': ['csv:column_name', 'csv']
}])
self.assertEqual(csv_col_current(display_name=False, name_format=' ({column_name:.1})'), [{
'contents': '3', 'highlight_groups': ['csv:column_number', 'csv']
}])
self.assertEqual(csv_col_current(display_name=False), [{
'contents': '3', 'highlight_groups': ['csv:column_number', 'csv']
}])
finally:
vim_module._bw(segment_info['bufnr'])
@classmethod
def setUpClass(cls):
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), 'path')))
from powerline.segments import vim
cls.vim = vim
from powerline.segments.common import vcs
cls.vcs = vcs
@classmethod
def tearDownClass(cls):
sys.path.pop(0)
class TestPDB(TestCase):
def test_current_line(self):
pl = Pl()
self.assertEqual(pdb.current_line(pl=pl, segment_info={'curframe': Args(f_lineno=10)}), '10')
def test_current_file(self):
pl = Pl()
cf = lambda **kwargs: pdb.current_file(
pl=pl,
segment_info={'curframe': Args(f_code=Args(co_filename='/tmp/abc.py'))},
**kwargs
)
self.assertEqual(cf(), 'abc.py')
self.assertEqual(cf(basename=True), 'abc.py')
self.assertEqual(cf(basename=False), '/tmp/abc.py')
def test_current_code_name(self):
pl = Pl()
ccn = lambda **kwargs: pdb.current_code_name(
pl=pl,
segment_info={'curframe': Args(f_code=Args(co_name='<module>'))},
**kwargs
)
self.assertEqual(ccn(), '<module>')
def test_current_context(self):
pl = Pl()
cc = lambda **kwargs: pdb.current_context(
pl=pl,
segment_info={'curframe': Args(f_code=Args(co_name='<module>', co_filename='/tmp/abc.py'))},
**kwargs
)
self.assertEqual(cc(), 'abc.py')
def test_stack_depth(self):
pl = Pl()
sd = lambda **kwargs: pdb.stack_depth(
pl=pl,
segment_info={'pdb': Args(stack=[1, 2, 3]), 'initial_stack_length': 1},
**kwargs
)
self.assertEqual(sd(), '2')
self.assertEqual(sd(full_stack=False), '2')
self.assertEqual(sd(full_stack=True), '3')
old_cwd = None
def setUpModule():
global old_cwd
global __file__
old_cwd = os.getcwd()
__file__ = os.path.abspath(__file__)
os.chdir(os.path.dirname(__file__))
def tearDownModule():
global old_cwd
os.chdir(old_cwd)
if __name__ == '__main__':
from tests import main
main()
|
bezhermoso/powerline
|
tests/test_segments.py
|
Python
|
mit
| 79,449 | 0.022501 |
# -*- coding: utf-8 -*-
# Copyright (C) Duncan Macleod (2017-2020)
#
# This file is part of GWpy.
#
# GWpy is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# GWpy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GWpy. If not, see <http://www.gnu.org/licenses/>.
"""Read/write WAV files using `scipy.signal.wavfile`
"""
import struct
import wave
import numpy
from scipy.io import wavfile
from .. import TimeSeries
from ...io import registry as io_registry
WAV_SIGNATURE = ('RIFF', 'WAVE')
def read(fobj, **kwargs):
"""Read a WAV file into a `TimeSeries`
Parameters
----------
fobj : `file`, `str`
open file-like object or filename to read from
**kwargs
all keyword arguments are passed onto :func:`scipy.io.wavfile.read`
See also
--------
scipy.io.wavfile.read
for details on how the WAV file is actually read
Examples
--------
>>> from gwpy.timeseries import TimeSeries
>>> t = TimeSeries.read('test.wav')
"""
fsamp, arr = wavfile.read(fobj, **kwargs)
return TimeSeries(arr, sample_rate=fsamp)
def write(series, output, scale=None):
"""Write a `TimeSeries` to a WAV file
Parameters
----------
series : `TimeSeries`
the series to write
output : `file`, `str`
the file object or filename to write to
scale : `float`, optional
the factor to apply to scale the data to (-1.0, 1.0),
pass `scale=1` to not apply any scale, otherwise
the data will be auto-scaled
See also
--------
scipy.io.wavfile.write
for details on how the WAV file is actually written
Examples
--------
>>> from gwpy.timeseries import TimeSeries
>>> t = TimeSeries([1, 2, 3, 4, 5])
>>> t = TimeSeries.write('test.wav')
"""
fsamp = int(series.sample_rate.decompose().value)
if scale is None:
scale = 1 / numpy.abs(series.value).max()
data = (series.value * scale).astype('float32')
return wavfile.write(output, fsamp, data)
def is_wav(origin, filepath, fileobj, *args, **kwargs):
"""Identify a file as WAV
See `astropy.io.registry` for details on how this function is used.
"""
# pylint: disable=unused-argument
if origin == 'read' and fileobj is not None:
loc = fileobj.tell()
fileobj.seek(0)
try:
riff, _, fmt = struct.unpack('<4sI4s', fileobj.read(12))
if isinstance(riff, bytes):
riff = riff.decode('utf-8')
fmt = fmt.decode('utf-8')
return riff == WAV_SIGNATURE[0] and fmt == WAV_SIGNATURE[1]
except (UnicodeDecodeError, struct.error):
return False
finally:
fileobj.seek(loc)
elif filepath is not None:
return filepath.endswith(('.wav', '.wave'))
else:
try:
wave.open(args[0])
except (wave.Error, AttributeError):
return False
else:
return True
io_registry.register_reader('wav', TimeSeries, read)
io_registry.register_writer('wav', TimeSeries, write)
io_registry.register_identifier('wav', TimeSeries, is_wav)
|
gwpy/gwpy
|
gwpy/timeseries/io/wav.py
|
Python
|
gpl-3.0
| 3,595 | 0 |
# -*- encoding: utf-8 -*-
"""Test for Roles CLI
@Requirement: Filter
@CaseAutomation: Automated
@CaseLevel: Acceptance
@CaseComponent: CLI
@TestType: Functional
@CaseImportance: High
@Upstream: No
"""
from robottelo.cli.base import CLIReturnCodeError
from robottelo.cli.factory import (
make_filter,
make_location,
make_org,
make_role,
)
from robottelo.cli.filter import Filter
from robottelo.cli.role import Role
from robottelo.decorators import tier1
from robottelo.test import APITestCase
class FilterTestCase(APITestCase):
@classmethod
def setUpClass(cls):
"""Search for Organization permissions. Set ``cls.perms``."""
super(FilterTestCase, cls).setUpClass()
cls.perms = [
permission['name']
for permission in Filter.available_permissions(
{'resource-type': 'User'})
]
def setUp(self):
"""Create a role that a filter would be assigned """
super(FilterTestCase, self).setUp()
self.role = make_role()
@tier1
def test_positive_create_with_permission(self):
"""Create a filter and assign it some permissions.
@id: 6da6c5d3-2727-4eb7-aa15-9f7b6f91d3b2
@Assert: The created filter has the assigned permissions.
"""
# Assign filter to created role
filter_ = make_filter({
'role-id': self.role['id'],
'permissions': self.perms,
})
self.assertEqual(
set(filter_['permissions'].split(", ")),
set(self.perms)
)
@tier1
def test_positive_create_with_org(self):
"""Create a filter and assign it some permissions.
@id: f6308192-0e1f-427b-a296-b285f6684691
@Assert: The created filter has the assigned permissions.
"""
org = make_org()
# Assign filter to created role
filter_ = make_filter({
'role-id': self.role['id'],
'permissions': self.perms,
'organization-ids': org['id'],
})
# we expect here only only one organization, i.e. first element
self.assertEqual(filter_['organizations'][0], org['name'])
@tier1
def test_positive_create_with_loc(self):
"""Create a filter and assign it some permissions.
@id: d7d1969a-cb30-4e97-a9a3-3a4aaf608795
@Assert: The created filter has the assigned permissions.
"""
loc = make_location()
# Assign filter to created role
filter_ = make_filter({
'role-id': self.role['id'],
'permissions': self.perms,
'location-ids': loc['id'],
})
# we expect here only only one location, i.e. first element
self.assertEqual(filter_['locations'][0], loc['name'])
@tier1
def test_positive_delete(self):
"""Create a filter and delete it afterwards.
@id: 97d1093c-0d49-454b-86f6-f5be87b32775
@Assert: The deleted filter cannot be fetched.
"""
filter_ = make_filter({
'role-id': self.role['id'],
'permissions': self.perms,
})
Filter.delete({'id': filter_['id']})
with self.assertRaises(CLIReturnCodeError):
Filter.info({'id': filter_['id']})
@tier1
def test_positive_delete_role(self):
"""Create a filter and delete the role it points at.
@id: e2adb6a4-e408-4912-a32d-2bf2c43187d9
@Assert: The filter cannot be fetched.
"""
filter_ = make_filter({
'role-id': self.role['id'],
'permissions': self.perms,
})
# A filter depends on a role. Deleting a role implicitly deletes the
# filter pointing at it.
Role.delete({'id': self.role['id']})
with self.assertRaises(CLIReturnCodeError):
Role.info({'id': self.role['id']})
with self.assertRaises(CLIReturnCodeError):
Filter.info({'id': filter_['id']})
@tier1
def test_positive_update_permissions(self):
"""Create a filter and update its permissions.
@id: 3d6a52d8-2f8f-4f97-a155-9b52888af16e
@Assert: Permissions updated.
"""
filter_ = make_filter({
'role-id': self.role['id'],
'permissions': self.perms,
})
new_perms = [
permission['name']
for permission in Filter.available_permissions(
{'resource-type': 'User'})
]
Filter.update({
'id': filter_['id'],
'permissions': new_perms
})
filter_ = Filter.info({'id': filter_['id']})
self.assertEqual(
set(filter_['permissions'].split(", ")),
set(new_perms)
)
@tier1
def test_positive_update_role(self):
"""Create a filter and assign it to another role.
@id: 2950b3a1-2bce-447f-9df2-869b1d10eaf5
@Assert: Filter is created and assigned to new role.
"""
filter_ = make_filter({
'role-id': self.role['id'],
'permissions': self.perms,
})
# Update with another role
new_role = make_role()
Filter.update({
'id': filter_['id'],
'role-id': new_role['id'],
})
filter_ = Filter.info({'id': filter_['id']})
self.assertEqual(filter_['role'], new_role['name'])
@tier1
def test_positive_update_org_loc(self):
"""Create a filter and assign it to another organization and location.
@id: 9bb59109-9701-4ef3-95c6-81f387d372da
@Assert: Filter is created and assigned to new org and loc.
"""
org = make_org()
loc = make_location()
filter_ = make_filter({
'role-id': self.role['id'],
'permissions': self.perms,
'organization-ids': org['id'],
'location-ids': loc['id']
})
# Update org and loc
new_org = make_org()
new_loc = make_location()
Filter.update({
'id': filter_['id'],
'permissions': self.perms,
'organization-ids': new_org['id'],
'location-ids': new_loc['id']
})
filter_ = Filter.info({'id': filter_['id']})
# We expect here only one organization and location
self.assertEqual(filter_['organizations'][0], new_org['name'])
self.assertEqual(filter_['locations'][0], new_loc['name'])
|
Ichimonji10/robottelo
|
tests/foreman/cli/test_filter.py
|
Python
|
gpl-3.0
| 6,479 | 0 |
if __name__ == '__main__':
s = input()
is_list = list(zip(*[[c.isalnum(), c.isalpha(), c.isdigit(), c.islower(), c.isupper()] for c in s]))
print_list = [True if True in is_result else False for is_result in is_list]
for result in print_list:
print(result)
|
nifannn/HackerRank
|
Practice/Python/Strings/string_validators.py
|
Python
|
mit
| 281 | 0.007117 |
#!/usr/bin/env python
import os
import shutil
import glob
import time
import sys
import subprocess
from optparse import OptionParser, make_option
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
PARAMETERS = None
ADB_CMD = "adb"
def doCMD(cmd):
# Do not need handle timeout in this short script, let tool do it
print "-->> \"%s\"" % cmd
output = []
cmd_return_code = 1
cmd_proc = subprocess.Popen(
cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True)
while True:
output_line = cmd_proc.stdout.readline().strip("\r\n")
cmd_return_code = cmd_proc.poll()
if output_line == '' and cmd_return_code is not None:
break
sys.stdout.write("%s\n" % output_line)
sys.stdout.flush()
output.append(output_line)
return (cmd_return_code, output)
def uninstResources():
action_status = True
cmd = "%s -s %s shell rm -r /sdcard/device_files" % (
ADB_CMD, PARAMETERS.device)
(return_code, output) = doCMD(cmd)
return action_status
def instResources():
action_status = True
source_path = os.path.join(SCRIPT_DIR, "device_files")
if os.path.exists(source_path):
cmd = "%s -s %s push %s /sdcard/device_files" % (
ADB_CMD, PARAMETERS.device,source_path)
(return_code, output) = doCMD(cmd)
return action_status
def uninstPKGs():
action_status = True
for root, dirs, files in os.walk(SCRIPT_DIR):
for file in files:
if file.endswith(".apk"):
index_start = str(file).index("_")
index_end = str(file).index(".")
cmd = "%s -s %s uninstall org.xwalk.embedding.test.%s" % (
ADB_CMD, PARAMETERS.device, str(file)[index_start + 1: index_end])
(return_code, output) = doCMD(cmd)
for line in output:
if "Failure" in line:
action_status = False
break
if action_status:
uninstResources()
return action_status
def instPKGs():
action_status = True
for root, dirs, files in os.walk(SCRIPT_DIR):
for file in files:
if file.endswith(".apk"):
cmd = "%s -s %s install %s" % (ADB_CMD,
PARAMETERS.device, os.path.join(root, file))
(return_code, output) = doCMD(cmd)
for line in output:
if "Failure" in line:
action_status = False
break
if action_status:
instResources()
return action_status
def main():
try:
usage = "usage: inst.py -i"
opts_parser = OptionParser(usage=usage)
opts_parser.add_option(
"-s", dest="device", action="store", help="Specify device")
opts_parser.add_option(
"-i", dest="binstpkg", action="store_true", help="Install package")
opts_parser.add_option(
"-u", dest="buninstpkg", action="store_true", help="Uninstall package")
global PARAMETERS
(PARAMETERS, args) = opts_parser.parse_args()
except Exception as e:
print "Got wrong option: %s, exit ..." % e
sys.exit(1)
if not PARAMETERS.device:
(return_code, output) = doCMD("adb devices")
for line in output:
if str.find(line, "\tdevice") != -1:
PARAMETERS.device = line.split("\t")[0]
break
if not PARAMETERS.device:
print "No device found"
sys.exit(1)
if PARAMETERS.binstpkg and PARAMETERS.buninstpkg:
print "-i and -u are conflict"
sys.exit(1)
if PARAMETERS.buninstpkg:
if not uninstPKGs():
sys.exit(1)
else:
if not instPKGs():
sys.exit(1)
if __name__ == "__main__":
main()
sys.exit(0)
|
crosswalk-project/crosswalk-test-suite
|
embeddingapi/embedding-api-android-tests/inst.apk.py
|
Python
|
bsd-3-clause
| 3,916 | 0.001277 |
def run():
import sys, os
try:
uri = sys.argv[1]
except IndexError:
uri = os.getcwd()
import gtk
from .app import App
from uxie.utils import idle
application = App()
idle(application.open, uri)
gtk.main()
|
baverman/fmd
|
fmd/run.py
|
Python
|
mit
| 262 | 0.007634 |
"""
Python Interchangeable Virtual Instrument Library
Copyright (c) 2014-2016 Alex Forencich
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from .agilentBaseESGD import *
class agilentE4431B(agilentBaseESGD):
"Agilent E4431B ESG-D IVI RF signal generator driver"
def __init__(self, *args, **kwargs):
self.__dict__.setdefault('_instrument_id', 'ESG-D4000B')
super(agilentE4431B, self).__init__(*args, **kwargs)
self._frequency_low = 250e3
self._frequency_high = 2e9
|
Diti24/python-ivi
|
ivi/agilent/agilentE4431B.py
|
Python
|
mit
| 1,495 | 0.001338 |
from django.contrib import admin
from .models import User
class UserAdmin(admin.ModelAdmin):
list_display = ('username', 'email', 'is_active', 'is_staff', 'validated')
admin.site.register(User, UserAdmin)
|
gitaarik/jazzchords
|
apps/users/admin.py
|
Python
|
gpl-3.0
| 213 | 0 |
# test slices; only 2 argument version supported by Micro Python at the moment
x = list(range(10))
# Assignment
l = list(x)
l[1:3] = [10, 20]
print(l)
l = list(x)
l[1:3] = [10]
print(l)
l = list(x)
l[1:3] = []
print(l)
l = list(x)
del l[1:3]
print(l)
l = list(x)
l[:3] = [10, 20]
print(l)
l = list(x)
l[:3] = []
print(l)
l = list(x)
del l[:3]
print(l)
l = list(x)
l[:-3] = [10, 20]
print(l)
l = list(x)
l[:-3] = []
print(l)
l = list(x)
del l[:-3]
print(l)
# assign a tuple
l = [1, 2, 3]
l[0:1] = (10, 11, 12)
print(l)
# RHS of slice must be an iterable
try:
[][0:1] = 123
except TypeError:
print('TypeError')
|
hiway/micropython
|
tests/basics/list_slice_assign.py
|
Python
|
mit
| 622 | 0.017685 |
import test.support
# Skip tests if _multiprocessing wasn't built.
test.support.import_module('_multiprocessing')
# Skip tests if sem_open implementation is broken.
test.support.import_module('multiprocessing.synchronize')
# import threading after _multiprocessing to raise a more revelant error
# message: "No module named _multiprocessing". _multiprocessing is not compiled
# without thread support.
test.support.import_module('threading')
import threading
import time
import unittest
from concurrent import futures
from concurrent.futures._base import (
PENDING, RUNNING, CANCELLED, CANCELLED_AND_NOTIFIED, FINISHED, Future)
import concurrent.futures.process
def create_future(state=PENDING, exception=None, result=None):
f = Future()
f._state = state
f._exception = exception
f._result = result
return f
PENDING_FUTURE = create_future(state=PENDING)
RUNNING_FUTURE = create_future(state=RUNNING)
CANCELLED_FUTURE = create_future(state=CANCELLED)
CANCELLED_AND_NOTIFIED_FUTURE = create_future(state=CANCELLED_AND_NOTIFIED)
EXCEPTION_FUTURE = create_future(state=FINISHED, exception=IOError())
SUCCESSFUL_FUTURE = create_future(state=FINISHED, result=42)
def mul(x, y):
return x * y
def sleep_and_raise(t):
time.sleep(t)
raise Exception('this is an exception')
class ExecutorMixin:
worker_count = 5
def _prime_executor(self):
# Make sure that the executor is ready to do work before running the
# tests. This should reduce the probability of timeouts in the tests.
futures = [self.executor.submit(time.sleep, 0.1)
for _ in range(self.worker_count)]
for f in futures:
f.result()
class ThreadPoolMixin(ExecutorMixin):
def setUp(self):
self.executor = futures.ThreadPoolExecutor(max_workers=5)
self._prime_executor()
def tearDown(self):
self.executor.shutdown(wait=True)
class ProcessPoolMixin(ExecutorMixin):
def setUp(self):
try:
self.executor = futures.ProcessPoolExecutor(max_workers=5)
except NotImplementedError as e:
self.skipTest(str(e))
self._prime_executor()
def tearDown(self):
self.executor.shutdown(wait=True)
class ExecutorShutdownTest(unittest.TestCase):
def test_run_after_shutdown(self):
self.executor.shutdown()
self.assertRaises(RuntimeError,
self.executor.submit,
pow, 2, 5)
class ThreadPoolShutdownTest(ThreadPoolMixin, ExecutorShutdownTest):
def _prime_executor(self):
pass
def test_threads_terminate(self):
self.executor.submit(mul, 21, 2)
self.executor.submit(mul, 6, 7)
self.executor.submit(mul, 3, 14)
self.assertEqual(len(self.executor._threads), 3)
self.executor.shutdown()
for t in self.executor._threads:
t.join()
def test_context_manager_shutdown(self):
with futures.ThreadPoolExecutor(max_workers=5) as e:
executor = e
self.assertEqual(list(e.map(abs, range(-5, 5))),
[5, 4, 3, 2, 1, 0, 1, 2, 3, 4])
for t in executor._threads:
t.join()
def test_del_shutdown(self):
executor = futures.ThreadPoolExecutor(max_workers=5)
executor.map(abs, range(-5, 5))
threads = executor._threads
del executor
for t in threads:
t.join()
class ProcessPoolShutdownTest(ProcessPoolMixin, ExecutorShutdownTest):
def _prime_executor(self):
pass
def test_processes_terminate(self):
self.executor.submit(mul, 21, 2)
self.executor.submit(mul, 6, 7)
self.executor.submit(mul, 3, 14)
self.assertEqual(len(self.executor._processes), 5)
processes = self.executor._processes
self.executor.shutdown()
for p in processes:
p.join()
def test_context_manager_shutdown(self):
with futures.ProcessPoolExecutor(max_workers=5) as e:
processes = e._processes
self.assertEqual(list(e.map(abs, range(-5, 5))),
[5, 4, 3, 2, 1, 0, 1, 2, 3, 4])
for p in processes:
p.join()
def test_del_shutdown(self):
executor = futures.ProcessPoolExecutor(max_workers=5)
list(executor.map(abs, range(-5, 5)))
queue_management_thread = executor._queue_management_thread
processes = executor._processes
del executor
queue_management_thread.join()
for p in processes:
p.join()
class WaitTests(unittest.TestCase):
def test_first_completed(self):
future1 = self.executor.submit(mul, 21, 2)
future2 = self.executor.submit(time.sleep, 5)
done, not_done = futures.wait(
[CANCELLED_FUTURE, future1, future2],
return_when=futures.FIRST_COMPLETED)
self.assertEqual(set([future1]), done)
self.assertEqual(set([CANCELLED_FUTURE, future2]), not_done)
def test_first_completed_some_already_completed(self):
future1 = self.executor.submit(time.sleep, 2)
finished, pending = futures.wait(
[CANCELLED_AND_NOTIFIED_FUTURE, SUCCESSFUL_FUTURE, future1],
return_when=futures.FIRST_COMPLETED)
self.assertEqual(
set([CANCELLED_AND_NOTIFIED_FUTURE, SUCCESSFUL_FUTURE]),
finished)
self.assertEqual(set([future1]), pending)
def test_first_exception(self):
future1 = self.executor.submit(mul, 2, 21)
future2 = self.executor.submit(sleep_and_raise, 5)
future3 = self.executor.submit(time.sleep, 10)
finished, pending = futures.wait(
[future1, future2, future3],
return_when=futures.FIRST_EXCEPTION)
self.assertEqual(set([future1, future2]), finished)
self.assertEqual(set([future3]), pending)
def test_first_exception_some_already_complete(self):
future1 = self.executor.submit(divmod, 21, 0)
future2 = self.executor.submit(time.sleep, 5)
finished, pending = futures.wait(
[SUCCESSFUL_FUTURE,
CANCELLED_FUTURE,
CANCELLED_AND_NOTIFIED_FUTURE,
future1, future2],
return_when=futures.FIRST_EXCEPTION)
self.assertEqual(set([SUCCESSFUL_FUTURE,
CANCELLED_AND_NOTIFIED_FUTURE,
future1]), finished)
self.assertEqual(set([CANCELLED_FUTURE, future2]), pending)
def test_first_exception_one_already_failed(self):
future1 = self.executor.submit(time.sleep, 2)
finished, pending = futures.wait(
[EXCEPTION_FUTURE, future1],
return_when=futures.FIRST_EXCEPTION)
self.assertEqual(set([EXCEPTION_FUTURE]), finished)
self.assertEqual(set([future1]), pending)
def test_all_completed(self):
future1 = self.executor.submit(divmod, 2, 0)
future2 = self.executor.submit(mul, 2, 21)
finished, pending = futures.wait(
[SUCCESSFUL_FUTURE,
CANCELLED_AND_NOTIFIED_FUTURE,
EXCEPTION_FUTURE,
future1,
future2],
return_when=futures.ALL_COMPLETED)
self.assertEqual(set([SUCCESSFUL_FUTURE,
CANCELLED_AND_NOTIFIED_FUTURE,
EXCEPTION_FUTURE,
future1,
future2]), finished)
self.assertEqual(set(), pending)
def test_timeout(self):
future1 = self.executor.submit(mul, 6, 7)
future2 = self.executor.submit(time.sleep, 10)
finished, pending = futures.wait(
[CANCELLED_AND_NOTIFIED_FUTURE,
EXCEPTION_FUTURE,
SUCCESSFUL_FUTURE,
future1, future2],
timeout=5,
return_when=futures.ALL_COMPLETED)
self.assertEqual(set([CANCELLED_AND_NOTIFIED_FUTURE,
EXCEPTION_FUTURE,
SUCCESSFUL_FUTURE,
future1]), finished)
self.assertEqual(set([future2]), pending)
class ThreadPoolWaitTests(ThreadPoolMixin, WaitTests):
pass
class ProcessPoolWaitTests(ProcessPoolMixin, WaitTests):
pass
class AsCompletedTests(unittest.TestCase):
# TODO(brian@sweetapp.com): Should have a test with a non-zero timeout.
def test_no_timeout(self):
future1 = self.executor.submit(mul, 2, 21)
future2 = self.executor.submit(mul, 7, 6)
completed = set(futures.as_completed(
[CANCELLED_AND_NOTIFIED_FUTURE,
EXCEPTION_FUTURE,
SUCCESSFUL_FUTURE,
future1, future2]))
self.assertEqual(set(
[CANCELLED_AND_NOTIFIED_FUTURE,
EXCEPTION_FUTURE,
SUCCESSFUL_FUTURE,
future1, future2]),
completed)
def test_zero_timeout(self):
future1 = self.executor.submit(time.sleep, 2)
completed_futures = set()
try:
for future in futures.as_completed(
[CANCELLED_AND_NOTIFIED_FUTURE,
EXCEPTION_FUTURE,
SUCCESSFUL_FUTURE,
future1],
timeout=0):
completed_futures.add(future)
except futures.TimeoutError:
pass
self.assertEqual(set([CANCELLED_AND_NOTIFIED_FUTURE,
EXCEPTION_FUTURE,
SUCCESSFUL_FUTURE]),
completed_futures)
class ThreadPoolAsCompletedTests(ThreadPoolMixin, AsCompletedTests):
pass
class ProcessPoolAsCompletedTests(ProcessPoolMixin, AsCompletedTests):
pass
class ExecutorTest(unittest.TestCase):
# Executor.shutdown() and context manager usage is tested by
# ExecutorShutdownTest.
def test_submit(self):
future = self.executor.submit(pow, 2, 8)
self.assertEqual(256, future.result())
def test_submit_keyword(self):
future = self.executor.submit(mul, 2, y=8)
self.assertEqual(16, future.result())
def test_map(self):
self.assertEqual(
list(self.executor.map(pow, range(10), range(10))),
list(map(pow, range(10), range(10))))
def test_map_exception(self):
i = self.executor.map(divmod, [1, 1, 1, 1], [2, 3, 0, 5])
self.assertEqual(i.__next__(), (0, 1))
self.assertEqual(i.__next__(), (0, 1))
self.assertRaises(ZeroDivisionError, i.__next__)
def test_map_timeout(self):
results = []
try:
for i in self.executor.map(time.sleep,
[0, 0, 10],
timeout=5):
results.append(i)
except futures.TimeoutError:
pass
else:
self.fail('expected TimeoutError')
self.assertEqual([None, None], results)
class ThreadPoolExecutorTest(ThreadPoolMixin, ExecutorTest):
pass
class ProcessPoolExecutorTest(ProcessPoolMixin, ExecutorTest):
pass
class FutureTests(unittest.TestCase):
def test_done_callback_with_result(self):
callback_result = None
def fn(callback_future):
nonlocal callback_result
callback_result = callback_future.result()
f = Future()
f.add_done_callback(fn)
f.set_result(5)
self.assertEqual(5, callback_result)
def test_done_callback_with_exception(self):
callback_exception = None
def fn(callback_future):
nonlocal callback_exception
callback_exception = callback_future.exception()
f = Future()
f.add_done_callback(fn)
f.set_exception(Exception('test'))
self.assertEqual(('test',), callback_exception.args)
def test_done_callback_with_cancel(self):
was_cancelled = None
def fn(callback_future):
nonlocal was_cancelled
was_cancelled = callback_future.cancelled()
f = Future()
f.add_done_callback(fn)
self.assertTrue(f.cancel())
self.assertTrue(was_cancelled)
def test_done_callback_raises(self):
with test.support.captured_stderr() as stderr:
raising_was_called = False
fn_was_called = False
def raising_fn(callback_future):
nonlocal raising_was_called
raising_was_called = True
raise Exception('doh!')
def fn(callback_future):
nonlocal fn_was_called
fn_was_called = True
f = Future()
f.add_done_callback(raising_fn)
f.add_done_callback(fn)
f.set_result(5)
self.assertTrue(raising_was_called)
self.assertTrue(fn_was_called)
self.assertIn('Exception: doh!', stderr.getvalue())
def test_done_callback_already_successful(self):
callback_result = None
def fn(callback_future):
nonlocal callback_result
callback_result = callback_future.result()
f = Future()
f.set_result(5)
f.add_done_callback(fn)
self.assertEqual(5, callback_result)
def test_done_callback_already_failed(self):
callback_exception = None
def fn(callback_future):
nonlocal callback_exception
callback_exception = callback_future.exception()
f = Future()
f.set_exception(Exception('test'))
f.add_done_callback(fn)
self.assertEqual(('test',), callback_exception.args)
def test_done_callback_already_cancelled(self):
was_cancelled = None
def fn(callback_future):
nonlocal was_cancelled
was_cancelled = callback_future.cancelled()
f = Future()
self.assertTrue(f.cancel())
f.add_done_callback(fn)
self.assertTrue(was_cancelled)
def test_repr(self):
self.assertRegex(repr(PENDING_FUTURE),
'<Future at 0x[0-9a-f]+ state=pending>')
self.assertRegex(repr(RUNNING_FUTURE),
'<Future at 0x[0-9a-f]+ state=running>')
self.assertRegex(repr(CANCELLED_FUTURE),
'<Future at 0x[0-9a-f]+ state=cancelled>')
self.assertRegex(repr(CANCELLED_AND_NOTIFIED_FUTURE),
'<Future at 0x[0-9a-f]+ state=cancelled>')
self.assertRegex(
repr(EXCEPTION_FUTURE),
'<Future at 0x[0-9a-f]+ state=finished raised IOError>')
self.assertRegex(
repr(SUCCESSFUL_FUTURE),
'<Future at 0x[0-9a-f]+ state=finished returned int>')
def test_cancel(self):
f1 = create_future(state=PENDING)
f2 = create_future(state=RUNNING)
f3 = create_future(state=CANCELLED)
f4 = create_future(state=CANCELLED_AND_NOTIFIED)
f5 = create_future(state=FINISHED, exception=IOError())
f6 = create_future(state=FINISHED, result=5)
self.assertTrue(f1.cancel())
self.assertEqual(f1._state, CANCELLED)
self.assertFalse(f2.cancel())
self.assertEqual(f2._state, RUNNING)
self.assertTrue(f3.cancel())
self.assertEqual(f3._state, CANCELLED)
self.assertTrue(f4.cancel())
self.assertEqual(f4._state, CANCELLED_AND_NOTIFIED)
self.assertFalse(f5.cancel())
self.assertEqual(f5._state, FINISHED)
self.assertFalse(f6.cancel())
self.assertEqual(f6._state, FINISHED)
def test_cancelled(self):
self.assertFalse(PENDING_FUTURE.cancelled())
self.assertFalse(RUNNING_FUTURE.cancelled())
self.assertTrue(CANCELLED_FUTURE.cancelled())
self.assertTrue(CANCELLED_AND_NOTIFIED_FUTURE.cancelled())
self.assertFalse(EXCEPTION_FUTURE.cancelled())
self.assertFalse(SUCCESSFUL_FUTURE.cancelled())
def test_done(self):
self.assertFalse(PENDING_FUTURE.done())
self.assertFalse(RUNNING_FUTURE.done())
self.assertTrue(CANCELLED_FUTURE.done())
self.assertTrue(CANCELLED_AND_NOTIFIED_FUTURE.done())
self.assertTrue(EXCEPTION_FUTURE.done())
self.assertTrue(SUCCESSFUL_FUTURE.done())
def test_running(self):
self.assertFalse(PENDING_FUTURE.running())
self.assertTrue(RUNNING_FUTURE.running())
self.assertFalse(CANCELLED_FUTURE.running())
self.assertFalse(CANCELLED_AND_NOTIFIED_FUTURE.running())
self.assertFalse(EXCEPTION_FUTURE.running())
self.assertFalse(SUCCESSFUL_FUTURE.running())
def test_result_with_timeout(self):
self.assertRaises(futures.TimeoutError,
PENDING_FUTURE.result, timeout=0)
self.assertRaises(futures.TimeoutError,
RUNNING_FUTURE.result, timeout=0)
self.assertRaises(futures.CancelledError,
CANCELLED_FUTURE.result, timeout=0)
self.assertRaises(futures.CancelledError,
CANCELLED_AND_NOTIFIED_FUTURE.result, timeout=0)
self.assertRaises(IOError, EXCEPTION_FUTURE.result, timeout=0)
self.assertEqual(SUCCESSFUL_FUTURE.result(timeout=0), 42)
def test_result_with_success(self):
# TODO(brian@sweetapp.com): This test is timing dependant.
def notification():
# Wait until the main thread is waiting for the result.
time.sleep(1)
f1.set_result(42)
f1 = create_future(state=PENDING)
t = threading.Thread(target=notification)
t.start()
self.assertEqual(f1.result(timeout=5), 42)
def test_result_with_cancel(self):
# TODO(brian@sweetapp.com): This test is timing dependant.
def notification():
# Wait until the main thread is waiting for the result.
time.sleep(1)
f1.cancel()
f1 = create_future(state=PENDING)
t = threading.Thread(target=notification)
t.start()
self.assertRaises(futures.CancelledError, f1.result, timeout=5)
def test_exception_with_timeout(self):
self.assertRaises(futures.TimeoutError,
PENDING_FUTURE.exception, timeout=0)
self.assertRaises(futures.TimeoutError,
RUNNING_FUTURE.exception, timeout=0)
self.assertRaises(futures.CancelledError,
CANCELLED_FUTURE.exception, timeout=0)
self.assertRaises(futures.CancelledError,
CANCELLED_AND_NOTIFIED_FUTURE.exception, timeout=0)
self.assertTrue(isinstance(EXCEPTION_FUTURE.exception(timeout=0),
IOError))
self.assertEqual(SUCCESSFUL_FUTURE.exception(timeout=0), None)
def test_exception_with_success(self):
def notification():
# Wait until the main thread is waiting for the exception.
time.sleep(1)
with f1._condition:
f1._state = FINISHED
f1._exception = IOError()
f1._condition.notify_all()
f1 = create_future(state=PENDING)
t = threading.Thread(target=notification)
t.start()
self.assertTrue(isinstance(f1.exception(timeout=5), IOError))
def test_main():
test.support.run_unittest(ProcessPoolExecutorTest,
ThreadPoolExecutorTest,
ProcessPoolWaitTests,
ThreadPoolWaitTests,
ProcessPoolAsCompletedTests,
ThreadPoolAsCompletedTests,
FutureTests,
ProcessPoolShutdownTest,
ThreadPoolShutdownTest)
if __name__ == "__main__":
test_main()
|
MalloyPower/parsing-python
|
front-end/testsuite-python-lib/Python-3.2/Lib/test/test_concurrent_futures.py
|
Python
|
mit
| 20,163 | 0.000893 |
from re import compile, match
REGEX = compile(r'((25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)\.){3}'
r'(25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)$')
def is_valid_IP(strng):
""" is_valid_ip == PEP8 (forced mixedCase by CodeWars) """
return bool(match(REGEX, strng))
|
the-zebulan/CodeWars
|
katas/kyu_4/ip_validation.py
|
Python
|
mit
| 276 | 0 |
# -*- coding: utf-8 -*-
r"""
Graph-directed iterated function system (GIFS)
See [JK14]_ or [BV20]_ or
- http://larryriddle.agnesscott.org/ifs/ifs.htm
- https://encyclopediaofmath.org/wiki/Iterated_function_system
We allow the functions to be contracting or not. When the functions are
inflations, it allows to represent inflation rules and stone inflations as
in Definition 5.17 of [BG13]_.
EXAMPLES:
The Cantor set::
sage: from slabbe import GraphDirectedIteratedFunctionSystem as GIFS
sage: F = AffineGroup(1, QQ)
sage: f1 = F.linear(1/3); f1
x |-> [1/3] x + [0]
sage: f2 = F(1/3, vector([2/3])); f2
x |-> [1/3] x + [2/3]
sage: cantor_IFS = GIFS(QQ^1, [(0,0,f1),(0,0,f2)])
sage: cantor_IFS
GIFS defined by 2 maps on
Vector space of dimension 1 over Rational Field
Fibonacci substitution::
sage: m = WordMorphism('a->ab,b->a')
sage: fibo_ifs = GIFS.from_one_dimensional_substitution(m)
sage: fibo_ifs
GIFS defined by 3 maps on Vector space of dimension 1 over
Number Field in root with defining polynomial x^2 - x - 1 with
root = 1.618033988749895?
Its element-wise Galois conjugate is a contracting IFS::
sage: fibo_ifs.galois_conjugate().pp()
GIFS defined by 3 maps on Vector space of dimension 1 over Number Field in root with defining polynomial x^2 - x - 1 with root = 1.618033988749895?
edge (0,0):
x |-> [-root + 1] x + [0]
edge (1,0):
x |-> [-root + 1] x + [1]
edge (0,1):
x |-> [-root + 1] x + [0]
Direct Product of 2 Fibonacci::
sage: from slabbe import GraphDirectedIteratedFunctionSystem as GIFS
sage: from slabbe import Substitution2d
sage: d = {0:[[3]], 1:[[3],[2]], 2:[[3,1]], 3:[[3,1],[2,0]]}
sage: s = Substitution2d(d)
sage: fibo2_ifs = GIFS.from_two_dimensional_substitution(s)
sage: fibo2_ifs
GIFS defined by 9 maps on Vector space of dimension 2 over
Number Field in rootX with defining polynomial x^2 - x - 1 with
rootX = 1.618033988749895?
REFERENCES:
.. [JK14] Jolivet, Timo, et Jarkko Kari. « Undecidable Properties of Self-Affine
Sets and Multi-Tape Automata ». In Mathematical Foundations of Computer
Science 2014, édité par Erzsébet Csuhaj-Varjú, Martin Dietzfelbinger,
et Zoltán Ésik, 8634:352‑64. Berlin, Heidelberg: Springer Berlin
Heidelberg, 2014. https://doi.org/10.1007/978-3-662-44522-8_30.
.. [BV20] Michael Barnsley, Andrew Vince. Tilings from Graph Directed
Iterated Function Systems. Geometriae Dedicata, 9 août 2020.
https://doi.org/10.1007/s10711-020-00560-4
.. [BG13] Michael Baake, Uwe Grimm. Aperiodic order. Vol. 1. Vol. 149.
Encyclopedia of Mathematics and its Applications. Cambridge University
Press, Cambridge, 2013. http://www.ams.org/mathscinet-getitem?mr=3136260.
.. [BFG19] Michael Baake, Natalie Priebe Frank, Uwe Grimm. Three variations on a
theme by Fibonacci. http://arxiv.org/abs/1910.00988
"""
#*****************************************************************************
# Copyright (C) 2020 Sebastien Labbe <slabqc@gmail.com>
#
# Distributed under the terms of the GNU General Public License (GPL)
# as published by the Free Software Foundation; either version 2 of
# the License, or (at your option) any later version.
# http://www.gnu.org/licenses/
#*****************************************************************************
from __future__ import absolute_import, print_function
import itertools
from sage.modules.free_module_element import vector
class GraphDirectedIteratedFunctionSystem(object):
r"""
INPUT:
- ``module`` -- the module on which the functions are defined
- ``edges`` -- list, list of triples (u,v,f) where f is a function
associated to the directed edge (u,v).
EXAMPLES:
The Cantor set::
sage: F = AffineGroup(1, QQ)
sage: f1 = F.linear(1/3)
sage: f2 = F(1/3, vector([2/3]))
sage: f1
x |-> [1/3] x + [0]
sage: f2
x |-> [1/3] x + [2/3]
sage: from slabbe import GraphDirectedIteratedFunctionSystem as GIFS
sage: GIFS(QQ^1, [(0,0,f1),(0,0,f2)])
GIFS defined by 2 maps on
Vector space of dimension 1 over Rational Field
"""
def __init__(self, module, edges):
r"""
See class documentation.
EXAMPLES::
sage: F = AffineGroup(1, QQ)
sage: f1 = F.linear(1/3)
sage: f2 = F(1/3, vector([2/3]))
sage: from slabbe import GraphDirectedIteratedFunctionSystem as GIFS
sage: ifs = GIFS(QQ^1, [(0,0,f1),(0,0,f2)])
"""
self._module = module
self._edges = edges
def __repr__(self):
r"""
EXAMPLES::
sage: from slabbe import GraphDirectedIteratedFunctionSystem as GIFS
sage: F = AffineGroup(1, QQ)
sage: f1 = F.linear(1/3)
sage: f2 = F(1/3, vector([2/3]))
sage: GIFS(QQ^1, [(0,0,f1),(0,0,f2)])
GIFS defined by 2 maps on
Vector space of dimension 1 over Rational Field
"""
return ("GIFS defined by {} maps on {}".format(len(self._edges),
self._module))
def pp(self):
r"""
Prints a nicer and complete string representation.
EXAMPLES::
sage: from slabbe import GraphDirectedIteratedFunctionSystem as GIFS
sage: F = AffineGroup(1, QQ)
sage: ifs = f1 = F.linear(1/3)
sage: f2 = F(1/3, vector([2/3]))
sage: ifs = GIFS(QQ^1, [(0,0,f1),(0,0,f2)])
sage: ifs.pp()
GIFS defined by 2 maps on Vector space of dimension 1 over Rational Field
edge (0,0):
x |-> [1/3] x + [0]
edge (0,0):
x |-> [1/3] x + [2/3]
"""
print("GIFS defined by {} maps on {}".format(len(self._edges),
self._module))
for (a,b,f) in self._edges:
print("edge ({},{}):".format(a,b))
print(f)
@classmethod
def from_one_dimensional_substitution(cls, m):
r"""
Return the GIFS defined by a unidimensional primitive
substitution
INPUT:
- ``m`` -- WordMorphism, primitive substitution
EXAMPLES::
sage: from slabbe import GraphDirectedIteratedFunctionSystem as GIFS
sage: m = WordMorphism('a->ab,b->a')
sage: g = GIFS.from_one_dimensional_substitution(m)
sage: g
GIFS defined by 3 maps on
Vector space of dimension 1 over
Number Field in root with defining polynomial x^2 - x - 1 with
root = 1.618033988749895?
"""
from slabbe.matrices import perron_left_eigenvector_in_number_field
M = m.incidence_matrix()
root, perron_left = perron_left_eigenvector_in_number_field(M, 'root')
K = root.parent()
alphabet = m.domain().alphabet()
size = alphabet.cardinality()
module = K**1
d = {(i,j):[] for i,j in itertools.product(range(size),repeat=2)}
for i,a in enumerate(alphabet):
m_a = m(a)
pos = module.zero()
for b in m_a:
j = alphabet.index(b)
d[(i,j)].append(pos)
pos += module([perron_left[j]])
return cls.from_inflation_rule(module, root, d)
@classmethod
def from_two_dimensional_substitution(cls, s):
r"""
Return the GIFS defined by a 2-dimensional primitive
substitution
The marker point associated to each rectangular tile is assumed to
be in the lower left corner.
INPUT:
- ``s`` -- Substitution2d, primitive substitution
EXAMPLES::
sage: from slabbe import GraphDirectedIteratedFunctionSystem as GIFS
sage: from slabbe import Substitution2d
sage: d = {0:[[3]], 1:[[3],[2]], 2:[[3,1]], 3:[[3,1],[2,0]]}
sage: s = Substitution2d(d)
sage: ifs = GIFS.from_two_dimensional_substitution(s)
sage: ifs.pp()
GIFS defined by 9 maps on Vector space of dimension 2 over
Number Field in rootX with defining polynomial x^2 - x - 1 with
rootX = 1.618033988749895?
edge (0,3):
[rootX 0] [0]
x |-> [ 0 rootX] x + [0]
edge (1,3):
[rootX 0] [0]
x |-> [ 0 rootX] x + [0]
edge (1,2):
[rootX 0] [rootX]
x |-> [ 0 rootX] x + [ 0]
edge (2,3):
[rootX 0] [0]
x |-> [ 0 rootX] x + [0]
edge (2,1):
[rootX 0] [ 0]
x |-> [ 0 rootX] x + [rootX]
edge (3,3):
[rootX 0] [0]
x |-> [ 0 rootX] x + [0]
edge (3,1):
[rootX 0] [ 0]
x |-> [ 0 rootX] x + [rootX]
edge (3,2):
[rootX 0] [rootX]
x |-> [ 0 rootX] x + [ 0]
edge (3,0):
[rootX 0] [rootX]
x |-> [ 0 rootX] x + [rootX]
"""
from sage.matrix.constructor import matrix
from sage.groups.affine_gps.affine_group import AffineGroup
rootX, rootY, shapes = s.stone_inflation_shapes()
KX = rootX.parent()
KY = rootY.parent()
inflation_matrix = matrix.diagonal([rootX, rootY])
base_ring = inflation_matrix.base_ring()
F = AffineGroup(2, base_ring)
vector_space = F.vector_space()
alphabet = s.domain_alphabet()
edges = []
for a in alphabet:
s_a = s([[a]])
# compute the X positions of marker points
lower_word = [col[0] for col in s_a]
X_pos = []
pos = base_ring.zero()
for b in lower_word:
X_pos.append(pos)
pos += shapes[b][0]
# compute the Y positions of marker points
left_word = s_a[0]
Y_pos = []
pos = base_ring.zero()
for b in left_word:
Y_pos.append(pos)
pos += shapes[b][1]
# compute the translations
for i,col in enumerate(s_a):
for j,b in enumerate(col):
translation = (X_pos[i], Y_pos[j])
f = F(inflation_matrix, translation)
edge = (a, b, f)
edges.append(edge)
return GraphDirectedIteratedFunctionSystem(vector_space, edges)
@classmethod
def from_inflation_rule(cls, module, multiplier, displacement_matrix):
r"""
Return the GIFS defined by a 2-dimensional primitive
substitution
We follow the convention used in [BFG19]_ for the displacement
matrix.
INPUT:
- ``module`` -- module or vector space
- ``multiplier`` -- real number, inflation multiplier
- ``d`` -- dict, the displacement matrix, where each key (i,j) is
mapped to a list of translations
EXAMPLES:
This examples is taken from [BFG19]_::
sage: from slabbe import GraphDirectedIteratedFunctionSystem as GIFS
sage: z = polygen(QQ, 'z')
sage: K = NumberField(z**2-z-1, 'tau', embedding=RR(1.6))
sage: tau = K.gen()
sage: import itertools
sage: d = {(i,j):[] for i,j in itertools.product(range(4),repeat=2)}
sage: d[(0,3)] = [vector(K, (tau,tau))]
sage: d[(1,2)] = d[(1,3)] = [vector(K, (0,tau))]
sage: d[(2,1)] = d[(2,3)] = [vector(K, (tau,0))]
sage: d[(3,0)] = d[(3,1)] = d[(3,2)] = d[(3,3)] = [vector(K, (0,0))]
sage: GIFS.from_inflation_rule(K^2, tau, d)
GIFS defined by 9 maps on Vector space of dimension 2 over
Number Field in tau with defining polynomial z^2 - z - 1
with tau = 1.618033988749895?
"""
from sage.groups.affine_gps.affine_group import AffineGroup
from sage.matrix.special import identity_matrix
dimension = module.dimension()
ring = module.base_ring()
F = AffineGroup(dimension, ring)
M = multiplier * identity_matrix(dimension)
edges = [(j,i,F(M, translation))
for (i,j),L in displacement_matrix.items()
for translation in L]
return GraphDirectedIteratedFunctionSystem(module, edges)
def to_digraph(self):
r"""
EXAMPLES::
sage: from slabbe import GraphDirectedIteratedFunctionSystem as GIFS
sage: F = AffineGroup(1, QQ)
sage: f1 = F.linear(1/3)
sage: f2 = F(1/3, vector([2/3]))
sage: cantor_ifs = GIFS(QQ^1, [(0,0,f1),(0,0,f2)])
sage: cantor_ifs.to_digraph()
Looped multi-digraph on 1 vertex
"""
from sage.graphs.digraph import DiGraph
edges = [(u,v) for (u,v,f) in self._edges]
return DiGraph(edges, format='list_of_edges', loops=True,
multiedges=True)
def vertices(self):
r"""
EXAMPLES::
sage: F = AffineGroup(1, QQ)
sage: f1 = F.linear(1/3)
sage: f2 = F(1/3, vector([2/3]))
sage: from slabbe import GraphDirectedIteratedFunctionSystem as GIFS
sage: cantor_ifs = GIFS(QQ^1, [(0,0,f1),(0,0,f2)])
sage: cantor_ifs.vertices()
[0]
"""
U = [u for (u,v,f) in self._edges]
V = [v for (u,v,f) in self._edges]
return sorted(set(U)|set(V))
def num_vertices(self):
r"""
EXAMPLES::
sage: F = AffineGroup(1, QQ)
sage: f1 = F.linear(1/3)
sage: f2 = F(1/3, vector([2/3]))
sage: from slabbe import GraphDirectedIteratedFunctionSystem as GIFS
sage: cantor_ifs = GIFS(QQ^1, [(0,0,f1),(0,0,f2)])
sage: cantor_ifs.num_vertices()
1
"""
return len(self.vertices())
def galois_conjugate(self):
r"""
Return the element-wise Galois conjugate of this GIFS
INPUT:
- ``self`` -- an Affine GIFS, defined on a ring where elements have
a method ``.galois_conjugate`` (e.g., quadratic number field elements)
EXAMPLES:
Fibonacci substitution::
sage: from slabbe import GraphDirectedIteratedFunctionSystem as GIFS
sage: m = WordMorphism('a->ab,b->a')
sage: s = GIFS.from_one_dimensional_substitution(m)
sage: s.galois_conjugate()
GIFS defined by 3 maps on Vector space of dimension 1 over
Number Field in root with defining polynomial x^2 - x - 1 with
root = 1.618033988749895?
Direct Product of 2 Fibonacci::
sage: from slabbe import Substitution2d
sage: d = {0:[[3]], 1:[[3],[2]], 2:[[3,1]], 3:[[3,1],[2,0]]}
sage: s = Substitution2d(d)
sage: ifs = GIFS.from_two_dimensional_substitution(s)
sage: ifs.galois_conjugate()
GIFS defined by 9 maps on Vector space of dimension 2 over
Number Field in rootX with defining polynomial x^2 - x - 1 with
rootX = 1.618033988749895?
"""
edges = [(u,v,galois_conjugate(f)) for (u,v,f) in self._edges]
return GraphDirectedIteratedFunctionSystem(self._module, edges)
def __call__(self, S=None, n_iterations=1):
r"""
Return the image of the list of list of points.
INPUT:
- ``S`` -- list or dict, list of list of points or dictionary
associating a list of points to each vertex. If a list is used,
we assume the vertices are integers 0,1,...,n-1.
- ``n_iterations`` -- integer (default: ``1``)
EXAMPLES::
sage: from slabbe import GraphDirectedIteratedFunctionSystem as GIFS
sage: F = AffineGroup(1, QQ)
sage: f1 = F.linear(1/3)
sage: f2 = F(1/3, vector([2/3]))
sage: cantor_ifs = GIFS(QQ^1, [(0,0,f1),(0,0,f2)])
sage: cantor_ifs({0:[vector([0])]})
{0: [(0), (2/3)]}
sage: cantor_ifs(_)
{0: [(0), (2/9), (2/3), (8/9)]}
sage: cantor_ifs(_)
{0: [(0), (2/27), (2/9), (8/27), (2/3), (20/27), (8/9), (26/27)]}
sage: cantor_ifs(_)
{0: [(0),
(2/81),
(2/27),
(8/81),
(2/9),
(20/81),
(8/27),
(26/81),
(2/3),
(56/81),
(20/27),
(62/81),
(8/9),
(74/81),
(26/27),
(80/81)]}
::
sage: cantor_ifs([[vector([0])]], 2)
{0: [(0), (2/9), (2/3), (8/9)]}
sage: cantor_ifs([[vector([0])]], 3)
{0: [(0), (2/27), (2/9), (8/27), (2/3), (20/27), (8/9), (26/27)]}
::
sage: from slabbe import GraphDirectedIteratedFunctionSystem as GIFS
sage: z = polygen(QQ, 'z')
sage: K = NumberField(z**2-z-1, 'tau', embedding=RR(1.6))
sage: tau = K.gen()
sage: import itertools
sage: d = {(i,j):[] for i,j in itertools.product(range(4),repeat=2)}
sage: d[(0,3)] = [vector(K, (tau,tau))]
sage: d[(1,2)] = d[(1,3)] = [vector(K, (0,tau))]
sage: d[(2,1)] = d[(2,3)] = [vector(K, (tau,0))]
sage: d[(3,0)] = d[(3,1)] = d[(3,2)] = d[(3,3)] = [vector(K, (0,0))]
sage: ifs = GIFS.from_inflation_rule(K^2, tau, d)
sage: ifs(n_iterations=1)
{0: [], 1: [], 2: [], 3: [(0, 0)]}
sage: ifs(n_iterations=2)
{0: [(tau, tau)], 1: [(0, tau)], 2: [(tau, 0)], 3: [(0, 0)]}
sage: ifs(n_iterations=3)
{0: [(tau, tau)],
1: [(tau + 1, tau), (0, tau)],
2: [(tau, tau + 1), (tau, 0)],
3: [(tau + 1, tau + 1), (0, tau + 1), (tau + 1, 0), (0, 0)]}
sage: ifs(n_iterations=4)
{0: [(3*tau + 1, 3*tau + 1), (tau, 3*tau + 1), (3*tau + 1, tau), (tau, tau)],
1: [(tau + 1, 3*tau + 1),
(tau + 1, tau),
(2*tau + 1, 3*tau + 1),
(0, 3*tau + 1),
(2*tau + 1, tau),
(0, tau)],
2: [(3*tau + 1, tau + 1),
(tau, tau + 1),
(3*tau + 1, 2*tau + 1),
(tau, 2*tau + 1),
(3*tau + 1, 0),
(tau, 0)],
3: [(tau + 1, tau + 1),
(2*tau + 1, tau + 1),
(0, tau + 1),
(tau + 1, 2*tau + 1),
(tau + 1, 0),
(2*tau + 1, 2*tau + 1),
(0, 2*tau + 1),
(2*tau + 1, 0),
(0, 0)]}
TESTS::
sage: cantor_ifs([[vector([0])],[vector([0])]])
Traceback (most recent call last):
...
ValueError: size of input (=2) must match the number of vertices of this GIFS (=1)
"""
# input S
if S is None:
zero = self._module.zero()
S = {0:[zero]}
elif isinstance(S, list):
if not len(S) == self.num_vertices():
raise ValueError("size of input (={}) must match the number of"
" vertices of this GIFS (={})".format(len(S),
self.num_vertices()))
S = dict(enumerate(S))
# one iteration
if n_iterations == 1:
S_image = {}
for v in self.vertices():
Ev = [(u,v_,f) for (u,v_,f) in self._edges if v_ == v]
S_image[v] = [f(p) for (u,_,f) in Ev for p in S.get(u,[])]
return S_image
# many iterations
if n_iterations > 1:
for _ in range(n_iterations):
S = self(S, n_iterations=1)
return S
# invalid number of iterations
if n_iterations < 1:
raise ValueError('n_iterations(={}) must be larger or equal to'
' 1'.format(n_iterations))
def __mul__(self, other):
r"""
Return the multiplication of two GIFS.
INPUT:
- ``other`` -- a GraphDirectedIteratedFunctionSystem
EXAMPLES::
sage: from slabbe import GraphDirectedIteratedFunctionSystem as GIFS
sage: F = AffineGroup(1, QQ)
sage: f1 = F.linear(1/3)
sage: f2 = F(1/3, vector([2/3]))
sage: cantor_ifs = GIFS(QQ^1, [(0,0,f1),(0,0,f2)])
sage: cantor_ifs * cantor_ifs
GIFS defined by 4 maps on Vector space of dimension 1 over
Rational Field
"""
if not isinstance(other, GraphDirectedIteratedFunctionSystem):
raise TypeError('other (={}) is not a GIFS'.format(other))
edges = [(u,z,f*g) for (u,v,f) in self._edges
for (w,z,g) in other._edges
if v == w]
return GraphDirectedIteratedFunctionSystem(self._module, edges)
def __neg__(self):
r"""
Return GIFS defined by the negative `-` of each function.
EXAMPLES::
sage: from slabbe import GraphDirectedIteratedFunctionSystem as GIFS
sage: F = AffineGroup(1, QQ)
sage: f1 = F.linear(1/3)
sage: f2 = F(1/3, vector([2/3]))
sage: cantor_ifs = GIFS(QQ^1, [(0,0,f1),(0,0,f2)])
sage: - cantor_ifs
GIFS defined by 2 maps on Vector space of dimension 1 over
Rational Field
"""
try:
edges = [(u,v,-f) for (u,v,f) in self._edges]
except TypeError:
_,_,f = self._edges[0]
F = f.parent()
edges = [(u,v,F(-f.A(), -f.b())) for (u,v,f) in self._edges]
return GraphDirectedIteratedFunctionSystem(self._module, edges)
def plot(self, S=None, n_iterations=1, projection=None):
r"""
Return a graphic image of the IFS after few iterations
INPUT:
- ``S`` -- list or dict, list of list of points or dictionary
associating a list of points to each vertex. If a list is used,
we assume the vertices are integers 0,1,...,n-1.
- ``n_iterations`` -- integer (default: ``1``)
- ``projection`` -- matrix (default: ``None``), projection matrix
to 2-dimensional space
OUTPUT:
Graphics object
EXAMPLES:
The Cantor set::
sage: from slabbe import GraphDirectedIteratedFunctionSystem as GIFS
sage: F = AffineGroup(1, QQ)
sage: f1 = F.linear(1/3)
sage: f2 = F(1/3, vector([2/3]))
sage: cantor_ifs = GIFS(QQ^1, [(0,0,f1),(0,0,f2)])
sage: G = cantor_ifs.plot(n_iterations=7)
Projection on the vertical y-axis instead::
sage: G = cantor_ifs.plot(n_iterations=7, projection=matrix(2,[0,1]))
The usual Fibonacci chain::
sage: m = WordMorphism('a->ab,b->a')
sage: ifs = GIFS.from_one_dimensional_substitution(m)
sage: G = ifs.plot(n_iterations=10)
and its contracting IFS::
sage: G = ifs.galois_conjugate().plot(n_iterations=10)
The direct product of two Fibonacci chains::
sage: from slabbe import GraphDirectedIteratedFunctionSystem as GIFS
sage: from slabbe import Substitution2d
sage: d = {0:[[3]], 1:[[3],[2]], 2:[[3,1]], 3:[[3,1],[2,0]]}
sage: s = Substitution2d(d)
sage: ifs = GIFS.from_two_dimensional_substitution(s)
sage: G = ifs.plot(n_iterations=7)
This inflation rule is related to a contracting IFS whose unique
solution is given in formula (4.5) of [BFG19]_::
sage: G = ifs.galois_conjugate().plot(n_iterations=7)
"""
from sage.matrix.constructor import matrix
from sage.plot.colors import rainbow
from sage.plot.graphics import Graphics
from sage.plot.point import points
from sage.misc.prandom import shuffle
if self._module.dimension() == 1 and projection is None:
# default projection on the x-axis
projection = matrix([[1],[0]])
elif self._module.dimension() != 2 and projection is None:
raise ValueError('a projection matrix must be provided'
' when the dimension of the GIFS (={}) is not'
' 2'.format(self._module.dimension()))
G = Graphics()
bow = rainbow(self.num_vertices())
shuffle(bow)
vertex_to_color = dict(zip(self.vertices(), bow))
ifs = self(S=S, n_iterations=n_iterations)
for v,P in ifs.items():
if not self._module.dimension() == 2:
P = [projection*p for p in P]
G += points(P, color=vertex_to_color[v], legend_label=str(v))
return G
def galois_conjugate(f):
r"""
Return the element-wise Galois conjugate of an element of an affine
group
INPUT:
- ``f`` -- affine group element
EXAMPLES::
sage: from slabbe.graph_directed_IFS import galois_conjugate
sage: z = polygen(QQ, 'z')
sage: K = NumberField(z**2-z-1, 'phi', embedding=RR(1.6))
sage: phi = K.gen()
sage: F = AffineGroup(2, K)
sage: f = F(phi*identity_matrix(2), (phi,0))
sage: galois_conjugate(f)
[-phi + 1 0] [-phi + 1]
x |-> [ 0 -phi + 1] x + [ 0]
"""
from sage.matrix.constructor import matrix
F = f.parent()
dim = F.degree() + 1
M = matrix(dim,[a.galois_conjugate() for a in f.matrix().list()])
return F(M)
|
seblabbe/slabbe
|
slabbe/graph_directed_IFS.py
|
Python
|
gpl-2.0
| 26,146 | 0.004171 |
import ddt
from analyticsclient.tests import (
APIListTestCase,
APIWithPostableIDsTestCase,
ClientTestCase
)
@ddt.ddt
class CourseSummariesTests(APIListTestCase, APIWithPostableIDsTestCase, ClientTestCase):
endpoint = 'course_summaries'
id_field = 'course_ids'
_LIST_PARAMS = frozenset([
'course_ids',
'availability',
'pacing_type',
'program_ids',
'fields',
'exclude',
])
_STRING_PARAMS = frozenset([
'text_search',
'order_by',
'sort_order',
])
_INT_PARAMS = frozenset([
'page',
'page_size',
])
_ALL_PARAMS = _LIST_PARAMS | _STRING_PARAMS | _INT_PARAMS
other_params = _ALL_PARAMS
# Test URL encoding (note: '+' is not handled right by httpretty, but it works in practice)
_TEST_STRING = 'Aa1_-:/* '
@ddt.data(
(_LIST_PARAMS, ['a', 'b', 'c']),
(_LIST_PARAMS, [_TEST_STRING]),
(_LIST_PARAMS, []),
(_STRING_PARAMS, _TEST_STRING),
(_STRING_PARAMS, ''),
(_INT_PARAMS, 1),
(_INT_PARAMS, 0),
(frozenset(), None),
)
@ddt.unpack
def test_all_parameters(self, param_names, param_value):
"""Course summaries can be called with all parameters."""
params = {param_name: None for param_name in self._ALL_PARAMS}
params.update({param_name: param_value for param_name in param_names})
self.verify_query_params(**params)
|
Stanford-Online/edx-analytics-data-api-client
|
analyticsclient/tests/test_course_summaries.py
|
Python
|
apache-2.0
| 1,471 | 0.00136 |
from django.core.management.base import BaseCommand, CommandError
from django.core.management import call_command
from django.conf import settings
from django.db import connection
from django.db.models import Q, F
from contactnetwork.distances import *
from protein.models import ProteinFamily
import time
import scipy
class Command(BaseCommand):
help = "Build distance representatives"
def handle(self, *args, **options):
self.receptor_representatives()
def receptor_representatives(self):
print('Script to decide distance representative for a state/receptor combination. Lowest average distance to all other structures for the same receptor/state')
structures = Structure.objects.all().prefetch_related(
"pdb_code",
"state",
"protein_conformation__protein__parent__family")
distinct_proteins = {}
resolution_lookup = {}
for s in structures:
pdb = s.pdb_code.index
resolution_lookup[pdb] = s.resolution
state = s.state.slug
slug = s.protein_conformation.protein.parent.family.slug
name = s.protein_conformation.protein.parent.family.name
key = '{}_{}'.format(name,state)
if key not in distinct_proteins:
distinct_proteins[key] = []
distinct_proteins[key].append(pdb)
for conformation, pdbs in distinct_proteins.items():
print(conformation, "PDBS:",pdbs)
number_of_pdbs = len(pdbs)
if (number_of_pdbs==1):
# Do not care when only one PDB for a conformation rep
print("REPRESENTATIVE:", pdbs[0])
s = Structure.objects.get(pdb_code__index=pdbs[0])
s.distance_representative = True
s.save()
else:
# Distances
dis = Distances()
dis.load_pdbs(pdbs)
distance_matrix = dis.get_distance_matrix()
# Calculate structures with lowest average distance (rank-by-vote fusion)
ranking = np.zeros(len(distance_matrix))
average = np.zeros(len(distance_matrix))
for i in range(0,len(distance_matrix)):
ranking = ranking + scipy.stats.rankdata(distance_matrix[i,:], method='min')
average = average + distance_matrix[i,:]
# check if single minimum
lowest = np.where(ranking==min(ranking))[0]
if len(lowest)>1:
lowest = lowest[np.where(average[lowest]==min(average))[0][0]]
for i in range(0,len(distance_matrix)):
if i==lowest:
print("REPRESENTATIVE:",pdbs[i])
s = Structure.objects.get(pdb_code__index=pdbs[i])
s.distance_representative = (i==lowest)
s.save()
|
protwis/protwis
|
contactnetwork/management/commands/build_distance_representative.py
|
Python
|
apache-2.0
| 2,958 | 0.006423 |
# Stack implementation
class Stack (object):
def __init__ (self):
self.stack = []
def push (self, data):
self.stack.append(data)
def peek (self):
if self.isEmpty():
return None
return self.stack[-1]
def pop (self):
if self.isEmpty():
return None
return self.stack.pop()
def isEmpty (self):
return len(self.stack) == 0
def __str__ (self):
return ' '.join(str(x) for x in self.stack)
|
mag6367/Cracking_the_Coding_Interview_Python_Solutions
|
chapter3/stack.py
|
Python
|
mit
| 418 | 0.057416 |
from django.apps import AppConfig
class ActivityConfig(AppConfig):
name = 'cyactivities'
verbose_name = 'Cyborg Activities'
def ready(self):
import cyactivities.signals
|
shawnhermans/cyborgcrm
|
cyactivities/apps.py
|
Python
|
bsd-2-clause
| 192 | 0.010417 |
# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2017-04-14 17:20
from __future__ import unicode_literals
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Article',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('headline', models.CharField(max_length=255, verbose_name='Headline')),
('slug', models.SlugField(max_length=255, unique=True, verbose_name='Slug')),
('body', models.TextField(verbose_name='Body')),
('pub_date', models.DateTimeField(default=django.utils.timezone.now, verbose_name='Pub date')),
],
options={
'verbose_name': 'article',
'ordering': ['-pub_date'],
'verbose_name_plural': 'articles',
},
),
]
|
richardcornish/django-paywall
|
regwall/tests/articles/migrations/0001_initial.py
|
Python
|
bsd-3-clause
| 1,053 | 0.003799 |
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
from ndreg import *
import ndio.remote.neurodata as neurodata
import nibabel as nb
refToken = "ara_ccf2"
refImg = imgDownload(refToken)
imgShow(refImg)
plt.savefig("refImg_initial.png", bbox_inches='tight')
imgShow(refImg, vmax=500)
plt.savefig("refImg_initial_vmax500.png", bbox_inches='tight')
refAnnoImg = imgDownload(refToken, channel="annotation")
imgShow(refAnnoImg, vmax=1000)
plt.savefig("refAnnoImg_initial_vmax1000.png", bbox_inches='tight')
randValues = np.random.rand(1000,3)
randValues = np.concatenate(([[0,0,0]],randValues))
randCmap = matplotlib.colors.ListedColormap (randValues)
imgShow(refAnnoImg, vmax=1000, cmap=randCmap)
plt.savefig("ColorefAnnoImg_initial_vmax1000.png", bbox_inches='tight')
imgShow(refImg, vmax=500, newFig=False)
imgShow(refAnnoImg, vmax=1000, cmap=randCmap, alpha=0.2, newFig=False)
plt.show()
plt.savefig("OverlaidImg.png", bbox_inches='tight')
inToken = "Control258"
nd = neurodata()
inImg = imgDownload(inToken, resolution=5)
imgShow(inImg, vmax=500)
plt.savefig("rawImgvmax500.png", bbox_inches='tight')
inImg.SetSpacing([0.01872, 0.01872, 0.005])
inImg_download = inImg
inImg = imgResample(inImg, spacing=refImg.GetSpacing())
imgShow(inImg, vmax=500)
plt.savefig("resample_inImg.png", bbox_inches='tight')
inImg = imgReorient(inImg, "LAI", "RSA")
imgShow(inImg, vmax=500)
plt.savefig("resample_inImg_rotated.png", bbox_inches='tight')
inImg_reorient = inImg
spacing=[0.25,0.25,0.25]
refImg_ds = imgResample(refImg, spacing=spacing)
imgShow(refImg_ds, vmax=500)
plt.savefig("resample_refImg.png", bbox_inches='tight')
inImg_ds = imgResample(inImg, spacing=spacing)
imgShow(inImg_ds, vmax=500)
plt.savefig("inImg_ds.png", bbox_inches='tight')
affine = imgAffineComposite(inImg_ds, refImg_ds, iterations=100, useMI=True, verbose=True)
inImg_affine = imgApplyAffine(inImg, affine, size=refImg.GetSize())
imgShow(inImg_affine, vmax=500)
plt.savefig("inImg_affine.png", bbox_inches='tight')
inImg_ds = imgResample(inImg_affine, spacing=spacing)
(field, invField) = imgMetamorphosisComposite(inImg_ds, refImg_ds, alphaList=[0.05, 0.02, 0.01], useMI=True, iterations=100, verbose=True)
inImg_lddmm = imgApplyField(inImg_affine, field, size=refImg.GetSize())
imgShow(inImg_lddmm, vmax = 500)
imgShow(inImg_lddmm, vmax=500, newFig=False, numSlices=1)
imgShow(refAnnoImg, vmax=1000, cmap=randCmap, alpha=0.2, newFig=False, numSlices=1)
plt.savefig("overlay.png", bbox_inches='tight')
##################
# Reverse orientation
########
invAffine = affineInverse(affine)
invAffineField = affineToField(invAffine, refImg.GetSize(), refImg.GetSpacing())
invField = fieldApplyField(invAffineField, invField)
inAnnoImg = imgApplyField(refAnnoImg, invField,useNearest=True, size=inImg_reorient.GetSize())
imgShow(inAnnoImg, vmax=1000, cmap=randCmap)
plt.savefig("reverse_affine_annotations.png", bbox_inches='tight')
inAnnoImg = imgReorient(inAnnoImg, "RSA", "LAI")
imgShow(inAnnoImg, vmax=1000, cmap=randCmap)
plt.savefig("reoriented_reverse_affine_annotation.png", bbox_inches='tight')
inAnnoImg = imgResample(inAnnoImg, spacing=inImg_download.GetSpacing(), size=inImg_download.GetSize(), useNearest=True)
imgShow(inImg_download, vmax=500, numSlices=1, newFig=False)
imgShow(inAnnoImg, vmax=1000, cmap=randCmap, alpha=0.2, numSlices=1, newFig=False)
plt.savefig("final_atlas.png", bbox_inches='tight')
imgWrite(inAnnoImg, "final_resized_atlas.nii")
|
NeuroDataDesign/seelviz
|
seelviz/brainalign.py
|
Python
|
apache-2.0
| 3,476 | 0.006617 |
import importlib
import lektor.i18n
def test_loading_i18n_triggers_no_warnings(recwarn):
importlib.reload(lektor.i18n)
for warning in recwarn.list:
print(warning) # debugging: display warnings on stdout
assert len(recwarn) == 0
|
lektor/lektor
|
tests/test_i18n.py
|
Python
|
bsd-3-clause
| 252 | 0 |
from django import forms
from seednetwork.forms import SeedNetworkBaseForm
from seedlibrary.models import Event
GRAIN_CHOICES = (
('-','-'),
('amaranth','Amaranth'),
('barley', 'Barley'),
('buckwheat', 'Buckwheat'),
('corn', 'Corn'),
# ('kaniwa', 'Kaniwa'),
('millet', 'Millet'),
('oats', 'Oats'),
('quinoa', 'Quinoa'),
('rice', 'Rice'),
('rye', 'Rye'),
('sorghum', 'Sorghum'),
('teff', 'Teff'),
# ('triticale', 'Triticale'),
('wheat', 'Wheat'),
)
GRAIN_SUBCATEGORIES = (
('-','-'),
('grain', 'Amaranth: Grain'),
('leaf', 'Amaranth: Leaf'),
('grain and leaf', 'Amaranth: Grain and Leaf'),
('common', 'Barley: Common'),
('hulless','Barley: Hulless'),
('common', 'Buckwheat: Common'),
('tartary', 'Buckwheat: Tartary'),
('dent', 'Corn: Dent'),
('flint', 'Corn: Flint'),
('flour', 'Corn: Flour'),
('popcorn', 'Corn: Popcorn'),
('sweet', 'Corn: Sweet'),
('finger', 'Millet: Finger'),
('foxtail', 'Millet: Foxtail'),
('pearl', 'Millet: Pearl'),
('proso', 'Millet: Proso'),
('common', 'Oats: Common'),
('hulless', 'Oats: Hulless'),
('dryland', 'Rice: Dryland'),
('paddy', 'Rice: Paddy'),
('broom', 'Sorghum: Broom'),
('grain', 'Sorghum: Grain'),
('sweet', 'Sorghum: Sweet'),
('multiuse', 'Sorghum: Multi-use'),
('unknown', 'Wheat: Not Sure'),
('club', 'Wheat: Club (Hexaploid)'),
('common', 'Wheat: Common (Hexaploid)'),
('durum', 'Wheat: Durum (Tetraploid)'),
('einkorn', 'Wheat: Einkorn (Diploid)'),
('emmer', 'Wheat: Emmer (Tetraploid)'),
('khorasan', 'Wheat: Khorasan (Tetraploid)'),
('macha', 'Wheat: Macha (Hexaploid)'),
('persian', 'Wheat: Persian (Tetraploid)'),
('polish', 'Wheat: Polish (Tetraploid)'),
('rivetpoulardcone', 'Wheat: Rivet/Poulard/Cone (Tetraploid)'),
('shot', 'Wheat: Shot (Hexaploid)'),
('spelt', 'Wheat: Spelt (Hexaploid)'),
('zanduri', 'Wheat: Zanduri (Tetraploid)'),
)
class GrainForm(SeedNetworkBaseForm):
required_css_class = 'required'
# seed_type = forms.CharField(label="Seed Type", max_length=150, required=False, help_text="i.e. grain, vegetable, herb, perennial, fruit bush, fruit tree, etc.")
crop_type = forms.ChoiceField(label="Grain", choices=GRAIN_CHOICES, required=True)
grain_subcategory=forms.ChoiceField(label="Grain Subcategory", choices=GRAIN_SUBCATEGORIES,required=True)
seed_variety = forms.CharField(label="Variety Name", max_length=150, required=True, help_text="e.g. Ukrainka, PI 356457 etc.")
seed_description = forms.CharField(label="Short Description", widget=forms.Textarea(attrs={'rows':'2', 'cols':'60'}), required=False, help_text="Briefly highlight defining characteristics. This text will appear in the Short Description column on the Browse Seeds page. Longer descriptions available in \'More Information\'. ")
enough_to_share = forms.BooleanField(label="Availability", required=False, help_text="Is your seed available for sharing or purchase? Please indicate terms on member profile page.")
year = forms.CharField(label="Year", max_length=150, required=False, help_text="What year was your seed grown?")
origin = forms.CharField(label="Source", max_length=150, required=False, help_text="The year and from whom you first obtained the seed.")
# events = forms.ModelMultipleChoiceField(Event.objects.filter(show_on_seed_edit=True), required=False, widget=forms.CheckboxSelectMultiple, help_text="What events will you bring the seed to?")
more_info = forms.BooleanField(label="More Information", required=False, help_text="Check the box to provide more detailed information that will be available on the seed's profile page.")
class ExtendedGrainForm(SeedNetworkBaseForm):
latin_name = forms.CharField(label="Latin Name", max_length=100, required=False, help_text="e.g. Triticum monococcum")
improvement_status =forms.ChoiceField(label="Improvement Status", choices=(('-','-'),('landrace','Landrace'),('cultivar','Cultivar'),('unknown','Unknown')),required=False)
growth_habit=forms.ChoiceField(label="Growth Habit", choices=(('-','-'),('spring','Spring'),('winter','Winter'),('facultative','Facultative'), ('perennial','Perennial')),required=False)
days_to_maturity=forms.IntegerField(label="Days to Maturity", required=False, help_text="Enter only a single number, even if that is an estimate.")
lodging=forms.ChoiceField(choices=(('-','-'),(1,'1'),(2,'2'),(3,'3'),(4,'4'),(5,'5'),(6,'6'),(7,'7'),(8,'8'),(9,'9')), required=False, help_text="1 = no lodging, 9 = all plants flat.")
cultivation=forms.CharField(label="Cultivation",widget=forms.Textarea( attrs={'rows':'5', 'cols':'60'}), required=False, help_text="Bed preparation, spacing, interplanting, fertility needs, pest protection, grown organically?")
cold_hardiness=forms.CharField(label="Cold Hardiness", widget=forms.Textarea(attrs={'rows':'5', 'cols':'60'}), required=False, help_text="Susceptibility to frost/freeze damage in spring/fall/winter? For example, \'A freak mid-June frost did not seem to slow down growth at all in USDA zone 5a.\'")
disease=forms.CharField(label="Disease",widget=forms.Textarea(attrs={'rows':'3', 'cols':'60'}), required=False, help_text="Describe disease resistance or susceptibility of variety.")
threshing=forms.CharField(label="Threshing",widget=forms.Textarea( attrs={'rows':'5', 'cols':'60'}), required=False, help_text="Describe ease or difficulty of threshing, shelling, dehulling.")
cold_hardiness=forms.CharField(label="Cold Hardiness", widget=forms.Textarea(attrs={'rows':'5', 'cols':'60'}), required=False, help_text="Susceptibility to frost/freeze damage in spring/fall/winter? For example, \'A freak mid-June frost did not seem to slow down growth at all in USDA zone 5a.\'")
culinary_qualities=forms.CharField(label="Culinary Qualities", widget=forms.Textarea(attrs={'rows':'5', 'cols':'60'}), required=False, help_text="Baking, cooking, or brewing qualities and uses.")
other_uses=forms.CharField(label="Other Uses", widget=forms.Textarea(attrs={'rows':'5', 'cols':'60'}), required=False, help_text="Livestock feed, bedding, broom-making, straw weaving, thatching, etc.")
additional_info=forms.CharField(label="Additional Information", widget=forms.Textarea(attrs={'rows':'5', 'cols':'60'}), required=False, help_text="Interesting history, cultural information, etc.")
external_url=forms.URLField(label="External URL", required=False, help_text="Include a link to a website with related information, or to your own website.")
class SeedExportForm(SeedNetworkBaseForm):
archive = forms.BooleanField(required=False, help_text="Do you want to export your archived seed listings?")
|
RockinRobin/seednetwork
|
seedlibrary/forms.py
|
Python
|
mit
| 6,875 | 0.0224 |
import os
import sqlite3
from time import time, strftime, gmtime
from waskr.config import options
import log
# Fixes Database Absolute Location
FILE_CWD = os.path.abspath(__file__)
FILE_DIR = os.path.dirname(FILE_CWD)
DB_FILE = FILE_DIR+'/waskr.db'
# Engines Supported
engines_supported = ['sqlite', 'mongodb']
class conf_db(object):
def __init__(self,
db = DB_FILE):
self.db = db
if os.path.isfile(self.db):
self.conn = sqlite3.connect(self.db)
self.c = self.conn.cursor()
else:
self.conn = sqlite3.connect(self.db)
table = """CREATE TABLE config(path TEXT)"""
self.c = self.conn.cursor()
self.c.execute(table)
self.conn.commit()
def closedb(self):
"""Make sure the db is closed"""
self.conn.close()
def add_config(self, path):
"""Adds a MASTER config for waskr"""
values = (path,path)
delete = 'DELETE FROM config'
command = 'INSERT INTO config(path) select ? WHERE NOT EXISTS(SELECT 1 FROM config WHERE path=?)'
self.c.execute(delete)
self.c.execute(command, values)
self.conn.commit()
def get_config_path(self):
"""Returns the first entry for the config path"""
command = "SELECT * FROM config limit 1"
return self.c.execute(command)
class Stats(object):
def __init__(self,config=None, test=False):
self.config = options(config)
self.engine = self._load_engine()
self.stats = self.engine.Stats(config, test)
def _load_engine(self):
if self._check_module(self.config['db_engine']):
engine = __import__('waskr.engines.%s' % self.config['db_engine'],
fromlist=['None'])
else:
engine = __import__('waskr.engines.sqlite',
fromlist=['None']) # fall backs to sqlite3
return engine
def _check_module(self, module):
if module not in engines_supported:
return False
return True
def insert(self, stats):
self.stats.insert(stats)
def last_insert(self):
return self.stats.last_insert()
def apps_nodes(self):
return self.stats.apps_nodes()
def response_time(self, minutes):
return self.stats.response_time(minutes)
def response_bundle(self, minutes):
return self.stats.request_bundle(minutes)
def request_bundle(self, minutes):
return self.stats.request_bundle(minutes)
def request_time(self, minutes):
return self.stats.request_time(minutes)
|
AloneRoad/waskr
|
waskr/database.py
|
Python
|
mit
| 2,723 | 0.011017 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.