text
stringlengths 6
947k
| repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
|
---|---|---|---|---|---|---|
from django.contrib.auth.decorators import user_passes_test
from django.utils.decorators import method_decorator
from django.views.generic.base import TemplateView
def in_students_group(user):
if user:
return user.groups.filter(name='Alumnos').exists()
return False
def in_teachers_group(user):
if user:
return user.groups.filter(name='Profesores').exists()
return False
class NewControlView(TemplateView):
template_name = 'new_control.html'
def get_context_data(self, **kwargs):
context = super(NewControlView, self).get_context_data(**kwargs)
# form = CrearTareaForm()
# context['crear_homework_form'] = form
# context['courses'] = self.request.user.teacher.courses.filter(year=timezone.now().year)
# context['homeworks'] = Homework.objects.filter(course__in=context['courses'])
return context
@method_decorator(user_passes_test(in_teachers_group, login_url='/'))
def dispatch(self, *args, **kwargs):
return super(NewControlView, self).dispatch(*args, **kwargs)
|
Videoclases/videoclases
|
quality_control/views/control_views.py
|
Python
|
gpl-3.0
| 1,076 | 0.001859 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2011 Yesudeep Mangalapilly <yesudeep@gmail.com>
# Copyright 2012 Google, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from sys import version_info
from os import name as OS_NAME
__all__= ['unittest', 'skipIfNtMove']
if version_info < (2, 7):
import unittest2 as unittest
else:
import unittest
skipIfNtMove = unittest.skipIf(OS_NAME == 'nt', "windows can not detect moves")
|
javrasya/watchdog
|
tests/__init__.py
|
Python
|
apache-2.0
| 951 | 0.001052 |
"""
Really could have implemented this all in javascript on the client side...
"""
from __future__ import print_function
import requests
from flask import Flask, redirect, url_for, request, session, abort, jsonify
import os
import sys
import logging
import json
STRAVA_CLIENT_ID = 1367
Flask.get = lambda self, path: self.route(path, methods=['get'])
Flask.put = lambda self, path: self.route(path, methods=['put'])
Flask.post = lambda self, path: self.route(path, methods=['post'])
Flask.delete = lambda self, path: self.route(path, methods=['delete'])
app = Flask(__name__)
if not os.environ.get("CLIENT_SECRET"):
print("ERROR: CLIENT_SECRET is not defined", file=sys.stderr)
exit(1)
client_secret = os.environ.get("CLIENT_SECRET")
Flask.secret_key = client_secret
app.secret_key = client_secret
redirect_url = "http://127.0.0.1:5000"
@app.get('/')
def index():
"""Return static content, index.html only, or handle callbacks."""
#Call back from Strava for token exchange.
if request.args.get('code'):
code = request.args.get('code')
session.permanent = True
session['CODE'] = code
app.logger.debug("Code = %s " % code)
get_token(request.args.get('code'))
return redirect(url_for('static', filename='loggedin.html'))
return redirect(url_for('static', filename='index.html'))
def get_token(code):
data = {"client_id": STRAVA_CLIENT_ID,
"client_secret": client_secret,
"code": code}
url = 'https://www.strava.com/oauth/token'
app.logger.debug("Post URL = %s" % url)
response = requests.post(url, data=data)
app.logger.info("Login post returned %d" % response.status_code)
app.logger.debug(response.json())
session['token'] = response.json()['access_token']
athlete = response.json()['athlete']
session['athlete_id'] = athlete['id']
session['athlete_name'] = athlete['firstname'] + " " + athlete['lastname']
@app.get('/athlete')
def get_current_user():
try:
return jsonify({"id": session['athlete_id'],
"name": session['athlete_name']})
except KeyError:
abort(404)
@app.get('/login')
def login():
return "https://www.strava.com/oauth/authorize?client_id=%s&response_type=code&redirect_uri=%s&scope=view_private,write" \
% (STRAVA_CLIENT_ID, redirect_url)
@app.get('/rides/<page>')
@app.get('/rides')
def get_rides(page=1):
"""Attempt to get all of a users rides so we can filter out the private ones"""
url = "https://www.strava.com/api/v3/athlete/activities"
data = {"per_page": 50, "page": page, "access_token": session['token']}
response = requests.get(url, data=data)
app.logger.debug("Strava return code = %d" % response.status_code)
app.logger.debug(response.json())
return json.dumps(response.json())#there has to be a better way.
@app.put('/ride/<ride_id>')
def update_ride(ride_id):
ride = request.json
app.logger.debug(ride)
if int(ride['id']) != int(ride_id):
abort(400)
app.logger.debug("Updating ride " + ride_id)
data = {"access_token": session['token']}
params = {"private": int(ride['private']), "trainer": int(ride['trainer'])}
url = "https://www.strava.com/api/v3/activities/" + ride_id
response = requests.put(url, data=data, params=params)
app.logger.debug(response.status_code)
return json.dumps(response.json())
if __name__ == '__main__':
app.logger.setLevel(logging.INFO)
file_handler = logging.FileHandler('strava.log')
app.logger.addHandler(file_handler)
app.run()
|
krujos/strava-private-to-public
|
private-to-public.py
|
Python
|
apache-2.0
| 3,601 | 0.002499 |
#/usr/bin/env python
import os
from setuptools import setup, find_packages
ROOT_DIR = os.path.dirname(__file__)
SOURCE_DIR = os.path.join(ROOT_DIR)
setup(
name="django_haikus",
description="Some classes for finding haikus in text",
author="Grant Thomas",
author_email="grant.thomas@wk.com",
url="https://github.com/wieden-kennedy/django_haikus",
version="0.0.1",
dependency_links = ['http://github.com/wieden-kennedy/haikus/tarball/master#egg=haikus'],
install_requires=["nltk","django>=1.3.1","redis","requests","elementtree", "django-tagging", "django-picklefield"],
packages=['django_haikus'],
zip_safe=False,
include_package_data=True,
classifiers=[
"Programming Language :: Python",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Development Status :: 4 - Beta",
"Environment :: Web Environment",
"Framework :: Django",
"Intended Audience :: Developers",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Software Development :: Libraries :: Python Modules",
],
)
|
wieden-kennedy/django-haikus
|
setup.py
|
Python
|
bsd-3-clause
| 1,122 | 0.008021 |
#
# Copyright 2012 New Dream Network, LLC (DreamHost)
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
import os
from oslo_config import cfg
from oslo_log import log
from paste import deploy
import pecan
from werkzeug import serving
from ceilometer.api import hooks
from ceilometer.api import middleware
from ceilometer.i18n import _LI, _LW
LOG = log.getLogger(__name__)
CONF = cfg.CONF
OPTS = [
cfg.StrOpt('api_paste_config',
default="api_paste.ini",
help="Configuration file for WSGI definition of API."
),
]
API_OPTS = [
cfg.BoolOpt('pecan_debug',
default=False,
help='Toggle Pecan Debug Middleware.'),
cfg.IntOpt('default_api_return_limit',
min=1,
default=100,
help='Default maximum number of items returned by API request.'
),
]
CONF.register_opts(OPTS)
CONF.register_opts(API_OPTS, group='api')
def setup_app(pecan_config=None):
# FIXME: Replace DBHook with a hooks.TransactionHook
app_hooks = [hooks.ConfigHook(),
hooks.DBHook(),
hooks.NotifierHook(),
hooks.TranslationHook()]
pecan_config = pecan_config or {
"app": {
'root': 'ceilometer.api.controllers.root.RootController',
'modules': ['ceilometer.api'],
}
}
pecan.configuration.set_config(dict(pecan_config), overwrite=True)
# NOTE(sileht): pecan debug won't work in multi-process environment
pecan_debug = CONF.api.pecan_debug
if CONF.api.workers and CONF.api.workers != 1 and pecan_debug:
pecan_debug = False
LOG.warning(_LW('pecan_debug cannot be enabled, if workers is > 1, '
'the value is overrided with False'))
app = pecan.make_app(
pecan_config['app']['root'],
debug=pecan_debug,
hooks=app_hooks,
wrap_app=middleware.ParsableErrorMiddleware,
guess_content_type_from_ext=False
)
return app
def load_app():
# Build the WSGI app
cfg_file = None
cfg_path = cfg.CONF.api_paste_config
if not os.path.isabs(cfg_path):
cfg_file = CONF.find_file(cfg_path)
elif os.path.exists(cfg_path):
cfg_file = cfg_path
if not cfg_file:
raise cfg.ConfigFilesNotFoundError([cfg.CONF.api_paste_config])
LOG.info("Full WSGI config used: %s" % cfg_file)
return deploy.loadapp("config:" + cfg_file)
def build_server():
app = load_app()
# Create the WSGI server and start it
host, port = cfg.CONF.api.host, cfg.CONF.api.port
LOG.info(_LI('Starting server in PID %s') % os.getpid())
LOG.info(_LI("Configuration:"))
cfg.CONF.log_opt_values(LOG, logging.INFO)
if host == '0.0.0.0':
LOG.info(_LI(
'serving on 0.0.0.0:%(sport)s, view at http://127.0.0.1:%(vport)s')
% ({'sport': port, 'vport': port}))
else:
LOG.info(_LI("serving on http://%(host)s:%(port)s") % (
{'host': host, 'port': port}))
serving.run_simple(cfg.CONF.api.host, cfg.CONF.api.port,
app, processes=CONF.api.workers)
def app_factory(global_config, **local_conf):
return setup_app()
|
eayunstack/ceilometer
|
ceilometer/api/app.py
|
Python
|
apache-2.0
| 3,764 | 0 |
#!/usr/bin/python3
import gui
gui.main()
|
Koala-Kaolin/pyweb
|
src/__main__.py
|
Python
|
gpl-3.0
| 42 | 0 |
#!/usr/bin/python
import math
def trapezint(f, a, b, n) :
"""
Just for testing - uses trapazoidal approximation from on f from a to b with
n trapazoids
"""
output = 0.0
for i in range(int(n)):
f_output_lower = f( a + i * (b - a) / n )
f_output_upper = f( a + (i + 1) * (b - a) / n )
output += (f_output_lower + f_output_upper) * ((b-a)/n) / 2
return output
def second_derivative_approximation(f, x, h = .001):
"""
Approximates the second derivative with h (dx) = .001
"""
return (f(x + h) - 2 * f(x) + f(x - h))/h**2
def adaptive_trapezint(f, a, b, eps=1E-5):
"""
Uses trapazoidal approximation on f from a to b with an error value
of less than epsilon, to calculate the number of trapazoids
"""
max_second_derivative = 0
for i in range(10000):
i_second_d = abs(second_derivative_approximation(f, a + i * (b - a)/10000))
if( i_second_d > max_second_derivative):
max_second_derivative = i_second_d
h = math.sqrt(12 * eps / ((b - a) * max_second_derivative))
#There is a clear problem here, as if the second derivative is zero,
#h will become too large and there will be no approximation
n = (b - a)/h
return trapezint(f, a, b, n)
|
chapman-phys227-2016s/hw-1-seama107
|
adaptive_trapezint.py
|
Python
|
mit
| 1,279 | 0.013292 |
import difflib
import os
import pytest
from fusesoc.core import Core
def compare_fileset(fileset, name, files):
assert name == fileset.name
for i in range(len(files)):
assert files[i] == fileset.file[i].name
def test_core_info():
tests_dir = os.path.dirname(__file__)
cores_root = os.path.join(tests_dir, 'cores')
for core_name in ['sockit', 'mor1kx-generic']:
core = Core(os.path.join(cores_root, core_name, core_name+'.core'))
gen_info = '\n'.join([x for x in core.info().split('\n') if not 'Core root' in x])
with open(os.path.join(tests_dir, __name__, core_name+".info")) as f:
assert f.read() == gen_info, core_name
def test_core_parsing():
from fusesoc.vlnv import Vlnv
cores_root = os.path.join(os.path.dirname(__file__), 'cores', 'misc')
core = Core(os.path.join(cores_root, 'nomain.core'))
assert core.name == Vlnv("::nomain:0")
import sys
if sys.version_info[0] > 2:
with pytest.raises(SyntaxError) as e:
core = Core(os.path.join(cores_root, "duplicateoptions.core"))
assert "option 'file_type' in section 'fileset dummy' already exists" in str(e.value)
def test_capi1_get_parameters():
tests_dir = os.path.join(os.path.dirname(__file__),
__name__)
with pytest.raises(SyntaxError) as e:
core = Core(os.path.join(tests_dir, 'parameters_nodatatype.core'))
assert "Invalid datatype '' for parameter" in str(e.value)
with pytest.raises(SyntaxError) as e:
core = Core(os.path.join(tests_dir, 'parameters_invaliddatatype.core'))
assert "Invalid datatype 'badtype' for parameter" in str(e.value)
with pytest.raises(SyntaxError) as e:
core = Core(os.path.join(tests_dir, 'parameters_noparamtype.core'))
assert "Invalid paramtype '' for parameter" in str(e.value)
with pytest.raises(SyntaxError) as e:
core = Core(os.path.join(tests_dir, 'parameters_invalidparamtype.core'))
assert "Invalid paramtype 'badtype' for parameter" in str(e.value)
def test_get_scripts():
flag_combos = [{'target' : 'sim' , 'is_toplevel' : False},
{'target' : 'sim' , 'is_toplevel' : True},
{'target' : 'synth', 'is_toplevel' : False},
{'target' : 'synth', 'is_toplevel' : True},
]
filename = os.path.join(os.path.dirname(__file__), 'cores', 'misc', 'scriptscore.core')
core = Core(filename, '', 'dummy_build_root')
for flags in flag_combos:
env = {
'BUILD_ROOT' : 'dummy_build_root',
'FILES_ROOT' : 'dummyroot'
}
result = core.get_scripts("dummyroot", flags)
expected = {}
if flags['target'] == 'sim':
sections = ['post_run', 'pre_build', 'pre_run']
else:
if flags['is_toplevel']:
env['SYSTEM_ROOT'] = core.files_root
sections = ['pre_build', 'post_build']
else:
sections = []
for section in sections:
_name = flags['target']+section+'_scripts{}'
expected[section] = [{'cmd' : ['sh', os.path.join('dummyroot', _name.format(i))],
'name' : _name.format(i),
'env' : env} for i in range(2)]
assert expected == result
def test_get_tool():
cores_root = os.path.join(os.path.dirname(__file__), 'cores')
core = Core(os.path.join(cores_root, 'atlys', 'atlys.core'))
assert None == core.get_tool({'target' : 'sim', 'tool' : None})
assert 'icarus' == core.get_tool({'target' : 'sim', 'tool' : 'icarus'})
assert 'ise' == core.get_tool({'target' : 'synth', 'tool' : None})
assert 'vivado' == core.get_tool({'target' : 'synth', 'tool' : 'vivado'})
core = Core(os.path.join(cores_root, 'sockit', 'sockit.core'))
assert 'icarus' == core.get_tool({'target' : 'sim', 'tool' : None})
assert 'icarus' == core.get_tool({'target' : 'sim', 'tool' : 'icarus'})
del core.main.backend
assert None == core.get_tool({'target' : 'synth', 'tool' : None})
assert 'vivado' == core.get_tool({'target' : 'synth', 'tool' : 'vivado'})
core.main.backend = 'quartus'
def test_get_tool_options():
cores_root = os.path.join(os.path.dirname(__file__), 'cores')
core = Core(os.path.join(cores_root, 'mor1kx-generic', 'mor1kx-generic.core'))
assert {'iverilog_options' : ['-DSIM']} == core.get_tool_options({'is_toplevel' : True, 'tool' : 'icarus'})
assert {} == core.get_tool_options({'is_toplevel' : True, 'tool' : 'modelsim'})
assert {'fuse_options' : ['some','isim','options']} == core.get_tool_options({'is_toplevel' : True, 'tool' : 'isim'})
expected = {'xelab_options' : ['--timescale 1ps/1ps', '--debug typical',
'dummy', 'options', 'for', 'xelab']}
assert expected == core.get_tool_options({'is_toplevel' : True, 'tool' : 'xsim'})
assert {} == core.get_tool_options({'is_toplevel' : False, 'tool' : 'icarus'})
core = Core(os.path.join(cores_root, 'elf-loader', 'elf-loader.core'))
assert {'libs' : ['-lelf']} == core.get_tool_options({'is_toplevel' : False, 'tool' : 'verilator'})
assert {} == core.get_tool_options({'is_toplevel' : True, 'tool' : 'invalid'})
def test_get_toplevel():
filename = os.path.join(os.path.dirname(__file__),
__name__,
"atlys.core")
core = Core(filename)
assert 'orpsoc_tb' == core.get_toplevel({'tool' : 'icarus'})
assert 'orpsoc_tb' == core.get_toplevel({'tool' : 'icarus', 'testbench' : None})
assert 'tb' == core.get_toplevel({'tool' : 'icarus', 'testbench' : 'tb'})
assert 'orpsoc_top' == core.get_toplevel({'tool' : 'vivado'})
filename = os.path.join(os.path.dirname(__file__),
__name__,
"sockit.core")
core = Core(filename)
assert 'dummy_tb' == core.get_toplevel({'tool' : 'icarus'})
assert 'dummy_tb' == core.get_toplevel({'tool' : 'icarus', 'testbench' : None})
assert 'tb' == core.get_toplevel({'tool' : 'icarus', 'testbench' : 'tb'})
assert 'orpsoc_top' == core.get_toplevel({'tool' : 'vivado'})
def test_icestorm():
filename = os.path.join(os.path.dirname(__file__),
__name__,
"c3demo.core")
core = Core(filename)
assert len(core.file_sets) == 3
compare_fileset(core.file_sets[0], 'rtl_files', ['c3demo.v', 'ledpanel.v','picorv32.v'])
compare_fileset(core.file_sets[1], 'tb_files' , ['firmware.hex', '$YOSYS_DAT_DIR/ice40/cells_sim.v', 'testbench.v'])
#Check that backend files are converted to fileset properly
compare_fileset(core.file_sets[2], 'backend_files', ['c3demo.pcf'])
assert core.file_sets[2].file[0].file_type == 'PCF'
assert core.icestorm.export_files == []
assert core.icestorm.arachne_pnr_options == ['-s', '1', '-d', '8k']
assert core.icestorm.top_module == 'c3demo'
assert core.icestorm.warnings == []
def test_ise():
filename = os.path.join(os.path.dirname(__file__),
__name__,
"atlys.core")
core = Core(filename)
#Check filesets
assert len(core.file_sets) == 4
assert core.file_sets[0].name == 'verilog_src_files'
assert core.file_sets[1].name == 'verilog_tb_src_files'
assert core.file_sets[2].name == 'verilog_tb_private_src_files'
#Check that backend files are converted to fileset properly
compare_fileset(core.file_sets[3], 'backend_files', ['data/atlys.ucf'])
assert core.file_sets[3].file[0].file_type == 'UCF'
#Check backend section
assert core.ise.export_files == []
assert core.ise.family == 'spartan6'
assert core.ise.device == 'xc6slx45'
assert core.ise.package == 'csg324'
assert core.ise.speed == '-2'
assert core.ise.top_module == 'orpsoc_top'
assert core.ise.warnings == []
def test_quartus():
filename = os.path.join(os.path.dirname(__file__),
__name__,
"sockit.core")
core = Core(filename)
#Check filesets
assert len(core.file_sets) == 4
assert core.file_sets[0].name == 'verilog_src_files'
assert core.file_sets[1].name == 'verilog_tb_src_files'
assert core.file_sets[2].name == 'verilog_tb_private_src_files'
#Check that backend files are converted to fileset properly
assert len(core.file_sets[3].file) == 3
compare_fileset(core.file_sets[3], 'backend_files', ['data/sockit.qsys', 'data/sockit.sdc', 'data/pinmap.tcl'])
assert core.file_sets[3].file[0].file_type == 'QSYS'
assert core.file_sets[3].file[1].file_type == 'SDC'
assert core.file_sets[3].file[2].file_type == 'tclSource'
#Check backend section
assert core.quartus.quartus_options == '--64bit'
assert core.quartus.family == '"Cyclone V"'
assert core.quartus.device == '5CSXFC6D6F31C8ES'
assert core.quartus.top_module == 'orpsoc_top'
assert core.quartus.warnings == []
def test_simulator():
#Explicit toplevel
filename = os.path.join(os.path.dirname(__file__),
__name__,
"c3demo.core")
core = Core(filename)
assert core.simulator['toplevel'] == 'testbench'
#Implicit toplevel
filename = os.path.join(os.path.dirname(__file__),
__name__,
"atlys.core")
core = Core(filename)
assert core.simulator['toplevel'] == 'orpsoc_tb'
def test_verilator():
cores_root = os.path.join(os.path.dirname(__file__), __name__)
core = Core(os.path.join(cores_root, "verilator_managed_systemc.core"))
expected = {'cli_parser' : 'managed', 'libs' : [], 'mode' : 'sc'}
assert expected == core.get_tool_options({'is_toplevel' : True, 'tool' : 'verilator'})
assert len(core.file_sets) == 2
compare_fileset(core.file_sets[0], 'verilator_src_files', ['file1.sc', 'file2.sc'])
assert core.file_sets[0].file[0].file_type == 'systemCSource'
assert core.file_sets[0].file[1].file_type == 'systemCSource'
compare_fileset(core.file_sets[1], 'verilator_tb_toplevel', [])
|
imphil/fusesoc
|
tests/test_capi1.py
|
Python
|
gpl-3.0
| 10,310 | 0.012512 |
#
# distutils/version.py
#
# Implements multiple version numbering conventions for the
# Python Module Distribution Utilities.
#
# $Id$
#
"""Provides classes to represent module version numbers (one class for
each style of version numbering). There are currently two such classes
implemented: StrictVersion and LooseVersion.
Every version number class implements the following interface:
* the 'parse' method takes a string and parses it to some internal
representation; if the string is an invalid version number,
'parse' raises a ValueError exception
* the class constructor takes an optional string argument which,
if supplied, is passed to 'parse'
* __str__ reconstructs the string that was passed to 'parse' (or
an equivalent string -- ie. one that will generate an equivalent
version number instance)
* __repr__ generates Python code to recreate the version number instance
* _cmp compares the current instance with either another instance
of the same class or a string (which will be parsed to an instance
of the same class, thus must follow the same rules)
"""
import re
class Version:
"""Abstract base class for version numbering classes. Just provides
constructor (__init__) and reproducer (__repr__), because those
seem to be the same for all version numbering classes; and route
rich comparisons to _cmp.
"""
def __init__ (self, vstring=None):
if vstring:
self.parse(vstring)
def __repr__ (self):
return "%s ('%s')" % (self.__class__.__name__, str(self))
def __eq__(self, other):
c = self._cmp(other)
if c is NotImplemented:
return c
return c == 0
def __lt__(self, other):
c = self._cmp(other)
if c is NotImplemented:
return c
return c < 0
def __le__(self, other):
c = self._cmp(other)
if c is NotImplemented:
return c
return c <= 0
def __gt__(self, other):
c = self._cmp(other)
if c is NotImplemented:
return c
return c > 0
def __ge__(self, other):
c = self._cmp(other)
if c is NotImplemented:
return c
return c >= 0
# Interface for version-number classes -- must be implemented
# by the following classes (the concrete ones -- Version should
# be treated as an abstract class).
# __init__ (string) - create and take same action as 'parse'
# (string parameter is optional)
# parse (string) - convert a string representation to whatever
# internal representation is appropriate for
# this style of version numbering
# __str__ (self) - convert back to a string; should be very similar
# (if not identical to) the string supplied to parse
# __repr__ (self) - generate Python code to recreate
# the instance
# _cmp (self, other) - compare two version numbers ('other' may
# be an unparsed version string, or another
# instance of your version class)
class StrictVersion (Version):
"""Version numbering for anal retentives and software idealists.
Implements the standard interface for version number classes as
described above. A version number consists of two or three
dot-separated numeric components, with an optional "pre-release" tag
on the end. The pre-release tag consists of the letter 'a' or 'b'
followed by a number. If the numeric components of two version
numbers are equal, then one with a pre-release tag will always
be deemed earlier (lesser) than one without.
The following are valid version numbers (shown in the order that
would be obtained by sorting according to the supplied cmp function):
0.4 0.4.0 (these two are equivalent)
0.4.1
0.5a1
0.5b3
0.5
0.9.6
1.0
1.0.4a3
1.0.4b1
1.0.4
The following are examples of invalid version numbers:
1
2.7.2.2
1.3.a4
1.3pl1
1.3c4
The rationale for this version numbering system will be explained
in the distutils documentation.
"""
version_re = re.compile(r'^(\d+) \. (\d+) (\. (\d+))? ([ab](\d+))?$',
re.VERBOSE | re.ASCII)
def parse (self, vstring):
match = self.version_re.match(vstring)
if not match:
raise ValueError("invalid version number '%s'" % vstring)
(major, minor, patch, prerelease, prerelease_num) = \
match.group(1, 2, 4, 5, 6)
if patch:
self.version = tuple(map(int, [major, minor, patch]))
else:
self.version = tuple(map(int, [major, minor])) + (0,)
if prerelease:
self.prerelease = (prerelease[0], int(prerelease_num))
else:
self.prerelease = None
def __str__ (self):
if self.version[2] == 0:
vstring = '.'.join(map(str, self.version[0:2]))
else:
vstring = '.'.join(map(str, self.version))
if self.prerelease:
vstring = vstring + self.prerelease[0] + str(self.prerelease[1])
return vstring
def _cmp (self, other):
if isinstance(other, str):
other = StrictVersion(other)
if self.version != other.version:
# numeric versions don't match
# prerelease stuff doesn't matter
if self.version < other.version:
return -1
else:
return 1
# have to compare prerelease
# case 1: neither has prerelease; they're equal
# case 2: self has prerelease, other doesn't; other is greater
# case 3: self doesn't have prerelease, other does: self is greater
# case 4: both have prerelease: must compare them!
if (not self.prerelease and not other.prerelease):
return 0
elif (self.prerelease and not other.prerelease):
return -1
elif (not self.prerelease and other.prerelease):
return 1
elif (self.prerelease and other.prerelease):
if self.prerelease == other.prerelease:
return 0
elif self.prerelease < other.prerelease:
return -1
else:
return 1
else:
assert False, "never get here"
# end class StrictVersion
# The rules according to Greg Stein:
# 1) a version number has 1 or more numbers separated by a period or by
# sequences of letters. If only periods, then these are compared
# left-to-right to determine an ordering.
# 2) sequences of letters are part of the tuple for comparison and are
# compared lexicographically
# 3) recognize the numeric components may have leading zeroes
#
# The LooseVersion class below implements these rules: a version number
# string is split up into a tuple of integer and string components, and
# comparison is a simple tuple comparison. This means that version
# numbers behave in a predictable and obvious way, but a way that might
# not necessarily be how people *want* version numbers to behave. There
# wouldn't be a problem if people could stick to purely numeric version
# numbers: just split on period and compare the numbers as tuples.
# However, people insist on putting letters into their version numbers;
# the most common purpose seems to be:
# - indicating a "pre-release" version
# ('alpha', 'beta', 'a', 'b', 'pre', 'p')
# - indicating a post-release patch ('p', 'pl', 'patch')
# but of course this can't cover all version number schemes, and there's
# no way to know what a programmer means without asking him.
#
# The problem is what to do with letters (and other non-numeric
# characters) in a version number. The current implementation does the
# obvious and predictable thing: keep them as strings and compare
# lexically within a tuple comparison. This has the desired effect if
# an appended letter sequence implies something "post-release":
# eg. "0.99" < "0.99pl14" < "1.0", and "5.001" < "5.001m" < "5.002".
#
# However, if letters in a version number imply a pre-release version,
# the "obvious" thing isn't correct. Eg. you would expect that
# "1.5.1" < "1.5.2a2" < "1.5.2", but under the tuple/lexical comparison
# implemented here, this just isn't so.
#
# Two possible solutions come to mind. The first is to tie the
# comparison algorithm to a particular set of semantic rules, as has
# been done in the StrictVersion class above. This works great as long
# as everyone can go along with bondage and discipline. Hopefully a
# (large) subset of Python module programmers will agree that the
# particular flavour of bondage and discipline provided by StrictVersion
# provides enough benefit to be worth using, and will submit their
# version numbering scheme to its domination. The free-thinking
# anarchists in the lot will never give in, though, and something needs
# to be done to accommodate them.
#
# Perhaps a "moderately strict" version class could be implemented that
# lets almost anything slide (syntactically), and makes some heuristic
# assumptions about non-digits in version number strings. This could
# sink into special-case-hell, though; if I was as talented and
# idiosyncratic as Larry Wall, I'd go ahead and implement a class that
# somehow knows that "1.2.1" < "1.2.2a2" < "1.2.2" < "1.2.2pl3", and is
# just as happy dealing with things like "2g6" and "1.13++". I don't
# think I'm smart enough to do it right though.
#
# In any case, I've coded the test suite for this module (see
# ../test/test_version.py) specifically to fail on things like comparing
# "1.2a2" and "1.2". That's not because the *code* is doing anything
# wrong, it's because the simple, obvious design doesn't match my
# complicated, hairy expectations for real-world version numbers. It
# would be a snap to fix the test suite to say, "Yep, LooseVersion does
# the Right Thing" (ie. the code matches the conception). But I'd rather
# have a conception that matches common notions about version numbers.
class LooseVersion (Version):
"""Version numbering for anarchists and software realists.
Implements the standard interface for version number classes as
described above. A version number consists of a series of numbers,
separated by either periods or strings of letters. When comparing
version numbers, the numeric components will be compared
numerically, and the alphabetic components lexically. The following
are all valid version numbers, in no particular order:
1.5.1
1.5.2b2
161
3.10a
8.02
3.4j
1996.07.12
3.2.pl0
3.1.1.6
2g6
11g
0.960923
2.2beta29
1.13++
5.5.kw
2.0b1pl0
In fact, there is no such thing as an invalid version number under
this scheme; the rules for comparison are simple and predictable,
but may not always give the results you want (for some definition
of "want").
"""
component_re = re.compile(r'(\d+ | [a-z]+ | \.)', re.VERBOSE)
def __init__ (self, vstring=None):
if vstring:
self.parse(vstring)
def parse (self, vstring):
# I've given up on thinking I can reconstruct the version string
# from the parsed tuple -- so I just store the string here for
# use by __str__
self.vstring = vstring
components = [x for x in self.component_re.split(vstring)
if x and x != '.']
for i, obj in enumerate(components):
try:
components[i] = int(obj)
except ValueError:
pass
self.version = components
def __str__ (self):
return self.vstring
def __repr__ (self):
return "LooseVersion ('%s')" % str(self)
def _cmp (self, other):
if isinstance(other, str):
other = LooseVersion(other)
if self.version == other.version:
return 0
if self.version < other.version:
return -1
if self.version > other.version:
return 1
# end class LooseVersion
|
prefetchnta/questlab
|
bin/x64bin/python/37/Lib/distutils/version.py
|
Python
|
lgpl-2.1
| 12,688 | 0.001497 |
from __future__ import unicode_literals
import os
import sys
from subprocess import PIPE, Popen
from django.apps import apps as installed_apps
from django.utils import six
from django.utils.crypto import get_random_string
from django.utils.encoding import DEFAULT_LOCALE_ENCODING, force_text
from .base import CommandError
def popen_wrapper(args, os_err_exc_type=CommandError, stdout_encoding='utf-8'):
"""
Friendly wrapper around Popen.
Returns stdout output, stderr output and OS status code.
"""
try:
p = Popen(args, shell=False, stdout=PIPE, stderr=PIPE, close_fds=os.name != 'nt')
except OSError as e:
strerror = force_text(e.strerror, DEFAULT_LOCALE_ENCODING, strings_only=True)
six.reraise(os_err_exc_type, os_err_exc_type('Error executing %s: %s' %
(args[0], strerror)), sys.exc_info()[2])
output, errors = p.communicate()
return (
force_text(output, stdout_encoding, strings_only=True, errors='strict'),
force_text(errors, DEFAULT_LOCALE_ENCODING, strings_only=True, errors='replace'),
p.returncode
)
def handle_extensions(extensions):
"""
Organizes multiple extensions that are separated with commas or passed by
using --extension/-e multiple times.
For example: running 'django-admin makemessages -e js,txt -e xhtml -a'
would result in an extension list: ['.js', '.txt', '.xhtml']
>>> handle_extensions(['.html', 'html,js,py,py,py,.py', 'py,.py'])
{'.html', '.js', '.py'}
>>> handle_extensions(['.html, txt,.tpl'])
{'.html', '.tpl', '.txt'}
"""
ext_list = []
for ext in extensions:
ext_list.extend(ext.replace(' ', '').split(','))
for i, ext in enumerate(ext_list):
if not ext.startswith('.'):
ext_list[i] = '.%s' % ext_list[i]
return set(ext_list)
def find_command(cmd, path=None, pathext=None):
if path is None:
path = os.environ.get('PATH', '').split(os.pathsep)
if isinstance(path, six.string_types):
path = [path]
# check if there are funny path extensions for executables, e.g. Windows
if pathext is None:
pathext = os.environ.get('PATHEXT', '.COM;.EXE;.BAT;.CMD').split(os.pathsep)
# don't use extensions if the command ends with one of them
for ext in pathext:
if cmd.endswith(ext):
pathext = ['']
break
# check if we find the command on PATH
for p in path:
f = os.path.join(p, cmd)
if os.path.isfile(f):
return f
for ext in pathext:
fext = f + ext
if os.path.isfile(fext):
return fext
return None
def get_random_secret_key():
"""
Return a 50 character random string usable as a SECRET_KEY setting value.
"""
chars = 'abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)'
return get_random_string(50, chars)
def parse_apps_and_model_labels(labels):
"""
Parse a list of "app_label.ModelName" or "app_label" strings into actual
objects and return a two-element tuple:
(set of model classes, set of app_configs).
Raise a CommandError if some specified models or apps don't exist.
"""
apps = set()
models = set()
for label in labels:
if '.' in label:
try:
model = installed_apps.get_model(label)
except LookupError:
raise CommandError('Unknown model: %s' % label)
models.add(model)
else:
try:
app_config = installed_apps.get_app_config(label)
except LookupError as e:
raise CommandError(str(e))
apps.add(app_config)
return models, apps
|
mbayon/TFG-MachineLearning
|
venv/lib/python3.6/site-packages/django/core/management/utils.py
|
Python
|
mit
| 3,739 | 0.001337 |
import logging
from django.conf import settings
from kombu import (Exchange,
Queue)
from kombu.mixins import ConsumerMixin
from treeherder.etl.common import fetch_json
from treeherder.etl.tasks.pulse_tasks import (store_pulse_jobs,
store_pulse_resultsets)
logger = logging.getLogger(__name__)
class PulseConsumer(ConsumerMixin):
"""
Consume jobs from Pulse exchanges
"""
def __init__(self, connection, queue_suffix):
self.connection = connection
self.consumers = []
self.queue = None
config = settings.PULSE_DATA_INGESTION_CONFIG
if not config:
raise ValueError("PULSE_DATA_INGESTION_CONFIG is required for the "
"JobConsumer class.")
self.queue_name = "queue/{}/{}".format(config.username, queue_suffix)
def get_consumers(self, Consumer, channel):
return [
Consumer(**c) for c in self.consumers
]
def bind_to(self, exchange, routing_key):
if not self.queue:
self.queue = Queue(
name=self.queue_name,
channel=self.connection.channel(),
exchange=exchange,
routing_key=routing_key,
durable=settings.PULSE_DATA_INGESTION_QUEUES_DURABLE,
auto_delete=settings.PULSE_DATA_INGESTION_QUEUES_AUTO_DELETE
)
self.consumers.append(dict(queues=self.queue,
callbacks=[self.on_message]))
# just in case the queue does not already exist on Pulse
self.queue.declare()
else:
self.queue.bind_to(exchange=exchange, routing_key=routing_key)
def unbind_from(self, exchange, routing_key):
self.queue.unbind_from(exchange, routing_key)
def close(self):
self.connection.release()
def prune_bindings(self, new_bindings):
# get the existing bindings for the queue
bindings = []
try:
bindings = self.get_bindings(self.queue_name)["bindings"]
except Exception:
logger.error("Unable to fetch existing bindings for {}".format(
self.queue_name))
logger.error("Data ingestion may proceed, "
"but no bindings will be pruned")
# Now prune any bindings from the queue that were not
# established above.
# This indicates that they are no longer in the config, and should
# therefore be removed from the durable queue bindings list.
for binding in bindings:
if binding["source"]:
binding_str = self.get_binding_str(binding["source"],
binding["routing_key"])
if binding_str not in new_bindings:
self.unbind_from(Exchange(binding["source"]),
binding["routing_key"])
logger.info("Unbound from: {}".format(binding_str))
def get_binding_str(self, exchange, routing_key):
"""Use consistent string format for binding comparisons"""
return "{} {}".format(exchange, routing_key)
def get_bindings(self, queue_name):
"""Get list of bindings from the pulse API"""
return fetch_json("{}queue/{}/bindings".format(
settings.PULSE_GUARDIAN_URL, queue_name))
class JobConsumer(PulseConsumer):
def on_message(self, body, message):
store_pulse_jobs.apply_async(
args=[body,
message.delivery_info["exchange"],
message.delivery_info["routing_key"]],
routing_key='store_pulse_jobs'
)
message.ack()
class ResultsetConsumer(PulseConsumer):
def on_message(self, body, message):
store_pulse_resultsets.apply_async(
args=[body,
message.delivery_info["exchange"],
message.delivery_info["routing_key"]],
routing_key='store_pulse_resultsets'
)
message.ack()
|
akhileshpillai/treeherder
|
treeherder/etl/pulse_consumer.py
|
Python
|
mpl-2.0
| 4,116 | 0 |
import os
import json
from ...resources.base import SurvoxAPIBase
from ...resources.exception import SurvoxAPIRuntime, SurvoxAPINotFound
from ...resources.valid import valid_url_field
class SurvoxAPIDncList(SurvoxAPIBase):
"""
Class to manage DNC lists.
"""
def __init__(self, base_url=None, headers=None, verbose=True):
super(SurvoxAPIDncList, self).__init__(base_url, headers, verbose)
self.url = '/sample/dnc/'
def list(self):
"""
Fetch a list of available DNC lists
:return: list of DNC lists
"""
return self.api_get(endpoint=self.url)
def create(self, name, description, dnc_type, account, filename=None, exists_okay=False):
"""
Create a new DNC list
:param name: new DNC list name
:param dnc_type: DNC list type
:param description: DNC description
:param account: Survox runtime account to put the DNC list into
:param filename: csv file containing dnc information
:param exists_okay: return existing list if True, else raise exception
:return: dnc list information
"""
valid, msg = valid_url_field('Do-Not-Contact', name, 1, 256)
if not valid:
raise SurvoxAPIRuntime(msg)
valid_dnc_types = ['phone', 'prefix', 'email']
if dnc_type not in valid_dnc_types:
raise SurvoxAPIRuntime('Unknown DNC type "{type}". Must be one of {opts}'.format(type=dnc_type,
opts=json.loads(
valid_dnc_types)))
try:
s = self.api_get(endpoint='{base}{name}/'.format(base=self.url, name=name))
if not exists_okay:
raise SurvoxAPIRuntime('Do-Not-Contact already exists: {name)'.format(name=name))
except SurvoxAPINotFound:
s = self.api_post(endpoint=self.url, data={
'name': name,
'dnc_type': dnc_type,
'description': description,
'account': account
})
if s and filename:
if not os.path.isfile(filename):
raise SurvoxAPIRuntime('No such filename for Do-Not-Contact: {name)'.format(name=filename))
x = SurvoxAPIDnc(name, base_url=self.base_url, headers=self.auth_headers, verbose=self.verbose)
upload = x.upload(filename)
s['upload_result'] = upload
return s
def delete(self):
"""
delete all DNC lists
:return: {}
"""
return self.api_delete(endpoint=self.url)
class SurvoxAPIDnc(SurvoxAPIBase):
"""
Class for working with a specific DNC list
"""
def __init__(self, name, base_url=None, headers=None, verbose=True):
super(SurvoxAPIDnc, self).__init__(base_url, headers, verbose)
self.name = name
self.url = '/sample/dnc/{name}/'.format(name=name)
self.upload_url = "{base}upload/".format(base=self.url)
self.download_url = "{base}download/".format(base=self.url)
def get(self):
try:
return self.api_get(endpoint=self.url)
except SurvoxAPINotFound:
return None
def set(self, description=None, realtime=None):
"""
update a DNC entry
:param description: new description for DNC list
:param realtime: if True, set DNC list as realtime, unset as realtime if False
:return: return the DNC list properties
"""
dnc = self.get()
if not dnc:
raise SurvoxAPIRuntime('No DNC available named: {name}'.format(name=self.name))
if not description and not realtime:
raise SurvoxAPIRuntime('No properties passed to set for DNC named: {name}'.format(name=self.name))
changes = {}
if description and description != dnc['description']:
changes['description'] = description
if realtime and realtime != dnc['realtime']:
changes['realtime'] = realtime
if changes:
return self.api_put(endpoint=self.url, data=changes)
else:
return dnc
def delete(self):
"""
Delete the specified DNC list
:return:
"""
return self.api_delete(endpoint=self.url)
def upload(self, filename, block_size=None):
"""
Upload records into DNC list
:param filename: file to upload
:param block_size: block size of upload
:return:
"""
return self.api_upload(self.upload_url, filename, block_size=block_size)
def download(self, filename):
"""
Download a dnc file in csv format
:param filename: file to save as
:return:
"""
download_location = self.api_get(self.download_url)
if not download_location:
raise SurvoxAPIRuntime('No DNC available for download: {name}'.format(name=self.name))
return self.api_download(download_location, filename)
|
cbeauvais/zAWygzxkeSjUBGGVsgMGTF56xvR
|
survox_api/resources/library/sample_dnc.py
|
Python
|
mit
| 5,130 | 0.002534 |
# Copyright 2018 Dgraph Labs, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from pydgraph.proto.api_pb2 import Operation, Payload, Request, Response, Mutation, TxnContext,\
Check, Version, NQuad, Value, Facet, Latency
from pydgraph.client_stub import *
from pydgraph.client import *
from pydgraph.txn import *
from pydgraph.errors import *
|
dgraph-io/pydgraph
|
pydgraph/__init__.py
|
Python
|
apache-2.0
| 849 | 0.001178 |
# -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2016-06-09 22:16
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('crowdsourcing', '0092_merge'),
('crowdsourcing', '0092_auto_20160608_0236'),
]
operations = [
]
|
shirishgoyal/crowdsource-platform
|
crowdsourcing/migrations/0093_merge.py
|
Python
|
mit
| 334 | 0 |
"""waybacktrack.py
Use this to extract Way Back Machine's
url-archives of any given domain!
TODO: reiterate entire design!
"""
import time
import os
import urllib2
import random
from math import ceil
try:
from cStringIO import StringIO as BytesIO
except ImportError:
from io import BytesIO
from lxml import html
from lxml.html import clean
ARCHIVE_DOMAIN = "http://web.archive.org"
CURR_DIR = os.path.dirname(__file__)
DATASET_DIR = os.path.join(CURR_DIR, '../../dataset/')
def archive_domain(domain, year, dir_path=DATASET_DIR,
percent=0, debug=False, throttle=1):
"""
domain
@type domain: string
@param domain: the domain of the website ie. www.nytimes.com
@type year: int
@param year: the year to extract archives from
@type dir_path: string
@param dir_path: the directory path to store archive, if
empty, directory will automatically be created
TODO: Think of better solution to storing
downloaded archives
@type percent: int
@param percent: the percentage of Way Back archives to crawl
@rtype:
@return: Returns a list of archived sites
"""
# TODO: Improve this for module portability
# WARNING: Module will likely break if used outside of
# crawl-to-the-future project
# automatically find or eventually create directory
# based off domain name
# Found way to check if file is being ran in crawl-to-the-future
# super "hacky" though
# TODO: Find better way to check if module is getting ran in
# in crawl-to-the-future project
if os.path.split(
os.path.abspath(os.path.join(__file__, os.pardir)))[1] != "Way-Back":
raise Exception("Please manually specify 'dir_name' value")
if dir_path is DATASET_DIR:
dir_path = os.path.join(dir_path, domain + '/')
if not os.path.exists(dir_path):
#raise IOError("[Errno 2] No such file or directory: '" + dir_path + "'")
# this part is shady
os.makedirs(dir_path)
if not isinstance(dir_path, basestring):
raise Exception("Directory - third arg. - path must be a string.")
ia_year_url = ARCHIVE_DOMAIN + "/web/" + str(year) + \
"*/http://" + domain + "/"
ia_parsed = html.parse(ia_year_url)
domain_snapshots = list(set(ia_parsed.xpath('//*[starts-with(@id,"' +
str(year) + '-")]//a/@href')))
#snapshot_age_span is a percentage of total snapshots to process from
#the given year
#ie. if percent is 100, and there are a total of 50 snapshots for
#www.cnn.com, we will crawl (to a depth of 1 atm) all 50 snapshots
snapshot_age_span = 1 if percent <= 0 \
else len(domain_snapshots) - 1 \
if percent >= 100 \
else int(percent*len(domain_snapshots)/100)
if debug:
print "Extracting links from: ", domain
# http://margerytech.blogspot.com/2011/06/python-get-last-directory-name-in-path.html
print "Current directory: ", os.path.split(
os.path.abspath(os.path.join(__file__, os.pardir)))[1]
print "Storing files in: ", os.path.abspath(dir_path)
print "Number of domain snapshots: ", len(domain_snapshots)
print "Number of domain snapshots to process: ", snapshot_age_span + 1
random.shuffle(domain_snapshots)
forward_links = []
#for snapshot in domain_snapshots[:snapshot_age_span]:
for snapshot in domain_snapshots[:3]:
curr_snapshot_flinks = get_forwardlink_snapshots(snapshot)
forward_links.extend(curr_snapshot_flinks)
if debug:
print "snapshot url: ", snapshot
print "forward link count: ", len(curr_snapshot_flinks)
random.shuffle(forward_links)
if debug:
print "total number of foward links to download: ", len(forward_links)
random.shuffle(forward_links)
# archive forward links
archived_links = []
duds = []
for forwardlink in forward_links:
if archive(forwardlink, year, dir_path, debug, throttle):
archived_links.append(forwardlink)
else:
duds.append(forwardlink)
if debug:
print "Number of archived forward links: ", len(archived_links)
print "Number of duds: ", len(duds)
return archived_links, duds
# I know I'm breaking so many rules by not seperating concerns
def archive(page, year, dir_path, debug=False, throttle=1):
"""
Check to see if downloaded forward link
satisfies the archival year specification
ie. (2000, 2005, 2010)
"""
#files = [f for f in os.listdir(dir_path) if os.path.isfile(f)]
if debug:
print "requesting ", page
page_file = page.rsplit('/web/')[1].replace('http://', '').replace('-','_')
page_file = page_file.replace('/', '_').replace(':', '_').replace('&','_')
page_file = page_file.replace('?', '_').replace('*','_').replace('=','_')
file_path = dir_path + page_file
if os.path.isfile(file_path):
if debug:
print "Already saved: ", page_file
print
return False
try:
html_file = urllib2.urlopen(ARCHIVE_DOMAIN + page)
except IOError:
if debug:
print "Failed to open request for ", ARCHIVE_DOMAIN + page
print
return False
if html_file.getcode() == 302:
if debug:
print "Got HTTP 302 response for ", ARCHIVE_DOMAIN + page
print
return False
html_string = str(html_file.read())
if html_string.find("HTTP 302 response") != -1:
if debug:
print "Got HTTP 302 response for ", ARCHIVE_DOMAIN + page
print
return False
archival_year_spec = ARCHIVE_DOMAIN + '/web/' + str(year)
page_url = html_file.geturl()
if page_url.startswith(archival_year_spec):
if debug:
print "saving ", page_url
print
try:
with open(file_path, 'wb') as f:
f.write(BytesIO(html_string).read())
time.sleep(throttle)
except IOError as e:
if debug:
print "Got error: ", e
return False
return True
else:
return False
def get_forwardlink_snapshots(parent_site):
"""
@type index: string
@param index: the index.html page from which to extract forward links
@type year: int
@param year: the year to extract archives from
"""
try:
parsed_parent_site = html.parse(ARCHIVE_DOMAIN+parent_site)
except IOError:
print "Did not get extract links in ", ARCHIVE_DOMAIN+parent_site
return []
#cleaner = html.clean.Cleaner(scripts=True, javascript=True,style=True, kill_tags = ["img"])
cleaner = clean.Cleaner(scripts=True, javascript=True, comments=True,
style=True, meta=True, processing_instructions=True, embedded=True,
frames=True, forms=True, kill_tags=["noscript", "iframe", "img"])
parsed_parent_site = cleaner.clean_html(parsed_parent_site)
# spec archival year
# check to see if the archival year of a forwark link
# is that of the parent (ie. 2000|2005|2010)
all_forwardlinks = parsed_parent_site.xpath('//a[starts-with(@href,"' +
parent_site[:9] +'")]/@href')
return all_forwardlinks
|
rodricios/crawl-to-the-future
|
crawlers/Way-Back/waybacktrack.py
|
Python
|
gpl-2.0
| 7,577 | 0.006071 |
import json
from apiserver.model import Route
import utils
def test_post(app, apiusers, db, default_headers, post_geojson):
with app.test_client() as client:
res = client.get('/routes?api_key=' + apiusers['valid'].api_key, default_headers['get'])
assert res.status_code == 200
ret = post_geojson('ying_yang_routes.geojson')
rows = db.session.query(Route).all()
assert len(rows) > 0
expected_data = ret['expected']
assert len(rows) == len(expected_data)
def test_query_by_radius(app, db, good_key, default_headers, post_geojson):
ret2 = post_geojson('caustic_cock.geojson')
assert ret2['response'].status_code == 200
with app.test_client() as client:
latlng = utils.geojson_to_lat_lng(ret2['expected'])
radius = 50 # caustic cock should be about 1km from yin yang
query_str = '&latlng={}&r={}'.format(latlng, radius)
res = client.get('/routes?api_key=' + good_key + query_str, default_headers['get'])
assert res.status_code == 200
actual_json = json.loads(res.data)
assert len(actual_json['features']) > 0, "Expect 1 route"
actual_route = Route(actual_json['features'][0])
expected = Route(ret2['expected']['features'][0])
assert actual_route == expected, "Expect route to be equal"
|
OpenBeta/beta
|
tests/test_api_routes.py
|
Python
|
gpl-3.0
| 1,343 | 0.001489 |
# -*- coding: utf-8 -*-
# © 2016 Antiun Ingenieria S.L. - Javier Iniesta
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from . import models
|
Endika/manufacture
|
mrp_sale_info/__init__.py
|
Python
|
agpl-3.0
| 165 | 0 |
import json
from flask import url_for
from flask_restplus import schemas
from udata.tests.helpers import assert200
class SwaggerBlueprintTest:
modules = []
def test_swagger_resource_type(self, api):
response = api.get(url_for('api.specs'))
assert200(response)
swagger = json.loads(response.data)
expected = swagger['paths']['/datasets/{dataset}/resources/']
expected = expected['put']['responses']['200']['schema']['type']
assert expected == 'array'
def test_swagger_specs_validate(self, api):
response = api.get(url_for('api.specs'))
try:
schemas.validate(response.json)
except schemas.SchemaValidationError as e:
print(e.errors)
raise
|
opendatateam/udata
|
udata/tests/api/test_swagger.py
|
Python
|
agpl-3.0
| 763 | 0 |
from pybindgen import Module, FileCodeSink, param, retval, cppclass, typehandlers
import pybindgen.settings
import warnings
class ErrorHandler(pybindgen.settings.ErrorHandler):
def handle_error(self, wrapper, exception, traceback_):
warnings.warn("exception %r in wrapper %s" % (exception, wrapper))
return True
pybindgen.settings.error_handler = ErrorHandler()
import sys
def module_init():
root_module = Module('ns.topology_read', cpp_namespace='::ns3')
return root_module
def register_types(module):
root_module = module.get_root()
## address.h (module 'network'): ns3::Address [class]
module.add_class('Address', import_from_module='ns.network')
## address.h (module 'network'): ns3::Address::MaxSize_e [enumeration]
module.add_enum('MaxSize_e', ['MAX_SIZE'], outer_class=root_module['ns3::Address'], import_from_module='ns.network')
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList [class]
module.add_class('AttributeConstructionList', import_from_module='ns.core')
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item [struct]
module.add_class('Item', import_from_module='ns.core', outer_class=root_module['ns3::AttributeConstructionList'])
typehandlers.add_type_alias(u'std::list< ns3::AttributeConstructionList::Item > const_iterator', u'ns3::AttributeConstructionList::CIterator')
typehandlers.add_type_alias(u'std::list< ns3::AttributeConstructionList::Item > const_iterator*', u'ns3::AttributeConstructionList::CIterator*')
typehandlers.add_type_alias(u'std::list< ns3::AttributeConstructionList::Item > const_iterator&', u'ns3::AttributeConstructionList::CIterator&')
## buffer.h (module 'network'): ns3::Buffer [class]
module.add_class('Buffer', import_from_module='ns.network')
## buffer.h (module 'network'): ns3::Buffer::Iterator [class]
module.add_class('Iterator', import_from_module='ns.network', outer_class=root_module['ns3::Buffer'])
## packet.h (module 'network'): ns3::ByteTagIterator [class]
module.add_class('ByteTagIterator', import_from_module='ns.network')
## packet.h (module 'network'): ns3::ByteTagIterator::Item [class]
module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::ByteTagIterator'])
## byte-tag-list.h (module 'network'): ns3::ByteTagList [class]
module.add_class('ByteTagList', import_from_module='ns.network')
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator [class]
module.add_class('Iterator', import_from_module='ns.network', outer_class=root_module['ns3::ByteTagList'])
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item [struct]
module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::ByteTagList::Iterator'])
## callback.h (module 'core'): ns3::CallbackBase [class]
module.add_class('CallbackBase', import_from_module='ns.core')
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::AttributeAccessor> [struct]
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::AttributeAccessor'])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::AttributeChecker> [struct]
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::AttributeChecker'])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::AttributeValue> [struct]
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::AttributeValue'])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::CallbackImplBase> [struct]
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::CallbackImplBase'])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::Hash::Implementation> [struct]
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::Hash::Implementation'])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::NixVector> [struct]
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::NixVector'])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::Packet> [struct]
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::Packet'])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::TraceSourceAccessor> [struct]
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::TraceSourceAccessor'])
## hash.h (module 'core'): ns3::Hasher [class]
module.add_class('Hasher', import_from_module='ns.core')
## ipv4-address.h (module 'network'): ns3::Ipv4Address [class]
module.add_class('Ipv4Address', import_from_module='ns.network')
## ipv4-address.h (module 'network'): ns3::Ipv4Address [class]
root_module['ns3::Ipv4Address'].implicitly_converts_to(root_module['ns3::Address'])
## ipv4-address.h (module 'network'): ns3::Ipv4Mask [class]
module.add_class('Ipv4Mask', import_from_module='ns.network')
## ipv6-address.h (module 'network'): ns3::Ipv6Address [class]
module.add_class('Ipv6Address', import_from_module='ns.network')
## ipv6-address.h (module 'network'): ns3::Ipv6Address [class]
root_module['ns3::Ipv6Address'].implicitly_converts_to(root_module['ns3::Address'])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix [class]
module.add_class('Ipv6Prefix', import_from_module='ns.network')
## mac48-address.h (module 'network'): ns3::Mac48Address [class]
module.add_class('Mac48Address', import_from_module='ns.network')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Mac48Address )', u'ns3::Mac48Address::TracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Mac48Address )*', u'ns3::Mac48Address::TracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Mac48Address )&', u'ns3::Mac48Address::TracedCallback&')
## mac48-address.h (module 'network'): ns3::Mac48Address [class]
root_module['ns3::Mac48Address'].implicitly_converts_to(root_module['ns3::Address'])
## mac8-address.h (module 'network'): ns3::Mac8Address [class]
module.add_class('Mac8Address', import_from_module='ns.network')
## mac8-address.h (module 'network'): ns3::Mac8Address [class]
root_module['ns3::Mac8Address'].implicitly_converts_to(root_module['ns3::Address'])
## object-base.h (module 'core'): ns3::ObjectBase [class]
module.add_class('ObjectBase', allow_subclassing=True, import_from_module='ns.core')
## object.h (module 'core'): ns3::ObjectDeleter [struct]
module.add_class('ObjectDeleter', import_from_module='ns.core')
## packet-metadata.h (module 'network'): ns3::PacketMetadata [class]
module.add_class('PacketMetadata', import_from_module='ns.network')
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item [struct]
module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::PacketMetadata'])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::ItemType [enumeration]
module.add_enum('ItemType', ['PAYLOAD', 'HEADER', 'TRAILER'], outer_class=root_module['ns3::PacketMetadata::Item'], import_from_module='ns.network')
## packet-metadata.h (module 'network'): ns3::PacketMetadata::ItemIterator [class]
module.add_class('ItemIterator', import_from_module='ns.network', outer_class=root_module['ns3::PacketMetadata'])
## packet.h (module 'network'): ns3::PacketTagIterator [class]
module.add_class('PacketTagIterator', import_from_module='ns.network')
## packet.h (module 'network'): ns3::PacketTagIterator::Item [class]
module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::PacketTagIterator'])
## packet-tag-list.h (module 'network'): ns3::PacketTagList [class]
module.add_class('PacketTagList', import_from_module='ns.network')
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData [struct]
module.add_class('TagData', import_from_module='ns.network', outer_class=root_module['ns3::PacketTagList'])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter> [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Object', 'ns3::ObjectBase', 'ns3::ObjectDeleter'], parent=root_module['ns3::ObjectBase'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## tag.h (module 'network'): ns3::Tag [class]
module.add_class('Tag', import_from_module='ns.network', parent=root_module['ns3::ObjectBase'])
## tag-buffer.h (module 'network'): ns3::TagBuffer [class]
module.add_class('TagBuffer', import_from_module='ns.network')
## topology-reader-helper.h (module 'topology-read'): ns3::TopologyReaderHelper [class]
module.add_class('TopologyReaderHelper')
## type-id.h (module 'core'): ns3::TypeId [class]
module.add_class('TypeId', import_from_module='ns.core')
## type-id.h (module 'core'): ns3::TypeId::AttributeFlag [enumeration]
module.add_enum('AttributeFlag', ['ATTR_GET', 'ATTR_SET', 'ATTR_CONSTRUCT', 'ATTR_SGC'], outer_class=root_module['ns3::TypeId'], import_from_module='ns.core')
## type-id.h (module 'core'): ns3::TypeId::SupportLevel [enumeration]
module.add_enum('SupportLevel', ['SUPPORTED', 'DEPRECATED', 'OBSOLETE'], outer_class=root_module['ns3::TypeId'], import_from_module='ns.core')
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation [struct]
module.add_class('AttributeInformation', import_from_module='ns.core', outer_class=root_module['ns3::TypeId'])
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation [struct]
module.add_class('TraceSourceInformation', import_from_module='ns.core', outer_class=root_module['ns3::TypeId'])
typehandlers.add_type_alias(u'uint32_t', u'ns3::TypeId::hash_t')
typehandlers.add_type_alias(u'uint32_t*', u'ns3::TypeId::hash_t*')
typehandlers.add_type_alias(u'uint32_t&', u'ns3::TypeId::hash_t&')
## empty.h (module 'core'): ns3::empty [class]
module.add_class('empty', import_from_module='ns.core')
## chunk.h (module 'network'): ns3::Chunk [class]
module.add_class('Chunk', import_from_module='ns.network', parent=root_module['ns3::ObjectBase'])
## header.h (module 'network'): ns3::Header [class]
module.add_class('Header', import_from_module='ns.network', parent=root_module['ns3::Chunk'])
## object.h (module 'core'): ns3::Object [class]
module.add_class('Object', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter >'])
## object.h (module 'core'): ns3::Object::AggregateIterator [class]
module.add_class('AggregateIterator', import_from_module='ns.core', outer_class=root_module['ns3::Object'])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeAccessor', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeAccessor>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeChecker', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeChecker>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeValue', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeValue>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::CallbackImplBase', 'ns3::empty', 'ns3::DefaultDeleter<ns3::CallbackImplBase>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Hash::Implementation', 'ns3::empty', 'ns3::DefaultDeleter<ns3::Hash::Implementation>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::NixVector', 'ns3::empty', 'ns3::DefaultDeleter<ns3::NixVector>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Packet', 'ns3::empty', 'ns3::DefaultDeleter<ns3::Packet>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::TraceSourceAccessor', 'ns3::empty', 'ns3::DefaultDeleter<ns3::TraceSourceAccessor>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## topology-reader.h (module 'topology-read'): ns3::TopologyReader [class]
module.add_class('TopologyReader', parent=root_module['ns3::Object'])
## topology-reader.h (module 'topology-read'): ns3::TopologyReader::Link [class]
module.add_class('Link', outer_class=root_module['ns3::TopologyReader'])
typehandlers.add_type_alias(u'std::map< std::string, std::string > const_iterator', u'ns3::TopologyReader::Link::ConstAttributesIterator')
typehandlers.add_type_alias(u'std::map< std::string, std::string > const_iterator*', u'ns3::TopologyReader::Link::ConstAttributesIterator*')
typehandlers.add_type_alias(u'std::map< std::string, std::string > const_iterator&', u'ns3::TopologyReader::Link::ConstAttributesIterator&')
typehandlers.add_type_alias(u'std::list< ns3::TopologyReader::Link > const_iterator', u'ns3::TopologyReader::ConstLinksIterator')
typehandlers.add_type_alias(u'std::list< ns3::TopologyReader::Link > const_iterator*', u'ns3::TopologyReader::ConstLinksIterator*')
typehandlers.add_type_alias(u'std::list< ns3::TopologyReader::Link > const_iterator&', u'ns3::TopologyReader::ConstLinksIterator&')
## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor [class]
module.add_class('TraceSourceAccessor', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >'])
## trailer.h (module 'network'): ns3::Trailer [class]
module.add_class('Trailer', import_from_module='ns.network', parent=root_module['ns3::Chunk'])
## attribute.h (module 'core'): ns3::AttributeAccessor [class]
module.add_class('AttributeAccessor', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >'])
## attribute.h (module 'core'): ns3::AttributeChecker [class]
module.add_class('AttributeChecker', allow_subclassing=False, automatic_type_narrowing=True, import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >'])
## attribute.h (module 'core'): ns3::AttributeValue [class]
module.add_class('AttributeValue', allow_subclassing=False, automatic_type_narrowing=True, import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >'])
## callback.h (module 'core'): ns3::CallbackChecker [class]
module.add_class('CallbackChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## callback.h (module 'core'): ns3::CallbackImplBase [class]
module.add_class('CallbackImplBase', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >'])
## callback.h (module 'core'): ns3::CallbackValue [class]
module.add_class('CallbackValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## attribute.h (module 'core'): ns3::EmptyAttributeAccessor [class]
module.add_class('EmptyAttributeAccessor', import_from_module='ns.core', parent=root_module['ns3::AttributeAccessor'])
## attribute.h (module 'core'): ns3::EmptyAttributeChecker [class]
module.add_class('EmptyAttributeChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## attribute.h (module 'core'): ns3::EmptyAttributeValue [class]
module.add_class('EmptyAttributeValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## inet-topology-reader.h (module 'topology-read'): ns3::InetTopologyReader [class]
module.add_class('InetTopologyReader', parent=root_module['ns3::TopologyReader'])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressChecker [class]
module.add_class('Ipv4AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue [class]
module.add_class('Ipv4AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskChecker [class]
module.add_class('Ipv4MaskChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue [class]
module.add_class('Ipv4MaskValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressChecker [class]
module.add_class('Ipv6AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue [class]
module.add_class('Ipv6AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixChecker [class]
module.add_class('Ipv6PrefixChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue [class]
module.add_class('Ipv6PrefixValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## mac48-address.h (module 'network'): ns3::Mac48AddressChecker [class]
module.add_class('Mac48AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## mac48-address.h (module 'network'): ns3::Mac48AddressValue [class]
module.add_class('Mac48AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## net-device.h (module 'network'): ns3::NetDevice [class]
module.add_class('NetDevice', import_from_module='ns.network', parent=root_module['ns3::Object'])
## net-device.h (module 'network'): ns3::NetDevice::PacketType [enumeration]
module.add_enum('PacketType', ['PACKET_HOST', 'NS3_PACKET_HOST', 'PACKET_BROADCAST', 'NS3_PACKET_BROADCAST', 'PACKET_MULTICAST', 'NS3_PACKET_MULTICAST', 'PACKET_OTHERHOST', 'NS3_PACKET_OTHERHOST'], outer_class=root_module['ns3::NetDevice'], import_from_module='ns.network')
typehandlers.add_type_alias(u'void ( * ) ( )', u'ns3::NetDevice::LinkChangeTracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( )*', u'ns3::NetDevice::LinkChangeTracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( )&', u'ns3::NetDevice::LinkChangeTracedCallback&')
typehandlers.add_type_alias(u'ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::NetDevice::ReceiveCallback')
typehandlers.add_type_alias(u'ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::NetDevice::ReceiveCallback*')
typehandlers.add_type_alias(u'ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::NetDevice::ReceiveCallback&')
typehandlers.add_type_alias(u'ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >', u'ns3::NetDevice::PromiscReceiveCallback')
typehandlers.add_type_alias(u'ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::NetDevice::PromiscReceiveCallback*')
typehandlers.add_type_alias(u'ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::NetDevice::PromiscReceiveCallback&')
## nix-vector.h (module 'network'): ns3::NixVector [class]
module.add_class('NixVector', import_from_module='ns.network', parent=root_module['ns3::SimpleRefCount< ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> >'])
## node.h (module 'network'): ns3::Node [class]
module.add_class('Node', import_from_module='ns.network', parent=root_module['ns3::Object'])
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >', u'ns3::Node::ProtocolHandler')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::Node::ProtocolHandler*')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::Node::ProtocolHandler&')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::Node::DeviceAdditionListener')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::Node::DeviceAdditionListener*')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::Node::DeviceAdditionListener&')
## orbis-topology-reader.h (module 'topology-read'): ns3::OrbisTopologyReader [class]
module.add_class('OrbisTopologyReader', parent=root_module['ns3::TopologyReader'])
## packet.h (module 'network'): ns3::Packet [class]
module.add_class('Packet', import_from_module='ns.network', parent=root_module['ns3::SimpleRefCount< ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> >'])
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const > )', u'ns3::Packet::TracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const > )*', u'ns3::Packet::TracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const > )&', u'ns3::Packet::TracedCallback&')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::Address const & )', u'ns3::Packet::AddressTracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::Address const & )*', u'ns3::Packet::AddressTracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::Address const & )&', u'ns3::Packet::AddressTracedCallback&')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const > const, ns3::Address const &, ns3::Address const & )', u'ns3::Packet::TwoAddressTracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const > const, ns3::Address const &, ns3::Address const & )*', u'ns3::Packet::TwoAddressTracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const > const, ns3::Address const &, ns3::Address const & )&', u'ns3::Packet::TwoAddressTracedCallback&')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::Mac48Address )', u'ns3::Packet::Mac48AddressTracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::Mac48Address )*', u'ns3::Packet::Mac48AddressTracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::Mac48Address )&', u'ns3::Packet::Mac48AddressTracedCallback&')
typehandlers.add_type_alias(u'void ( * ) ( uint32_t, uint32_t )', u'ns3::Packet::SizeTracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( uint32_t, uint32_t )*', u'ns3::Packet::SizeTracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( uint32_t, uint32_t )&', u'ns3::Packet::SizeTracedCallback&')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, double )', u'ns3::Packet::SinrTracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, double )*', u'ns3::Packet::SinrTracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, double )&', u'ns3::Packet::SinrTracedCallback&')
## rocketfuel-topology-reader.h (module 'topology-read'): ns3::RocketfuelTopologyReader [class]
module.add_class('RocketfuelTopologyReader', parent=root_module['ns3::TopologyReader'])
## type-id.h (module 'core'): ns3::TypeIdChecker [class]
module.add_class('TypeIdChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## type-id.h (module 'core'): ns3::TypeIdValue [class]
module.add_class('TypeIdValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## address.h (module 'network'): ns3::AddressChecker [class]
module.add_class('AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## address.h (module 'network'): ns3::AddressValue [class]
module.add_class('AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## callback.h (module 'core'): ns3::CallbackImpl<ns3::ObjectBase *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> [class]
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['ns3::ObjectBase *', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
## callback.h (module 'core'): ns3::CallbackImpl<void, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<const ns3::Packet>, unsigned short, const ns3::Address &, const ns3::Address &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty> [class]
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::Ptr<ns3::NetDevice>', 'ns3::Ptr<const ns3::Packet>', 'unsigned short', 'const ns3::Address &', 'const ns3::Address &', 'ns3::NetDevice::PacketType', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
## callback.h (module 'core'): ns3::CallbackImpl<void, ns3::Ptr<ns3::NetDevice>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> [class]
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::Ptr<ns3::NetDevice>', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
module.add_container('std::map< std::string, std::string >', ('std::string', 'std::string'), container_type=u'map')
## Register a nested module for the namespace FatalImpl
nested_module = module.add_cpp_namespace('FatalImpl')
register_types_ns3_FatalImpl(nested_module)
## Register a nested module for the namespace Hash
nested_module = module.add_cpp_namespace('Hash')
register_types_ns3_Hash(nested_module)
def register_types_ns3_FatalImpl(module):
root_module = module.get_root()
def register_types_ns3_Hash(module):
root_module = module.get_root()
## hash-function.h (module 'core'): ns3::Hash::Implementation [class]
module.add_class('Implementation', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >'])
typehandlers.add_type_alias(u'uint32_t ( * ) ( char const *, std::size_t const )', u'ns3::Hash::Hash32Function_ptr')
typehandlers.add_type_alias(u'uint32_t ( * ) ( char const *, std::size_t const )*', u'ns3::Hash::Hash32Function_ptr*')
typehandlers.add_type_alias(u'uint32_t ( * ) ( char const *, std::size_t const )&', u'ns3::Hash::Hash32Function_ptr&')
typehandlers.add_type_alias(u'uint64_t ( * ) ( char const *, std::size_t const )', u'ns3::Hash::Hash64Function_ptr')
typehandlers.add_type_alias(u'uint64_t ( * ) ( char const *, std::size_t const )*', u'ns3::Hash::Hash64Function_ptr*')
typehandlers.add_type_alias(u'uint64_t ( * ) ( char const *, std::size_t const )&', u'ns3::Hash::Hash64Function_ptr&')
## Register a nested module for the namespace Function
nested_module = module.add_cpp_namespace('Function')
register_types_ns3_Hash_Function(nested_module)
def register_types_ns3_Hash_Function(module):
root_module = module.get_root()
## hash-fnv.h (module 'core'): ns3::Hash::Function::Fnv1a [class]
module.add_class('Fnv1a', import_from_module='ns.core', parent=root_module['ns3::Hash::Implementation'])
## hash-function.h (module 'core'): ns3::Hash::Function::Hash32 [class]
module.add_class('Hash32', import_from_module='ns.core', parent=root_module['ns3::Hash::Implementation'])
## hash-function.h (module 'core'): ns3::Hash::Function::Hash64 [class]
module.add_class('Hash64', import_from_module='ns.core', parent=root_module['ns3::Hash::Implementation'])
## hash-murmur3.h (module 'core'): ns3::Hash::Function::Murmur3 [class]
module.add_class('Murmur3', import_from_module='ns.core', parent=root_module['ns3::Hash::Implementation'])
def register_methods(root_module):
register_Ns3Address_methods(root_module, root_module['ns3::Address'])
register_Ns3AttributeConstructionList_methods(root_module, root_module['ns3::AttributeConstructionList'])
register_Ns3AttributeConstructionListItem_methods(root_module, root_module['ns3::AttributeConstructionList::Item'])
register_Ns3Buffer_methods(root_module, root_module['ns3::Buffer'])
register_Ns3BufferIterator_methods(root_module, root_module['ns3::Buffer::Iterator'])
register_Ns3ByteTagIterator_methods(root_module, root_module['ns3::ByteTagIterator'])
register_Ns3ByteTagIteratorItem_methods(root_module, root_module['ns3::ByteTagIterator::Item'])
register_Ns3ByteTagList_methods(root_module, root_module['ns3::ByteTagList'])
register_Ns3ByteTagListIterator_methods(root_module, root_module['ns3::ByteTagList::Iterator'])
register_Ns3ByteTagListIteratorItem_methods(root_module, root_module['ns3::ByteTagList::Iterator::Item'])
register_Ns3CallbackBase_methods(root_module, root_module['ns3::CallbackBase'])
register_Ns3DefaultDeleter__Ns3AttributeAccessor_methods(root_module, root_module['ns3::DefaultDeleter< ns3::AttributeAccessor >'])
register_Ns3DefaultDeleter__Ns3AttributeChecker_methods(root_module, root_module['ns3::DefaultDeleter< ns3::AttributeChecker >'])
register_Ns3DefaultDeleter__Ns3AttributeValue_methods(root_module, root_module['ns3::DefaultDeleter< ns3::AttributeValue >'])
register_Ns3DefaultDeleter__Ns3CallbackImplBase_methods(root_module, root_module['ns3::DefaultDeleter< ns3::CallbackImplBase >'])
register_Ns3DefaultDeleter__Ns3HashImplementation_methods(root_module, root_module['ns3::DefaultDeleter< ns3::Hash::Implementation >'])
register_Ns3DefaultDeleter__Ns3NixVector_methods(root_module, root_module['ns3::DefaultDeleter< ns3::NixVector >'])
register_Ns3DefaultDeleter__Ns3Packet_methods(root_module, root_module['ns3::DefaultDeleter< ns3::Packet >'])
register_Ns3DefaultDeleter__Ns3TraceSourceAccessor_methods(root_module, root_module['ns3::DefaultDeleter< ns3::TraceSourceAccessor >'])
register_Ns3Hasher_methods(root_module, root_module['ns3::Hasher'])
register_Ns3Ipv4Address_methods(root_module, root_module['ns3::Ipv4Address'])
register_Ns3Ipv4Mask_methods(root_module, root_module['ns3::Ipv4Mask'])
register_Ns3Ipv6Address_methods(root_module, root_module['ns3::Ipv6Address'])
register_Ns3Ipv6Prefix_methods(root_module, root_module['ns3::Ipv6Prefix'])
register_Ns3Mac48Address_methods(root_module, root_module['ns3::Mac48Address'])
register_Ns3Mac8Address_methods(root_module, root_module['ns3::Mac8Address'])
register_Ns3ObjectBase_methods(root_module, root_module['ns3::ObjectBase'])
register_Ns3ObjectDeleter_methods(root_module, root_module['ns3::ObjectDeleter'])
register_Ns3PacketMetadata_methods(root_module, root_module['ns3::PacketMetadata'])
register_Ns3PacketMetadataItem_methods(root_module, root_module['ns3::PacketMetadata::Item'])
register_Ns3PacketMetadataItemIterator_methods(root_module, root_module['ns3::PacketMetadata::ItemIterator'])
register_Ns3PacketTagIterator_methods(root_module, root_module['ns3::PacketTagIterator'])
register_Ns3PacketTagIteratorItem_methods(root_module, root_module['ns3::PacketTagIterator::Item'])
register_Ns3PacketTagList_methods(root_module, root_module['ns3::PacketTagList'])
register_Ns3PacketTagListTagData_methods(root_module, root_module['ns3::PacketTagList::TagData'])
register_Ns3SimpleRefCount__Ns3Object_Ns3ObjectBase_Ns3ObjectDeleter_methods(root_module, root_module['ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter >'])
register_Ns3Tag_methods(root_module, root_module['ns3::Tag'])
register_Ns3TagBuffer_methods(root_module, root_module['ns3::TagBuffer'])
register_Ns3TopologyReaderHelper_methods(root_module, root_module['ns3::TopologyReaderHelper'])
register_Ns3TypeId_methods(root_module, root_module['ns3::TypeId'])
register_Ns3TypeIdAttributeInformation_methods(root_module, root_module['ns3::TypeId::AttributeInformation'])
register_Ns3TypeIdTraceSourceInformation_methods(root_module, root_module['ns3::TypeId::TraceSourceInformation'])
register_Ns3Empty_methods(root_module, root_module['ns3::empty'])
register_Ns3Chunk_methods(root_module, root_module['ns3::Chunk'])
register_Ns3Header_methods(root_module, root_module['ns3::Header'])
register_Ns3Object_methods(root_module, root_module['ns3::Object'])
register_Ns3ObjectAggregateIterator_methods(root_module, root_module['ns3::Object::AggregateIterator'])
register_Ns3SimpleRefCount__Ns3AttributeAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeAccessor__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >'])
register_Ns3SimpleRefCount__Ns3AttributeChecker_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeChecker__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >'])
register_Ns3SimpleRefCount__Ns3AttributeValue_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeValue__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >'])
register_Ns3SimpleRefCount__Ns3CallbackImplBase_Ns3Empty_Ns3DefaultDeleter__lt__ns3CallbackImplBase__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >'])
register_Ns3SimpleRefCount__Ns3HashImplementation_Ns3Empty_Ns3DefaultDeleter__lt__ns3HashImplementation__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >'])
register_Ns3SimpleRefCount__Ns3NixVector_Ns3Empty_Ns3DefaultDeleter__lt__ns3NixVector__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> >'])
register_Ns3SimpleRefCount__Ns3Packet_Ns3Empty_Ns3DefaultDeleter__lt__ns3Packet__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> >'])
register_Ns3SimpleRefCount__Ns3TraceSourceAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3TraceSourceAccessor__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >'])
register_Ns3TopologyReader_methods(root_module, root_module['ns3::TopologyReader'])
register_Ns3TopologyReaderLink_methods(root_module, root_module['ns3::TopologyReader::Link'])
register_Ns3TraceSourceAccessor_methods(root_module, root_module['ns3::TraceSourceAccessor'])
register_Ns3Trailer_methods(root_module, root_module['ns3::Trailer'])
register_Ns3AttributeAccessor_methods(root_module, root_module['ns3::AttributeAccessor'])
register_Ns3AttributeChecker_methods(root_module, root_module['ns3::AttributeChecker'])
register_Ns3AttributeValue_methods(root_module, root_module['ns3::AttributeValue'])
register_Ns3CallbackChecker_methods(root_module, root_module['ns3::CallbackChecker'])
register_Ns3CallbackImplBase_methods(root_module, root_module['ns3::CallbackImplBase'])
register_Ns3CallbackValue_methods(root_module, root_module['ns3::CallbackValue'])
register_Ns3EmptyAttributeAccessor_methods(root_module, root_module['ns3::EmptyAttributeAccessor'])
register_Ns3EmptyAttributeChecker_methods(root_module, root_module['ns3::EmptyAttributeChecker'])
register_Ns3EmptyAttributeValue_methods(root_module, root_module['ns3::EmptyAttributeValue'])
register_Ns3InetTopologyReader_methods(root_module, root_module['ns3::InetTopologyReader'])
register_Ns3Ipv4AddressChecker_methods(root_module, root_module['ns3::Ipv4AddressChecker'])
register_Ns3Ipv4AddressValue_methods(root_module, root_module['ns3::Ipv4AddressValue'])
register_Ns3Ipv4MaskChecker_methods(root_module, root_module['ns3::Ipv4MaskChecker'])
register_Ns3Ipv4MaskValue_methods(root_module, root_module['ns3::Ipv4MaskValue'])
register_Ns3Ipv6AddressChecker_methods(root_module, root_module['ns3::Ipv6AddressChecker'])
register_Ns3Ipv6AddressValue_methods(root_module, root_module['ns3::Ipv6AddressValue'])
register_Ns3Ipv6PrefixChecker_methods(root_module, root_module['ns3::Ipv6PrefixChecker'])
register_Ns3Ipv6PrefixValue_methods(root_module, root_module['ns3::Ipv6PrefixValue'])
register_Ns3Mac48AddressChecker_methods(root_module, root_module['ns3::Mac48AddressChecker'])
register_Ns3Mac48AddressValue_methods(root_module, root_module['ns3::Mac48AddressValue'])
register_Ns3NetDevice_methods(root_module, root_module['ns3::NetDevice'])
register_Ns3NixVector_methods(root_module, root_module['ns3::NixVector'])
register_Ns3Node_methods(root_module, root_module['ns3::Node'])
register_Ns3OrbisTopologyReader_methods(root_module, root_module['ns3::OrbisTopologyReader'])
register_Ns3Packet_methods(root_module, root_module['ns3::Packet'])
register_Ns3RocketfuelTopologyReader_methods(root_module, root_module['ns3::RocketfuelTopologyReader'])
register_Ns3TypeIdChecker_methods(root_module, root_module['ns3::TypeIdChecker'])
register_Ns3TypeIdValue_methods(root_module, root_module['ns3::TypeIdValue'])
register_Ns3AddressChecker_methods(root_module, root_module['ns3::AddressChecker'])
register_Ns3AddressValue_methods(root_module, root_module['ns3::AddressValue'])
register_Ns3CallbackImpl__Ns3ObjectBase___star___Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_methods(root_module, root_module['ns3::CallbackImpl< ns3::ObjectBase *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >'])
register_Ns3CallbackImpl__Void_Ns3Ptr__lt__ns3NetDevice__gt___Ns3Ptr__lt__const_ns3Packet__gt___Unsigned_short_Const_ns3Address___amp___Const_ns3Address___amp___Ns3NetDevicePacketType_Ns3Empty_Ns3Empty_Ns3Empty_methods(root_module, root_module['ns3::CallbackImpl< void, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<const ns3::Packet>, unsigned short, const ns3::Address &, const ns3::Address &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >'])
register_Ns3CallbackImpl__Void_Ns3Ptr__lt__ns3NetDevice__gt___Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_methods(root_module, root_module['ns3::CallbackImpl< void, ns3::Ptr<ns3::NetDevice>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >'])
register_Ns3HashImplementation_methods(root_module, root_module['ns3::Hash::Implementation'])
register_Ns3HashFunctionFnv1a_methods(root_module, root_module['ns3::Hash::Function::Fnv1a'])
register_Ns3HashFunctionHash32_methods(root_module, root_module['ns3::Hash::Function::Hash32'])
register_Ns3HashFunctionHash64_methods(root_module, root_module['ns3::Hash::Function::Hash64'])
register_Ns3HashFunctionMurmur3_methods(root_module, root_module['ns3::Hash::Function::Murmur3'])
return
def register_Ns3Address_methods(root_module, cls):
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('!=')
cls.add_binary_comparison_operator('<')
cls.add_output_stream_operator()
## address.h (module 'network'): ns3::Address::Address() [constructor]
cls.add_constructor([])
## address.h (module 'network'): ns3::Address::Address(uint8_t type, uint8_t const * buffer, uint8_t len) [constructor]
cls.add_constructor([param('uint8_t', 'type'), param('uint8_t const *', 'buffer'), param('uint8_t', 'len')])
## address.h (module 'network'): ns3::Address::Address(ns3::Address const & address) [constructor]
cls.add_constructor([param('ns3::Address const &', 'address')])
## address.h (module 'network'): bool ns3::Address::CheckCompatible(uint8_t type, uint8_t len) const [member function]
cls.add_method('CheckCompatible',
'bool',
[param('uint8_t', 'type'), param('uint8_t', 'len')],
is_const=True)
## address.h (module 'network'): uint32_t ns3::Address::CopyAllFrom(uint8_t const * buffer, uint8_t len) [member function]
cls.add_method('CopyAllFrom',
'uint32_t',
[param('uint8_t const *', 'buffer'), param('uint8_t', 'len')])
## address.h (module 'network'): uint32_t ns3::Address::CopyAllTo(uint8_t * buffer, uint8_t len) const [member function]
cls.add_method('CopyAllTo',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint8_t', 'len')],
is_const=True)
## address.h (module 'network'): uint32_t ns3::Address::CopyFrom(uint8_t const * buffer, uint8_t len) [member function]
cls.add_method('CopyFrom',
'uint32_t',
[param('uint8_t const *', 'buffer'), param('uint8_t', 'len')])
## address.h (module 'network'): uint32_t ns3::Address::CopyTo(uint8_t * buffer) const [member function]
cls.add_method('CopyTo',
'uint32_t',
[param('uint8_t *', 'buffer')],
is_const=True)
## address.h (module 'network'): void ns3::Address::Deserialize(ns3::TagBuffer buffer) [member function]
cls.add_method('Deserialize',
'void',
[param('ns3::TagBuffer', 'buffer')])
## address.h (module 'network'): uint8_t ns3::Address::GetLength() const [member function]
cls.add_method('GetLength',
'uint8_t',
[],
is_const=True)
## address.h (module 'network'): uint32_t ns3::Address::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True)
## address.h (module 'network'): bool ns3::Address::IsInvalid() const [member function]
cls.add_method('IsInvalid',
'bool',
[],
is_const=True)
## address.h (module 'network'): bool ns3::Address::IsMatchingType(uint8_t type) const [member function]
cls.add_method('IsMatchingType',
'bool',
[param('uint8_t', 'type')],
is_const=True)
## address.h (module 'network'): static uint8_t ns3::Address::Register() [member function]
cls.add_method('Register',
'uint8_t',
[],
is_static=True)
## address.h (module 'network'): void ns3::Address::Serialize(ns3::TagBuffer buffer) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::TagBuffer', 'buffer')],
is_const=True)
return
def register_Ns3AttributeConstructionList_methods(root_module, cls):
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::AttributeConstructionList(ns3::AttributeConstructionList const & arg0) [constructor]
cls.add_constructor([param('ns3::AttributeConstructionList const &', 'arg0')])
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::AttributeConstructionList() [constructor]
cls.add_constructor([])
## attribute-construction-list.h (module 'core'): void ns3::AttributeConstructionList::Add(std::string name, ns3::Ptr<const ns3::AttributeChecker> checker, ns3::Ptr<ns3::AttributeValue> value) [member function]
cls.add_method('Add',
'void',
[param('std::string', 'name'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker'), param('ns3::Ptr< ns3::AttributeValue >', 'value')])
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::CIterator ns3::AttributeConstructionList::Begin() const [member function]
cls.add_method('Begin',
'ns3::AttributeConstructionList::CIterator',
[],
is_const=True)
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::CIterator ns3::AttributeConstructionList::End() const [member function]
cls.add_method('End',
'ns3::AttributeConstructionList::CIterator',
[],
is_const=True)
## attribute-construction-list.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeConstructionList::Find(ns3::Ptr<const ns3::AttributeChecker> checker) const [member function]
cls.add_method('Find',
'ns3::Ptr< ns3::AttributeValue >',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True)
return
def register_Ns3AttributeConstructionListItem_methods(root_module, cls):
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::Item() [constructor]
cls.add_constructor([])
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::Item(ns3::AttributeConstructionList::Item const & arg0) [constructor]
cls.add_constructor([param('ns3::AttributeConstructionList::Item const &', 'arg0')])
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::checker [variable]
cls.add_instance_attribute('checker', 'ns3::Ptr< ns3::AttributeChecker const >', is_const=False)
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::name [variable]
cls.add_instance_attribute('name', 'std::string', is_const=False)
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::value [variable]
cls.add_instance_attribute('value', 'ns3::Ptr< ns3::AttributeValue >', is_const=False)
return
def register_Ns3Buffer_methods(root_module, cls):
## buffer.h (module 'network'): ns3::Buffer::Buffer(ns3::Buffer const & o) [constructor]
cls.add_constructor([param('ns3::Buffer const &', 'o')])
## buffer.h (module 'network'): ns3::Buffer::Buffer() [constructor]
cls.add_constructor([])
## buffer.h (module 'network'): ns3::Buffer::Buffer(uint32_t dataSize) [constructor]
cls.add_constructor([param('uint32_t', 'dataSize')])
## buffer.h (module 'network'): ns3::Buffer::Buffer(uint32_t dataSize, bool initialize) [constructor]
cls.add_constructor([param('uint32_t', 'dataSize'), param('bool', 'initialize')])
## buffer.h (module 'network'): void ns3::Buffer::AddAtEnd(uint32_t end) [member function]
cls.add_method('AddAtEnd',
'void',
[param('uint32_t', 'end')])
## buffer.h (module 'network'): void ns3::Buffer::AddAtEnd(ns3::Buffer const & o) [member function]
cls.add_method('AddAtEnd',
'void',
[param('ns3::Buffer const &', 'o')])
## buffer.h (module 'network'): void ns3::Buffer::AddAtStart(uint32_t start) [member function]
cls.add_method('AddAtStart',
'void',
[param('uint32_t', 'start')])
## buffer.h (module 'network'): ns3::Buffer::Iterator ns3::Buffer::Begin() const [member function]
cls.add_method('Begin',
'ns3::Buffer::Iterator',
[],
is_const=True)
## buffer.h (module 'network'): void ns3::Buffer::CopyData(std::ostream * os, uint32_t size) const [member function]
cls.add_method('CopyData',
'void',
[param('std::ostream *', 'os'), param('uint32_t', 'size')],
is_const=True)
## buffer.h (module 'network'): uint32_t ns3::Buffer::CopyData(uint8_t * buffer, uint32_t size) const [member function]
cls.add_method('CopyData',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint32_t', 'size')],
is_const=True)
## buffer.h (module 'network'): ns3::Buffer ns3::Buffer::CreateFragment(uint32_t start, uint32_t length) const [member function]
cls.add_method('CreateFragment',
'ns3::Buffer',
[param('uint32_t', 'start'), param('uint32_t', 'length')],
is_const=True)
## buffer.h (module 'network'): uint32_t ns3::Buffer::Deserialize(uint8_t const * buffer, uint32_t size) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('uint8_t const *', 'buffer'), param('uint32_t', 'size')])
## buffer.h (module 'network'): ns3::Buffer::Iterator ns3::Buffer::End() const [member function]
cls.add_method('End',
'ns3::Buffer::Iterator',
[],
is_const=True)
## buffer.h (module 'network'): uint32_t ns3::Buffer::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True)
## buffer.h (module 'network'): uint32_t ns3::Buffer::GetSize() const [member function]
cls.add_method('GetSize',
'uint32_t',
[],
is_const=True)
## buffer.h (module 'network'): uint8_t const * ns3::Buffer::PeekData() const [member function]
cls.add_method('PeekData',
'uint8_t const *',
[],
is_const=True)
## buffer.h (module 'network'): void ns3::Buffer::RemoveAtEnd(uint32_t end) [member function]
cls.add_method('RemoveAtEnd',
'void',
[param('uint32_t', 'end')])
## buffer.h (module 'network'): void ns3::Buffer::RemoveAtStart(uint32_t start) [member function]
cls.add_method('RemoveAtStart',
'void',
[param('uint32_t', 'start')])
## buffer.h (module 'network'): uint32_t ns3::Buffer::Serialize(uint8_t * buffer, uint32_t maxSize) const [member function]
cls.add_method('Serialize',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint32_t', 'maxSize')],
is_const=True)
return
def register_Ns3BufferIterator_methods(root_module, cls):
## buffer.h (module 'network'): ns3::Buffer::Iterator::Iterator(ns3::Buffer::Iterator const & arg0) [constructor]
cls.add_constructor([param('ns3::Buffer::Iterator const &', 'arg0')])
## buffer.h (module 'network'): ns3::Buffer::Iterator::Iterator() [constructor]
cls.add_constructor([])
## buffer.h (module 'network'): uint16_t ns3::Buffer::Iterator::CalculateIpChecksum(uint16_t size) [member function]
cls.add_method('CalculateIpChecksum',
'uint16_t',
[param('uint16_t', 'size')])
## buffer.h (module 'network'): uint16_t ns3::Buffer::Iterator::CalculateIpChecksum(uint16_t size, uint32_t initialChecksum) [member function]
cls.add_method('CalculateIpChecksum',
'uint16_t',
[param('uint16_t', 'size'), param('uint32_t', 'initialChecksum')])
## buffer.h (module 'network'): uint32_t ns3::Buffer::Iterator::GetDistanceFrom(ns3::Buffer::Iterator const & o) const [member function]
cls.add_method('GetDistanceFrom',
'uint32_t',
[param('ns3::Buffer::Iterator const &', 'o')],
is_const=True)
## buffer.h (module 'network'): uint32_t ns3::Buffer::Iterator::GetRemainingSize() const [member function]
cls.add_method('GetRemainingSize',
'uint32_t',
[],
is_const=True)
## buffer.h (module 'network'): uint32_t ns3::Buffer::Iterator::GetSize() const [member function]
cls.add_method('GetSize',
'uint32_t',
[],
is_const=True)
## buffer.h (module 'network'): bool ns3::Buffer::Iterator::IsEnd() const [member function]
cls.add_method('IsEnd',
'bool',
[],
is_const=True)
## buffer.h (module 'network'): bool ns3::Buffer::Iterator::IsStart() const [member function]
cls.add_method('IsStart',
'bool',
[],
is_const=True)
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Next() [member function]
cls.add_method('Next',
'void',
[])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Next(uint32_t delta) [member function]
cls.add_method('Next',
'void',
[param('uint32_t', 'delta')])
## buffer.h (module 'network'): uint8_t ns3::Buffer::Iterator::PeekU8() [member function]
cls.add_method('PeekU8',
'uint8_t',
[])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Prev() [member function]
cls.add_method('Prev',
'void',
[])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Prev(uint32_t delta) [member function]
cls.add_method('Prev',
'void',
[param('uint32_t', 'delta')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Read(uint8_t * buffer, uint32_t size) [member function]
cls.add_method('Read',
'void',
[param('uint8_t *', 'buffer'), param('uint32_t', 'size')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Read(ns3::Buffer::Iterator start, uint32_t size) [member function]
cls.add_method('Read',
'void',
[param('ns3::Buffer::Iterator', 'start'), param('uint32_t', 'size')])
## buffer.h (module 'network'): uint16_t ns3::Buffer::Iterator::ReadLsbtohU16() [member function]
cls.add_method('ReadLsbtohU16',
'uint16_t',
[])
## buffer.h (module 'network'): uint32_t ns3::Buffer::Iterator::ReadLsbtohU32() [member function]
cls.add_method('ReadLsbtohU32',
'uint32_t',
[])
## buffer.h (module 'network'): uint64_t ns3::Buffer::Iterator::ReadLsbtohU64() [member function]
cls.add_method('ReadLsbtohU64',
'uint64_t',
[])
## buffer.h (module 'network'): uint16_t ns3::Buffer::Iterator::ReadNtohU16() [member function]
cls.add_method('ReadNtohU16',
'uint16_t',
[])
## buffer.h (module 'network'): uint32_t ns3::Buffer::Iterator::ReadNtohU32() [member function]
cls.add_method('ReadNtohU32',
'uint32_t',
[])
## buffer.h (module 'network'): uint64_t ns3::Buffer::Iterator::ReadNtohU64() [member function]
cls.add_method('ReadNtohU64',
'uint64_t',
[])
## buffer.h (module 'network'): uint16_t ns3::Buffer::Iterator::ReadU16() [member function]
cls.add_method('ReadU16',
'uint16_t',
[])
## buffer.h (module 'network'): uint32_t ns3::Buffer::Iterator::ReadU32() [member function]
cls.add_method('ReadU32',
'uint32_t',
[])
## buffer.h (module 'network'): uint64_t ns3::Buffer::Iterator::ReadU64() [member function]
cls.add_method('ReadU64',
'uint64_t',
[])
## buffer.h (module 'network'): uint8_t ns3::Buffer::Iterator::ReadU8() [member function]
cls.add_method('ReadU8',
'uint8_t',
[])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Write(uint8_t const * buffer, uint32_t size) [member function]
cls.add_method('Write',
'void',
[param('uint8_t const *', 'buffer'), param('uint32_t', 'size')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Write(ns3::Buffer::Iterator start, ns3::Buffer::Iterator end) [member function]
cls.add_method('Write',
'void',
[param('ns3::Buffer::Iterator', 'start'), param('ns3::Buffer::Iterator', 'end')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtolsbU16(uint16_t data) [member function]
cls.add_method('WriteHtolsbU16',
'void',
[param('uint16_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtolsbU32(uint32_t data) [member function]
cls.add_method('WriteHtolsbU32',
'void',
[param('uint32_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtolsbU64(uint64_t data) [member function]
cls.add_method('WriteHtolsbU64',
'void',
[param('uint64_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtonU16(uint16_t data) [member function]
cls.add_method('WriteHtonU16',
'void',
[param('uint16_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtonU32(uint32_t data) [member function]
cls.add_method('WriteHtonU32',
'void',
[param('uint32_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtonU64(uint64_t data) [member function]
cls.add_method('WriteHtonU64',
'void',
[param('uint64_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteU16(uint16_t data) [member function]
cls.add_method('WriteU16',
'void',
[param('uint16_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteU32(uint32_t data) [member function]
cls.add_method('WriteU32',
'void',
[param('uint32_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteU64(uint64_t data) [member function]
cls.add_method('WriteU64',
'void',
[param('uint64_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteU8(uint8_t data) [member function]
cls.add_method('WriteU8',
'void',
[param('uint8_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteU8(uint8_t data, uint32_t len) [member function]
cls.add_method('WriteU8',
'void',
[param('uint8_t', 'data'), param('uint32_t', 'len')])
return
def register_Ns3ByteTagIterator_methods(root_module, cls):
## packet.h (module 'network'): ns3::ByteTagIterator::ByteTagIterator(ns3::ByteTagIterator const & arg0) [constructor]
cls.add_constructor([param('ns3::ByteTagIterator const &', 'arg0')])
## packet.h (module 'network'): bool ns3::ByteTagIterator::HasNext() const [member function]
cls.add_method('HasNext',
'bool',
[],
is_const=True)
## packet.h (module 'network'): ns3::ByteTagIterator::Item ns3::ByteTagIterator::Next() [member function]
cls.add_method('Next',
'ns3::ByteTagIterator::Item',
[])
return
def register_Ns3ByteTagIteratorItem_methods(root_module, cls):
## packet.h (module 'network'): ns3::ByteTagIterator::Item::Item(ns3::ByteTagIterator::Item const & arg0) [constructor]
cls.add_constructor([param('ns3::ByteTagIterator::Item const &', 'arg0')])
## packet.h (module 'network'): uint32_t ns3::ByteTagIterator::Item::GetEnd() const [member function]
cls.add_method('GetEnd',
'uint32_t',
[],
is_const=True)
## packet.h (module 'network'): uint32_t ns3::ByteTagIterator::Item::GetStart() const [member function]
cls.add_method('GetStart',
'uint32_t',
[],
is_const=True)
## packet.h (module 'network'): void ns3::ByteTagIterator::Item::GetTag(ns3::Tag & tag) const [member function]
cls.add_method('GetTag',
'void',
[param('ns3::Tag &', 'tag')],
is_const=True)
## packet.h (module 'network'): ns3::TypeId ns3::ByteTagIterator::Item::GetTypeId() const [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_const=True)
return
def register_Ns3ByteTagList_methods(root_module, cls):
## byte-tag-list.h (module 'network'): ns3::ByteTagList::ByteTagList() [constructor]
cls.add_constructor([])
## byte-tag-list.h (module 'network'): ns3::ByteTagList::ByteTagList(ns3::ByteTagList const & o) [constructor]
cls.add_constructor([param('ns3::ByteTagList const &', 'o')])
## byte-tag-list.h (module 'network'): ns3::TagBuffer ns3::ByteTagList::Add(ns3::TypeId tid, uint32_t bufferSize, int32_t start, int32_t end) [member function]
cls.add_method('Add',
'ns3::TagBuffer',
[param('ns3::TypeId', 'tid'), param('uint32_t', 'bufferSize'), param('int32_t', 'start'), param('int32_t', 'end')])
## byte-tag-list.h (module 'network'): void ns3::ByteTagList::Add(ns3::ByteTagList const & o) [member function]
cls.add_method('Add',
'void',
[param('ns3::ByteTagList const &', 'o')])
## byte-tag-list.h (module 'network'): void ns3::ByteTagList::AddAtEnd(int32_t appendOffset) [member function]
cls.add_method('AddAtEnd',
'void',
[param('int32_t', 'appendOffset')])
## byte-tag-list.h (module 'network'): void ns3::ByteTagList::AddAtStart(int32_t prependOffset) [member function]
cls.add_method('AddAtStart',
'void',
[param('int32_t', 'prependOffset')])
## byte-tag-list.h (module 'network'): void ns3::ByteTagList::Adjust(int32_t adjustment) [member function]
cls.add_method('Adjust',
'void',
[param('int32_t', 'adjustment')])
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator ns3::ByteTagList::Begin(int32_t offsetStart, int32_t offsetEnd) const [member function]
cls.add_method('Begin',
'ns3::ByteTagList::Iterator',
[param('int32_t', 'offsetStart'), param('int32_t', 'offsetEnd')],
is_const=True)
## byte-tag-list.h (module 'network'): void ns3::ByteTagList::RemoveAll() [member function]
cls.add_method('RemoveAll',
'void',
[])
return
def register_Ns3ByteTagListIterator_methods(root_module, cls):
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Iterator(ns3::ByteTagList::Iterator const & arg0) [constructor]
cls.add_constructor([param('ns3::ByteTagList::Iterator const &', 'arg0')])
## byte-tag-list.h (module 'network'): uint32_t ns3::ByteTagList::Iterator::GetOffsetStart() const [member function]
cls.add_method('GetOffsetStart',
'uint32_t',
[],
is_const=True)
## byte-tag-list.h (module 'network'): bool ns3::ByteTagList::Iterator::HasNext() const [member function]
cls.add_method('HasNext',
'bool',
[],
is_const=True)
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item ns3::ByteTagList::Iterator::Next() [member function]
cls.add_method('Next',
'ns3::ByteTagList::Iterator::Item',
[])
return
def register_Ns3ByteTagListIteratorItem_methods(root_module, cls):
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::Item(ns3::ByteTagList::Iterator::Item const & arg0) [constructor]
cls.add_constructor([param('ns3::ByteTagList::Iterator::Item const &', 'arg0')])
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::Item(ns3::TagBuffer buf) [constructor]
cls.add_constructor([param('ns3::TagBuffer', 'buf')])
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::buf [variable]
cls.add_instance_attribute('buf', 'ns3::TagBuffer', is_const=False)
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::end [variable]
cls.add_instance_attribute('end', 'int32_t', is_const=False)
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::size [variable]
cls.add_instance_attribute('size', 'uint32_t', is_const=False)
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::start [variable]
cls.add_instance_attribute('start', 'int32_t', is_const=False)
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::tid [variable]
cls.add_instance_attribute('tid', 'ns3::TypeId', is_const=False)
return
def register_Ns3CallbackBase_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackBase::CallbackBase(ns3::CallbackBase const & arg0) [constructor]
cls.add_constructor([param('ns3::CallbackBase const &', 'arg0')])
## callback.h (module 'core'): ns3::CallbackBase::CallbackBase() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::Ptr<ns3::CallbackImplBase> ns3::CallbackBase::GetImpl() const [member function]
cls.add_method('GetImpl',
'ns3::Ptr< ns3::CallbackImplBase >',
[],
is_const=True)
## callback.h (module 'core'): ns3::CallbackBase::CallbackBase(ns3::Ptr<ns3::CallbackImplBase> impl) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::CallbackImplBase >', 'impl')],
visibility='protected')
return
def register_Ns3DefaultDeleter__Ns3AttributeAccessor_methods(root_module, cls):
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::AttributeAccessor>::DefaultDeleter() [constructor]
cls.add_constructor([])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::AttributeAccessor>::DefaultDeleter(ns3::DefaultDeleter<ns3::AttributeAccessor> const & arg0) [constructor]
cls.add_constructor([param('ns3::DefaultDeleter< ns3::AttributeAccessor > const &', 'arg0')])
## default-deleter.h (module 'core'): static void ns3::DefaultDeleter<ns3::AttributeAccessor>::Delete(ns3::AttributeAccessor * object) [member function]
cls.add_method('Delete',
'void',
[param('ns3::AttributeAccessor *', 'object')],
is_static=True)
return
def register_Ns3DefaultDeleter__Ns3AttributeChecker_methods(root_module, cls):
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::AttributeChecker>::DefaultDeleter() [constructor]
cls.add_constructor([])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::AttributeChecker>::DefaultDeleter(ns3::DefaultDeleter<ns3::AttributeChecker> const & arg0) [constructor]
cls.add_constructor([param('ns3::DefaultDeleter< ns3::AttributeChecker > const &', 'arg0')])
## default-deleter.h (module 'core'): static void ns3::DefaultDeleter<ns3::AttributeChecker>::Delete(ns3::AttributeChecker * object) [member function]
cls.add_method('Delete',
'void',
[param('ns3::AttributeChecker *', 'object')],
is_static=True)
return
def register_Ns3DefaultDeleter__Ns3AttributeValue_methods(root_module, cls):
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::AttributeValue>::DefaultDeleter() [constructor]
cls.add_constructor([])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::AttributeValue>::DefaultDeleter(ns3::DefaultDeleter<ns3::AttributeValue> const & arg0) [constructor]
cls.add_constructor([param('ns3::DefaultDeleter< ns3::AttributeValue > const &', 'arg0')])
## default-deleter.h (module 'core'): static void ns3::DefaultDeleter<ns3::AttributeValue>::Delete(ns3::AttributeValue * object) [member function]
cls.add_method('Delete',
'void',
[param('ns3::AttributeValue *', 'object')],
is_static=True)
return
def register_Ns3DefaultDeleter__Ns3CallbackImplBase_methods(root_module, cls):
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::CallbackImplBase>::DefaultDeleter() [constructor]
cls.add_constructor([])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::CallbackImplBase>::DefaultDeleter(ns3::DefaultDeleter<ns3::CallbackImplBase> const & arg0) [constructor]
cls.add_constructor([param('ns3::DefaultDeleter< ns3::CallbackImplBase > const &', 'arg0')])
## default-deleter.h (module 'core'): static void ns3::DefaultDeleter<ns3::CallbackImplBase>::Delete(ns3::CallbackImplBase * object) [member function]
cls.add_method('Delete',
'void',
[param('ns3::CallbackImplBase *', 'object')],
is_static=True)
return
def register_Ns3DefaultDeleter__Ns3HashImplementation_methods(root_module, cls):
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::Hash::Implementation>::DefaultDeleter() [constructor]
cls.add_constructor([])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::Hash::Implementation>::DefaultDeleter(ns3::DefaultDeleter<ns3::Hash::Implementation> const & arg0) [constructor]
cls.add_constructor([param('ns3::DefaultDeleter< ns3::Hash::Implementation > const &', 'arg0')])
## default-deleter.h (module 'core'): static void ns3::DefaultDeleter<ns3::Hash::Implementation>::Delete(ns3::Hash::Implementation * object) [member function]
cls.add_method('Delete',
'void',
[param('ns3::Hash::Implementation *', 'object')],
is_static=True)
return
def register_Ns3DefaultDeleter__Ns3NixVector_methods(root_module, cls):
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::NixVector>::DefaultDeleter() [constructor]
cls.add_constructor([])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::NixVector>::DefaultDeleter(ns3::DefaultDeleter<ns3::NixVector> const & arg0) [constructor]
cls.add_constructor([param('ns3::DefaultDeleter< ns3::NixVector > const &', 'arg0')])
## default-deleter.h (module 'core'): static void ns3::DefaultDeleter<ns3::NixVector>::Delete(ns3::NixVector * object) [member function]
cls.add_method('Delete',
'void',
[param('ns3::NixVector *', 'object')],
is_static=True)
return
def register_Ns3DefaultDeleter__Ns3Packet_methods(root_module, cls):
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::Packet>::DefaultDeleter() [constructor]
cls.add_constructor([])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::Packet>::DefaultDeleter(ns3::DefaultDeleter<ns3::Packet> const & arg0) [constructor]
cls.add_constructor([param('ns3::DefaultDeleter< ns3::Packet > const &', 'arg0')])
## default-deleter.h (module 'core'): static void ns3::DefaultDeleter<ns3::Packet>::Delete(ns3::Packet * object) [member function]
cls.add_method('Delete',
'void',
[param('ns3::Packet *', 'object')],
is_static=True)
return
def register_Ns3DefaultDeleter__Ns3TraceSourceAccessor_methods(root_module, cls):
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::TraceSourceAccessor>::DefaultDeleter() [constructor]
cls.add_constructor([])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::TraceSourceAccessor>::DefaultDeleter(ns3::DefaultDeleter<ns3::TraceSourceAccessor> const & arg0) [constructor]
cls.add_constructor([param('ns3::DefaultDeleter< ns3::TraceSourceAccessor > const &', 'arg0')])
## default-deleter.h (module 'core'): static void ns3::DefaultDeleter<ns3::TraceSourceAccessor>::Delete(ns3::TraceSourceAccessor * object) [member function]
cls.add_method('Delete',
'void',
[param('ns3::TraceSourceAccessor *', 'object')],
is_static=True)
return
def register_Ns3Hasher_methods(root_module, cls):
## hash.h (module 'core'): ns3::Hasher::Hasher(ns3::Hasher const & arg0) [constructor]
cls.add_constructor([param('ns3::Hasher const &', 'arg0')])
## hash.h (module 'core'): ns3::Hasher::Hasher() [constructor]
cls.add_constructor([])
## hash.h (module 'core'): ns3::Hasher::Hasher(ns3::Ptr<ns3::Hash::Implementation> hp) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::Hash::Implementation >', 'hp')])
## hash.h (module 'core'): uint32_t ns3::Hasher::GetHash32(char const * buffer, std::size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('std::size_t const', 'size')])
## hash.h (module 'core'): uint32_t ns3::Hasher::GetHash32(std::string const s) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('std::string const', 's')])
## hash.h (module 'core'): uint64_t ns3::Hasher::GetHash64(char const * buffer, std::size_t const size) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('char const *', 'buffer'), param('std::size_t const', 'size')])
## hash.h (module 'core'): uint64_t ns3::Hasher::GetHash64(std::string const s) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('std::string const', 's')])
## hash.h (module 'core'): ns3::Hasher & ns3::Hasher::clear() [member function]
cls.add_method('clear',
'ns3::Hasher &',
[])
return
def register_Ns3Ipv4Address_methods(root_module, cls):
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('!=')
cls.add_binary_comparison_operator('<')
## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address(ns3::Ipv4Address const & arg0) [constructor]
cls.add_constructor([param('ns3::Ipv4Address const &', 'arg0')])
## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address(uint32_t address) [constructor]
cls.add_constructor([param('uint32_t', 'address')])
## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address(char const * address) [constructor]
cls.add_constructor([param('char const *', 'address')])
## ipv4-address.h (module 'network'): ns3::Ipv4Address ns3::Ipv4Address::CombineMask(ns3::Ipv4Mask const & mask) const [member function]
cls.add_method('CombineMask',
'ns3::Ipv4Address',
[param('ns3::Ipv4Mask const &', 'mask')],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::ConvertFrom(ns3::Address const & address) [member function]
cls.add_method('ConvertFrom',
'ns3::Ipv4Address',
[param('ns3::Address const &', 'address')],
is_static=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::Deserialize(uint8_t const * buf) [member function]
cls.add_method('Deserialize',
'ns3::Ipv4Address',
[param('uint8_t const *', 'buf')],
is_static=True)
## ipv4-address.h (module 'network'): uint32_t ns3::Ipv4Address::Get() const [member function]
cls.add_method('Get',
'uint32_t',
[],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetAny() [member function]
cls.add_method('GetAny',
'ns3::Ipv4Address',
[],
is_static=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetBroadcast() [member function]
cls.add_method('GetBroadcast',
'ns3::Ipv4Address',
[],
is_static=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetLoopback() [member function]
cls.add_method('GetLoopback',
'ns3::Ipv4Address',
[],
is_static=True)
## ipv4-address.h (module 'network'): ns3::Ipv4Address ns3::Ipv4Address::GetSubnetDirectedBroadcast(ns3::Ipv4Mask const & mask) const [member function]
cls.add_method('GetSubnetDirectedBroadcast',
'ns3::Ipv4Address',
[param('ns3::Ipv4Mask const &', 'mask')],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetZero() [member function]
cls.add_method('GetZero',
'ns3::Ipv4Address',
[],
is_static=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsAny() const [member function]
cls.add_method('IsAny',
'bool',
[],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsBroadcast() const [member function]
cls.add_method('IsBroadcast',
'bool',
[],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsEqual(ns3::Ipv4Address const & other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ipv4Address const &', 'other')],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsLocalMulticast() const [member function]
cls.add_method('IsLocalMulticast',
'bool',
[],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsLocalhost() const [member function]
cls.add_method('IsLocalhost',
'bool',
[],
is_const=True)
## ipv4-address.h (module 'network'): static bool ns3::Ipv4Address::IsMatchingType(ns3::Address const & address) [member function]
cls.add_method('IsMatchingType',
'bool',
[param('ns3::Address const &', 'address')],
is_static=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsMulticast() const [member function]
cls.add_method('IsMulticast',
'bool',
[],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsSubnetDirectedBroadcast(ns3::Ipv4Mask const & mask) const [member function]
cls.add_method('IsSubnetDirectedBroadcast',
'bool',
[param('ns3::Ipv4Mask const &', 'mask')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Serialize(uint8_t * buf) const [member function]
cls.add_method('Serialize',
'void',
[param('uint8_t *', 'buf')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Set(uint32_t address) [member function]
cls.add_method('Set',
'void',
[param('uint32_t', 'address')])
## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Set(char const * address) [member function]
cls.add_method('Set',
'void',
[param('char const *', 'address')])
return
def register_Ns3Ipv4Mask_methods(root_module, cls):
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('!=')
## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask(ns3::Ipv4Mask const & arg0) [constructor]
cls.add_constructor([param('ns3::Ipv4Mask const &', 'arg0')])
## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask(uint32_t mask) [constructor]
cls.add_constructor([param('uint32_t', 'mask')])
## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask(char const * mask) [constructor]
cls.add_constructor([param('char const *', 'mask')])
## ipv4-address.h (module 'network'): uint32_t ns3::Ipv4Mask::Get() const [member function]
cls.add_method('Get',
'uint32_t',
[],
is_const=True)
## ipv4-address.h (module 'network'): uint32_t ns3::Ipv4Mask::GetInverse() const [member function]
cls.add_method('GetInverse',
'uint32_t',
[],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Mask ns3::Ipv4Mask::GetLoopback() [member function]
cls.add_method('GetLoopback',
'ns3::Ipv4Mask',
[],
is_static=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Mask ns3::Ipv4Mask::GetOnes() [member function]
cls.add_method('GetOnes',
'ns3::Ipv4Mask',
[],
is_static=True)
## ipv4-address.h (module 'network'): uint16_t ns3::Ipv4Mask::GetPrefixLength() const [member function]
cls.add_method('GetPrefixLength',
'uint16_t',
[],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Mask ns3::Ipv4Mask::GetZero() [member function]
cls.add_method('GetZero',
'ns3::Ipv4Mask',
[],
is_static=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Mask::IsEqual(ns3::Ipv4Mask other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ipv4Mask', 'other')],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Mask::IsMatch(ns3::Ipv4Address a, ns3::Ipv4Address b) const [member function]
cls.add_method('IsMatch',
'bool',
[param('ns3::Ipv4Address', 'a'), param('ns3::Ipv4Address', 'b')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Mask::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Mask::Set(uint32_t mask) [member function]
cls.add_method('Set',
'void',
[param('uint32_t', 'mask')])
return
def register_Ns3Ipv6Address_methods(root_module, cls):
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('!=')
cls.add_binary_comparison_operator('<')
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(char const * address) [constructor]
cls.add_constructor([param('char const *', 'address')])
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(uint8_t * address) [constructor]
cls.add_constructor([param('uint8_t *', 'address')])
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(ns3::Ipv6Address const & addr) [constructor]
cls.add_constructor([param('ns3::Ipv6Address const &', 'addr')])
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(ns3::Ipv6Address const * addr) [constructor]
cls.add_constructor([param('ns3::Ipv6Address const *', 'addr')])
## ipv6-address.h (module 'network'): ns3::Ipv6Address ns3::Ipv6Address::CombinePrefix(ns3::Ipv6Prefix const & prefix) [member function]
cls.add_method('CombinePrefix',
'ns3::Ipv6Address',
[param('ns3::Ipv6Prefix const &', 'prefix')])
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::ConvertFrom(ns3::Address const & address) [member function]
cls.add_method('ConvertFrom',
'ns3::Ipv6Address',
[param('ns3::Address const &', 'address')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::Deserialize(uint8_t const * buf) [member function]
cls.add_method('Deserialize',
'ns3::Ipv6Address',
[param('uint8_t const *', 'buf')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAllHostsMulticast() [member function]
cls.add_method('GetAllHostsMulticast',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAllNodesMulticast() [member function]
cls.add_method('GetAllNodesMulticast',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAllRoutersMulticast() [member function]
cls.add_method('GetAllRoutersMulticast',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAny() [member function]
cls.add_method('GetAny',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::GetBytes(uint8_t * buf) const [member function]
cls.add_method('GetBytes',
'void',
[param('uint8_t *', 'buf')],
is_const=True)
## ipv6-address.h (module 'network'): ns3::Ipv4Address ns3::Ipv6Address::GetIpv4MappedAddress() const [member function]
cls.add_method('GetIpv4MappedAddress',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetLoopback() [member function]
cls.add_method('GetLoopback',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetOnes() [member function]
cls.add_method('GetOnes',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetZero() [member function]
cls.add_method('GetZero',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAllHostsMulticast() const [member function]
cls.add_method('IsAllHostsMulticast',
'bool',
[],
deprecated=True, is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAllNodesMulticast() const [member function]
cls.add_method('IsAllNodesMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAllRoutersMulticast() const [member function]
cls.add_method('IsAllRoutersMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAny() const [member function]
cls.add_method('IsAny',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsDocumentation() const [member function]
cls.add_method('IsDocumentation',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsEqual(ns3::Ipv6Address const & other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ipv6Address const &', 'other')],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsIpv4MappedAddress() const [member function]
cls.add_method('IsIpv4MappedAddress',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsLinkLocal() const [member function]
cls.add_method('IsLinkLocal',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsLinkLocalMulticast() const [member function]
cls.add_method('IsLinkLocalMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsLocalhost() const [member function]
cls.add_method('IsLocalhost',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): static bool ns3::Ipv6Address::IsMatchingType(ns3::Address const & address) [member function]
cls.add_method('IsMatchingType',
'bool',
[param('ns3::Address const &', 'address')],
is_static=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsMulticast() const [member function]
cls.add_method('IsMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsSolicitedMulticast() const [member function]
cls.add_method('IsSolicitedMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredAddress(ns3::Mac16Address addr, ns3::Ipv6Address prefix) [member function]
cls.add_method('MakeAutoconfiguredAddress',
'ns3::Ipv6Address',
[param('ns3::Mac16Address', 'addr'), param('ns3::Ipv6Address', 'prefix')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredAddress(ns3::Mac48Address addr, ns3::Ipv6Address prefix) [member function]
cls.add_method('MakeAutoconfiguredAddress',
'ns3::Ipv6Address',
[param('ns3::Mac48Address', 'addr'), param('ns3::Ipv6Address', 'prefix')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredAddress(ns3::Mac64Address addr, ns3::Ipv6Address prefix) [member function]
cls.add_method('MakeAutoconfiguredAddress',
'ns3::Ipv6Address',
[param('ns3::Mac64Address', 'addr'), param('ns3::Ipv6Address', 'prefix')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredAddress(ns3::Mac8Address addr, ns3::Ipv6Address prefix) [member function]
cls.add_method('MakeAutoconfiguredAddress',
'ns3::Ipv6Address',
[param('ns3::Mac8Address', 'addr'), param('ns3::Ipv6Address', 'prefix')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredLinkLocalAddress(ns3::Mac16Address mac) [member function]
cls.add_method('MakeAutoconfiguredLinkLocalAddress',
'ns3::Ipv6Address',
[param('ns3::Mac16Address', 'mac')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredLinkLocalAddress(ns3::Mac48Address mac) [member function]
cls.add_method('MakeAutoconfiguredLinkLocalAddress',
'ns3::Ipv6Address',
[param('ns3::Mac48Address', 'mac')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredLinkLocalAddress(ns3::Mac64Address mac) [member function]
cls.add_method('MakeAutoconfiguredLinkLocalAddress',
'ns3::Ipv6Address',
[param('ns3::Mac64Address', 'mac')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredLinkLocalAddress(ns3::Mac8Address mac) [member function]
cls.add_method('MakeAutoconfiguredLinkLocalAddress',
'ns3::Ipv6Address',
[param('ns3::Mac8Address', 'mac')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeIpv4MappedAddress(ns3::Ipv4Address addr) [member function]
cls.add_method('MakeIpv4MappedAddress',
'ns3::Ipv6Address',
[param('ns3::Ipv4Address', 'addr')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeSolicitedAddress(ns3::Ipv6Address addr) [member function]
cls.add_method('MakeSolicitedAddress',
'ns3::Ipv6Address',
[param('ns3::Ipv6Address', 'addr')],
is_static=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Serialize(uint8_t * buf) const [member function]
cls.add_method('Serialize',
'void',
[param('uint8_t *', 'buf')],
is_const=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Set(char const * address) [member function]
cls.add_method('Set',
'void',
[param('char const *', 'address')])
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Set(uint8_t * address) [member function]
cls.add_method('Set',
'void',
[param('uint8_t *', 'address')])
return
def register_Ns3Ipv6Prefix_methods(root_module, cls):
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('!=')
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(uint8_t * prefix) [constructor]
cls.add_constructor([param('uint8_t *', 'prefix')])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(char const * prefix) [constructor]
cls.add_constructor([param('char const *', 'prefix')])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(uint8_t prefix) [constructor]
cls.add_constructor([param('uint8_t', 'prefix')])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(ns3::Ipv6Prefix const & prefix) [constructor]
cls.add_constructor([param('ns3::Ipv6Prefix const &', 'prefix')])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(ns3::Ipv6Prefix const * prefix) [constructor]
cls.add_constructor([param('ns3::Ipv6Prefix const *', 'prefix')])
## ipv6-address.h (module 'network'): void ns3::Ipv6Prefix::GetBytes(uint8_t * buf) const [member function]
cls.add_method('GetBytes',
'void',
[param('uint8_t *', 'buf')],
is_const=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Prefix ns3::Ipv6Prefix::GetLoopback() [member function]
cls.add_method('GetLoopback',
'ns3::Ipv6Prefix',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Prefix ns3::Ipv6Prefix::GetOnes() [member function]
cls.add_method('GetOnes',
'ns3::Ipv6Prefix',
[],
is_static=True)
## ipv6-address.h (module 'network'): uint8_t ns3::Ipv6Prefix::GetPrefixLength() const [member function]
cls.add_method('GetPrefixLength',
'uint8_t',
[],
is_const=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Prefix ns3::Ipv6Prefix::GetZero() [member function]
cls.add_method('GetZero',
'ns3::Ipv6Prefix',
[],
is_static=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Prefix::IsEqual(ns3::Ipv6Prefix const & other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ipv6Prefix const &', 'other')],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Prefix::IsMatch(ns3::Ipv6Address a, ns3::Ipv6Address b) const [member function]
cls.add_method('IsMatch',
'bool',
[param('ns3::Ipv6Address', 'a'), param('ns3::Ipv6Address', 'b')],
is_const=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Prefix::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
return
def register_Ns3Mac48Address_methods(root_module, cls):
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('!=')
cls.add_binary_comparison_operator('<')
cls.add_output_stream_operator()
## mac48-address.h (module 'network'): ns3::Mac48Address::Mac48Address(ns3::Mac48Address const & arg0) [constructor]
cls.add_constructor([param('ns3::Mac48Address const &', 'arg0')])
## mac48-address.h (module 'network'): ns3::Mac48Address::Mac48Address() [constructor]
cls.add_constructor([])
## mac48-address.h (module 'network'): ns3::Mac48Address::Mac48Address(char const * str) [constructor]
cls.add_constructor([param('char const *', 'str')])
## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::Allocate() [member function]
cls.add_method('Allocate',
'ns3::Mac48Address',
[],
is_static=True)
## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::ConvertFrom(ns3::Address const & address) [member function]
cls.add_method('ConvertFrom',
'ns3::Mac48Address',
[param('ns3::Address const &', 'address')],
is_static=True)
## mac48-address.h (module 'network'): void ns3::Mac48Address::CopyFrom(uint8_t const * buffer) [member function]
cls.add_method('CopyFrom',
'void',
[param('uint8_t const *', 'buffer')])
## mac48-address.h (module 'network'): void ns3::Mac48Address::CopyTo(uint8_t * buffer) const [member function]
cls.add_method('CopyTo',
'void',
[param('uint8_t *', 'buffer')],
is_const=True)
## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::GetBroadcast() [member function]
cls.add_method('GetBroadcast',
'ns3::Mac48Address',
[],
is_static=True)
## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::GetMulticast(ns3::Ipv4Address address) [member function]
cls.add_method('GetMulticast',
'ns3::Mac48Address',
[param('ns3::Ipv4Address', 'address')],
is_static=True)
## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::GetMulticast(ns3::Ipv6Address address) [member function]
cls.add_method('GetMulticast',
'ns3::Mac48Address',
[param('ns3::Ipv6Address', 'address')],
is_static=True)
## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::GetMulticast6Prefix() [member function]
cls.add_method('GetMulticast6Prefix',
'ns3::Mac48Address',
[],
is_static=True)
## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::GetMulticastPrefix() [member function]
cls.add_method('GetMulticastPrefix',
'ns3::Mac48Address',
[],
is_static=True)
## mac48-address.h (module 'network'): bool ns3::Mac48Address::IsBroadcast() const [member function]
cls.add_method('IsBroadcast',
'bool',
[],
is_const=True)
## mac48-address.h (module 'network'): bool ns3::Mac48Address::IsGroup() const [member function]
cls.add_method('IsGroup',
'bool',
[],
is_const=True)
## mac48-address.h (module 'network'): static bool ns3::Mac48Address::IsMatchingType(ns3::Address const & address) [member function]
cls.add_method('IsMatchingType',
'bool',
[param('ns3::Address const &', 'address')],
is_static=True)
return
def register_Ns3Mac8Address_methods(root_module, cls):
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
## mac8-address.h (module 'network'): ns3::Mac8Address::Mac8Address(ns3::Mac8Address const & arg0) [constructor]
cls.add_constructor([param('ns3::Mac8Address const &', 'arg0')])
## mac8-address.h (module 'network'): ns3::Mac8Address::Mac8Address() [constructor]
cls.add_constructor([])
## mac8-address.h (module 'network'): ns3::Mac8Address::Mac8Address(uint8_t addr) [constructor]
cls.add_constructor([param('uint8_t', 'addr')])
## mac8-address.h (module 'network'): static ns3::Mac8Address ns3::Mac8Address::Allocate() [member function]
cls.add_method('Allocate',
'ns3::Mac8Address',
[],
is_static=True)
## mac8-address.h (module 'network'): static ns3::Mac8Address ns3::Mac8Address::ConvertFrom(ns3::Address const & address) [member function]
cls.add_method('ConvertFrom',
'ns3::Mac8Address',
[param('ns3::Address const &', 'address')],
is_static=True)
## mac8-address.h (module 'network'): void ns3::Mac8Address::CopyFrom(uint8_t const * pBuffer) [member function]
cls.add_method('CopyFrom',
'void',
[param('uint8_t const *', 'pBuffer')])
## mac8-address.h (module 'network'): void ns3::Mac8Address::CopyTo(uint8_t * pBuffer) const [member function]
cls.add_method('CopyTo',
'void',
[param('uint8_t *', 'pBuffer')],
is_const=True)
## mac8-address.h (module 'network'): static ns3::Mac8Address ns3::Mac8Address::GetBroadcast() [member function]
cls.add_method('GetBroadcast',
'ns3::Mac8Address',
[],
is_static=True)
## mac8-address.h (module 'network'): static bool ns3::Mac8Address::IsMatchingType(ns3::Address const & address) [member function]
cls.add_method('IsMatchingType',
'bool',
[param('ns3::Address const &', 'address')],
is_static=True)
return
def register_Ns3ObjectBase_methods(root_module, cls):
## object-base.h (module 'core'): ns3::ObjectBase::ObjectBase() [constructor]
cls.add_constructor([])
## object-base.h (module 'core'): ns3::ObjectBase::ObjectBase(ns3::ObjectBase const & arg0) [constructor]
cls.add_constructor([param('ns3::ObjectBase const &', 'arg0')])
## object-base.h (module 'core'): void ns3::ObjectBase::GetAttribute(std::string name, ns3::AttributeValue & value) const [member function]
cls.add_method('GetAttribute',
'void',
[param('std::string', 'name'), param('ns3::AttributeValue &', 'value')],
is_const=True)
## object-base.h (module 'core'): bool ns3::ObjectBase::GetAttributeFailSafe(std::string name, ns3::AttributeValue & value) const [member function]
cls.add_method('GetAttributeFailSafe',
'bool',
[param('std::string', 'name'), param('ns3::AttributeValue &', 'value')],
is_const=True)
## object-base.h (module 'core'): ns3::TypeId ns3::ObjectBase::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## object-base.h (module 'core'): static ns3::TypeId ns3::ObjectBase::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## object-base.h (module 'core'): void ns3::ObjectBase::SetAttribute(std::string name, ns3::AttributeValue const & value) [member function]
cls.add_method('SetAttribute',
'void',
[param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')])
## object-base.h (module 'core'): bool ns3::ObjectBase::SetAttributeFailSafe(std::string name, ns3::AttributeValue const & value) [member function]
cls.add_method('SetAttributeFailSafe',
'bool',
[param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceConnect(std::string name, std::string context, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceConnect',
'bool',
[param('std::string', 'name'), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceConnectWithoutContext(std::string name, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceConnectWithoutContext',
'bool',
[param('std::string', 'name'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceDisconnect(std::string name, std::string context, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceDisconnect',
'bool',
[param('std::string', 'name'), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceDisconnectWithoutContext(std::string name, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceDisconnectWithoutContext',
'bool',
[param('std::string', 'name'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): void ns3::ObjectBase::ConstructSelf(ns3::AttributeConstructionList const & attributes) [member function]
cls.add_method('ConstructSelf',
'void',
[param('ns3::AttributeConstructionList const &', 'attributes')],
visibility='protected')
## object-base.h (module 'core'): void ns3::ObjectBase::NotifyConstructionCompleted() [member function]
cls.add_method('NotifyConstructionCompleted',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3ObjectDeleter_methods(root_module, cls):
## object.h (module 'core'): ns3::ObjectDeleter::ObjectDeleter() [constructor]
cls.add_constructor([])
## object.h (module 'core'): ns3::ObjectDeleter::ObjectDeleter(ns3::ObjectDeleter const & arg0) [constructor]
cls.add_constructor([param('ns3::ObjectDeleter const &', 'arg0')])
## object.h (module 'core'): static void ns3::ObjectDeleter::Delete(ns3::Object * object) [member function]
cls.add_method('Delete',
'void',
[param('ns3::Object *', 'object')],
is_static=True)
return
def register_Ns3PacketMetadata_methods(root_module, cls):
## packet-metadata.h (module 'network'): ns3::PacketMetadata::PacketMetadata(uint64_t uid, uint32_t size) [constructor]
cls.add_constructor([param('uint64_t', 'uid'), param('uint32_t', 'size')])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::PacketMetadata(ns3::PacketMetadata const & o) [constructor]
cls.add_constructor([param('ns3::PacketMetadata const &', 'o')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::AddAtEnd(ns3::PacketMetadata const & o) [member function]
cls.add_method('AddAtEnd',
'void',
[param('ns3::PacketMetadata const &', 'o')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::AddHeader(ns3::Header const & header, uint32_t size) [member function]
cls.add_method('AddHeader',
'void',
[param('ns3::Header const &', 'header'), param('uint32_t', 'size')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::AddPaddingAtEnd(uint32_t end) [member function]
cls.add_method('AddPaddingAtEnd',
'void',
[param('uint32_t', 'end')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::AddTrailer(ns3::Trailer const & trailer, uint32_t size) [member function]
cls.add_method('AddTrailer',
'void',
[param('ns3::Trailer const &', 'trailer'), param('uint32_t', 'size')])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::ItemIterator ns3::PacketMetadata::BeginItem(ns3::Buffer buffer) const [member function]
cls.add_method('BeginItem',
'ns3::PacketMetadata::ItemIterator',
[param('ns3::Buffer', 'buffer')],
is_const=True)
## packet-metadata.h (module 'network'): ns3::PacketMetadata ns3::PacketMetadata::CreateFragment(uint32_t start, uint32_t end) const [member function]
cls.add_method('CreateFragment',
'ns3::PacketMetadata',
[param('uint32_t', 'start'), param('uint32_t', 'end')],
is_const=True)
## packet-metadata.h (module 'network'): uint32_t ns3::PacketMetadata::Deserialize(uint8_t const * buffer, uint32_t size) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('uint8_t const *', 'buffer'), param('uint32_t', 'size')])
## packet-metadata.h (module 'network'): static void ns3::PacketMetadata::Enable() [member function]
cls.add_method('Enable',
'void',
[],
is_static=True)
## packet-metadata.h (module 'network'): static void ns3::PacketMetadata::EnableChecking() [member function]
cls.add_method('EnableChecking',
'void',
[],
is_static=True)
## packet-metadata.h (module 'network'): uint32_t ns3::PacketMetadata::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True)
## packet-metadata.h (module 'network'): uint64_t ns3::PacketMetadata::GetUid() const [member function]
cls.add_method('GetUid',
'uint64_t',
[],
is_const=True)
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::RemoveAtEnd(uint32_t end) [member function]
cls.add_method('RemoveAtEnd',
'void',
[param('uint32_t', 'end')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::RemoveAtStart(uint32_t start) [member function]
cls.add_method('RemoveAtStart',
'void',
[param('uint32_t', 'start')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::RemoveHeader(ns3::Header const & header, uint32_t size) [member function]
cls.add_method('RemoveHeader',
'void',
[param('ns3::Header const &', 'header'), param('uint32_t', 'size')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::RemoveTrailer(ns3::Trailer const & trailer, uint32_t size) [member function]
cls.add_method('RemoveTrailer',
'void',
[param('ns3::Trailer const &', 'trailer'), param('uint32_t', 'size')])
## packet-metadata.h (module 'network'): uint32_t ns3::PacketMetadata::Serialize(uint8_t * buffer, uint32_t maxSize) const [member function]
cls.add_method('Serialize',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint32_t', 'maxSize')],
is_const=True)
return
def register_Ns3PacketMetadataItem_methods(root_module, cls):
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::Item() [constructor]
cls.add_constructor([])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::Item(ns3::PacketMetadata::Item const & arg0) [constructor]
cls.add_constructor([param('ns3::PacketMetadata::Item const &', 'arg0')])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::current [variable]
cls.add_instance_attribute('current', 'ns3::Buffer::Iterator', is_const=False)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::currentSize [variable]
cls.add_instance_attribute('currentSize', 'uint32_t', is_const=False)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::currentTrimedFromEnd [variable]
cls.add_instance_attribute('currentTrimedFromEnd', 'uint32_t', is_const=False)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::currentTrimedFromStart [variable]
cls.add_instance_attribute('currentTrimedFromStart', 'uint32_t', is_const=False)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::isFragment [variable]
cls.add_instance_attribute('isFragment', 'bool', is_const=False)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::tid [variable]
cls.add_instance_attribute('tid', 'ns3::TypeId', is_const=False)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::type [variable]
cls.add_instance_attribute('type', 'ns3::PacketMetadata::Item::ItemType', is_const=False)
return
def register_Ns3PacketMetadataItemIterator_methods(root_module, cls):
## packet-metadata.h (module 'network'): ns3::PacketMetadata::ItemIterator::ItemIterator(ns3::PacketMetadata::ItemIterator const & arg0) [constructor]
cls.add_constructor([param('ns3::PacketMetadata::ItemIterator const &', 'arg0')])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::ItemIterator::ItemIterator(ns3::PacketMetadata const * metadata, ns3::Buffer buffer) [constructor]
cls.add_constructor([param('ns3::PacketMetadata const *', 'metadata'), param('ns3::Buffer', 'buffer')])
## packet-metadata.h (module 'network'): bool ns3::PacketMetadata::ItemIterator::HasNext() const [member function]
cls.add_method('HasNext',
'bool',
[],
is_const=True)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item ns3::PacketMetadata::ItemIterator::Next() [member function]
cls.add_method('Next',
'ns3::PacketMetadata::Item',
[])
return
def register_Ns3PacketTagIterator_methods(root_module, cls):
## packet.h (module 'network'): ns3::PacketTagIterator::PacketTagIterator(ns3::PacketTagIterator const & arg0) [constructor]
cls.add_constructor([param('ns3::PacketTagIterator const &', 'arg0')])
## packet.h (module 'network'): bool ns3::PacketTagIterator::HasNext() const [member function]
cls.add_method('HasNext',
'bool',
[],
is_const=True)
## packet.h (module 'network'): ns3::PacketTagIterator::Item ns3::PacketTagIterator::Next() [member function]
cls.add_method('Next',
'ns3::PacketTagIterator::Item',
[])
return
def register_Ns3PacketTagIteratorItem_methods(root_module, cls):
## packet.h (module 'network'): ns3::PacketTagIterator::Item::Item(ns3::PacketTagIterator::Item const & arg0) [constructor]
cls.add_constructor([param('ns3::PacketTagIterator::Item const &', 'arg0')])
## packet.h (module 'network'): void ns3::PacketTagIterator::Item::GetTag(ns3::Tag & tag) const [member function]
cls.add_method('GetTag',
'void',
[param('ns3::Tag &', 'tag')],
is_const=True)
## packet.h (module 'network'): ns3::TypeId ns3::PacketTagIterator::Item::GetTypeId() const [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_const=True)
return
def register_Ns3PacketTagList_methods(root_module, cls):
## packet-tag-list.h (module 'network'): ns3::PacketTagList::PacketTagList() [constructor]
cls.add_constructor([])
## packet-tag-list.h (module 'network'): ns3::PacketTagList::PacketTagList(ns3::PacketTagList const & o) [constructor]
cls.add_constructor([param('ns3::PacketTagList const &', 'o')])
## packet-tag-list.h (module 'network'): void ns3::PacketTagList::Add(ns3::Tag const & tag) const [member function]
cls.add_method('Add',
'void',
[param('ns3::Tag const &', 'tag')],
is_const=True)
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData const * ns3::PacketTagList::Head() const [member function]
cls.add_method('Head',
'ns3::PacketTagList::TagData const *',
[],
is_const=True)
## packet-tag-list.h (module 'network'): bool ns3::PacketTagList::Peek(ns3::Tag & tag) const [member function]
cls.add_method('Peek',
'bool',
[param('ns3::Tag &', 'tag')],
is_const=True)
## packet-tag-list.h (module 'network'): bool ns3::PacketTagList::Remove(ns3::Tag & tag) [member function]
cls.add_method('Remove',
'bool',
[param('ns3::Tag &', 'tag')])
## packet-tag-list.h (module 'network'): void ns3::PacketTagList::RemoveAll() [member function]
cls.add_method('RemoveAll',
'void',
[])
## packet-tag-list.h (module 'network'): bool ns3::PacketTagList::Replace(ns3::Tag & tag) [member function]
cls.add_method('Replace',
'bool',
[param('ns3::Tag &', 'tag')])
return
def register_Ns3PacketTagListTagData_methods(root_module, cls):
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::TagData() [constructor]
cls.add_constructor([])
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::TagData(ns3::PacketTagList::TagData const & arg0) [constructor]
cls.add_constructor([param('ns3::PacketTagList::TagData const &', 'arg0')])
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::count [variable]
cls.add_instance_attribute('count', 'uint32_t', is_const=False)
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::data [variable]
cls.add_instance_attribute('data', 'uint8_t [ 1 ]', is_const=False)
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::next [variable]
cls.add_instance_attribute('next', 'ns3::PacketTagList::TagData *', is_const=False)
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::size [variable]
cls.add_instance_attribute('size', 'uint32_t', is_const=False)
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::tid [variable]
cls.add_instance_attribute('tid', 'ns3::TypeId', is_const=False)
return
def register_Ns3SimpleRefCount__Ns3Object_Ns3ObjectBase_Ns3ObjectDeleter_methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter>::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter>::SimpleRefCount(ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter> const & o) [constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter > const &', 'o')])
return
def register_Ns3Tag_methods(root_module, cls):
## tag.h (module 'network'): ns3::Tag::Tag() [constructor]
cls.add_constructor([])
## tag.h (module 'network'): ns3::Tag::Tag(ns3::Tag const & arg0) [constructor]
cls.add_constructor([param('ns3::Tag const &', 'arg0')])
## tag.h (module 'network'): void ns3::Tag::Deserialize(ns3::TagBuffer i) [member function]
cls.add_method('Deserialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_pure_virtual=True, is_virtual=True)
## tag.h (module 'network'): uint32_t ns3::Tag::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## tag.h (module 'network'): static ns3::TypeId ns3::Tag::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## tag.h (module 'network'): void ns3::Tag::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## tag.h (module 'network'): void ns3::Tag::Serialize(ns3::TagBuffer i) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3TagBuffer_methods(root_module, cls):
## tag-buffer.h (module 'network'): ns3::TagBuffer::TagBuffer(ns3::TagBuffer const & arg0) [constructor]
cls.add_constructor([param('ns3::TagBuffer const &', 'arg0')])
## tag-buffer.h (module 'network'): ns3::TagBuffer::TagBuffer(uint8_t * start, uint8_t * end) [constructor]
cls.add_constructor([param('uint8_t *', 'start'), param('uint8_t *', 'end')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::CopyFrom(ns3::TagBuffer o) [member function]
cls.add_method('CopyFrom',
'void',
[param('ns3::TagBuffer', 'o')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::Read(uint8_t * buffer, uint32_t size) [member function]
cls.add_method('Read',
'void',
[param('uint8_t *', 'buffer'), param('uint32_t', 'size')])
## tag-buffer.h (module 'network'): double ns3::TagBuffer::ReadDouble() [member function]
cls.add_method('ReadDouble',
'double',
[])
## tag-buffer.h (module 'network'): uint16_t ns3::TagBuffer::ReadU16() [member function]
cls.add_method('ReadU16',
'uint16_t',
[])
## tag-buffer.h (module 'network'): uint32_t ns3::TagBuffer::ReadU32() [member function]
cls.add_method('ReadU32',
'uint32_t',
[])
## tag-buffer.h (module 'network'): uint64_t ns3::TagBuffer::ReadU64() [member function]
cls.add_method('ReadU64',
'uint64_t',
[])
## tag-buffer.h (module 'network'): uint8_t ns3::TagBuffer::ReadU8() [member function]
cls.add_method('ReadU8',
'uint8_t',
[])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::TrimAtEnd(uint32_t trim) [member function]
cls.add_method('TrimAtEnd',
'void',
[param('uint32_t', 'trim')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::Write(uint8_t const * buffer, uint32_t size) [member function]
cls.add_method('Write',
'void',
[param('uint8_t const *', 'buffer'), param('uint32_t', 'size')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteDouble(double v) [member function]
cls.add_method('WriteDouble',
'void',
[param('double', 'v')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU16(uint16_t v) [member function]
cls.add_method('WriteU16',
'void',
[param('uint16_t', 'v')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU32(uint32_t v) [member function]
cls.add_method('WriteU32',
'void',
[param('uint32_t', 'v')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU64(uint64_t v) [member function]
cls.add_method('WriteU64',
'void',
[param('uint64_t', 'v')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU8(uint8_t v) [member function]
cls.add_method('WriteU8',
'void',
[param('uint8_t', 'v')])
return
def register_Ns3TopologyReaderHelper_methods(root_module, cls):
## topology-reader-helper.h (module 'topology-read'): ns3::TopologyReaderHelper::TopologyReaderHelper(ns3::TopologyReaderHelper const & arg0) [constructor]
cls.add_constructor([param('ns3::TopologyReaderHelper const &', 'arg0')])
## topology-reader-helper.h (module 'topology-read'): ns3::TopologyReaderHelper::TopologyReaderHelper() [constructor]
cls.add_constructor([])
## topology-reader-helper.h (module 'topology-read'): ns3::Ptr<ns3::TopologyReader> ns3::TopologyReaderHelper::GetTopologyReader() [member function]
cls.add_method('GetTopologyReader',
'ns3::Ptr< ns3::TopologyReader >',
[])
## topology-reader-helper.h (module 'topology-read'): void ns3::TopologyReaderHelper::SetFileName(std::string const fileName) [member function]
cls.add_method('SetFileName',
'void',
[param('std::string const', 'fileName')])
## topology-reader-helper.h (module 'topology-read'): void ns3::TopologyReaderHelper::SetFileType(std::string const fileType) [member function]
cls.add_method('SetFileType',
'void',
[param('std::string const', 'fileType')])
return
def register_Ns3TypeId_methods(root_module, cls):
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('<')
## type-id.h (module 'core'): ns3::TypeId::TypeId(char const * name) [constructor]
cls.add_constructor([param('char const *', 'name')])
## type-id.h (module 'core'): ns3::TypeId::TypeId() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeId::TypeId(ns3::TypeId const & o) [constructor]
cls.add_constructor([param('ns3::TypeId const &', 'o')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddAttribute(std::string name, std::string help, ns3::AttributeValue const & initialValue, ns3::Ptr<const ns3::AttributeAccessor> accessor, ns3::Ptr<const ns3::AttributeChecker> checker, ns3::TypeId::SupportLevel supportLevel=::ns3::TypeId::SupportLevel::SUPPORTED, std::string const & supportMsg="") [member function]
cls.add_method('AddAttribute',
'ns3::TypeId',
[param('std::string', 'name'), param('std::string', 'help'), param('ns3::AttributeValue const &', 'initialValue'), param('ns3::Ptr< ns3::AttributeAccessor const >', 'accessor'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker'), param('ns3::TypeId::SupportLevel', 'supportLevel', default_value='::ns3::TypeId::SupportLevel::SUPPORTED'), param('std::string const &', 'supportMsg', default_value='""')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddAttribute(std::string name, std::string help, uint32_t flags, ns3::AttributeValue const & initialValue, ns3::Ptr<const ns3::AttributeAccessor> accessor, ns3::Ptr<const ns3::AttributeChecker> checker, ns3::TypeId::SupportLevel supportLevel=::ns3::TypeId::SupportLevel::SUPPORTED, std::string const & supportMsg="") [member function]
cls.add_method('AddAttribute',
'ns3::TypeId',
[param('std::string', 'name'), param('std::string', 'help'), param('uint32_t', 'flags'), param('ns3::AttributeValue const &', 'initialValue'), param('ns3::Ptr< ns3::AttributeAccessor const >', 'accessor'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker'), param('ns3::TypeId::SupportLevel', 'supportLevel', default_value='::ns3::TypeId::SupportLevel::SUPPORTED'), param('std::string const &', 'supportMsg', default_value='""')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddTraceSource(std::string name, std::string help, ns3::Ptr<const ns3::TraceSourceAccessor> accessor) [member function]
cls.add_method('AddTraceSource',
'ns3::TypeId',
[param('std::string', 'name'), param('std::string', 'help'), param('ns3::Ptr< ns3::TraceSourceAccessor const >', 'accessor')],
deprecated=True)
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddTraceSource(std::string name, std::string help, ns3::Ptr<const ns3::TraceSourceAccessor> accessor, std::string callback, ns3::TypeId::SupportLevel supportLevel=::ns3::TypeId::SupportLevel::SUPPORTED, std::string const & supportMsg="") [member function]
cls.add_method('AddTraceSource',
'ns3::TypeId',
[param('std::string', 'name'), param('std::string', 'help'), param('ns3::Ptr< ns3::TraceSourceAccessor const >', 'accessor'), param('std::string', 'callback'), param('ns3::TypeId::SupportLevel', 'supportLevel', default_value='::ns3::TypeId::SupportLevel::SUPPORTED'), param('std::string const &', 'supportMsg', default_value='""')])
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation ns3::TypeId::GetAttribute(std::size_t i) const [member function]
cls.add_method('GetAttribute',
'ns3::TypeId::AttributeInformation',
[param('std::size_t', 'i')],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeId::GetAttributeFullName(std::size_t i) const [member function]
cls.add_method('GetAttributeFullName',
'std::string',
[param('std::size_t', 'i')],
is_const=True)
## type-id.h (module 'core'): std::size_t ns3::TypeId::GetAttributeN() const [member function]
cls.add_method('GetAttributeN',
'std::size_t',
[],
is_const=True)
## type-id.h (module 'core'): ns3::Callback<ns3::ObjectBase *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> ns3::TypeId::GetConstructor() const [member function]
cls.add_method('GetConstructor',
'ns3::Callback< ns3::ObjectBase *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >',
[],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeId::GetGroupName() const [member function]
cls.add_method('GetGroupName',
'std::string',
[],
is_const=True)
## type-id.h (module 'core'): ns3::TypeId::hash_t ns3::TypeId::GetHash() const [member function]
cls.add_method('GetHash',
'ns3::TypeId::hash_t',
[],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeId::GetName() const [member function]
cls.add_method('GetName',
'std::string',
[],
is_const=True)
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::GetParent() const [member function]
cls.add_method('GetParent',
'ns3::TypeId',
[],
is_const=True)
## type-id.h (module 'core'): static ns3::TypeId ns3::TypeId::GetRegistered(uint16_t i) [member function]
cls.add_method('GetRegistered',
'ns3::TypeId',
[param('uint16_t', 'i')],
is_static=True)
## type-id.h (module 'core'): static uint16_t ns3::TypeId::GetRegisteredN() [member function]
cls.add_method('GetRegisteredN',
'uint16_t',
[],
is_static=True)
## type-id.h (module 'core'): std::size_t ns3::TypeId::GetSize() const [member function]
cls.add_method('GetSize',
'std::size_t',
[],
is_const=True)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation ns3::TypeId::GetTraceSource(std::size_t i) const [member function]
cls.add_method('GetTraceSource',
'ns3::TypeId::TraceSourceInformation',
[param('std::size_t', 'i')],
is_const=True)
## type-id.h (module 'core'): std::size_t ns3::TypeId::GetTraceSourceN() const [member function]
cls.add_method('GetTraceSourceN',
'std::size_t',
[],
is_const=True)
## type-id.h (module 'core'): uint16_t ns3::TypeId::GetUid() const [member function]
cls.add_method('GetUid',
'uint16_t',
[],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::HasConstructor() const [member function]
cls.add_method('HasConstructor',
'bool',
[],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::HasParent() const [member function]
cls.add_method('HasParent',
'bool',
[],
is_const=True)
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::HideFromDocumentation() [member function]
cls.add_method('HideFromDocumentation',
'ns3::TypeId',
[])
## type-id.h (module 'core'): bool ns3::TypeId::IsChildOf(ns3::TypeId other) const [member function]
cls.add_method('IsChildOf',
'bool',
[param('ns3::TypeId', 'other')],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::LookupAttributeByName(std::string name, ns3::TypeId::AttributeInformation * info) const [member function]
cls.add_method('LookupAttributeByName',
'bool',
[param('std::string', 'name'), param('ns3::TypeId::AttributeInformation *', 'info', transfer_ownership=False)],
is_const=True)
## type-id.h (module 'core'): static ns3::TypeId ns3::TypeId::LookupByHash(ns3::TypeId::hash_t hash) [member function]
cls.add_method('LookupByHash',
'ns3::TypeId',
[param('uint32_t', 'hash')],
is_static=True)
## type-id.h (module 'core'): static bool ns3::TypeId::LookupByHashFailSafe(ns3::TypeId::hash_t hash, ns3::TypeId * tid) [member function]
cls.add_method('LookupByHashFailSafe',
'bool',
[param('uint32_t', 'hash'), param('ns3::TypeId *', 'tid')],
is_static=True)
## type-id.h (module 'core'): static ns3::TypeId ns3::TypeId::LookupByName(std::string name) [member function]
cls.add_method('LookupByName',
'ns3::TypeId',
[param('std::string', 'name')],
is_static=True)
## type-id.h (module 'core'): ns3::Ptr<const ns3::TraceSourceAccessor> ns3::TypeId::LookupTraceSourceByName(std::string name) const [member function]
cls.add_method('LookupTraceSourceByName',
'ns3::Ptr< ns3::TraceSourceAccessor const >',
[param('std::string', 'name')],
is_const=True)
## type-id.h (module 'core'): ns3::Ptr<const ns3::TraceSourceAccessor> ns3::TypeId::LookupTraceSourceByName(std::string name, ns3::TypeId::TraceSourceInformation * info) const [member function]
cls.add_method('LookupTraceSourceByName',
'ns3::Ptr< ns3::TraceSourceAccessor const >',
[param('std::string', 'name'), param('ns3::TypeId::TraceSourceInformation *', 'info')],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::MustHideFromDocumentation() const [member function]
cls.add_method('MustHideFromDocumentation',
'bool',
[],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::SetAttributeInitialValue(std::size_t i, ns3::Ptr<const ns3::AttributeValue> initialValue) [member function]
cls.add_method('SetAttributeInitialValue',
'bool',
[param('std::size_t', 'i'), param('ns3::Ptr< ns3::AttributeValue const >', 'initialValue')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::SetGroupName(std::string groupName) [member function]
cls.add_method('SetGroupName',
'ns3::TypeId',
[param('std::string', 'groupName')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::SetParent(ns3::TypeId tid) [member function]
cls.add_method('SetParent',
'ns3::TypeId',
[param('ns3::TypeId', 'tid')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::SetSize(std::size_t size) [member function]
cls.add_method('SetSize',
'ns3::TypeId',
[param('std::size_t', 'size')])
## type-id.h (module 'core'): void ns3::TypeId::SetUid(uint16_t uid) [member function]
cls.add_method('SetUid',
'void',
[param('uint16_t', 'uid')])
return
def register_Ns3TypeIdAttributeInformation_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::AttributeInformation() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::AttributeInformation(ns3::TypeId::AttributeInformation const & arg0) [constructor]
cls.add_constructor([param('ns3::TypeId::AttributeInformation const &', 'arg0')])
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::accessor [variable]
cls.add_instance_attribute('accessor', 'ns3::Ptr< ns3::AttributeAccessor const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::checker [variable]
cls.add_instance_attribute('checker', 'ns3::Ptr< ns3::AttributeChecker const >', is_const=False)
cls.add_instance_attribute('flags', 'uint32_t', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::help [variable]
cls.add_instance_attribute('help', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::initialValue [variable]
cls.add_instance_attribute('initialValue', 'ns3::Ptr< ns3::AttributeValue const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::name [variable]
cls.add_instance_attribute('name', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::originalInitialValue [variable]
cls.add_instance_attribute('originalInitialValue', 'ns3::Ptr< ns3::AttributeValue const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::supportLevel [variable]
cls.add_instance_attribute('supportLevel', 'ns3::TypeId::SupportLevel', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::supportMsg [variable]
cls.add_instance_attribute('supportMsg', 'std::string', is_const=False)
return
def register_Ns3TypeIdTraceSourceInformation_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::TraceSourceInformation() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::TraceSourceInformation(ns3::TypeId::TraceSourceInformation const & arg0) [constructor]
cls.add_constructor([param('ns3::TypeId::TraceSourceInformation const &', 'arg0')])
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::accessor [variable]
cls.add_instance_attribute('accessor', 'ns3::Ptr< ns3::TraceSourceAccessor const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::callback [variable]
cls.add_instance_attribute('callback', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::help [variable]
cls.add_instance_attribute('help', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::name [variable]
cls.add_instance_attribute('name', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::supportLevel [variable]
cls.add_instance_attribute('supportLevel', 'ns3::TypeId::SupportLevel', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::supportMsg [variable]
cls.add_instance_attribute('supportMsg', 'std::string', is_const=False)
return
def register_Ns3Empty_methods(root_module, cls):
## empty.h (module 'core'): ns3::empty::empty() [constructor]
cls.add_constructor([])
## empty.h (module 'core'): ns3::empty::empty(ns3::empty const & arg0) [constructor]
cls.add_constructor([param('ns3::empty const &', 'arg0')])
return
def register_Ns3Chunk_methods(root_module, cls):
## chunk.h (module 'network'): ns3::Chunk::Chunk() [constructor]
cls.add_constructor([])
## chunk.h (module 'network'): ns3::Chunk::Chunk(ns3::Chunk const & arg0) [constructor]
cls.add_constructor([param('ns3::Chunk const &', 'arg0')])
## chunk.h (module 'network'): uint32_t ns3::Chunk::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start')],
is_pure_virtual=True, is_virtual=True)
## chunk.h (module 'network'): uint32_t ns3::Chunk::Deserialize(ns3::Buffer::Iterator start, ns3::Buffer::Iterator end) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start'), param('ns3::Buffer::Iterator', 'end')],
is_virtual=True)
## chunk.h (module 'network'): static ns3::TypeId ns3::Chunk::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## chunk.h (module 'network'): void ns3::Chunk::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3Header_methods(root_module, cls):
cls.add_output_stream_operator()
## header.h (module 'network'): ns3::Header::Header() [constructor]
cls.add_constructor([])
## header.h (module 'network'): ns3::Header::Header(ns3::Header const & arg0) [constructor]
cls.add_constructor([param('ns3::Header const &', 'arg0')])
## header.h (module 'network'): uint32_t ns3::Header::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start')],
is_pure_virtual=True, is_virtual=True)
## header.h (module 'network'): uint32_t ns3::Header::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## header.h (module 'network'): static ns3::TypeId ns3::Header::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## header.h (module 'network'): void ns3::Header::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## header.h (module 'network'): void ns3::Header::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3Object_methods(root_module, cls):
## object.h (module 'core'): ns3::Object::Object() [constructor]
cls.add_constructor([])
## object.h (module 'core'): void ns3::Object::AggregateObject(ns3::Ptr<ns3::Object> other) [member function]
cls.add_method('AggregateObject',
'void',
[param('ns3::Ptr< ns3::Object >', 'other')])
## object.h (module 'core'): void ns3::Object::Dispose() [member function]
cls.add_method('Dispose',
'void',
[])
## object.h (module 'core'): ns3::Object::AggregateIterator ns3::Object::GetAggregateIterator() const [member function]
cls.add_method('GetAggregateIterator',
'ns3::Object::AggregateIterator',
[],
is_const=True)
## object.h (module 'core'): ns3::TypeId ns3::Object::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## object.h (module 'core'): static ns3::TypeId ns3::Object::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## object.h (module 'core'): void ns3::Object::Initialize() [member function]
cls.add_method('Initialize',
'void',
[])
## object.h (module 'core'): bool ns3::Object::IsInitialized() const [member function]
cls.add_method('IsInitialized',
'bool',
[],
is_const=True)
## object.h (module 'core'): ns3::Object::Object(ns3::Object const & o) [constructor]
cls.add_constructor([param('ns3::Object const &', 'o')],
visibility='protected')
## object.h (module 'core'): void ns3::Object::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
## object.h (module 'core'): void ns3::Object::DoInitialize() [member function]
cls.add_method('DoInitialize',
'void',
[],
visibility='protected', is_virtual=True)
## object.h (module 'core'): void ns3::Object::NotifyNewAggregate() [member function]
cls.add_method('NotifyNewAggregate',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3ObjectAggregateIterator_methods(root_module, cls):
## object.h (module 'core'): ns3::Object::AggregateIterator::AggregateIterator(ns3::Object::AggregateIterator const & arg0) [constructor]
cls.add_constructor([param('ns3::Object::AggregateIterator const &', 'arg0')])
## object.h (module 'core'): ns3::Object::AggregateIterator::AggregateIterator() [constructor]
cls.add_constructor([])
## object.h (module 'core'): bool ns3::Object::AggregateIterator::HasNext() const [member function]
cls.add_method('HasNext',
'bool',
[],
is_const=True)
## object.h (module 'core'): ns3::Ptr<const ns3::Object> ns3::Object::AggregateIterator::Next() [member function]
cls.add_method('Next',
'ns3::Ptr< ns3::Object const >',
[])
return
def register_Ns3SimpleRefCount__Ns3AttributeAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeAccessor__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >::SimpleRefCount(ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> > const & o) [constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter< ns3::AttributeAccessor > > const &', 'o')])
return
def register_Ns3SimpleRefCount__Ns3AttributeChecker_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeChecker__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >::SimpleRefCount(ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> > const & o) [constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter< ns3::AttributeChecker > > const &', 'o')])
return
def register_Ns3SimpleRefCount__Ns3AttributeValue_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeValue__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >::SimpleRefCount(ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> > const & o) [constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter< ns3::AttributeValue > > const &', 'o')])
return
def register_Ns3SimpleRefCount__Ns3CallbackImplBase_Ns3Empty_Ns3DefaultDeleter__lt__ns3CallbackImplBase__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >::SimpleRefCount(ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> > const & o) [constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter< ns3::CallbackImplBase > > const &', 'o')])
return
def register_Ns3SimpleRefCount__Ns3HashImplementation_Ns3Empty_Ns3DefaultDeleter__lt__ns3HashImplementation__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >::SimpleRefCount(ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> > const & o) [constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter< ns3::Hash::Implementation > > const &', 'o')])
return
def register_Ns3SimpleRefCount__Ns3NixVector_Ns3Empty_Ns3DefaultDeleter__lt__ns3NixVector__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> >::SimpleRefCount(ns3::SimpleRefCount<ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> > const & o) [constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::NixVector, ns3::empty, ns3::DefaultDeleter< ns3::NixVector > > const &', 'o')])
return
def register_Ns3SimpleRefCount__Ns3Packet_Ns3Empty_Ns3DefaultDeleter__lt__ns3Packet__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> >::SimpleRefCount(ns3::SimpleRefCount<ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> > const & o) [constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::Packet, ns3::empty, ns3::DefaultDeleter< ns3::Packet > > const &', 'o')])
return
def register_Ns3SimpleRefCount__Ns3TraceSourceAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3TraceSourceAccessor__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >::SimpleRefCount(ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> > const & o) [constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter< ns3::TraceSourceAccessor > > const &', 'o')])
return
def register_Ns3TopologyReader_methods(root_module, cls):
## topology-reader.h (module 'topology-read'): ns3::TopologyReader::TopologyReader() [constructor]
cls.add_constructor([])
## topology-reader.h (module 'topology-read'): void ns3::TopologyReader::AddLink(ns3::TopologyReader::Link link) [member function]
cls.add_method('AddLink',
'void',
[param('ns3::TopologyReader::Link', 'link')])
## topology-reader.h (module 'topology-read'): std::string ns3::TopologyReader::GetFileName() const [member function]
cls.add_method('GetFileName',
'std::string',
[],
is_const=True)
## topology-reader.h (module 'topology-read'): static ns3::TypeId ns3::TopologyReader::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## topology-reader.h (module 'topology-read'): ns3::TopologyReader::ConstLinksIterator ns3::TopologyReader::LinksBegin() const [member function]
cls.add_method('LinksBegin',
'ns3::TopologyReader::ConstLinksIterator',
[],
is_const=True)
## topology-reader.h (module 'topology-read'): bool ns3::TopologyReader::LinksEmpty() const [member function]
cls.add_method('LinksEmpty',
'bool',
[],
is_const=True)
## topology-reader.h (module 'topology-read'): ns3::TopologyReader::ConstLinksIterator ns3::TopologyReader::LinksEnd() const [member function]
cls.add_method('LinksEnd',
'ns3::TopologyReader::ConstLinksIterator',
[],
is_const=True)
## topology-reader.h (module 'topology-read'): int ns3::TopologyReader::LinksSize() const [member function]
cls.add_method('LinksSize',
'int',
[],
is_const=True)
## topology-reader.h (module 'topology-read'): ns3::NodeContainer ns3::TopologyReader::Read() [member function]
cls.add_method('Read',
'ns3::NodeContainer',
[],
is_pure_virtual=True, is_virtual=True)
## topology-reader.h (module 'topology-read'): void ns3::TopologyReader::SetFileName(std::string const & fileName) [member function]
cls.add_method('SetFileName',
'void',
[param('std::string const &', 'fileName')])
return
def register_Ns3TopologyReaderLink_methods(root_module, cls):
## topology-reader.h (module 'topology-read'): ns3::TopologyReader::Link::Link(ns3::TopologyReader::Link const & arg0) [constructor]
cls.add_constructor([param('ns3::TopologyReader::Link const &', 'arg0')])
## topology-reader.h (module 'topology-read'): ns3::TopologyReader::Link::Link(ns3::Ptr<ns3::Node> fromPtr, std::string const & fromName, ns3::Ptr<ns3::Node> toPtr, std::string const & toName) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::Node >', 'fromPtr'), param('std::string const &', 'fromName'), param('ns3::Ptr< ns3::Node >', 'toPtr'), param('std::string const &', 'toName')])
## topology-reader.h (module 'topology-read'): ns3::TopologyReader::Link::ConstAttributesIterator ns3::TopologyReader::Link::AttributesBegin() const [member function]
cls.add_method('AttributesBegin',
'ns3::TopologyReader::Link::ConstAttributesIterator',
[],
is_const=True)
## topology-reader.h (module 'topology-read'): ns3::TopologyReader::Link::ConstAttributesIterator ns3::TopologyReader::Link::AttributesEnd() const [member function]
cls.add_method('AttributesEnd',
'ns3::TopologyReader::Link::ConstAttributesIterator',
[],
is_const=True)
## topology-reader.h (module 'topology-read'): std::string ns3::TopologyReader::Link::GetAttribute(std::string const & name) const [member function]
cls.add_method('GetAttribute',
'std::string',
[param('std::string const &', 'name')],
is_const=True)
## topology-reader.h (module 'topology-read'): bool ns3::TopologyReader::Link::GetAttributeFailSafe(std::string const & name, std::string & value) const [member function]
cls.add_method('GetAttributeFailSafe',
'bool',
[param('std::string const &', 'name'), param('std::string &', 'value')],
is_const=True)
## topology-reader.h (module 'topology-read'): ns3::Ptr<ns3::Node> ns3::TopologyReader::Link::GetFromNode() const [member function]
cls.add_method('GetFromNode',
'ns3::Ptr< ns3::Node >',
[],
is_const=True)
## topology-reader.h (module 'topology-read'): std::string ns3::TopologyReader::Link::GetFromNodeName() const [member function]
cls.add_method('GetFromNodeName',
'std::string',
[],
is_const=True)
## topology-reader.h (module 'topology-read'): ns3::Ptr<ns3::Node> ns3::TopologyReader::Link::GetToNode() const [member function]
cls.add_method('GetToNode',
'ns3::Ptr< ns3::Node >',
[],
is_const=True)
## topology-reader.h (module 'topology-read'): std::string ns3::TopologyReader::Link::GetToNodeName() const [member function]
cls.add_method('GetToNodeName',
'std::string',
[],
is_const=True)
## topology-reader.h (module 'topology-read'): void ns3::TopologyReader::Link::SetAttribute(std::string const & name, std::string const & value) [member function]
cls.add_method('SetAttribute',
'void',
[param('std::string const &', 'name'), param('std::string const &', 'value')])
return
def register_Ns3TraceSourceAccessor_methods(root_module, cls):
## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor::TraceSourceAccessor(ns3::TraceSourceAccessor const & arg0) [constructor]
cls.add_constructor([param('ns3::TraceSourceAccessor const &', 'arg0')])
## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor::TraceSourceAccessor() [constructor]
cls.add_constructor([])
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::Connect(ns3::ObjectBase * obj, std::string context, ns3::CallbackBase const & cb) const [member function]
cls.add_method('Connect',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::ConnectWithoutContext(ns3::ObjectBase * obj, ns3::CallbackBase const & cb) const [member function]
cls.add_method('ConnectWithoutContext',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::Disconnect(ns3::ObjectBase * obj, std::string context, ns3::CallbackBase const & cb) const [member function]
cls.add_method('Disconnect',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::DisconnectWithoutContext(ns3::ObjectBase * obj, ns3::CallbackBase const & cb) const [member function]
cls.add_method('DisconnectWithoutContext',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3Trailer_methods(root_module, cls):
cls.add_output_stream_operator()
## trailer.h (module 'network'): ns3::Trailer::Trailer() [constructor]
cls.add_constructor([])
## trailer.h (module 'network'): ns3::Trailer::Trailer(ns3::Trailer const & arg0) [constructor]
cls.add_constructor([param('ns3::Trailer const &', 'arg0')])
## trailer.h (module 'network'): uint32_t ns3::Trailer::Deserialize(ns3::Buffer::Iterator end) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'end')],
is_pure_virtual=True, is_virtual=True)
## trailer.h (module 'network'): uint32_t ns3::Trailer::Deserialize(ns3::Buffer::Iterator start, ns3::Buffer::Iterator end) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start'), param('ns3::Buffer::Iterator', 'end')],
is_virtual=True)
## trailer.h (module 'network'): uint32_t ns3::Trailer::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trailer.h (module 'network'): static ns3::TypeId ns3::Trailer::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## trailer.h (module 'network'): void ns3::Trailer::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trailer.h (module 'network'): void ns3::Trailer::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3AttributeAccessor_methods(root_module, cls):
## attribute.h (module 'core'): ns3::AttributeAccessor::AttributeAccessor(ns3::AttributeAccessor const & arg0) [constructor]
cls.add_constructor([param('ns3::AttributeAccessor const &', 'arg0')])
## attribute.h (module 'core'): ns3::AttributeAccessor::AttributeAccessor() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): bool ns3::AttributeAccessor::Get(ns3::ObjectBase const * object, ns3::AttributeValue & attribute) const [member function]
cls.add_method('Get',
'bool',
[param('ns3::ObjectBase const *', 'object'), param('ns3::AttributeValue &', 'attribute')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeAccessor::HasGetter() const [member function]
cls.add_method('HasGetter',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeAccessor::HasSetter() const [member function]
cls.add_method('HasSetter',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeAccessor::Set(ns3::ObjectBase * object, ns3::AttributeValue const & value) const [member function]
cls.add_method('Set',
'bool',
[param('ns3::ObjectBase *', 'object', transfer_ownership=False), param('ns3::AttributeValue const &', 'value')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3AttributeChecker_methods(root_module, cls):
## attribute.h (module 'core'): ns3::AttributeChecker::AttributeChecker(ns3::AttributeChecker const & arg0) [constructor]
cls.add_constructor([param('ns3::AttributeChecker const &', 'arg0')])
## attribute.h (module 'core'): ns3::AttributeChecker::AttributeChecker() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): bool ns3::AttributeChecker::Check(ns3::AttributeValue const & value) const [member function]
cls.add_method('Check',
'bool',
[param('ns3::AttributeValue const &', 'value')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeChecker::Copy(ns3::AttributeValue const & source, ns3::AttributeValue & destination) const [member function]
cls.add_method('Copy',
'bool',
[param('ns3::AttributeValue const &', 'source'), param('ns3::AttributeValue &', 'destination')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeChecker::Create() const [member function]
cls.add_method('Create',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeChecker::CreateValidValue(ns3::AttributeValue const & value) const [member function]
cls.add_method('CreateValidValue',
'ns3::Ptr< ns3::AttributeValue >',
[param('ns3::AttributeValue const &', 'value')],
is_const=True)
## attribute.h (module 'core'): std::string ns3::AttributeChecker::GetUnderlyingTypeInformation() const [member function]
cls.add_method('GetUnderlyingTypeInformation',
'std::string',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): std::string ns3::AttributeChecker::GetValueTypeName() const [member function]
cls.add_method('GetValueTypeName',
'std::string',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeChecker::HasUnderlyingTypeInformation() const [member function]
cls.add_method('HasUnderlyingTypeInformation',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3AttributeValue_methods(root_module, cls):
## attribute.h (module 'core'): ns3::AttributeValue::AttributeValue(ns3::AttributeValue const & arg0) [constructor]
cls.add_constructor([param('ns3::AttributeValue const &', 'arg0')])
## attribute.h (module 'core'): ns3::AttributeValue::AttributeValue() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeValue::DeserializeFromString(std::string value, ns3::Ptr<const ns3::AttributeChecker> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_pure_virtual=True, is_virtual=True)
## attribute.h (module 'core'): std::string ns3::AttributeValue::SerializeToString(ns3::Ptr<const ns3::AttributeChecker> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3CallbackChecker_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackChecker::CallbackChecker() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackChecker::CallbackChecker(ns3::CallbackChecker const & arg0) [constructor]
cls.add_constructor([param('ns3::CallbackChecker const &', 'arg0')])
return
def register_Ns3CallbackImplBase_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackImplBase::CallbackImplBase() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackImplBase::CallbackImplBase(ns3::CallbackImplBase const & arg0) [constructor]
cls.add_constructor([param('ns3::CallbackImplBase const &', 'arg0')])
## callback.h (module 'core'): std::string ns3::CallbackImplBase::GetTypeid() const [member function]
cls.add_method('GetTypeid',
'std::string',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## callback.h (module 'core'): bool ns3::CallbackImplBase::IsEqual(ns3::Ptr<const ns3::CallbackImplBase> other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ptr< ns3::CallbackImplBase const >', 'other')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## callback.h (module 'core'): static std::string ns3::CallbackImplBase::Demangle(std::string const & mangled) [member function]
cls.add_method('Demangle',
'std::string',
[param('std::string const &', 'mangled')],
is_static=True, visibility='protected')
## callback.h (module 'core'): static std::string ns3::CallbackImplBase::GetCppTypeid() [member function]
cls.add_method('GetCppTypeid',
'std::string',
[],
is_static=True, visibility='protected', template_parameters=[u'ns3::ObjectBase*'])
## callback.h (module 'core'): static std::string ns3::CallbackImplBase::GetCppTypeid() [member function]
cls.add_method('GetCppTypeid',
'std::string',
[],
is_static=True, visibility='protected', template_parameters=[u'void'])
## callback.h (module 'core'): static std::string ns3::CallbackImplBase::GetCppTypeid() [member function]
cls.add_method('GetCppTypeid',
'std::string',
[],
is_static=True, visibility='protected', template_parameters=[u'ns3::Ptr<ns3::NetDevice> '])
## callback.h (module 'core'): static std::string ns3::CallbackImplBase::GetCppTypeid() [member function]
cls.add_method('GetCppTypeid',
'std::string',
[],
is_static=True, visibility='protected', template_parameters=[u'ns3::Ptr<ns3::Packet const> '])
## callback.h (module 'core'): static std::string ns3::CallbackImplBase::GetCppTypeid() [member function]
cls.add_method('GetCppTypeid',
'std::string',
[],
is_static=True, visibility='protected', template_parameters=[u'unsigned short'])
## callback.h (module 'core'): static std::string ns3::CallbackImplBase::GetCppTypeid() [member function]
cls.add_method('GetCppTypeid',
'std::string',
[],
is_static=True, visibility='protected', template_parameters=[u'ns3::Address const&'])
## callback.h (module 'core'): static std::string ns3::CallbackImplBase::GetCppTypeid() [member function]
cls.add_method('GetCppTypeid',
'std::string',
[],
is_static=True, visibility='protected', template_parameters=[u'ns3::NetDevice::PacketType'])
return
def register_Ns3CallbackValue_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackValue::CallbackValue(ns3::CallbackValue const & arg0) [constructor]
cls.add_constructor([param('ns3::CallbackValue const &', 'arg0')])
## callback.h (module 'core'): ns3::CallbackValue::CallbackValue() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackValue::CallbackValue(ns3::CallbackBase const & base) [constructor]
cls.add_constructor([param('ns3::CallbackBase const &', 'base')])
## callback.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::CallbackValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## callback.h (module 'core'): bool ns3::CallbackValue::DeserializeFromString(std::string value, ns3::Ptr<const ns3::AttributeChecker> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## callback.h (module 'core'): std::string ns3::CallbackValue::SerializeToString(ns3::Ptr<const ns3::AttributeChecker> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## callback.h (module 'core'): void ns3::CallbackValue::Set(ns3::CallbackBase base) [member function]
cls.add_method('Set',
'void',
[param('ns3::CallbackBase', 'base')])
return
def register_Ns3EmptyAttributeAccessor_methods(root_module, cls):
## attribute.h (module 'core'): ns3::EmptyAttributeAccessor::EmptyAttributeAccessor(ns3::EmptyAttributeAccessor const & arg0) [constructor]
cls.add_constructor([param('ns3::EmptyAttributeAccessor const &', 'arg0')])
## attribute.h (module 'core'): ns3::EmptyAttributeAccessor::EmptyAttributeAccessor() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): bool ns3::EmptyAttributeAccessor::Get(ns3::ObjectBase const * object, ns3::AttributeValue & attribute) const [member function]
cls.add_method('Get',
'bool',
[param('ns3::ObjectBase const *', 'object'), param('ns3::AttributeValue &', 'attribute')],
is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::EmptyAttributeAccessor::HasGetter() const [member function]
cls.add_method('HasGetter',
'bool',
[],
is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::EmptyAttributeAccessor::HasSetter() const [member function]
cls.add_method('HasSetter',
'bool',
[],
is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::EmptyAttributeAccessor::Set(ns3::ObjectBase * object, ns3::AttributeValue const & value) const [member function]
cls.add_method('Set',
'bool',
[param('ns3::ObjectBase *', 'object'), param('ns3::AttributeValue const &', 'value')],
is_const=True, is_virtual=True)
return
def register_Ns3EmptyAttributeChecker_methods(root_module, cls):
## attribute.h (module 'core'): ns3::EmptyAttributeChecker::EmptyAttributeChecker(ns3::EmptyAttributeChecker const & arg0) [constructor]
cls.add_constructor([param('ns3::EmptyAttributeChecker const &', 'arg0')])
## attribute.h (module 'core'): ns3::EmptyAttributeChecker::EmptyAttributeChecker() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): bool ns3::EmptyAttributeChecker::Check(ns3::AttributeValue const & value) const [member function]
cls.add_method('Check',
'bool',
[param('ns3::AttributeValue const &', 'value')],
is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::EmptyAttributeChecker::Copy(ns3::AttributeValue const & source, ns3::AttributeValue & destination) const [member function]
cls.add_method('Copy',
'bool',
[param('ns3::AttributeValue const &', 'source'), param('ns3::AttributeValue &', 'destination')],
is_const=True, is_virtual=True)
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::EmptyAttributeChecker::Create() const [member function]
cls.add_method('Create',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## attribute.h (module 'core'): std::string ns3::EmptyAttributeChecker::GetUnderlyingTypeInformation() const [member function]
cls.add_method('GetUnderlyingTypeInformation',
'std::string',
[],
is_const=True, is_virtual=True)
## attribute.h (module 'core'): std::string ns3::EmptyAttributeChecker::GetValueTypeName() const [member function]
cls.add_method('GetValueTypeName',
'std::string',
[],
is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::EmptyAttributeChecker::HasUnderlyingTypeInformation() const [member function]
cls.add_method('HasUnderlyingTypeInformation',
'bool',
[],
is_const=True, is_virtual=True)
return
def register_Ns3EmptyAttributeValue_methods(root_module, cls):
## attribute.h (module 'core'): ns3::EmptyAttributeValue::EmptyAttributeValue(ns3::EmptyAttributeValue const & arg0) [constructor]
cls.add_constructor([param('ns3::EmptyAttributeValue const &', 'arg0')])
## attribute.h (module 'core'): ns3::EmptyAttributeValue::EmptyAttributeValue() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::EmptyAttributeValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, visibility='private', is_virtual=True)
## attribute.h (module 'core'): bool ns3::EmptyAttributeValue::DeserializeFromString(std::string value, ns3::Ptr<const ns3::AttributeChecker> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
visibility='private', is_virtual=True)
## attribute.h (module 'core'): std::string ns3::EmptyAttributeValue::SerializeToString(ns3::Ptr<const ns3::AttributeChecker> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, visibility='private', is_virtual=True)
return
def register_Ns3InetTopologyReader_methods(root_module, cls):
## inet-topology-reader.h (module 'topology-read'): static ns3::TypeId ns3::InetTopologyReader::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## inet-topology-reader.h (module 'topology-read'): ns3::InetTopologyReader::InetTopologyReader() [constructor]
cls.add_constructor([])
## inet-topology-reader.h (module 'topology-read'): ns3::NodeContainer ns3::InetTopologyReader::Read() [member function]
cls.add_method('Read',
'ns3::NodeContainer',
[],
is_virtual=True)
return
def register_Ns3Ipv4AddressChecker_methods(root_module, cls):
## ipv4-address.h (module 'network'): ns3::Ipv4AddressChecker::Ipv4AddressChecker() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressChecker::Ipv4AddressChecker(ns3::Ipv4AddressChecker const & arg0) [constructor]
cls.add_constructor([param('ns3::Ipv4AddressChecker const &', 'arg0')])
return
def register_Ns3Ipv4AddressValue_methods(root_module, cls):
## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue::Ipv4AddressValue() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue::Ipv4AddressValue(ns3::Ipv4Address const & value) [constructor]
cls.add_constructor([param('ns3::Ipv4Address const &', 'value')])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue::Ipv4AddressValue(ns3::Ipv4AddressValue const & arg0) [constructor]
cls.add_constructor([param('ns3::Ipv4AddressValue const &', 'arg0')])
## ipv4-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv4AddressValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4AddressValue::DeserializeFromString(std::string value, ns3::Ptr<const ns3::AttributeChecker> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## ipv4-address.h (module 'network'): ns3::Ipv4Address ns3::Ipv4AddressValue::Get() const [member function]
cls.add_method('Get',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv4-address.h (module 'network'): std::string ns3::Ipv4AddressValue::SerializeToString(ns3::Ptr<const ns3::AttributeChecker> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4AddressValue::Set(ns3::Ipv4Address const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Ipv4Address const &', 'value')])
return
def register_Ns3Ipv4MaskChecker_methods(root_module, cls):
## ipv4-address.h (module 'network'): ns3::Ipv4MaskChecker::Ipv4MaskChecker() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskChecker::Ipv4MaskChecker(ns3::Ipv4MaskChecker const & arg0) [constructor]
cls.add_constructor([param('ns3::Ipv4MaskChecker const &', 'arg0')])
return
def register_Ns3Ipv4MaskValue_methods(root_module, cls):
## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue::Ipv4MaskValue() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue::Ipv4MaskValue(ns3::Ipv4Mask const & value) [constructor]
cls.add_constructor([param('ns3::Ipv4Mask const &', 'value')])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue::Ipv4MaskValue(ns3::Ipv4MaskValue const & arg0) [constructor]
cls.add_constructor([param('ns3::Ipv4MaskValue const &', 'arg0')])
## ipv4-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv4MaskValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4MaskValue::DeserializeFromString(std::string value, ns3::Ptr<const ns3::AttributeChecker> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## ipv4-address.h (module 'network'): ns3::Ipv4Mask ns3::Ipv4MaskValue::Get() const [member function]
cls.add_method('Get',
'ns3::Ipv4Mask',
[],
is_const=True)
## ipv4-address.h (module 'network'): std::string ns3::Ipv4MaskValue::SerializeToString(ns3::Ptr<const ns3::AttributeChecker> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4MaskValue::Set(ns3::Ipv4Mask const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Ipv4Mask const &', 'value')])
return
def register_Ns3Ipv6AddressChecker_methods(root_module, cls):
## ipv6-address.h (module 'network'): ns3::Ipv6AddressChecker::Ipv6AddressChecker() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressChecker::Ipv6AddressChecker(ns3::Ipv6AddressChecker const & arg0) [constructor]
cls.add_constructor([param('ns3::Ipv6AddressChecker const &', 'arg0')])
return
def register_Ns3Ipv6AddressValue_methods(root_module, cls):
## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue::Ipv6AddressValue() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue::Ipv6AddressValue(ns3::Ipv6Address const & value) [constructor]
cls.add_constructor([param('ns3::Ipv6Address const &', 'value')])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue::Ipv6AddressValue(ns3::Ipv6AddressValue const & arg0) [constructor]
cls.add_constructor([param('ns3::Ipv6AddressValue const &', 'arg0')])
## ipv6-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv6AddressValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6AddressValue::DeserializeFromString(std::string value, ns3::Ptr<const ns3::AttributeChecker> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## ipv6-address.h (module 'network'): ns3::Ipv6Address ns3::Ipv6AddressValue::Get() const [member function]
cls.add_method('Get',
'ns3::Ipv6Address',
[],
is_const=True)
## ipv6-address.h (module 'network'): std::string ns3::Ipv6AddressValue::SerializeToString(ns3::Ptr<const ns3::AttributeChecker> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6AddressValue::Set(ns3::Ipv6Address const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Ipv6Address const &', 'value')])
return
def register_Ns3Ipv6PrefixChecker_methods(root_module, cls):
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixChecker::Ipv6PrefixChecker() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixChecker::Ipv6PrefixChecker(ns3::Ipv6PrefixChecker const & arg0) [constructor]
cls.add_constructor([param('ns3::Ipv6PrefixChecker const &', 'arg0')])
return
def register_Ns3Ipv6PrefixValue_methods(root_module, cls):
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue::Ipv6PrefixValue() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue::Ipv6PrefixValue(ns3::Ipv6Prefix const & value) [constructor]
cls.add_constructor([param('ns3::Ipv6Prefix const &', 'value')])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue::Ipv6PrefixValue(ns3::Ipv6PrefixValue const & arg0) [constructor]
cls.add_constructor([param('ns3::Ipv6PrefixValue const &', 'arg0')])
## ipv6-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv6PrefixValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6PrefixValue::DeserializeFromString(std::string value, ns3::Ptr<const ns3::AttributeChecker> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix ns3::Ipv6PrefixValue::Get() const [member function]
cls.add_method('Get',
'ns3::Ipv6Prefix',
[],
is_const=True)
## ipv6-address.h (module 'network'): std::string ns3::Ipv6PrefixValue::SerializeToString(ns3::Ptr<const ns3::AttributeChecker> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6PrefixValue::Set(ns3::Ipv6Prefix const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Ipv6Prefix const &', 'value')])
return
def register_Ns3Mac48AddressChecker_methods(root_module, cls):
## mac48-address.h (module 'network'): ns3::Mac48AddressChecker::Mac48AddressChecker() [constructor]
cls.add_constructor([])
## mac48-address.h (module 'network'): ns3::Mac48AddressChecker::Mac48AddressChecker(ns3::Mac48AddressChecker const & arg0) [constructor]
cls.add_constructor([param('ns3::Mac48AddressChecker const &', 'arg0')])
return
def register_Ns3Mac48AddressValue_methods(root_module, cls):
## mac48-address.h (module 'network'): ns3::Mac48AddressValue::Mac48AddressValue() [constructor]
cls.add_constructor([])
## mac48-address.h (module 'network'): ns3::Mac48AddressValue::Mac48AddressValue(ns3::Mac48Address const & value) [constructor]
cls.add_constructor([param('ns3::Mac48Address const &', 'value')])
## mac48-address.h (module 'network'): ns3::Mac48AddressValue::Mac48AddressValue(ns3::Mac48AddressValue const & arg0) [constructor]
cls.add_constructor([param('ns3::Mac48AddressValue const &', 'arg0')])
## mac48-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Mac48AddressValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## mac48-address.h (module 'network'): bool ns3::Mac48AddressValue::DeserializeFromString(std::string value, ns3::Ptr<const ns3::AttributeChecker> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## mac48-address.h (module 'network'): ns3::Mac48Address ns3::Mac48AddressValue::Get() const [member function]
cls.add_method('Get',
'ns3::Mac48Address',
[],
is_const=True)
## mac48-address.h (module 'network'): std::string ns3::Mac48AddressValue::SerializeToString(ns3::Ptr<const ns3::AttributeChecker> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## mac48-address.h (module 'network'): void ns3::Mac48AddressValue::Set(ns3::Mac48Address const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Mac48Address const &', 'value')])
return
def register_Ns3NetDevice_methods(root_module, cls):
## net-device.h (module 'network'): ns3::NetDevice::NetDevice() [constructor]
cls.add_constructor([])
## net-device.h (module 'network'): ns3::NetDevice::NetDevice(ns3::NetDevice const & arg0) [constructor]
cls.add_constructor([param('ns3::NetDevice const &', 'arg0')])
## net-device.h (module 'network'): void ns3::NetDevice::AddLinkChangeCallback(ns3::Callback<void, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> callback) [member function]
cls.add_method('AddLinkChangeCallback',
'void',
[param('ns3::Callback< void, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'callback')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Address ns3::NetDevice::GetAddress() const [member function]
cls.add_method('GetAddress',
'ns3::Address',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Address ns3::NetDevice::GetBroadcast() const [member function]
cls.add_method('GetBroadcast',
'ns3::Address',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Ptr<ns3::Channel> ns3::NetDevice::GetChannel() const [member function]
cls.add_method('GetChannel',
'ns3::Ptr< ns3::Channel >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): uint32_t ns3::NetDevice::GetIfIndex() const [member function]
cls.add_method('GetIfIndex',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): uint16_t ns3::NetDevice::GetMtu() const [member function]
cls.add_method('GetMtu',
'uint16_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Address ns3::NetDevice::GetMulticast(ns3::Ipv4Address multicastGroup) const [member function]
cls.add_method('GetMulticast',
'ns3::Address',
[param('ns3::Ipv4Address', 'multicastGroup')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Address ns3::NetDevice::GetMulticast(ns3::Ipv6Address addr) const [member function]
cls.add_method('GetMulticast',
'ns3::Address',
[param('ns3::Ipv6Address', 'addr')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Ptr<ns3::Node> ns3::NetDevice::GetNode() const [member function]
cls.add_method('GetNode',
'ns3::Ptr< ns3::Node >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): static ns3::TypeId ns3::NetDevice::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsBridge() const [member function]
cls.add_method('IsBridge',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsBroadcast() const [member function]
cls.add_method('IsBroadcast',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsLinkUp() const [member function]
cls.add_method('IsLinkUp',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsMulticast() const [member function]
cls.add_method('IsMulticast',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsPointToPoint() const [member function]
cls.add_method('IsPointToPoint',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::NeedsArp() const [member function]
cls.add_method('NeedsArp',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::Send(ns3::Ptr<ns3::Packet> packet, ns3::Address const & dest, uint16_t protocolNumber) [member function]
cls.add_method('Send',
'bool',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Address const &', 'dest'), param('uint16_t', 'protocolNumber')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::SendFrom(ns3::Ptr<ns3::Packet> packet, ns3::Address const & source, ns3::Address const & dest, uint16_t protocolNumber) [member function]
cls.add_method('SendFrom',
'bool',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Address const &', 'source'), param('ns3::Address const &', 'dest'), param('uint16_t', 'protocolNumber')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetAddress(ns3::Address address) [member function]
cls.add_method('SetAddress',
'void',
[param('ns3::Address', 'address')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetIfIndex(uint32_t const index) [member function]
cls.add_method('SetIfIndex',
'void',
[param('uint32_t const', 'index')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::SetMtu(uint16_t const mtu) [member function]
cls.add_method('SetMtu',
'bool',
[param('uint16_t const', 'mtu')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetNode(ns3::Ptr<ns3::Node> node) [member function]
cls.add_method('SetNode',
'void',
[param('ns3::Ptr< ns3::Node >', 'node')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetPromiscReceiveCallback(ns3::NetDevice::PromiscReceiveCallback cb) [member function]
cls.add_method('SetPromiscReceiveCallback',
'void',
[param('ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >', 'cb')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetReceiveCallback(ns3::NetDevice::ReceiveCallback cb) [member function]
cls.add_method('SetReceiveCallback',
'void',
[param('ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'cb')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::SupportsSendFrom() const [member function]
cls.add_method('SupportsSendFrom',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3NixVector_methods(root_module, cls):
cls.add_output_stream_operator()
## nix-vector.h (module 'network'): ns3::NixVector::NixVector() [constructor]
cls.add_constructor([])
## nix-vector.h (module 'network'): ns3::NixVector::NixVector(ns3::NixVector const & o) [constructor]
cls.add_constructor([param('ns3::NixVector const &', 'o')])
## nix-vector.h (module 'network'): void ns3::NixVector::AddNeighborIndex(uint32_t newBits, uint32_t numberOfBits) [member function]
cls.add_method('AddNeighborIndex',
'void',
[param('uint32_t', 'newBits'), param('uint32_t', 'numberOfBits')])
## nix-vector.h (module 'network'): uint32_t ns3::NixVector::BitCount(uint32_t numberOfNeighbors) const [member function]
cls.add_method('BitCount',
'uint32_t',
[param('uint32_t', 'numberOfNeighbors')],
is_const=True)
## nix-vector.h (module 'network'): ns3::Ptr<ns3::NixVector> ns3::NixVector::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::NixVector >',
[],
is_const=True)
## nix-vector.h (module 'network'): uint32_t ns3::NixVector::Deserialize(uint32_t const * buffer, uint32_t size) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('uint32_t const *', 'buffer'), param('uint32_t', 'size')])
## nix-vector.h (module 'network'): uint32_t ns3::NixVector::ExtractNeighborIndex(uint32_t numberOfBits) [member function]
cls.add_method('ExtractNeighborIndex',
'uint32_t',
[param('uint32_t', 'numberOfBits')])
## nix-vector.h (module 'network'): uint32_t ns3::NixVector::GetRemainingBits() [member function]
cls.add_method('GetRemainingBits',
'uint32_t',
[])
## nix-vector.h (module 'network'): uint32_t ns3::NixVector::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True)
## nix-vector.h (module 'network'): uint32_t ns3::NixVector::Serialize(uint32_t * buffer, uint32_t maxSize) const [member function]
cls.add_method('Serialize',
'uint32_t',
[param('uint32_t *', 'buffer'), param('uint32_t', 'maxSize')],
is_const=True)
return
def register_Ns3Node_methods(root_module, cls):
## node.h (module 'network'): ns3::Node::Node(ns3::Node const & arg0) [constructor]
cls.add_constructor([param('ns3::Node const &', 'arg0')])
## node.h (module 'network'): ns3::Node::Node() [constructor]
cls.add_constructor([])
## node.h (module 'network'): ns3::Node::Node(uint32_t systemId) [constructor]
cls.add_constructor([param('uint32_t', 'systemId')])
## node.h (module 'network'): uint32_t ns3::Node::AddApplication(ns3::Ptr<ns3::Application> application) [member function]
cls.add_method('AddApplication',
'uint32_t',
[param('ns3::Ptr< ns3::Application >', 'application')])
## node.h (module 'network'): uint32_t ns3::Node::AddDevice(ns3::Ptr<ns3::NetDevice> device) [member function]
cls.add_method('AddDevice',
'uint32_t',
[param('ns3::Ptr< ns3::NetDevice >', 'device')])
## node.h (module 'network'): static bool ns3::Node::ChecksumEnabled() [member function]
cls.add_method('ChecksumEnabled',
'bool',
[],
is_static=True)
## node.h (module 'network'): ns3::Ptr<ns3::Application> ns3::Node::GetApplication(uint32_t index) const [member function]
cls.add_method('GetApplication',
'ns3::Ptr< ns3::Application >',
[param('uint32_t', 'index')],
is_const=True)
## node.h (module 'network'): ns3::Ptr<ns3::NetDevice> ns3::Node::GetDevice(uint32_t index) const [member function]
cls.add_method('GetDevice',
'ns3::Ptr< ns3::NetDevice >',
[param('uint32_t', 'index')],
is_const=True)
## node.h (module 'network'): uint32_t ns3::Node::GetId() const [member function]
cls.add_method('GetId',
'uint32_t',
[],
is_const=True)
## node.h (module 'network'): ns3::Time ns3::Node::GetLocalTime() const [member function]
cls.add_method('GetLocalTime',
'ns3::Time',
[],
is_const=True)
## node.h (module 'network'): uint32_t ns3::Node::GetNApplications() const [member function]
cls.add_method('GetNApplications',
'uint32_t',
[],
is_const=True)
## node.h (module 'network'): uint32_t ns3::Node::GetNDevices() const [member function]
cls.add_method('GetNDevices',
'uint32_t',
[],
is_const=True)
## node.h (module 'network'): uint32_t ns3::Node::GetSystemId() const [member function]
cls.add_method('GetSystemId',
'uint32_t',
[],
is_const=True)
## node.h (module 'network'): static ns3::TypeId ns3::Node::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## node.h (module 'network'): void ns3::Node::RegisterDeviceAdditionListener(ns3::Node::DeviceAdditionListener listener) [member function]
cls.add_method('RegisterDeviceAdditionListener',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'listener')])
## node.h (module 'network'): void ns3::Node::RegisterProtocolHandler(ns3::Node::ProtocolHandler handler, uint16_t protocolType, ns3::Ptr<ns3::NetDevice> device, bool promiscuous=false) [member function]
cls.add_method('RegisterProtocolHandler',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >', 'handler'), param('uint16_t', 'protocolType'), param('ns3::Ptr< ns3::NetDevice >', 'device'), param('bool', 'promiscuous', default_value='false')])
## node.h (module 'network'): void ns3::Node::UnregisterDeviceAdditionListener(ns3::Node::DeviceAdditionListener listener) [member function]
cls.add_method('UnregisterDeviceAdditionListener',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'listener')])
## node.h (module 'network'): void ns3::Node::UnregisterProtocolHandler(ns3::Node::ProtocolHandler handler) [member function]
cls.add_method('UnregisterProtocolHandler',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >', 'handler')])
## node.h (module 'network'): void ns3::Node::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
## node.h (module 'network'): void ns3::Node::DoInitialize() [member function]
cls.add_method('DoInitialize',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3OrbisTopologyReader_methods(root_module, cls):
## orbis-topology-reader.h (module 'topology-read'): static ns3::TypeId ns3::OrbisTopologyReader::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## orbis-topology-reader.h (module 'topology-read'): ns3::OrbisTopologyReader::OrbisTopologyReader() [constructor]
cls.add_constructor([])
## orbis-topology-reader.h (module 'topology-read'): ns3::NodeContainer ns3::OrbisTopologyReader::Read() [member function]
cls.add_method('Read',
'ns3::NodeContainer',
[],
is_virtual=True)
return
def register_Ns3Packet_methods(root_module, cls):
cls.add_output_stream_operator()
## packet.h (module 'network'): ns3::Packet::Packet() [constructor]
cls.add_constructor([])
## packet.h (module 'network'): ns3::Packet::Packet(ns3::Packet const & o) [constructor]
cls.add_constructor([param('ns3::Packet const &', 'o')])
## packet.h (module 'network'): ns3::Packet::Packet(uint32_t size) [constructor]
cls.add_constructor([param('uint32_t', 'size')])
## packet.h (module 'network'): ns3::Packet::Packet(uint8_t const * buffer, uint32_t size, bool magic) [constructor]
cls.add_constructor([param('uint8_t const *', 'buffer'), param('uint32_t', 'size'), param('bool', 'magic')])
## packet.h (module 'network'): ns3::Packet::Packet(uint8_t const * buffer, uint32_t size) [constructor]
cls.add_constructor([param('uint8_t const *', 'buffer'), param('uint32_t', 'size')])
## packet.h (module 'network'): void ns3::Packet::AddAtEnd(ns3::Ptr<const ns3::Packet> packet) [member function]
cls.add_method('AddAtEnd',
'void',
[param('ns3::Ptr< ns3::Packet const >', 'packet')])
## packet.h (module 'network'): void ns3::Packet::AddByteTag(ns3::Tag const & tag) const [member function]
cls.add_method('AddByteTag',
'void',
[param('ns3::Tag const &', 'tag')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::AddHeader(ns3::Header const & header) [member function]
cls.add_method('AddHeader',
'void',
[param('ns3::Header const &', 'header')])
## packet.h (module 'network'): void ns3::Packet::AddPacketTag(ns3::Tag const & tag) const [member function]
cls.add_method('AddPacketTag',
'void',
[param('ns3::Tag const &', 'tag')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::AddPaddingAtEnd(uint32_t size) [member function]
cls.add_method('AddPaddingAtEnd',
'void',
[param('uint32_t', 'size')])
## packet.h (module 'network'): void ns3::Packet::AddTrailer(ns3::Trailer const & trailer) [member function]
cls.add_method('AddTrailer',
'void',
[param('ns3::Trailer const &', 'trailer')])
## packet.h (module 'network'): ns3::PacketMetadata::ItemIterator ns3::Packet::BeginItem() const [member function]
cls.add_method('BeginItem',
'ns3::PacketMetadata::ItemIterator',
[],
is_const=True)
## packet.h (module 'network'): ns3::Ptr<ns3::Packet> ns3::Packet::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::Packet >',
[],
is_const=True)
## packet.h (module 'network'): uint32_t ns3::Packet::CopyData(uint8_t * buffer, uint32_t size) const [member function]
cls.add_method('CopyData',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint32_t', 'size')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::CopyData(std::ostream * os, uint32_t size) const [member function]
cls.add_method('CopyData',
'void',
[param('std::ostream *', 'os'), param('uint32_t', 'size')],
is_const=True)
## packet.h (module 'network'): ns3::Ptr<ns3::Packet> ns3::Packet::CreateFragment(uint32_t start, uint32_t length) const [member function]
cls.add_method('CreateFragment',
'ns3::Ptr< ns3::Packet >',
[param('uint32_t', 'start'), param('uint32_t', 'length')],
is_const=True)
## packet.h (module 'network'): static void ns3::Packet::EnableChecking() [member function]
cls.add_method('EnableChecking',
'void',
[],
is_static=True)
## packet.h (module 'network'): static void ns3::Packet::EnablePrinting() [member function]
cls.add_method('EnablePrinting',
'void',
[],
is_static=True)
## packet.h (module 'network'): bool ns3::Packet::FindFirstMatchingByteTag(ns3::Tag & tag) const [member function]
cls.add_method('FindFirstMatchingByteTag',
'bool',
[param('ns3::Tag &', 'tag')],
is_const=True)
## packet.h (module 'network'): ns3::ByteTagIterator ns3::Packet::GetByteTagIterator() const [member function]
cls.add_method('GetByteTagIterator',
'ns3::ByteTagIterator',
[],
is_const=True)
## packet.h (module 'network'): ns3::Ptr<ns3::NixVector> ns3::Packet::GetNixVector() const [member function]
cls.add_method('GetNixVector',
'ns3::Ptr< ns3::NixVector >',
[],
is_const=True)
## packet.h (module 'network'): ns3::PacketTagIterator ns3::Packet::GetPacketTagIterator() const [member function]
cls.add_method('GetPacketTagIterator',
'ns3::PacketTagIterator',
[],
is_const=True)
## packet.h (module 'network'): uint32_t ns3::Packet::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True)
## packet.h (module 'network'): uint32_t ns3::Packet::GetSize() const [member function]
cls.add_method('GetSize',
'uint32_t',
[],
is_const=True)
## packet.h (module 'network'): uint64_t ns3::Packet::GetUid() const [member function]
cls.add_method('GetUid',
'uint64_t',
[],
is_const=True)
## packet.h (module 'network'): uint32_t ns3::Packet::PeekHeader(ns3::Header & header) const [member function]
cls.add_method('PeekHeader',
'uint32_t',
[param('ns3::Header &', 'header')],
is_const=True)
## packet.h (module 'network'): uint32_t ns3::Packet::PeekHeader(ns3::Header & header, uint32_t size) const [member function]
cls.add_method('PeekHeader',
'uint32_t',
[param('ns3::Header &', 'header'), param('uint32_t', 'size')],
is_const=True)
## packet.h (module 'network'): bool ns3::Packet::PeekPacketTag(ns3::Tag & tag) const [member function]
cls.add_method('PeekPacketTag',
'bool',
[param('ns3::Tag &', 'tag')],
is_const=True)
## packet.h (module 'network'): uint32_t ns3::Packet::PeekTrailer(ns3::Trailer & trailer) [member function]
cls.add_method('PeekTrailer',
'uint32_t',
[param('ns3::Trailer &', 'trailer')])
## packet.h (module 'network'): void ns3::Packet::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::PrintByteTags(std::ostream & os) const [member function]
cls.add_method('PrintByteTags',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::PrintPacketTags(std::ostream & os) const [member function]
cls.add_method('PrintPacketTags',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::RemoveAllByteTags() [member function]
cls.add_method('RemoveAllByteTags',
'void',
[])
## packet.h (module 'network'): void ns3::Packet::RemoveAllPacketTags() [member function]
cls.add_method('RemoveAllPacketTags',
'void',
[])
## packet.h (module 'network'): void ns3::Packet::RemoveAtEnd(uint32_t size) [member function]
cls.add_method('RemoveAtEnd',
'void',
[param('uint32_t', 'size')])
## packet.h (module 'network'): void ns3::Packet::RemoveAtStart(uint32_t size) [member function]
cls.add_method('RemoveAtStart',
'void',
[param('uint32_t', 'size')])
## packet.h (module 'network'): uint32_t ns3::Packet::RemoveHeader(ns3::Header & header) [member function]
cls.add_method('RemoveHeader',
'uint32_t',
[param('ns3::Header &', 'header')])
## packet.h (module 'network'): uint32_t ns3::Packet::RemoveHeader(ns3::Header & header, uint32_t size) [member function]
cls.add_method('RemoveHeader',
'uint32_t',
[param('ns3::Header &', 'header'), param('uint32_t', 'size')])
## packet.h (module 'network'): bool ns3::Packet::RemovePacketTag(ns3::Tag & tag) [member function]
cls.add_method('RemovePacketTag',
'bool',
[param('ns3::Tag &', 'tag')])
## packet.h (module 'network'): uint32_t ns3::Packet::RemoveTrailer(ns3::Trailer & trailer) [member function]
cls.add_method('RemoveTrailer',
'uint32_t',
[param('ns3::Trailer &', 'trailer')])
## packet.h (module 'network'): bool ns3::Packet::ReplacePacketTag(ns3::Tag & tag) [member function]
cls.add_method('ReplacePacketTag',
'bool',
[param('ns3::Tag &', 'tag')])
## packet.h (module 'network'): uint32_t ns3::Packet::Serialize(uint8_t * buffer, uint32_t maxSize) const [member function]
cls.add_method('Serialize',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint32_t', 'maxSize')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::SetNixVector(ns3::Ptr<ns3::NixVector> nixVector) [member function]
cls.add_method('SetNixVector',
'void',
[param('ns3::Ptr< ns3::NixVector >', 'nixVector')])
## packet.h (module 'network'): std::string ns3::Packet::ToString() const [member function]
cls.add_method('ToString',
'std::string',
[],
is_const=True)
return
def register_Ns3RocketfuelTopologyReader_methods(root_module, cls):
## rocketfuel-topology-reader.h (module 'topology-read'): static ns3::TypeId ns3::RocketfuelTopologyReader::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## rocketfuel-topology-reader.h (module 'topology-read'): ns3::RocketfuelTopologyReader::RocketfuelTopologyReader() [constructor]
cls.add_constructor([])
## rocketfuel-topology-reader.h (module 'topology-read'): ns3::NodeContainer ns3::RocketfuelTopologyReader::Read() [member function]
cls.add_method('Read',
'ns3::NodeContainer',
[],
is_virtual=True)
return
def register_Ns3TypeIdChecker_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeIdChecker::TypeIdChecker() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeIdChecker::TypeIdChecker(ns3::TypeIdChecker const & arg0) [constructor]
cls.add_constructor([param('ns3::TypeIdChecker const &', 'arg0')])
return
def register_Ns3TypeIdValue_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue(ns3::TypeId const & value) [constructor]
cls.add_constructor([param('ns3::TypeId const &', 'value')])
## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue(ns3::TypeIdValue const & arg0) [constructor]
cls.add_constructor([param('ns3::TypeIdValue const &', 'arg0')])
## type-id.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::TypeIdValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## type-id.h (module 'core'): bool ns3::TypeIdValue::DeserializeFromString(std::string value, ns3::Ptr<const ns3::AttributeChecker> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## type-id.h (module 'core'): ns3::TypeId ns3::TypeIdValue::Get() const [member function]
cls.add_method('Get',
'ns3::TypeId',
[],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeIdValue::SerializeToString(ns3::Ptr<const ns3::AttributeChecker> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## type-id.h (module 'core'): void ns3::TypeIdValue::Set(ns3::TypeId const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::TypeId const &', 'value')])
return
def register_Ns3AddressChecker_methods(root_module, cls):
## address.h (module 'network'): ns3::AddressChecker::AddressChecker() [constructor]
cls.add_constructor([])
## address.h (module 'network'): ns3::AddressChecker::AddressChecker(ns3::AddressChecker const & arg0) [constructor]
cls.add_constructor([param('ns3::AddressChecker const &', 'arg0')])
return
def register_Ns3AddressValue_methods(root_module, cls):
## address.h (module 'network'): ns3::AddressValue::AddressValue() [constructor]
cls.add_constructor([])
## address.h (module 'network'): ns3::AddressValue::AddressValue(ns3::Address const & value) [constructor]
cls.add_constructor([param('ns3::Address const &', 'value')])
## address.h (module 'network'): ns3::AddressValue::AddressValue(ns3::AddressValue const & arg0) [constructor]
cls.add_constructor([param('ns3::AddressValue const &', 'arg0')])
## address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::AddressValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## address.h (module 'network'): bool ns3::AddressValue::DeserializeFromString(std::string value, ns3::Ptr<const ns3::AttributeChecker> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## address.h (module 'network'): ns3::Address ns3::AddressValue::Get() const [member function]
cls.add_method('Get',
'ns3::Address',
[],
is_const=True)
## address.h (module 'network'): std::string ns3::AddressValue::SerializeToString(ns3::Ptr<const ns3::AttributeChecker> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## address.h (module 'network'): void ns3::AddressValue::Set(ns3::Address const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Address const &', 'value')])
return
def register_Ns3CallbackImpl__Ns3ObjectBase___star___Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackImpl<ns3::ObjectBase *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::CallbackImpl() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackImpl<ns3::ObjectBase *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::CallbackImpl(ns3::CallbackImpl<ns3::ObjectBase *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> const & arg0) [constructor]
cls.add_constructor([param('ns3::CallbackImpl< ns3::ObjectBase *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty > const &', 'arg0')])
## callback.h (module 'core'): static std::string ns3::CallbackImpl<ns3::ObjectBase *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::DoGetTypeid() [member function]
cls.add_method('DoGetTypeid',
'std::string',
[],
is_static=True)
## callback.h (module 'core'): std::string ns3::CallbackImpl<ns3::ObjectBase *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::GetTypeid() const [member function]
cls.add_method('GetTypeid',
'std::string',
[],
is_const=True, is_virtual=True)
## callback.h (module 'core'): ns3::ObjectBase * ns3::CallbackImpl<ns3::ObjectBase *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::operator()() [member operator]
cls.add_method('operator()',
'ns3::ObjectBase *',
[],
is_pure_virtual=True, is_virtual=True, custom_name=u'__call__')
return
def register_Ns3CallbackImpl__Void_Ns3Ptr__lt__ns3NetDevice__gt___Ns3Ptr__lt__const_ns3Packet__gt___Unsigned_short_Const_ns3Address___amp___Const_ns3Address___amp___Ns3NetDevicePacketType_Ns3Empty_Ns3Empty_Ns3Empty_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackImpl<void, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<const ns3::Packet>, unsigned short, const ns3::Address &, const ns3::Address &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty>::CallbackImpl() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackImpl<void, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<const ns3::Packet>, unsigned short, const ns3::Address &, const ns3::Address &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty>::CallbackImpl(ns3::CallbackImpl<void, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<const ns3::Packet>, unsigned short, const ns3::Address &, const ns3::Address &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty> const & arg0) [constructor]
cls.add_constructor([param('ns3::CallbackImpl< void, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty > const &', 'arg0')])
## callback.h (module 'core'): static std::string ns3::CallbackImpl<void, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<const ns3::Packet>, unsigned short, const ns3::Address &, const ns3::Address &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty>::DoGetTypeid() [member function]
cls.add_method('DoGetTypeid',
'std::string',
[],
is_static=True)
## callback.h (module 'core'): std::string ns3::CallbackImpl<void, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<const ns3::Packet>, unsigned short, const ns3::Address &, const ns3::Address &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty>::GetTypeid() const [member function]
cls.add_method('GetTypeid',
'std::string',
[],
is_const=True, is_virtual=True)
## callback.h (module 'core'): void ns3::CallbackImpl<void, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<const ns3::Packet>, unsigned short, const ns3::Address &, const ns3::Address &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty>::operator()(ns3::Ptr<ns3::NetDevice> arg0, ns3::Ptr<const ns3::Packet> arg1, short unsigned int arg2, ns3::Address const & arg3, ns3::Address const & arg4, ns3::NetDevice::PacketType arg5) [member operator]
cls.add_method('operator()',
'void',
[param('ns3::Ptr< ns3::NetDevice >', 'arg0'), param('ns3::Ptr< ns3::Packet const >', 'arg1'), param('short unsigned int', 'arg2'), param('ns3::Address const &', 'arg3'), param('ns3::Address const &', 'arg4'), param('ns3::NetDevice::PacketType', 'arg5')],
is_pure_virtual=True, is_virtual=True, custom_name=u'__call__')
return
def register_Ns3CallbackImpl__Void_Ns3Ptr__lt__ns3NetDevice__gt___Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackImpl<void, ns3::Ptr<ns3::NetDevice>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::CallbackImpl() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackImpl<void, ns3::Ptr<ns3::NetDevice>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::CallbackImpl(ns3::CallbackImpl<void, ns3::Ptr<ns3::NetDevice>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> const & arg0) [constructor]
cls.add_constructor([param('ns3::CallbackImpl< void, ns3::Ptr< ns3::NetDevice >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty > const &', 'arg0')])
## callback.h (module 'core'): static std::string ns3::CallbackImpl<void, ns3::Ptr<ns3::NetDevice>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::DoGetTypeid() [member function]
cls.add_method('DoGetTypeid',
'std::string',
[],
is_static=True)
## callback.h (module 'core'): std::string ns3::CallbackImpl<void, ns3::Ptr<ns3::NetDevice>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::GetTypeid() const [member function]
cls.add_method('GetTypeid',
'std::string',
[],
is_const=True, is_virtual=True)
## callback.h (module 'core'): void ns3::CallbackImpl<void, ns3::Ptr<ns3::NetDevice>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::operator()(ns3::Ptr<ns3::NetDevice> arg0) [member operator]
cls.add_method('operator()',
'void',
[param('ns3::Ptr< ns3::NetDevice >', 'arg0')],
is_pure_virtual=True, is_virtual=True, custom_name=u'__call__')
return
def register_Ns3HashImplementation_methods(root_module, cls):
## hash-function.h (module 'core'): ns3::Hash::Implementation::Implementation(ns3::Hash::Implementation const & arg0) [constructor]
cls.add_constructor([param('ns3::Hash::Implementation const &', 'arg0')])
## hash-function.h (module 'core'): ns3::Hash::Implementation::Implementation() [constructor]
cls.add_constructor([])
## hash-function.h (module 'core'): uint32_t ns3::Hash::Implementation::GetHash32(char const * buffer, std::size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('std::size_t const', 'size')],
is_pure_virtual=True, is_virtual=True)
## hash-function.h (module 'core'): uint64_t ns3::Hash::Implementation::GetHash64(char const * buffer, std::size_t const size) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('char const *', 'buffer'), param('std::size_t const', 'size')],
is_virtual=True)
## hash-function.h (module 'core'): void ns3::Hash::Implementation::clear() [member function]
cls.add_method('clear',
'void',
[],
is_pure_virtual=True, is_virtual=True)
return
def register_Ns3HashFunctionFnv1a_methods(root_module, cls):
## hash-fnv.h (module 'core'): ns3::Hash::Function::Fnv1a::Fnv1a(ns3::Hash::Function::Fnv1a const & arg0) [constructor]
cls.add_constructor([param('ns3::Hash::Function::Fnv1a const &', 'arg0')])
## hash-fnv.h (module 'core'): ns3::Hash::Function::Fnv1a::Fnv1a() [constructor]
cls.add_constructor([])
## hash-fnv.h (module 'core'): uint32_t ns3::Hash::Function::Fnv1a::GetHash32(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-fnv.h (module 'core'): uint64_t ns3::Hash::Function::Fnv1a::GetHash64(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-fnv.h (module 'core'): void ns3::Hash::Function::Fnv1a::clear() [member function]
cls.add_method('clear',
'void',
[],
is_virtual=True)
return
def register_Ns3HashFunctionHash32_methods(root_module, cls):
## hash-function.h (module 'core'): ns3::Hash::Function::Hash32::Hash32(ns3::Hash::Function::Hash32 const & arg0) [constructor]
cls.add_constructor([param('ns3::Hash::Function::Hash32 const &', 'arg0')])
## hash-function.h (module 'core'): ns3::Hash::Function::Hash32::Hash32(ns3::Hash::Hash32Function_ptr hp) [constructor]
cls.add_constructor([param('ns3::Hash::Hash32Function_ptr', 'hp')])
## hash-function.h (module 'core'): uint32_t ns3::Hash::Function::Hash32::GetHash32(char const * buffer, std::size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('std::size_t const', 'size')],
is_virtual=True)
## hash-function.h (module 'core'): void ns3::Hash::Function::Hash32::clear() [member function]
cls.add_method('clear',
'void',
[],
is_virtual=True)
return
def register_Ns3HashFunctionHash64_methods(root_module, cls):
## hash-function.h (module 'core'): ns3::Hash::Function::Hash64::Hash64(ns3::Hash::Function::Hash64 const & arg0) [constructor]
cls.add_constructor([param('ns3::Hash::Function::Hash64 const &', 'arg0')])
## hash-function.h (module 'core'): ns3::Hash::Function::Hash64::Hash64(ns3::Hash::Hash64Function_ptr hp) [constructor]
cls.add_constructor([param('ns3::Hash::Hash64Function_ptr', 'hp')])
## hash-function.h (module 'core'): uint32_t ns3::Hash::Function::Hash64::GetHash32(char const * buffer, std::size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('std::size_t const', 'size')],
is_virtual=True)
## hash-function.h (module 'core'): uint64_t ns3::Hash::Function::Hash64::GetHash64(char const * buffer, std::size_t const size) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('char const *', 'buffer'), param('std::size_t const', 'size')],
is_virtual=True)
## hash-function.h (module 'core'): void ns3::Hash::Function::Hash64::clear() [member function]
cls.add_method('clear',
'void',
[],
is_virtual=True)
return
def register_Ns3HashFunctionMurmur3_methods(root_module, cls):
## hash-murmur3.h (module 'core'): ns3::Hash::Function::Murmur3::Murmur3(ns3::Hash::Function::Murmur3 const & arg0) [constructor]
cls.add_constructor([param('ns3::Hash::Function::Murmur3 const &', 'arg0')])
## hash-murmur3.h (module 'core'): ns3::Hash::Function::Murmur3::Murmur3() [constructor]
cls.add_constructor([])
## hash-murmur3.h (module 'core'): uint32_t ns3::Hash::Function::Murmur3::GetHash32(char const * buffer, std::size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('std::size_t const', 'size')],
is_virtual=True)
## hash-murmur3.h (module 'core'): uint64_t ns3::Hash::Function::Murmur3::GetHash64(char const * buffer, std::size_t const size) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('char const *', 'buffer'), param('std::size_t const', 'size')],
is_virtual=True)
## hash-murmur3.h (module 'core'): void ns3::Hash::Function::Murmur3::clear() [member function]
cls.add_method('clear',
'void',
[],
is_virtual=True)
return
def register_functions(root_module):
module = root_module
register_functions_ns3_FatalImpl(module.add_cpp_namespace('FatalImpl'), root_module)
register_functions_ns3_Hash(module.add_cpp_namespace('Hash'), root_module)
return
def register_functions_ns3_FatalImpl(module, root_module):
return
def register_functions_ns3_Hash(module, root_module):
register_functions_ns3_Hash_Function(module.add_cpp_namespace('Function'), root_module)
return
def register_functions_ns3_Hash_Function(module, root_module):
return
def main():
out = FileCodeSink(sys.stdout)
root_module = module_init()
register_types(root_module)
register_methods(root_module)
register_functions(root_module)
root_module.generate(out)
if __name__ == '__main__':
main()
|
tomhenderson/ns-3-dev-git
|
src/topology-read/bindings/modulegen__gcc_LP64.py
|
Python
|
gpl-2.0
| 251,206 | 0.014602 |
# Copyright (C) 2013-2018 Samuel Damashek, Peter Foley, James Forcier, Srijay Kasturi, Reed Koser, Christopher Reffett, and Tris Wilson
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
from random import choice
from ..helpers.command import Command
from ..helpers.misc import get_fortune
from ..helpers.textutils import gen_lenny
from ..helpers.web import get_urban
def gen_fortune(send):
for line in get_fortune('-o').splitlines():
send(line)
def gen_urban(send, session, key):
defn, url = get_urban("", session, key)
send(defn)
if url:
send("See full definition at %s" % url)
@Command('botspam', ['config', 'db'])
def cmd(send, _, args):
"""Abuses the bot.
Syntax: {command}
"""
def lenny_send(msg):
send(gen_lenny(msg))
key = args['config']['api']['bitlykey']
cmds = [lambda: gen_fortune(lenny_send), lambda: gen_urban(lenny_send, args['db'], key)]
choice(cmds)()
|
tjcsl/cslbot
|
cslbot/commands/botspam.py
|
Python
|
gpl-2.0
| 1,606 | 0.001868 |
"""Support for Nanoleaf Lights."""
import logging
import voluptuous as vol
from homeassistant.components.light import (
ATTR_BRIGHTNESS, ATTR_COLOR_TEMP, ATTR_EFFECT, ATTR_HS_COLOR,
ATTR_TRANSITION, PLATFORM_SCHEMA, SUPPORT_BRIGHTNESS,
SUPPORT_COLOR, SUPPORT_COLOR_TEMP, SUPPORT_EFFECT,
SUPPORT_TRANSITION, Light)
from homeassistant.const import CONF_HOST, CONF_NAME, CONF_TOKEN
import homeassistant.helpers.config_validation as cv
from homeassistant.util import color as color_util
from homeassistant.util.color import \
color_temperature_mired_to_kelvin as mired_to_kelvin
from homeassistant.util.json import load_json, save_json
_LOGGER = logging.getLogger(__name__)
DEFAULT_NAME = 'Nanoleaf'
DATA_NANOLEAF = 'nanoleaf'
CONFIG_FILE = '.nanoleaf.conf'
ICON = 'mdi:triangle-outline'
SUPPORT_NANOLEAF = (SUPPORT_BRIGHTNESS | SUPPORT_COLOR_TEMP | SUPPORT_EFFECT |
SUPPORT_COLOR | SUPPORT_TRANSITION)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Required(CONF_HOST): cv.string,
vol.Required(CONF_TOKEN): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
})
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Nanoleaf light."""
from pynanoleaf import Nanoleaf, Unavailable
if DATA_NANOLEAF not in hass.data:
hass.data[DATA_NANOLEAF] = dict()
token = ''
if discovery_info is not None:
host = discovery_info['host']
name = discovery_info['hostname']
# if device already exists via config, skip discovery setup
if host in hass.data[DATA_NANOLEAF]:
return
_LOGGER.info("Discovered a new Nanoleaf: %s", discovery_info)
conf = load_json(hass.config.path(CONFIG_FILE))
if conf.get(host, {}).get('token'):
token = conf[host]['token']
else:
host = config[CONF_HOST]
name = config[CONF_NAME]
token = config[CONF_TOKEN]
nanoleaf_light = Nanoleaf(host)
if not token:
token = nanoleaf_light.request_token()
if not token:
_LOGGER.error("Could not generate the auth token, did you press "
"and hold the power button on %s"
"for 5-7 seconds?", name)
return
conf = load_json(hass.config.path(CONFIG_FILE))
conf[host] = {'token': token}
save_json(hass.config.path(CONFIG_FILE), conf)
nanoleaf_light.token = token
try:
nanoleaf_light.available
except Unavailable:
_LOGGER.error(
"Could not connect to Nanoleaf Light: %s on %s", name, host)
return
hass.data[DATA_NANOLEAF][host] = nanoleaf_light
add_entities([NanoleafLight(nanoleaf_light, name)], True)
class NanoleafLight(Light):
"""Representation of a Nanoleaf Light."""
def __init__(self, light, name):
"""Initialize an Nanoleaf light."""
self._available = True
self._brightness = None
self._color_temp = None
self._effect = None
self._effects_list = None
self._light = light
self._name = name
self._hs_color = None
self._state = None
@property
def available(self):
"""Return availability."""
return self._available
@property
def brightness(self):
"""Return the brightness of the light."""
if self._brightness is not None:
return int(self._brightness * 2.55)
return None
@property
def color_temp(self):
"""Return the current color temperature."""
if self._color_temp is not None:
return color_util.color_temperature_kelvin_to_mired(
self._color_temp)
return None
@property
def effect(self):
"""Return the current effect."""
return self._effect
@property
def effect_list(self):
"""Return the list of supported effects."""
return self._effects_list
@property
def min_mireds(self):
"""Return the coldest color_temp that this light supports."""
return 154
@property
def max_mireds(self):
"""Return the warmest color_temp that this light supports."""
return 833
@property
def name(self):
"""Return the display name of this light."""
return self._name
@property
def icon(self):
"""Return the icon to use in the frontend, if any."""
return ICON
@property
def is_on(self):
"""Return true if light is on."""
return self._state
@property
def hs_color(self):
"""Return the color in HS."""
return self._hs_color
@property
def supported_features(self):
"""Flag supported features."""
return SUPPORT_NANOLEAF
def turn_on(self, **kwargs):
"""Instruct the light to turn on."""
brightness = kwargs.get(ATTR_BRIGHTNESS)
hs_color = kwargs.get(ATTR_HS_COLOR)
color_temp_mired = kwargs.get(ATTR_COLOR_TEMP)
effect = kwargs.get(ATTR_EFFECT)
transition = kwargs.get(ATTR_TRANSITION)
if hs_color:
hue, saturation = hs_color
self._light.hue = int(hue)
self._light.saturation = int(saturation)
if color_temp_mired:
self._light.color_temperature = mired_to_kelvin(color_temp_mired)
if transition:
if brightness: # tune to the required brightness in n seconds
self._light.brightness_transition(
int(brightness / 2.55), int(transition))
else: # If brightness is not specified, assume full brightness
self._light.brightness_transition(100, int(transition))
else: # If no transition is occurring, turn on the light
self._light.on = True
if brightness:
self._light.brightness = int(brightness / 2.55)
if effect:
self._light.effect = effect
def turn_off(self, **kwargs):
"""Instruct the light to turn off."""
transition = kwargs.get(ATTR_TRANSITION)
if transition:
self._light.brightness_transition(0, int(transition))
else:
self._light.on = False
def update(self):
"""Fetch new state data for this light."""
from pynanoleaf import Unavailable
try:
self._available = self._light.available
self._brightness = self._light.brightness
self._color_temp = self._light.color_temperature
self._effect = self._light.effect
self._effects_list = self._light.effects
self._hs_color = self._light.hue, self._light.saturation
self._state = self._light.on
except Unavailable as err:
_LOGGER.error("Could not update status for %s (%s)",
self.name, err)
self._available = False
|
MartinHjelmare/home-assistant
|
homeassistant/components/nanoleaf/light.py
|
Python
|
apache-2.0
| 6,951 | 0 |
#!/usr/bin/python
import sys
import plugins
import flask
import argparse
import os
import urllib2, urllib
import threading
import time
import socket
import subprocess
import random
import json
import signal
import traceback
from uuid import uuid4 as generateUUID
from killerbee import kbutils
from beekeeperwids.utils.errors import ErrorCodes as ec
from beekeeperwids.utils import KBLogUtil, KBInterface
from beekeeperwids.drone.plugins.capture import CapturePlugin
class DroneDaemon:
def __init__(self, name, port):
signal.signal(signal.SIGINT, self.SIGINT)
self.port = port
self.name = name
self.logutil = KBLogUtil(self.name, 'Daemon', os.getpid())
self.interfaces = {}
self.plugins = {}
self.pid = os.getpid()
def SIGINT(self, s, f):
#TODO find a cleaner way to do only handle signals from the parent process ?
if self.pid == os.getpid():
self.logutil.log("SIGINT")
signal.signal(signal.SIGINT, signal.SIG_IGN)
self.shutdown = True
self.shutdownDaemon()
def handleException(self):
etb = traceback.format_exc()
print(etb)
self.logutil.trace(etb)
return json.dumps({'success':False, 'data':str(etb)})
def runChecks(self):
try:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind(('', self.port))
s.close()
except socket.error:
print("Error Starting Drone:")
print("Socket TCP {0} already bound".format(self.port))
sys.exit()
def startDaemon(self):
self.runChecks()
self.logutil.writePID()
self.logutil.startlog()
self.logutil.log("Starting DroneDaemon")
self.enumerateInterfaces()
self.startRestServer()
def shutdownDaemon(self):
self.logutil.log('Initiating shutdown')
self.stopRunningPlugins()
self.logutil.log('Completed shutdown')
self.logutil.cleanup()
# TODO: verify that all subprocess have been terminated
sys.exit()
def startRestServer(self):
self.logutil.log('Starting REST Server on port {0}'.format(self.port))
app = flask.Flask(__name__)
app.add_url_rule('/task', None, self.processTaskRequest, methods=['POST'])
app.add_url_rule('/detask', None, self.processDetaskRequest, methods=['POST'])
app.add_url_rule('/status', None, self.processStatusGetRequest, methods=['POST'])
app.run(port=self.port, threaded=True)
def handleUnknownException(self):
etb = traceback.format_exc()
self.logutil.trace(etb)
return self.formatResult(error=ec.ERROR_UnknownException, data=str(etb))
def formatResponse(self, error, data):
return json.dumps({'error':error, 'data':data})
def processTaskRequest(self):
self.logutil.log('Processing Task Request')
try:
data = json.loads(flask.request.data)
uuid = data.get('uuid')
plugin = data.get('plugin')
channel = data.get('channel')
parameters = data.get('parameters')
self.logutil.log('Processing Task Request: {0} ({1})'.format(uuid, plugin))
(error,data) = self.taskPlugin(plugin, channel, uuid, parameters)
return self.formatResponse(error,data)
except Exception:
return self.handleUnknownException()
def processDetaskRequest(self):
self.logutil.log('Processing Detask Request')
try:
data = json.loads(flask.request.data)
uuid = data.get('uuid')
(error,data) = self.detaskPlugin(uuid)
return self.formatResponse(error,None)
except Exception:
return self.handleUnknownException()
def processStatusGetRequest(self):
self.logutil.log('Processing Status Get Request')
try:
status = {}
status['config'] = {}
status['config']['pid'] = self.pid
status['config']['name'] = self.name
status['interfaces'] = list((interface.info() for interface in self.interfaces.values()))
status['plugins'] = list((plugin.info() for plugin in self.plugins.values()))
return self.formatResponse(None, status)
except Exception:
self.handleUnknownException()
def loadPluginClass(self, plugin):
if plugin == 'CapturePlugin':
return CapturePlugin
def taskPlugin(self, plugin, channel, uuid, parameters):
self.logutil.debug('Tasking Plugin ({0},{1})'.format(plugin,channel))
pluginObject = self.plugins.get((plugin,channel), None)
if pluginObject == None:
self.logutil.log('No Instance of ({0},{1}) Found - Starting New one'.format(plugin, channel))
(error,data) = self.startPlugin(plugin,channel)
if error == None:
pluginObject = data
else:
return (error,data)
try:
self.logutil.log('Tasking Plugin: ({0}, ch.{1}) with Task {2}'.format(plugin, channel, uuid))
success = pluginObject.task(uuid, parameters)
if success == False:
error = ec.ERROR_DRONE_UnknownTaskingFailure
else:
error = None
return (error,None)
except Exception:
self.handleException()
def startPlugin(self, plugin, channel):
self.logutil.debug('Starting Plugin ({0},{1})'.format(plugin,channel))
try:
interface = self.getAvailableInterface()
if interface == None:
self.logutil.log('Failed to Start Plugin - No Avilable Interfaces')
error = ec.ERROR_DRONE_UnavailableInterface
return (error, None)
pluginClass = self.loadPluginClass(plugin)
if pluginClass == None:
self.logutil.log('Failed to Start Plugin - Plugin Module: {0} does not exist'.format(plugin))
error = ec.ERROR_DroneFailedToLoadPlugin
return (error,plugin)
self.logutil.log('Acquired Interface: {0}'.format(interface.device))
self.logutil.log('Loaded Plugin Class: {0}'.format(pluginClass))
pluginObject = pluginClass([interface], channel, self.name)
self.plugins[(plugin,channel)] = pluginObject
self.logutil.log('Successfully Started Plugin')
time.sleep(0.5)
error = None
data = pluginObject
return (error,data)
except Exception:
self.handleException()
def detaskPlugin(self, uuid):
self.logutil.log('Processing Detask Request for {0}'.format(uuid))
try:
for pluginKey,pluginObject in self.plugins.items():
for task_uuid in pluginObject.tasks.keys():
if task_uuid == uuid:
detask_success = pluginObject.detask(uuid)
if detask_success == False:
error = ec.ERROR_DroneUnknownDetaskingFailure
return (error,None)
time.sleep(2)
if pluginObject.active == False:
del(self.plugins[pluginKey])
self.logutil.log('Succesfully detasked {0} from {1}'.format(uuid, str(pluginObject.desc)))
return (None,None)
except Exception:
return self.handleException()
def stopRunningPlugins(self):
self.logutil.log('Stopping Running Plugins')
for plugin in self.plugins.values():
if plugin.active == True:
self.logutil.log("Stopping Plugin: {0}".format(plugin.desc))
plugin.shutdown()
if plugin.active:
print("had a problem shutting down plugin")
self.logutil.log('Running plugins have been terminated')
def getAvailableInterface(self):
for interface in self.interfaces.values():
if not interface.active:
return interface
return None
def enumerateInterfaces(self):
self.logutil.log("Enumerating Interfaces")
try:
for interface in kbutils.devlist():
device = interface[0]
description = interface[1]
self.logutil.log("Added new interface: {0}".format(device))
self.interfaces[device] = KBInterface(device)
except Exception:
self.handleUnknownException()
|
riverloopsec/beekeeperwids
|
beekeeperwids/drone/daemon.py
|
Python
|
gpl-2.0
| 8,678 | 0.005646 |
__copyright__ = "Copyright 2017 Birkbeck, University of London"
__author__ = "Martin Paul Eve & Andy Byers"
__license__ = "AGPL v3"
__maintainer__ = "Birkbeck Centre for Technology and Publishing"
from django.db import models
from django.utils import timezone
from events import logic as event_logic
from utils import setting_handler
class ProofingAssignment(models.Model):
article = models.OneToOneField('submission.Article')
proofing_manager = models.ForeignKey('core.Account', null=True, on_delete=models.SET_NULL)
editor = models.ForeignKey('core.Account', null=True, related_name='proofing_editor')
assigned = models.DateTimeField(default=timezone.now)
notified = models.BooleanField(default=False)
completed = models.DateTimeField(blank=True, null=True)
class Meta:
unique_together = ('article', 'proofing_manager')
@property
def current_proofing_round_number(self):
try:
return self.proofinground_set.all().order_by('-number')[0].number
except IndexError:
return 0
def current_proofing_round(self):
try:
return self.proofinground_set.all().order_by('-number')[0]
except IndexError:
return None
def add_new_proofing_round(self):
new_round_number = self.current_proofing_round_number + 1
return ProofingRound.objects.create(assignment=self,
number=new_round_number)
def user_is_manager(self, user):
if user == self.proofing_manager:
return True
return False
def __str__(self):
return 'Proofing Assignment {pk}'.format(pk=self.pk)
class ProofingRound(models.Model):
assignment = models.ForeignKey(ProofingAssignment)
number = models.PositiveIntegerField(default=1)
date_started = models.DateTimeField(default=timezone.now)
class Meta:
ordering = ('-number',)
def __str__(self):
return "Round #{0} for Article {1}".format(self.number, self.assignment.article.title)
@property
def has_active_tasks(self):
if self.proofingtask_set.filter(completed__isnull=True):
return True
else:
return False
@property
def active_proofreaders(self):
return [task.proofreader for task in self.proofingtask_set.all()]
@property
def typeset_tasks(self):
typeset_tasks = list()
for p_task in self.proofingtask_set.all():
for t_task in p_task.typesetterproofingtask_set.all():
typeset_tasks.append(t_task)
return typeset_tasks
def delete_round_relations(self, request, article, tasks, corrections):
for task in tasks:
if not task.completed:
kwargs = {
'article': article,
'proofing_task': task,
'request': request,
}
event_logic.Events.raise_event(
event_logic.Events.ON_CANCEL_PROOFING_TASK,
task_object=article,
**kwargs,
)
task.delete()
for correction in corrections:
if not correction.completed and not correction.cancelled:
kwargs = {
'article': article,
'correction': correction,
'request': request,
}
event_logic.Events.raise_event(
event_logic.Events.ON_CORRECTIONS_CANCELLED,
task_object=article,
**kwargs,
)
correction.delete()
def can_add_another_proofreader(self, journal):
"""
Checks if this round can have another proofreader.
:param journal: Journal object
:return: Boolean, True or False
"""
limit = setting_handler.get_setting(
'general',
'max_proofreaders',
journal,
).processed_value
if not limit == 0:
current_num_proofers = ProofingTask.objects.filter(
round=self,
).count()
if current_num_proofers >= limit:
return False
return True
class ProofingTask(models.Model):
round = models.ForeignKey(ProofingRound)
proofreader = models.ForeignKey('core.Account', null=True, on_delete=models.SET_NULL)
assigned = models.DateTimeField(default=timezone.now)
notified = models.BooleanField(default=False)
due = models.DateTimeField(default=None, verbose_name="Date Due")
accepted = models.DateTimeField(blank=True, null=True)
completed = models.DateTimeField(blank=True, null=True)
cancelled = models.BooleanField(default=False)
acknowledged = models.DateTimeField(blank=True, null=True)
task = models.TextField(verbose_name="Proofing Task")
galleys_for_proofing = models.ManyToManyField('core.Galley')
proofed_files = models.ManyToManyField('core.File')
notes = models.ManyToManyField('proofing.Note')
def __str__(self):
return "{0} proofing {1} in round {2}".format(self.proofreader.full_name(),
self.round.assignment.article.title,
self.round.number)
@property
def assignment(self):
return self.round.assignment
def typesetter_tasks(self):
return self.typesetterproofingtask_set.all()
def status(self):
if self.cancelled:
return {'slug': 'cancelled', 'friendly': 'Task cancelled'}
elif self.assigned and not self.accepted and not self.completed:
return {'slug': 'assigned', 'friendly': 'Awaiting response'}
elif self.assigned and self.accepted and not self.completed:
return {'slug': 'accepted', 'friendly': 'Task accepted, underway'}
elif self.assigned and not self.accepted and self.completed:
return {'slug': 'declined', 'friendly': 'Task declined'}
elif self.completed:
return {'slug': 'completed', 'friendly': 'Task completed'}
def galley_files(self):
return [galley.file for galley in self.galleys_for_proofing.all()]
def actor(self):
return self.proofreader
def review_comments(self):
comment_text = ''
for note in self.notes.all().order_by('galley'):
comment_text = comment_text + "Comment by: {0} for Galley {1}<br>{2}<br>".format(note.creator.full_name(),
note.galley,
note.text)
return comment_text
def reset(self):
self.completed = None
self.cancelled = False
self.accepted = None
self.save()
class TypesetterProofingTask(models.Model):
proofing_task = models.ForeignKey(ProofingTask)
typesetter = models.ForeignKey('core.Account', null=True, on_delete=models.SET_NULL)
assigned = models.DateTimeField(default=timezone.now)
notified = models.BooleanField(default=False)
due = models.DateTimeField(blank=True, null=True)
accepted = models.DateTimeField(blank=True, null=True)
completed = models.DateTimeField(blank=True, null=True)
cancelled = models.BooleanField(default=False)
acknowledged = models.DateTimeField(blank=True, null=True)
task = models.TextField(verbose_name="Typesetter Task")
galleys = models.ManyToManyField('core.Galley')
files = models.ManyToManyField('core.File')
notes = models.TextField(verbose_name="Correction Note", blank=True, null=True)
class Meta:
verbose_name = 'Correction Task'
def __str__(self):
return "Correction Task Proof ID: {0}, Proofreader {1}, Due: {2}".format(self.proofing_task.pk,
self.typesetter.full_name(),
self.due)
def status(self):
if self.cancelled:
return {'slug': 'cancelled', 'friendly': 'Cancelled'}
elif self.assigned and not self.accepted and not self.completed:
return {'slug': 'assigned', 'friendly': 'Awaiting response'}
elif self.assigned and self.accepted and not self.completed:
return {'slug': 'accepted', 'friendly': 'Underway'}
elif self.assigned and not self.accepted and self.completed:
return {'slug': 'declined', 'friendly': 'Declined'}
elif self.completed:
return {'slug': 'completed', 'friendly': 'Completed'}
def actor(self):
return self.typesetter
class Note(models.Model):
galley = models.ForeignKey('core.Galley')
creator = models.ForeignKey('core.Account', related_name='proofing_note_creator',
null=True, on_delete=models.SET_NULL)
text = models.TextField()
date_time = models.DateTimeField(auto_now_add=True)
class Meta:
ordering = ('-date_time',)
def __str__(self):
return "{0} - {1} {2}".format(self.pk, self.creator.full_name(), self.galley)
|
BirkbeckCTP/janeway
|
src/proofing/models.py
|
Python
|
agpl-3.0
| 9,317 | 0.001717 |
# Licensed Materials - Property of IBM
# Copyright IBM Corp. 2016, 2017
class DataAlreadyExistsError(RuntimeError):
def __init__(self, label):
self.message = str("Data with label '%s' already exists and cannot be added" % (label))
def get_patient_id(d):
return d['patient']['identifier']
def get_index_by_label(d, label):
for idx in range(len(d['data'])):
if d['data'][idx]['label'] == label:
return idx
return None
def get_sampled_data_values(d, label):
idx = get_index_by_label(d, label)
return d['data'][idx]['valueSampledData']['values']
def get_coordinate_data_values(d, label):
idx = get_index_by_label(d, label)
return d['data'][idx]['valueCoordinateData']['values']
def get_period_value(d, label):
idx = get_index_by_label(d, label)
return d['data'][idx]['valueSampledData']['period']['value']
def get_sampled_data_unit(d, label):
idx = get_index_by_label(d, label)
return d['data'][idx]['valueSampledData']['unit']
def get_period_unit(d, label):
idx = get_index_by_label(d, label)
return d['data'][idx]['valueSampledData']['period']['unit']
def get_gain(d, label):
idx = get_index_by_label(d, label)
return d['data'][idx]['valueSampledData']['gain']
def get_initValue(d, label):
idx = get_index_by_label(d, label)
return d['data'][idx]['valueSampledData']['initVal']
def get_patient_ID(d):
return d['patient']['identifier']
def add_sampled_data(d, label, sampled_data, period_value, period_unit, update_if_exists=False):
# check if label already exists
data_idx = get_index_by_label(d, label)
if data_idx is not None:
if update_if_exists == True:
v = {'valuesSampledData' : { 'values' : sampled_data, 'period' : { 'value' : period_value, 'unit' : period_unit }}}
d['data'][data_idx] = v
else:
raise DataAlreadyExistsError(label=label)
else:
v = {'label' : label, 'valuesSampledData' : { 'values' : sampled_data, 'period' : { 'value' : period_value, 'unit' : period_unit }}}
d['data'].append(v)
def add_coordinate_data(d, label, coords, replace_if_exists=False):
data_idx = get_index_by_label(d, label)
if data_idx is not None:
if replace_if_exists == True:
v = {'valueCoordinateData' : {'values' : coords}}
d['data'][data_idx] = v
else:
raise DataAlreadyExistsError(label=label)
else:
v = {'label' : label, 'valueCoordinateData' : {'values' : coords}}
d['data'].append(v)
|
IBMStreams/streamsx.health
|
samples/HealthcareJupyterDemo/package/healthdemo/utils.py
|
Python
|
apache-2.0
| 2,562 | 0.015613 |
#!/usr/bin/env python
# encoding: utf-8
## Python impl of JFRED, developed by Robby Garner and Paco Nathan
## See: http://www.robitron.com/JFRED.php
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
import fred_fuzzy
import random
import re
import sys
######################################################################
## rule classes
######################################################################
class ParseError (Exception):
def __init__ (self, value):
self.value = value
def __str__ (self):
return repr(self.value)
class Rules (object):
def __init__ (self, lang, rule_dict, first_action, fuzzy_dict):
self.lang = lang
self.rule_dict = rule_dict
self.first_action = first_action
# 1. create an inverted index for the fuzzy sets
self.fuzzy_sets = {}
for (name, r) in fuzzy_dict.items():
self.fuzzy_sets[name] = map(lambda x: (self.rule_dict[r.members[x]], r.weights[x]), range(0, len(r.members)))
# 2. randomly shuffle the order of responses within all the
# action rules, and establish priority rankings (later)
self.action_rules = [r for r in self.rule_dict.values() if isinstance(r, ActionRule)]
# 3. randomly shuffle the intro rule(s)
self.intro_rules = [r for r in self.rule_dict.values() if isinstance(r, IntroRule)]
# 4. create an inverted index for the regex phrases
self.regex_phrases = {}
for r in self.rule_dict.values():
if isinstance(r, RegexRule):
try:
invoked = set(map(lambda x: self.rule_dict[x], r.invokes.split(" ")))
for phrase in r.vector:
phrase_tuple = tuple(self.lang.parse(phrase))
self.regex_phrases[phrase_tuple] = invoked
except KeyError, e:
print "ERROR: references unknown action rule", e
sys.exit(1)
def choose_first (self):
return self.first_action.fire()
@staticmethod
def find_sublist (sub, bigger):
# kudos to nosklo
# http://stackoverflow.com/questions/2250633/python-find-a-list-within-members-of-another-listin-order
if not bigger:
return -1
if not sub:
return 0
first, remainder = sub[0], sub[1:]
pos = 0
try:
while True:
pos = bigger.index(first, pos) + 1
if not remainder or bigger[pos:pos+len(remainder)] == remainder:
return pos
except ValueError:
return -1
def choose_rule (self, utterance):
stimulus = self.lang.parse(utterance)
fuzzy_union = fred_fuzzy.FuzzyUnion()
# 1. select an optional introduction (p <= 0.03)
response = ""
if random.random < 0.03:
response = choice(self.intro_rules).fire()
# 2. "Fred.chooseReply()"
# based on key words from the input stream
# 2.1 regex matches => invoked action rules r=200
for (phrase, rules) in self.regex_phrases.items():
if Rules.find_sublist(phrase, stimulus) >= 0:
for rule in rules:
fuzzy_union.add_rule(rule, 2.0)
# 2.2 fuzzy rules => invoked action rules
for (fuzzy_term, members) in self.fuzzy_sets.items():
if fuzzy_term in stimulus:
for rule, weight in members:
fuzzy_union.add_rule(rule, weight)
# 2.3 action rules r=100
if fuzzy_union.is_empty():
for rule in self.action_rules:
if rule.repeat or rule.count < 1:
fuzzy_union.add_rule(rule, 1.0)
# select an action rule to use for a response template
selected_rule, weight = fuzzy_union.select_rule()
response_template = selected_rule.fire()
# 3. test for "bind" points in the selected response template
if selected_rule.bind and response_template.find("[]") > 0:
pos = stimulus.index(selected_rule.bind) + 1
fragment = stimulus[pos:]
# 3.1 invert the verb tense, possessives, contractions, negations...
# NB: some kind of context-free grammar might work better here
replacement = " ".join(self.lang.invert(fragment))
response_template = response_template.replace("[]", replacement)
response += response_template
# 4. decide whether the current query differs from the
# previous one...
# 5. "Fred.logChat()" keep track of what's been said
return response, selected_rule, weight
class Rule (object):
rule_pat = re.compile("(\S+)\:\s+(\S+)")
def __init__ (self):
self.name = None
self.vector = None
self.count = 0
def parse (self, name, vector, attrib):
self.name = name.lower()
self.vector = vector
return self
def fire (self):
self.count += 1
return random.choice(self.vector)
@staticmethod
def parse_lines (rule_lines):
"""
parse the raw text lines for one JFRED rule
"""
first_line = rule_lines.pop(0)
m = Rule.rule_pat.match(first_line)
if not m:
raise ParseError("unrecognized rule format: " + first_line)
(kind, name) = m.group(1).lower().strip(), m.group(2).lower().strip()
if not kind in ["intro", "action", "response", "regex", "fuzzy"]:
raise ParseError("bad rule type: " + kind)
vector = []
attrib = {}
for line in rule_lines:
m = Rule.rule_pat.match(line)
if m:
(elem, value) = m.group(1).lower().strip(), m.group(2).strip()
if not elem in ["priority", "requires", "equals", "bind", "invokes", "url", "next", "repeat", "expect"]:
raise ParseError("bad rule elem: " + elem)
else:
attrib[elem] = value
else:
vector.append(line)
rule = None
if kind == "intro":
rule = IntroRule().parse(name, vector, attrib)
elif kind == "action":
rule = ActionRule().parse(name, vector, attrib)
elif kind == "response":
rule = ResponseRule().parse(name, vector, attrib)
elif kind == "regex":
rule = RegexRule().parse(name, vector, attrib)
elif kind == "fuzzy":
rule = FuzzyRule().parse(name, vector, attrib)
return rule
@staticmethod
def parse_file (lang, filename):
"""
read a JFRED rule file, return a Rules object
"""
rule_dict = {}
first_action = None
fuzzy_dict = {}
with open(filename, "r") as f:
rule_lines = []
for line in f:
line = line.strip()
if line.startswith("#"):
pass
elif len(line) == 0:
if len(rule_lines) > 0:
try:
rule = Rule.parse_lines(rule_lines)
except ParseError:
print "ERROR: cannot parse rule description", rule_lines
sys.exit(1)
else:
if isinstance(rule, FuzzyRule):
fuzzy_dict[rule.name] = rule
else:
rule_dict[rule.name] = rule
if not first_action and isinstance(rule, ActionRule):
first_action = rule
rule_lines = []
else:
rule_lines.append(line)
return Rules(lang, rule_dict, first_action, fuzzy_dict)
class IntroRule (Rule):
def __init__ (self):
super(IntroRule, self).__init__()
def parse (self, name, vector, attrib):
super(IntroRule, self).parse(name, vector, attrib)
if len(attrib) > 0:
raise ParseError("unrecognized rule element: " + str(attrib))
return self
class ActionRule (Rule):
def __init__ (self):
super(ActionRule, self).__init__()
self.priority = 0
self.repeat = False
self.requires = None
self.expect = []
self.bind = None
self.next = None
self.url = None
def parse (self, name, vector, attrib):
super(ActionRule, self).parse(name, vector, attrib)
if "priority" in attrib:
self.priority = int(attrib["priority"])
del attrib["priority"]
if "repeat" in attrib:
self.repeat = (attrib["repeat"].lower() == "true")
del attrib["repeat"]
if "requires" in attrib:
self.requires = attrib["requires"].lower()
del attrib["requires"]
if "expect" in attrib:
self.expect = attrib["expect"].lower().split(" ")
del attrib["expect"]
if "bind" in attrib:
self.bind = attrib["bind"].lower()
del attrib["bind"]
if "next" in attrib:
self.next = attrib["next"].lower()
del attrib["next"]
if "url" in attrib:
self.url = attrib["url"].lower()
del attrib["url"]
if len(attrib) > 0:
raise ParseError("unrecognized rule element: " + str(attrib))
if not self.bind:
# correct for missing "bind:" attributes
self.bind = self.name
return self
class ResponseRule (Rule):
def __init__ (self):
super(ResponseRule, self).__init__()
def parse (self, name, vector, attrib):
super(ResponseRule, self).parse(name, vector, attrib)
if len(attrib) > 0:
raise ParseError("unrecognized rule element: " + str(attrib))
return self
class RegexRule (Rule):
def __init__ (self):
super(RegexRule, self).__init__()
self.invokes = None
def parse (self, name, vector, attrib):
super(RegexRule, self).parse(name, vector, attrib)
if "invokes" in attrib:
self.invokes = attrib["invokes"].lower()
del attrib["invokes"]
if not self.invokes:
raise ParseError("regex rule must invoke: " + name)
if len(attrib) > 0:
raise ParseError("unrecognized rule element: " + str(attrib))
return self
class FuzzyRule (Rule):
def __init__ (self):
super(FuzzyRule, self).__init__()
self.weights = []
self.members = []
def parse (self, name, vector, attrib):
super(FuzzyRule, self).parse(name, vector, attrib)
if len(attrib) > 0:
raise ParseError("unrecognized rule element: " + str(attrib))
for line in self.vector:
weight, rule = line.split("\t")
weight = float(int(weight))
self.members.append(rule.lower())
self.weights.append(weight)
sum_weight = sum(self.weights)
self.weights = map(lambda x: x / sum_weight, self.weights)
self.vector = []
return self
if __name__=='__main__':
rule_dict, first_action = Rule.parse_file(sys.argv[1])
print len(rule_dict)
print first_action
|
liber118/pyFRED
|
src/fred_rules.py
|
Python
|
apache-2.0
| 11,958 | 0.005101 |
#! /usr/bin/python3
def main():
try:
while True:
line1 = input().strip().split(' ')
n = int(line1[0])
name_list = []
num_list = [0]
for i in range(1, len(line1)):
if i % 2 == 1:
name_list.append(line1[i])
else:
num_list.append(int(line1[i]))
ans = [0 for _ in range(len(num_list))]
m = int(input())
for i in range(len(num_list) - 1, 0, -1):
ans[i] = m % num_list[i]
m = int(m / num_list[i])
ans[0] = m
add = 0
if ans[1] * 2 >= num_list[1]:
add = 1
print("{} {}".format(ans[0] + add, name_list[0]))
add = 0
if n > 2 and ans[2] * 2 >= num_list[2]:
add = 1
if ans[1] + add >= num_list[1]:
print("{} {} {} {}".format(ans[0] + 1, name_list[0], 0,
name_list[1]))
else:
print("{} {} {} {}".format(ans[0], name_list[0], ans[1] +
add, name_list[1]))
except EOFError:
pass
if __name__ == '__main__':
main()
|
zyoohv/zyoohv.github.io
|
code_repository/tencent_ad_contest/tencent_contest/model/main.py
|
Python
|
mit
| 1,329 | 0.000752 |
# coding: utf-8
"""
ORCID Member
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: Latest
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class ContributorOrcid(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self, uri=None, path=None, host=None):
"""
ContributorOrcid - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'uri': 'str',
'path': 'str',
'host': 'str'
}
self.attribute_map = {
'uri': 'uri',
'path': 'path',
'host': 'host'
}
self._uri = uri
self._path = path
self._host = host
@property
def uri(self):
"""
Gets the uri of this ContributorOrcid.
:return: The uri of this ContributorOrcid.
:rtype: str
"""
return self._uri
@uri.setter
def uri(self, uri):
"""
Sets the uri of this ContributorOrcid.
:param uri: The uri of this ContributorOrcid.
:type: str
"""
self._uri = uri
@property
def path(self):
"""
Gets the path of this ContributorOrcid.
:return: The path of this ContributorOrcid.
:rtype: str
"""
return self._path
@path.setter
def path(self, path):
"""
Sets the path of this ContributorOrcid.
:param path: The path of this ContributorOrcid.
:type: str
"""
self._path = path
@property
def host(self):
"""
Gets the host of this ContributorOrcid.
:return: The host of this ContributorOrcid.
:rtype: str
"""
return self._host
@host.setter
def host(self, host):
"""
Sets the host of this ContributorOrcid.
:param host: The host of this ContributorOrcid.
:type: str
"""
self._host = host
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, ContributorOrcid):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
|
Royal-Society-of-New-Zealand/NZ-ORCID-Hub
|
orcid_api/models/contributor_orcid.py
|
Python
|
mit
| 3,922 | 0.00051 |
import itertools
from .core import frequencies
from ..compatibility import map
def countby(func, seq):
""" Count elements of a collection by a key function
>>> countby(len, ['cat', 'mouse', 'dog'])
{3: 2, 5: 1}
>>> def iseven(x): return x % 2 == 0
>>> countby(iseven, [1, 2, 3]) # doctest:+SKIP
{True: 1, False: 2}
See Also:
groupby
"""
return frequencies(map(func, seq))
def partitionby(func, seq):
""" Partition a sequence according to a function
Partition `s` into a sequence of lists such that, when traversing
`s`, every time the output of `func` changes a new list is started
and that and subsequent items are collected into that list.
>>> is_space = lambda c: c == " "
>>> list(partitionby(is_space, "I have space"))
[('I',), (' ',), ('h', 'a', 'v', 'e'), (' ',), ('s', 'p', 'a', 'c', 'e')]
>>> is_large = lambda x: x > 10
>>> list(partitionby(is_large, [1, 2, 1, 99, 88, 33, 99, -1, 5]))
[(1, 2, 1), (99, 88, 33, 99), (-1, 5)]
See also:
partition
groupby
itertools.groupby
"""
# Note: applying `list` seems to be required both Python 2 and 3
# compatible (Python 3 works without it).
return (tuple(v) for k, v in itertools.groupby(seq, key=func))
|
obmarg/toolz
|
toolz/itertoolz/recipes.py
|
Python
|
bsd-3-clause
| 1,295 | 0 |
import six
from sqlalchemy_utils.utils import str_coercible
from .weekday import WeekDay
@str_coercible
class WeekDays(object):
def __init__(self, bit_string_or_week_days):
if isinstance(bit_string_or_week_days, six.string_types):
self._days = set()
if len(bit_string_or_week_days) != WeekDay.NUM_WEEK_DAYS:
raise ValueError(
'Bit string must be {0} characters long.'.format(
WeekDay.NUM_WEEK_DAYS
)
)
for index, bit in enumerate(bit_string_or_week_days):
if bit not in '01':
raise ValueError(
'Bit string may only contain zeroes and ones.'
)
if bit == '1':
self._days.add(WeekDay(index))
elif isinstance(bit_string_or_week_days, WeekDays):
self._days = bit_string_or_week_days._days
else:
self._days = set(bit_string_or_week_days)
def __eq__(self, other):
if isinstance(other, WeekDays):
return self._days == other._days
elif isinstance(other, six.string_types):
return self.as_bit_string() == other
else:
return NotImplemented
def __iter__(self):
for day in sorted(self._days):
yield day
def __contains__(self, value):
return value in self._days
def __repr__(self):
return '%s(%r)' % (
self.__class__.__name__,
self.as_bit_string()
)
def __unicode__(self):
return u', '.join(six.text_type(day) for day in self)
def as_bit_string(self):
return ''.join(
'1' if WeekDay(index) in self._days else '0'
for index in six.moves.xrange(WeekDay.NUM_WEEK_DAYS)
)
|
cheungpat/sqlalchemy-utils
|
sqlalchemy_utils/primitives/weekdays.py
|
Python
|
bsd-3-clause
| 1,866 | 0 |
import os
import unittest
from dateutil.parser import parse as dtparse
import numpy as np
from pocean.dsg import ContiguousRaggedTrajectoryProfile
import logging
from pocean import logger
logger.level = logging.INFO
logger.handlers = [logging.StreamHandler()]
class TestContinousRaggedTrajectoryProfile(unittest.TestCase):
def setUp(self):
self.single = os.path.join(os.path.dirname(__file__), 'resources', 'cr-single.nc')
self.multi = os.path.join(os.path.dirname(__file__), 'resources', 'cr-multiple.nc')
self.missing_time = os.path.join(os.path.dirname(__file__), 'resources', 'cr-missing-time.nc')
def test_crtp_load(self):
ContiguousRaggedTrajectoryProfile(self.single).close()
ContiguousRaggedTrajectoryProfile(self.multi).close()
ContiguousRaggedTrajectoryProfile(self.missing_time).close()
def test_crtp_dataframe(self):
with ContiguousRaggedTrajectoryProfile(self.single) as s:
s.to_dataframe()
with ContiguousRaggedTrajectoryProfile(self.multi) as m:
m.to_dataframe()
with ContiguousRaggedTrajectoryProfile(self.missing_time) as t:
t.to_dataframe()
def test_crtp_calculated_metadata(self):
with ContiguousRaggedTrajectoryProfile(self.single) as st:
s = st.calculated_metadata()
assert s.min_t == dtparse('2014-11-25 18:57:30')
assert s.max_t == dtparse('2014-11-27 07:10:30')
assert len(s.trajectories) == 1
traj = s.trajectories["sp025-20141125T1730"]
assert traj.min_z == 0
assert np.isclose(traj.max_z, 504.37827)
assert traj.min_t == dtparse('2014-11-25 18:57:30')
assert traj.max_t == dtparse('2014-11-27 07:10:30')
assert np.isclose(traj.first_loc.x, -119.79025)
assert np.isclose(traj.first_loc.y, 34.30818)
assert len(traj.profiles) == 17
with ContiguousRaggedTrajectoryProfile(self.multi) as mt:
m = mt.calculated_metadata()
assert m.min_t == dtparse('1990-01-01 00:00:00')
assert m.max_t == dtparse('1990-01-03 02:00:00')
assert len(m.trajectories) == 5
# First trajectory
traj0 = m.trajectories[0]
assert traj0.min_z == 0
assert traj0.max_z == 43
assert traj0.min_t == dtparse('1990-01-02 05:00:00')
assert traj0.max_t == dtparse('1990-01-03 01:00:00')
assert traj0.first_loc.x == -60
assert traj0.first_loc.y == 53
assert len(traj0.profiles) == 4
assert traj0.profiles[0].t == dtparse('1990-01-03 01:00:00')
assert traj0.profiles[0].x == -60
assert traj0.profiles[0].y == 49
# Last trajectory
traj4 = m.trajectories[4]
assert traj4.min_z == 0
assert traj4.max_z == 38
assert traj4.min_t == dtparse('1990-01-02 14:00:00')
assert traj4.max_t == dtparse('1990-01-02 15:00:00')
assert traj4.first_loc.x == -67
assert traj4.first_loc.y == 47
assert len(traj4.profiles) == 4
assert traj4.profiles[19].t == dtparse('1990-01-02 14:00:00')
assert traj4.profiles[19].x == -44
assert traj4.profiles[19].y == 47
with ContiguousRaggedTrajectoryProfile(self.missing_time) as mmt:
t = mmt.calculated_metadata()
assert t.min_t == dtparse('2014-11-16 21:32:29.952500')
assert t.max_t == dtparse('2014-11-17 07:59:08.398500')
assert len(t.trajectories) == 1
traj = t.trajectories["UW157-20141116T211809"]
assert np.isclose(traj.min_z, 0.47928014)
assert np.isclose(traj.max_z, 529.68005)
assert traj.min_t == dtparse('2014-11-16 21:32:29.952500')
assert traj.max_t == dtparse('2014-11-17 07:59:08.398500')
assert np.isclose(traj.first_loc.x, -124.681526638573)
assert np.isclose(traj.first_loc.y, 43.5022166666667)
assert len(traj.profiles) == 13
def test_just_missing_time(self):
with ContiguousRaggedTrajectoryProfile(self.missing_time) as mmt:
t = mmt.calculated_metadata()
assert t.min_t == dtparse('2014-11-16 21:32:29.952500')
assert t.max_t == dtparse('2014-11-17 07:59:08.398500')
assert len(t.trajectories) == 1
traj = t.trajectories["UW157-20141116T211809"]
assert np.isclose(traj.min_z, 0.47928014)
assert np.isclose(traj.max_z, 529.68005)
assert traj.min_t == dtparse('2014-11-16 21:32:29.952500')
assert traj.max_t == dtparse('2014-11-17 07:59:08.398500')
assert np.isclose(traj.first_loc.x, -124.681526638573)
assert np.isclose(traj.first_loc.y, 43.5022166666667)
assert len(traj.profiles) == 13
|
joefutrelle/pocean-core
|
pocean/tests/dsg/trajectoryProfile/test_trajectoryProfile_cr.py
|
Python
|
mit
| 4,960 | 0.000605 |
#!/usr/bin/env python3
####################################
# ACE3 automatic deployment script #
# ================================ #
# This is not meant to be run #
# directly! #
####################################
import os
import sys
import shutil
import traceback
import subprocess as sp
from pygithub3 import Github
TRANSLATIONISSUE = 367
TRANSLATIONBODY = """**How to translate ACE3:**
https://github.com/acemod/ACE3/blob/master/documentation/development/how-to-translate-ace3.md
{}
"""
REPOUSER = "acemod"
REPONAME = "ACE3"
REPOPATH = "{}/{}".format(REPOUSER,REPONAME)
USERNAME = "ACE3 Travis"
USEREMAIL = "travis@ace3mod.com"
def update_translations(token):
diag = sp.check_output(["python3", "tools/stringtablediag.py", "--markdown"])
diag = str(diag, "utf-8")
repo = Github(token).get_repo(REPOPATH)
issue = repo.get_issue(TRANSLATIONISSUE)
issue.edit(body=TRANSLATIONBODY.format(diag))
def main():
print("Obtaining token ...")
try:
token = os.environ["GH_TOKEN"]
except:
print("Could not obtain token.")
print(traceback.format_exc())
return 1
else:
print("done.")
print("\nUpdating translation issue ...")
try:
update_translations(token)
except:
print("Failed to update translation issue.")
print(traceback.format_exc())
return 1
else:
print("done.")
return 0
if __name__ == "__main__":
sys.exit(main())
|
NemesisRE/ACE3
|
tools/deploy.py
|
Python
|
gpl-2.0
| 1,499 | 0.002668 |
import re
from django.core.urlresolvers import reverse
def test_view_with_scss_file(client, precompiled):
"""
Test view that renders *SCSS file* that *imports SCSS file from another Django app*.
:param client: ``pytest-django`` fixture: Django test client
:param precompiled: custom fixture that asserts pre-compiled content
"""
response = client.get(reverse('scss-file'))
assert response.status_code == 200
assert precompiled('app/layout.scss', 'css').strip() == \
'.title {\n font: bold 30px Arial, sans-serif;\n}'
def test_view_with_inline_scss(client):
"""
Test view that renders *inline SCSS* that *imports SCSS file from another Django app*.
:param client: ``pytest-django`` fixture: Django test client
"""
response = client.get(reverse('scss-inline'))
assert response.status_code == 200
assert re.search(
r'<style type="text/css">.title \{\n\s*font: bold 30px Arial, sans-serif;\n\}\s*</style>',
response.content.decode('utf8')
)
def test_view_with_es6_file(client, precompiled):
"""
Test view that renders *ES6 file* into *ES5 file*.
:param client: ``pytest-django`` fixture: Django test client
:param precompiled: custom fixture that asserts pre-compiled content
"""
response = client.get(reverse('es6-file'))
assert response.status_code == 200
assert precompiled('app/scripts.js', 'js') == (
'(function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=='
'"function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);var f='
'new Error("Cannot find module \'"+o+"\'");throw f.code="MODULE_NOT_FOUND",f}'
'var l=n[o]={exports:{}};t[o][0].call(l.exports,function(e){var n=t[o][1][e];'
'return s(n?n:e)},l,l.exports,e,t,n,r)}return n[o].exports}var i=typeof require=='
'"function"&&require;for(var o=0;o<r.length;o++)s(r[o]);return s})({1:['
'function(require,module,exports){\n'
'\'use strict\';\n'
'\n'
'var _framework = require(\'base/framework\');\n'
'\n'
'var _framework2 = _interopRequireDefault(_framework);\n'
'\n'
'function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : '
'{ default: obj }; }\n'
'\n'
'new _framework2.default();\n'
'new _framework2.default(\'1.0.1\');\n'
'\n'
'},{"base/framework":2}],2:[function(require,module,exports){\n'
'\'use strict\';\n'
'\n'
'Object.defineProperty(exports, "__esModule", {\n'
' value: true\n'
'});\n'
'\n'
'function _classCallCheck(instance, Constructor) {'
' if (!(instance instanceof Constructor)) {'
' throw new TypeError("Cannot call a class as a function"); } }\n'
'\n'
'var version = exports.version = \'1.0\';\n'
'\n'
'var _class = function _class(customVersion) {\n'
' _classCallCheck(this, _class);\n'
'\n'
' console.log(\'Framework v\' + (customVersion || version) + \' initialized\');\n'
'};\n'
'\n'
'exports.default = _class;\n'
'\n'
'},{}]},{},[1]);\n'
)
def test_view_with_inline_es6(client):
"""
Test view that renders *inline ES6* into *inline ES5*.
:param client: ``pytest-django`` fixture: Django test client
"""
response = client.get(reverse('es6-inline'))
assert response.status_code == 200
assert b'"use strict";\n' \
b'\n' \
b'var square = function square(x) {\n' \
b' return x * x;\n' \
b'};\n'\
b'console.log("Square of 2:", square(2));' in response.content
|
kottenator/django-compressor-toolkit
|
tests/integration_tests/test_views.py
|
Python
|
mit
| 3,738 | 0.002943 |
# Copyright (C) 2011, 2012 Abhijit Mahabal
#
# This program is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with this
# program. If not, see <http://www.gnu.org/licenses/>
"""Defines the base GUI for the GUI run-mode."""
import logging
import threading
from tkinter import Button, Frame, Label, StringVar, Tk
from tkinter.constants import LEFT
from tkinter.messagebox import askyesno, showinfo
from farg.core.exceptions import AnswerFoundException
from farg.core.ltm.manager import LTMManager
from farg.core.question.question import BooleanQuestion
from farg.core.ui.gui.central_pane import CentralPane
import farg.flags as farg_flags
class RunForNSteps(threading.Thread):
"""Runs controller for up to n steps.
This does not update the GUI directly; It however results in changing the
state of the
attribute "controller" that it holds. This is shared by the GUI and used to
update itself.
Before each step, checks that we have not been asked to pause.
"""
def __init__(self, *, controller, gui, num_steps=1000):
threading.Thread.__init__(self)
#: Controller for the whole app.
self.controller = controller
#: Number of steps taken so far.
self.num_steps = num_steps
#: GUI being displayed. We need this to communicate some state (such as "we have found
#: an answer and can now quit.").
self.gui = gui
def run(self): # Name stipulated by Thread. pylint: disable=C0103
try:
self.controller.RunUptoNSteps(self.num_steps)
except AnswerFoundException:
# We should exit.
self.gui.quitting_called_from_thread = True
return
class GUI:
"""Base-class of GUI for an application.
Provides a :py:mod:`tkinter` based interface to display various components
such as the workspace,
and for interacting with the user (such as asking questions).
**Supported Views**
The central part of the window---everything except the row of buttons at the
top---is controlled by
an instance of the class
:py:class:`~farg.core.ui.gui.central_pane.CentralPane` (see which for
further details).The top-left corner of the window allows switching between
different views.
**Key Bindings**
The UI allows running the app at various speeds---full steam ahead,
step-by-step, or with long
strides. These keyboard bindings are provided:
* 'q' for Quit
* 'c' for Continue (full-steam ahead!)
* 'p' for Pause while running
* 's' for Step (run one codelet)
* 'l' for taking a 10-codelet stride
* 'k' for taking a 100-codelet stride.
"""
#: Size and location of the window.
geometry = '1280x980-0+0' # Not a const. pylint: disable=C6409
#: Class handling the central part of the display.
central_pane_class = CentralPane # Not a const. pylint: disable=C6409
def __init__(self, *, controller_class, stopping_condition_fn=None):
self.run_state_lock = threading.Lock()
self.pause_stepping = False
self.quitting = False
self.quitting_called_from_thread = False
self.stepping_thread = None
#: Button pane.
self.buttons_pane = None # Set up later.
#: Central pane (a canvas).
self.central_pane = None # Set up later.
#: A Tk variable tracking codelet count.
self.codelet_count_var = None # Set up later.
self.controller = controller_class(
ui=self, controller_depth=0, stopping_condition=stopping_condition_fn)
self.mw = mw = Tk()
# mw.geometry(self.geometry)
self.mw.bind('<KeyPress-q>', lambda e: self.Quit())
self.mw.bind('<KeyPress-s>', lambda e: self.StepsInAnotherThread(1))
self.mw.bind('<KeyPress-l>', lambda e: self.StepsInAnotherThread(10))
self.mw.bind('<KeyPress-k>', lambda e: self.StepsInAnotherThread(100))
self.mw.bind('<KeyPress-c>', lambda e: self.StartThreaded())
self.mw.bind('<KeyPress-p>', lambda e: self.Pause())
self.items_to_refresh = []
self.SetupWindows()
self.RegisterQuestionHandlers()
def UpdateDisplay(self):
"""Refresh the display. Erases everything and draws it again."""
if self.quitting_called_from_thread:
self.Quit()
for item in self.items_to_refresh:
try:
item.ReDraw()
except RuntimeError as error:
# This may occur because the object being updated may have changed. Log a warning
# and continue.
logging.warn('Runtime error while updating: %s', error)
self.codelet_count_var.set('%d' % self.controller.steps_taken)
def SetupWindows(self):
"""Sets up frames in the GUI."""
self.buttons_pane = Frame(self.mw)
self.PopulateButtonPane(self.buttons_pane)
self.buttons_pane.grid(row=0, column=0, columnspan=2)
self.PopulateCentralPane()
def StepsInAnotherThread(self, num_steps):
with self.run_state_lock:
if self.quitting:
return
if self.stepping_thread:
if self.stepping_thread.is_alive():
return
else:
self.stepping_thread = None
self.stepping_thread = RunForNSteps(
controller=self.controller, num_steps=num_steps, gui=self)
self.pause_stepping = False
self.stepping_thread.start()
def StartThreaded(self):
self.StepsInAnotherThread(10000)
def Pause(self):
with self.run_state_lock:
self.pause_stepping = True
if self.stepping_thread:
self.stepping_thread.join()
self.stepping_thread = None
def Quit(self):
"""Called when quitting.
Ensures that all threads have exited, and LTMs saved.
"""
with self.run_state_lock:
self.quitting = True
self.pause_stepping = True
self.Pause()
self.mw.quit()
LTMManager.SaveAllOpenLTMS()
def PopulateButtonPane(self, frame):
"""Adds buttons to the top row."""
Button(frame, text='Start', command=self.StartThreaded).pack(side=LEFT)
Button(frame, text='Pause', command=self.Pause).pack(side=LEFT)
Button(frame, text='Quit', command=self.Quit).pack(side=LEFT)
self.codelet_count_var = StringVar()
self.codelet_count_var.set('0')
Label(
frame,
textvariable=self.codelet_count_var,
font=('Helvetica', 28, 'bold')).pack(side=LEFT)
def PopulateCentralPane(self):
"""Sets up the display in the central part.
If an item must be refreshed, add it to items_to_refresh.
"""
height = farg_flags.FargFlags.gui_canvas_height
width = farg_flags.FargFlags.gui_canvas_width
canvas = self.central_pane_class(
self.mw,
self.controller,
height=int(height),
width=int(width),
background='#EEFFFF')
canvas.grid(row=1, column=0)
self.central_pane = canvas
self.items_to_refresh.append(canvas)
canvas.ReDraw()
def PopulateInteractionPane(self):
"""Sets up the interaction pane at the bottom."""
pass
def AskQuestion(self, question):
"""Asks the question (by delegating to the Ask method of the question)."""
return question.Ask(self)
def RegisterQuestionHandlers(self): # Needs to be a method. pylint: disable=R0201
"""Registers how to ask a given type of question."""
def BooleanQuestionHandler(question, ui): # pylint: disable=W0613
return askyesno('', question.question_string)
BooleanQuestion.Ask = BooleanQuestionHandler
def DisplayMessage(self, message): # Needs to be a method. pylint: disable=R0201
showinfo('', message)
|
amahabal/PySeqsee
|
farg/core/ui/gui/__init__.py
|
Python
|
gpl-3.0
| 7,857 | 0.005982 |
from os.path import join
import pytest
from cobra.io import load_json_model, write_sbml_model
def test_load_json_model_valid(data_directory, tmp_path):
"""Test loading a valid annotation from JSON."""
path_to_file = join(data_directory, "valid_annotation_format.json")
model = load_json_model(path_to_file)
expected = {
"bigg.reaction": [["is", "PFK26"]],
"kegg.reaction": [["is", "R02732"]],
"rhea": [["is", "15656"]],
}
for metabolite in model.metabolites:
assert metabolite.annotation == expected
path_to_output = join(str(tmp_path), "valid_annotation_output.xml")
write_sbml_model(model, path_to_output)
def test_load_json_model_invalid(data_directory):
"""Test that loading an invalid annotation from JSON raises TypeError"""
path = join(data_directory, "invalid_annotation_format.json")
with pytest.raises(TypeError):
model = load_json_model(path)
|
opencobra/cobrapy
|
src/cobra/test/test_io/test_annotation_format.py
|
Python
|
gpl-2.0
| 944 | 0 |
# Copyright 2015 Tesora Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from proboscis import SkipTest
from trove.common import exception
from trove.common.utils import poll_until
from trove.tests.scenario.runners.test_runners import TestRunner
from troveclient.compat import exceptions
class DatabaseActionsRunner(TestRunner):
def __init__(self):
super(DatabaseActionsRunner, self).__init__()
self.db_defs = []
@property
def first_db_def(self):
if self.db_defs:
return self.db_defs[0]
raise SkipTest("No valid database definitions provided.")
@property
def non_existing_db_def(self):
db_def = self.test_helper.get_non_existing_database_definition()
if db_def:
return db_def
raise SkipTest("No valid database definitions provided.")
def run_databases_create(self, expected_http_code=202):
databases = self.test_helper.get_valid_database_definitions()
if databases:
self.db_defs = self.assert_databases_create(
self.instance_info.id, databases, expected_http_code)
else:
raise SkipTest("No valid database definitions provided.")
def assert_databases_create(self, instance_id, serial_databases_def,
expected_http_code):
self.auth_client.databases.create(instance_id, serial_databases_def)
self.assert_client_code(expected_http_code)
self._wait_for_database_create(instance_id, serial_databases_def)
return serial_databases_def
def run_databases_list(self, expected_http_code=200):
self.assert_databases_list(
self.instance_info.id, self.db_defs, expected_http_code)
def assert_databases_list(self, instance_id, expected_database_defs,
expected_http_code, limit=2):
full_list = self.auth_client.databases.list(instance_id)
self.assert_client_code(expected_http_code)
listed_databases = {database.name: database for database in full_list}
self.assert_is_none(full_list.next,
"Unexpected pagination in the list.")
for database_def in expected_database_defs:
database_name = database_def['name']
self.assert_true(
database_name in listed_databases,
"Database not included in the 'database-list' output: %s" %
database_name)
# Check that the system (ignored) databases are not included in the
# output.
system_databases = self.get_system_databases()
self.assert_false(
any(name in listed_databases for name in system_databases),
"System databases should not be included in the 'database-list' "
"output.")
# Test list pagination.
list_page = self.auth_client.databases.list(instance_id, limit=limit)
self.assert_client_code(expected_http_code)
self.assert_true(len(list_page) <= limit)
if len(full_list) > limit:
self.assert_is_not_none(list_page.next, "List page is missing.")
else:
self.assert_is_none(list_page.next, "An extra page in the list.")
marker = list_page.next
self.assert_pagination_match(list_page, full_list, 0, limit)
if marker:
last_database = list_page[-1]
expected_marker = last_database.name
self.assert_equal(expected_marker, marker,
"Pagination marker should be the last element "
"in the page.")
list_page = self.auth_client.databases.list(
instance_id, marker=marker)
self.assert_client_code(expected_http_code)
self.assert_pagination_match(
list_page, full_list, limit, len(full_list))
def _wait_for_database_create(self, instance_id, expected_database_defs):
expected_db_names = {db_def['name']
for db_def in expected_database_defs}
self.report.log("Waiting for all created databases to appear in the "
"listing: %s" % expected_db_names)
def _all_exist():
all_dbs = self._get_db_names(instance_id)
return all(db in all_dbs for db in expected_db_names)
try:
poll_until(_all_exist, time_out=self.GUEST_CAST_WAIT_TIMEOUT_SEC)
self.report.log("All databases now exist on the instance.")
except exception.PollTimeOut:
self.fail("Some databases were not created within the poll "
"timeout: %ds" % self.GUEST_CAST_WAIT_TIMEOUT_SEC)
def _get_db_names(self, instance_id):
full_list = self.auth_client.databases.list(instance_id)
return {database.name: database for database in full_list}
def run_database_create_with_no_attributes(
self, expected_exception=exceptions.BadRequest,
expected_http_code=400):
self.assert_databases_create_failure(
self.instance_info.id, {}, expected_exception, expected_http_code)
def run_database_create_with_blank_name(
self, expected_exception=exceptions.BadRequest,
expected_http_code=400):
self.assert_databases_create_failure(
self.instance_info.id, {'name': ''},
expected_exception, expected_http_code)
def run_existing_database_create(
self, expected_exception=exceptions.BadRequest,
expected_http_code=400):
self.assert_databases_create_failure(
self.instance_info.id, self.first_db_def,
expected_exception, expected_http_code)
def assert_databases_create_failure(
self, instance_id, serial_databases_def,
expected_exception, expected_http_code):
self.assert_raises(
expected_exception,
expected_http_code,
self.auth_client.databases.create,
instance_id,
serial_databases_def)
def run_system_database_create(
self, expected_exception=exceptions.BadRequest,
expected_http_code=400):
# TODO(pmalik): Actions on system users and databases should probably
# return Forbidden 403 instead. The current error messages are
# confusing (talking about a malformed request).
system_databases = self.get_system_databases()
database_defs = [{'name': name} for name in system_databases]
if system_databases:
self.assert_databases_create_failure(
self.instance_info.id, database_defs,
expected_exception, expected_http_code)
def run_database_delete(self, expected_http_code=202):
for database_def in self.db_defs:
self.assert_database_delete(
self.instance_info.id, database_def['name'],
expected_http_code)
def assert_database_delete(
self,
instance_id,
database_name,
expected_http_code):
self.auth_client.databases.delete(instance_id, database_name)
self.assert_client_code(expected_http_code)
self._wait_for_database_delete(instance_id, database_name)
def _wait_for_database_delete(self, instance_id, deleted_database_name):
self.report.log("Waiting for deleted database to disappear from the "
"listing: %s" % deleted_database_name)
def _db_is_gone():
all_dbs = self._get_db_names(instance_id)
return deleted_database_name not in all_dbs
try:
poll_until(_db_is_gone, time_out=self.GUEST_CAST_WAIT_TIMEOUT_SEC)
self.report.log("Database is now gone from the instance.")
except exception.PollTimeOut:
self.fail("Database still listed after the poll timeout: %ds" %
self.GUEST_CAST_WAIT_TIMEOUT_SEC)
def run_nonexisting_database_delete(self, expected_http_code=202):
# Deleting a non-existing database is expected to succeed as if the
# database was deleted.
self.assert_database_delete(
self.instance_info.id, self.non_existing_db_def['name'],
expected_http_code)
def run_system_database_delete(
self, expected_exception=exceptions.BadRequest,
expected_http_code=400):
# TODO(pmalik): Actions on system users and databases should probably
# return Forbidden 403 instead. The current error messages are
# confusing (talking about a malformed request).
system_databases = self.get_system_databases()
if system_databases:
for name in system_databases:
self.assert_database_delete_failure(
self.instance_info.id, name,
expected_exception, expected_http_code)
def assert_database_delete_failure(
self, instance_id, database_name,
expected_exception, expected_http_code):
self.assert_raises(expected_exception, expected_http_code,
self.auth_client.databases.delete,
instance_id, database_name)
def get_system_databases(self):
return self.get_datastore_config_property('ignore_dbs')
|
Tesora-Release/tesora-trove
|
trove/tests/scenario/runners/database_actions_runners.py
|
Python
|
apache-2.0
| 9,910 | 0 |
# Copyright 2015 Dell Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Volume driver for Dell Storage Center."""
from oslo_log import log as logging
from oslo_utils import excutils
from cinder import exception
from cinder.i18n import _, _LE, _LI
from cinder.volume import driver
from cinder.volume.drivers.dell import dell_storagecenter_common
LOG = logging.getLogger(__name__)
class DellStorageCenterISCSIDriver(dell_storagecenter_common.DellCommonDriver,
driver.ISCSIDriver):
"""Implements commands for Dell StorageCenter ISCSI management.
To enable the driver add the following line to the cinder configuration:
volume_driver=cinder.volume.drivers.dell.DellStorageCenterISCSIDriver
Version history:
1.0.0 - Initial driver
1.1.0 - Added extra spec support for Storage Profile selection
1.2.0 - Added consistency group support.
2.0.0 - Switched to inheriting functional objects rather than volume
driver.
2.1.0 - Added support for ManageableVD.
2.2.0 - Driver retype support for switching volume's Storage Profile.
Added API 2.2 support.
2.3.0 - Added Legacy Port Mode Support
2.3.1 - Updated error handling.
"""
VERSION = '2.3.1'
def __init__(self, *args, **kwargs):
super(DellStorageCenterISCSIDriver, self).__init__(*args, **kwargs)
self.backend_name = (
self.configuration.safe_get('volume_backend_name')
or 'Dell-iSCSI')
def initialize_connection(self, volume, connector):
# Initialize_connection will find or create a server identified by the
# connector on the Dell backend. It will then map the volume to it
# and return the properties as follows..
# {'driver_volume_type': 'iscsi',
# data = {'target_discovered': False,
# 'target_iqn': preferred iqn,
# 'target_iqns': all iqns,
# 'target_portal': preferred portal,
# 'target_portals': all portals,
# 'target_lun': preferred lun,
# 'target_luns': all luns,
# 'access_mode': access_mode
# }
# We use id to name the volume name as it is a
# known unique name.
volume_name = volume.get('id')
initiator_name = connector.get('initiator')
multipath = connector.get('multipath', False)
LOG.info(_LI('initialize_ connection: %(vol)s:%(initiator)s'),
{'vol': volume_name,
'initiator': initiator_name})
with self._client.open_connection() as api:
try:
# Find our server.
server = api.find_server(initiator_name)
# No? Create it.
if server is None:
server = api.create_server(initiator_name)
# Find the volume on the storage center.
scvolume = api.find_volume(volume_name)
# if we have a server and a volume lets bring them together.
if server is not None and scvolume is not None:
mapping = api.map_volume(scvolume,
server)
if mapping is not None:
# Since we just mapped our volume we had best update
# our sc volume object.
scvolume = api.find_volume(volume_name)
# Our return.
iscsiprops = {}
ip = None
port = None
if not multipath:
# We want to make sure we point to the specified
# ip address for our target_portal return. This
# isn't an issue with multipath since it should
# try all the alternate portal.
ip = self.configuration.iscsi_ip_address
port = self.configuration.iscsi_port
# Three cases that should all be satisfied with the
# same return of Target_Portal and Target_Portals.
# 1. Nova is calling us so we need to return the
# Target_Portal stuff. It should ignore the
# Target_Portals stuff.
# 2. OS brick is calling us in multipath mode so we
# want to return Target_Portals. It will ignore
# the Target_Portal stuff.
# 3. OS brick is calling us in single path mode so
# we want to return Target_Portal and
# Target_Portals as alternates.
iscsiprops = (api.find_iscsi_properties(scvolume,
ip,
port))
# Return our iscsi properties.
return {'driver_volume_type': 'iscsi',
'data': iscsiprops}
# Re-raise any backend exception.
except exception.VolumeBackendAPIException:
with excutils.save_and_reraise_exception():
LOG.error(_LE('Failed to initialize connection'))
# If there is a data structure issue then detail the exception
# and bail with a Backend Exception.
except Exception as error:
LOG.error(error)
raise exception.VolumeBackendAPIException(error)
# We get here because our mapping is none or we have no valid iqn to
# return so blow up.
raise exception.VolumeBackendAPIException(
_('Unable to map volume'))
def terminate_connection(self, volume, connector, force=False, **kwargs):
# Grab some initial info.
initiator_name = connector.get('initiator')
volume_name = volume.get('id')
LOG.debug('Terminate connection: %(vol)s:%(initiator)s',
{'vol': volume_name,
'initiator': initiator_name})
with self._client.open_connection() as api:
try:
scserver = api.find_server(initiator_name)
# Find the volume on the storage center.
scvolume = api.find_volume(volume_name)
# If we have a server and a volume lets pull them apart.
if (scserver is not None and
scvolume is not None and
api.unmap_volume(scvolume, scserver) is True):
LOG.debug('Connection terminated')
return
except Exception:
with excutils.save_and_reraise_exception():
LOG.error(_LE('Failed to terminate connection '
'%(initiator)s %(vol)s'),
{'initiator': initiator_name,
'vol': volume_name})
raise exception.VolumeBackendAPIException(
_('Terminate connection failed'))
|
nikesh-mahalka/cinder
|
cinder/volume/drivers/dell/dell_storagecenter_iscsi.py
|
Python
|
apache-2.0
| 7,849 | 0 |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-08-01 20:22
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('valet', '0003_sequence_driver'),
]
operations = [
migrations.RenameModel(
old_name='Sequence',
new_name='StoreSequence',
),
]
|
rayhu-osu/vcube
|
valet/migrations/0004_auto_20170801_1622.py
|
Python
|
mit
| 418 | 0 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# --------------------------------------------------------------------------
# File Name: try.py
# Author: Zhao Yanbai
# Wed Dec 28 21:41:17 2011
# Description: none
# --------------------------------------------------------------------------
try:
s = input("Enter an integer: ")
n = int(s)
print "valid integer entered: ", n
except NameError as nerr:
print nerr
except ValueError as verr:
print verr
|
acevest/acecode
|
learn/python/try.py
|
Python
|
gpl-2.0
| 456 | 0.013158 |
from __future__ import absolute_import, division, print_function
import copy
from ._compat import iteritems
from ._make import NOTHING, _obj_setattr, fields
from .exceptions import AttrsAttributeNotFoundError
def asdict(
inst,
recurse=True,
filter=None,
dict_factory=dict,
retain_collection_types=False,
):
"""
Return the ``attrs`` attribute values of *inst* as a dict.
Optionally recurse into other ``attrs``-decorated classes.
:param inst: Instance of an ``attrs``-decorated class.
:param bool recurse: Recurse into classes that are also
``attrs``-decorated.
:param callable filter: A callable whose return code determines whether an
attribute or element is included (``True``) or dropped (``False``). Is
called with the :class:`attr.Attribute` as the first argument and the
value as the second argument.
:param callable dict_factory: A callable to produce dictionaries from. For
example, to produce ordered dictionaries instead of normal Python
dictionaries, pass in ``collections.OrderedDict``.
:param bool retain_collection_types: Do not convert to ``list`` when
encountering an attribute whose type is ``tuple`` or ``set``. Only
meaningful if ``recurse`` is ``True``.
:rtype: return type of *dict_factory*
:raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
class.
.. versionadded:: 16.0.0 *dict_factory*
.. versionadded:: 16.1.0 *retain_collection_types*
"""
attrs = fields(inst.__class__)
rv = dict_factory()
for a in attrs:
v = getattr(inst, a.name)
if filter is not None and not filter(a, v):
continue
if recurse is True:
if has(v.__class__):
rv[a.name] = asdict(
v, True, filter, dict_factory, retain_collection_types
)
elif isinstance(v, (tuple, list, set)):
cf = v.__class__ if retain_collection_types is True else list
rv[a.name] = cf(
[
_asdict_anything(
i, filter, dict_factory, retain_collection_types
)
for i in v
]
)
elif isinstance(v, dict):
df = dict_factory
rv[a.name] = df(
(
_asdict_anything(
kk, filter, df, retain_collection_types
),
_asdict_anything(
vv, filter, df, retain_collection_types
),
)
for kk, vv in iteritems(v)
)
else:
rv[a.name] = v
else:
rv[a.name] = v
return rv
def _asdict_anything(val, filter, dict_factory, retain_collection_types):
"""
``asdict`` only works on attrs instances, this works on anything.
"""
if getattr(val.__class__, "__attrs_attrs__", None) is not None:
# Attrs class.
rv = asdict(val, True, filter, dict_factory, retain_collection_types)
elif isinstance(val, (tuple, list, set)):
cf = val.__class__ if retain_collection_types is True else list
rv = cf(
[
_asdict_anything(
i, filter, dict_factory, retain_collection_types
)
for i in val
]
)
elif isinstance(val, dict):
df = dict_factory
rv = df(
(
_asdict_anything(kk, filter, df, retain_collection_types),
_asdict_anything(vv, filter, df, retain_collection_types),
)
for kk, vv in iteritems(val)
)
else:
rv = val
return rv
def astuple(
inst,
recurse=True,
filter=None,
tuple_factory=tuple,
retain_collection_types=False,
):
"""
Return the ``attrs`` attribute values of *inst* as a tuple.
Optionally recurse into other ``attrs``-decorated classes.
:param inst: Instance of an ``attrs``-decorated class.
:param bool recurse: Recurse into classes that are also
``attrs``-decorated.
:param callable filter: A callable whose return code determines whether an
attribute or element is included (``True``) or dropped (``False``). Is
called with the :class:`attr.Attribute` as the first argument and the
value as the second argument.
:param callable tuple_factory: A callable to produce tuples from. For
example, to produce lists instead of tuples.
:param bool retain_collection_types: Do not convert to ``list``
or ``dict`` when encountering an attribute which type is
``tuple``, ``dict`` or ``set``. Only meaningful if ``recurse`` is
``True``.
:rtype: return type of *tuple_factory*
:raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
class.
.. versionadded:: 16.2.0
"""
attrs = fields(inst.__class__)
rv = []
retain = retain_collection_types # Very long. :/
for a in attrs:
v = getattr(inst, a.name)
if filter is not None and not filter(a, v):
continue
if recurse is True:
if has(v.__class__):
rv.append(
astuple(
v,
recurse=True,
filter=filter,
tuple_factory=tuple_factory,
retain_collection_types=retain,
)
)
elif isinstance(v, (tuple, list, set)):
cf = v.__class__ if retain is True else list
rv.append(
cf(
[
astuple(
j,
recurse=True,
filter=filter,
tuple_factory=tuple_factory,
retain_collection_types=retain,
)
if has(j.__class__)
else j
for j in v
]
)
)
elif isinstance(v, dict):
df = v.__class__ if retain is True else dict
rv.append(
df(
(
astuple(
kk,
tuple_factory=tuple_factory,
retain_collection_types=retain,
)
if has(kk.__class__)
else kk,
astuple(
vv,
tuple_factory=tuple_factory,
retain_collection_types=retain,
)
if has(vv.__class__)
else vv,
)
for kk, vv in iteritems(v)
)
)
else:
rv.append(v)
else:
rv.append(v)
return rv if tuple_factory is list else tuple_factory(rv)
def has(cls):
"""
Check whether *cls* is a class with ``attrs`` attributes.
:param type cls: Class to introspect.
:raise TypeError: If *cls* is not a class.
:rtype: :class:`bool`
"""
return getattr(cls, "__attrs_attrs__", None) is not None
def assoc(inst, **changes):
"""
Copy *inst* and apply *changes*.
:param inst: Instance of a class with ``attrs`` attributes.
:param changes: Keyword changes in the new copy.
:return: A copy of inst with *changes* incorporated.
:raise attr.exceptions.AttrsAttributeNotFoundError: If *attr_name* couldn't
be found on *cls*.
:raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
class.
.. deprecated:: 17.1.0
Use :func:`evolve` instead.
"""
import warnings
warnings.warn(
"assoc is deprecated and will be removed after 2018/01.",
DeprecationWarning,
stacklevel=2,
)
new = copy.copy(inst)
attrs = fields(inst.__class__)
for k, v in iteritems(changes):
a = getattr(attrs, k, NOTHING)
if a is NOTHING:
raise AttrsAttributeNotFoundError(
"{k} is not an attrs attribute on {cl}.".format(
k=k, cl=new.__class__
)
)
_obj_setattr(new, k, v)
return new
def evolve(inst, **changes):
"""
Create a new instance, based on *inst* with *changes* applied.
:param inst: Instance of a class with ``attrs`` attributes.
:param changes: Keyword changes in the new copy.
:return: A copy of inst with *changes* incorporated.
:raise TypeError: If *attr_name* couldn't be found in the class
``__init__``.
:raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
class.
.. versionadded:: 17.1.0
"""
cls = inst.__class__
attrs = fields(cls)
for a in attrs:
if not a.init:
continue
attr_name = a.name # To deal with private attributes.
init_name = attr_name if attr_name[0] != "_" else attr_name[1:]
if init_name not in changes:
changes[init_name] = getattr(inst, attr_name)
return cls(**changes)
|
fnaum/rez
|
src/rez/vendor/attr/_funcs.py
|
Python
|
lgpl-3.0
| 9,725 | 0 |
#### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Tangible()
result.template = "object/tangible/veteran_reward/shared_antidecay.iff"
result.attribute_template_id = -1
result.stfName("item_n","veteran_reward_antidecay")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result
|
anhstudios/swganh
|
data/scripts/templates/object/tangible/veteran_reward/shared_antidecay.py
|
Python
|
mit
| 459 | 0.045752 |
from __future__ import annotations
import numbers
from typing import (
TYPE_CHECKING,
overload,
)
import warnings
import numpy as np
from pandas._libs import (
lib,
missing as libmissing,
)
from pandas._typing import (
ArrayLike,
AstypeArg,
Dtype,
DtypeObj,
npt,
type_t,
)
from pandas.compat.numpy import function as nv
from pandas.core.dtypes.common import (
is_bool_dtype,
is_float,
is_float_dtype,
is_integer_dtype,
is_list_like,
is_numeric_dtype,
pandas_dtype,
)
from pandas.core.dtypes.dtypes import (
ExtensionDtype,
register_extension_dtype,
)
from pandas.core.dtypes.missing import isna
from pandas.core import ops
from pandas.core.arrays import ExtensionArray
from pandas.core.arrays.masked import (
BaseMaskedArray,
BaseMaskedDtype,
)
if TYPE_CHECKING:
import pyarrow
@register_extension_dtype
class BooleanDtype(BaseMaskedDtype):
"""
Extension dtype for boolean data.
.. versionadded:: 1.0.0
.. warning::
BooleanDtype is considered experimental. The implementation and
parts of the API may change without warning.
Attributes
----------
None
Methods
-------
None
Examples
--------
>>> pd.BooleanDtype()
BooleanDtype
"""
name = "boolean"
# https://github.com/python/mypy/issues/4125
# error: Signature of "type" incompatible with supertype "BaseMaskedDtype"
@property
def type(self) -> type: # type: ignore[override]
return np.bool_
@property
def kind(self) -> str:
return "b"
@property
def numpy_dtype(self) -> np.dtype:
return np.dtype("bool")
@classmethod
def construct_array_type(cls) -> type_t[BooleanArray]:
"""
Return the array type associated with this dtype.
Returns
-------
type
"""
return BooleanArray
def __repr__(self) -> str:
return "BooleanDtype"
@property
def _is_boolean(self) -> bool:
return True
@property
def _is_numeric(self) -> bool:
return True
def __from_arrow__(
self, array: pyarrow.Array | pyarrow.ChunkedArray
) -> BooleanArray:
"""
Construct BooleanArray from pyarrow Array/ChunkedArray.
"""
import pyarrow
if array.type != pyarrow.bool_():
raise TypeError(f"Expected array of boolean type, got {array.type} instead")
if isinstance(array, pyarrow.Array):
chunks = [array]
else:
# pyarrow.ChunkedArray
chunks = array.chunks
results = []
for arr in chunks:
buflist = arr.buffers()
data = pyarrow.BooleanArray.from_buffers(
arr.type, len(arr), [None, buflist[1]], offset=arr.offset
).to_numpy(zero_copy_only=False)
if arr.null_count != 0:
mask = pyarrow.BooleanArray.from_buffers(
arr.type, len(arr), [None, buflist[0]], offset=arr.offset
).to_numpy(zero_copy_only=False)
mask = ~mask
else:
mask = np.zeros(len(arr), dtype=bool)
bool_arr = BooleanArray(data, mask)
results.append(bool_arr)
if not results:
return BooleanArray(
np.array([], dtype=np.bool_), np.array([], dtype=np.bool_)
)
else:
return BooleanArray._concat_same_type(results)
def _get_common_dtype(self, dtypes: list[DtypeObj]) -> DtypeObj | None:
# Handle only boolean + np.bool_ -> boolean, since other cases like
# Int64 + boolean -> Int64 will be handled by the other type
if all(
isinstance(t, BooleanDtype)
or (isinstance(t, np.dtype) and (np.issubdtype(t, np.bool_)))
for t in dtypes
):
return BooleanDtype()
else:
return None
def coerce_to_array(
values, mask=None, copy: bool = False
) -> tuple[np.ndarray, np.ndarray]:
"""
Coerce the input values array to numpy arrays with a mask.
Parameters
----------
values : 1D list-like
mask : bool 1D array, optional
copy : bool, default False
if True, copy the input
Returns
-------
tuple of (values, mask)
"""
if isinstance(values, BooleanArray):
if mask is not None:
raise ValueError("cannot pass mask for BooleanArray input")
values, mask = values._data, values._mask
if copy:
values = values.copy()
mask = mask.copy()
return values, mask
mask_values = None
if isinstance(values, np.ndarray) and values.dtype == np.bool_:
if copy:
values = values.copy()
elif isinstance(values, np.ndarray) and is_numeric_dtype(values.dtype):
mask_values = isna(values)
values_bool = np.zeros(len(values), dtype=bool)
values_bool[~mask_values] = values[~mask_values].astype(bool)
if not np.all(
values_bool[~mask_values].astype(values.dtype) == values[~mask_values]
):
raise TypeError("Need to pass bool-like values")
values = values_bool
else:
values_object = np.asarray(values, dtype=object)
inferred_dtype = lib.infer_dtype(values_object, skipna=True)
integer_like = ("floating", "integer", "mixed-integer-float")
if inferred_dtype not in ("boolean", "empty") + integer_like:
raise TypeError("Need to pass bool-like values")
mask_values = isna(values_object)
values = np.zeros(len(values), dtype=bool)
values[~mask_values] = values_object[~mask_values].astype(bool)
# if the values were integer-like, validate it were actually 0/1's
if (inferred_dtype in integer_like) and not (
np.all(
values[~mask_values].astype(float)
== values_object[~mask_values].astype(float)
)
):
raise TypeError("Need to pass bool-like values")
if mask is None and mask_values is None:
mask = np.zeros(len(values), dtype=bool)
elif mask is None:
mask = mask_values
else:
if isinstance(mask, np.ndarray) and mask.dtype == np.bool_:
if mask_values is not None:
mask = mask | mask_values
else:
if copy:
mask = mask.copy()
else:
mask = np.array(mask, dtype=bool)
if mask_values is not None:
mask = mask | mask_values
if values.shape != mask.shape:
raise ValueError("values.shape and mask.shape must match")
return values, mask
class BooleanArray(BaseMaskedArray):
"""
Array of boolean (True/False) data with missing values.
This is a pandas Extension array for boolean data, under the hood
represented by 2 numpy arrays: a boolean array with the data and
a boolean array with the mask (True indicating missing).
BooleanArray implements Kleene logic (sometimes called three-value
logic) for logical operations. See :ref:`boolean.kleene` for more.
To construct an BooleanArray from generic array-like input, use
:func:`pandas.array` specifying ``dtype="boolean"`` (see examples
below).
.. versionadded:: 1.0.0
.. warning::
BooleanArray is considered experimental. The implementation and
parts of the API may change without warning.
Parameters
----------
values : numpy.ndarray
A 1-d boolean-dtype array with the data.
mask : numpy.ndarray
A 1-d boolean-dtype array indicating missing values (True
indicates missing).
copy : bool, default False
Whether to copy the `values` and `mask` arrays.
Attributes
----------
None
Methods
-------
None
Returns
-------
BooleanArray
Examples
--------
Create an BooleanArray with :func:`pandas.array`:
>>> pd.array([True, False, None], dtype="boolean")
<BooleanArray>
[True, False, <NA>]
Length: 3, dtype: boolean
"""
# The value used to fill '_data' to avoid upcasting
_internal_fill_value = False
# Fill values used for any/all
_truthy_value = True
_falsey_value = False
_TRUE_VALUES = {"True", "TRUE", "true", "1", "1.0"}
_FALSE_VALUES = {"False", "FALSE", "false", "0", "0.0"}
def __init__(self, values: np.ndarray, mask: np.ndarray, copy: bool = False):
if not (isinstance(values, np.ndarray) and values.dtype == np.bool_):
raise TypeError(
"values should be boolean numpy array. Use "
"the 'pd.array' function instead"
)
self._dtype = BooleanDtype()
super().__init__(values, mask, copy=copy)
@property
def dtype(self) -> BooleanDtype:
return self._dtype
@classmethod
def _from_sequence(
cls, scalars, *, dtype: Dtype | None = None, copy: bool = False
) -> BooleanArray:
if dtype:
assert dtype == "boolean"
values, mask = coerce_to_array(scalars, copy=copy)
return BooleanArray(values, mask)
@classmethod
def _from_sequence_of_strings(
cls,
strings: list[str],
*,
dtype: Dtype | None = None,
copy: bool = False,
true_values: list[str] | None = None,
false_values: list[str] | None = None,
) -> BooleanArray:
true_values_union = cls._TRUE_VALUES.union(true_values or [])
false_values_union = cls._FALSE_VALUES.union(false_values or [])
def map_string(s):
if isna(s):
return s
elif s in true_values_union:
return True
elif s in false_values_union:
return False
else:
raise ValueError(f"{s} cannot be cast to bool")
scalars = [map_string(x) for x in strings]
return cls._from_sequence(scalars, dtype=dtype, copy=copy)
_HANDLED_TYPES = (np.ndarray, numbers.Number, bool, np.bool_)
def _coerce_to_array(self, value) -> tuple[np.ndarray, np.ndarray]:
return coerce_to_array(value)
@overload
def astype(self, dtype: npt.DTypeLike, copy: bool = ...) -> np.ndarray:
...
@overload
def astype(self, dtype: ExtensionDtype, copy: bool = ...) -> ExtensionArray:
...
@overload
def astype(self, dtype: AstypeArg, copy: bool = ...) -> ArrayLike:
...
def astype(self, dtype: AstypeArg, copy: bool = True) -> ArrayLike:
"""
Cast to a NumPy array or ExtensionArray with 'dtype'.
Parameters
----------
dtype : str or dtype
Typecode or data-type to which the array is cast.
copy : bool, default True
Whether to copy the data, even if not necessary. If False,
a copy is made only if the old dtype does not match the
new dtype.
Returns
-------
ndarray or ExtensionArray
NumPy ndarray, BooleanArray or IntegerArray with 'dtype' for its dtype.
Raises
------
TypeError
if incompatible type with an BooleanDtype, equivalent of same_kind
casting
"""
dtype = pandas_dtype(dtype)
if isinstance(dtype, ExtensionDtype):
return super().astype(dtype, copy)
if is_bool_dtype(dtype):
# astype_nansafe converts np.nan to True
if self._hasna:
raise ValueError("cannot convert float NaN to bool")
else:
return self._data.astype(dtype, copy=copy)
# for integer, error if there are missing values
if is_integer_dtype(dtype) and self._hasna:
raise ValueError("cannot convert NA to integer")
# for float dtype, ensure we use np.nan before casting (numpy cannot
# deal with pd.NA)
na_value = self._na_value
if is_float_dtype(dtype):
na_value = np.nan
# coerce
return self.to_numpy(dtype=dtype, na_value=na_value, copy=False)
def _values_for_argsort(self) -> np.ndarray:
"""
Return values for sorting.
Returns
-------
ndarray
The transformed values should maintain the ordering between values
within the array.
See Also
--------
ExtensionArray.argsort : Return the indices that would sort this array.
"""
data = self._data.copy()
data[self._mask] = -1
return data
def any(self, *, skipna: bool = True, axis: int | None = 0, **kwargs):
"""
Return whether any element is True.
Returns False unless there is at least one element that is True.
By default, NAs are skipped. If ``skipna=False`` is specified and
missing values are present, similar :ref:`Kleene logic <boolean.kleene>`
is used as for logical operations.
Parameters
----------
skipna : bool, default True
Exclude NA values. If the entire array is NA and `skipna` is
True, then the result will be False, as for an empty array.
If `skipna` is False, the result will still be True if there is
at least one element that is True, otherwise NA will be returned
if there are NA's present.
axis : int or None, default 0
**kwargs : any, default None
Additional keywords have no effect but might be accepted for
compatibility with NumPy.
Returns
-------
bool or :attr:`pandas.NA`
See Also
--------
numpy.any : Numpy version of this method.
BooleanArray.all : Return whether all elements are True.
Examples
--------
The result indicates whether any element is True (and by default
skips NAs):
>>> pd.array([True, False, True]).any()
True
>>> pd.array([True, False, pd.NA]).any()
True
>>> pd.array([False, False, pd.NA]).any()
False
>>> pd.array([], dtype="boolean").any()
False
>>> pd.array([pd.NA], dtype="boolean").any()
False
With ``skipna=False``, the result can be NA if this is logically
required (whether ``pd.NA`` is True or False influences the result):
>>> pd.array([True, False, pd.NA]).any(skipna=False)
True
>>> pd.array([False, False, pd.NA]).any(skipna=False)
<NA>
"""
kwargs.pop("axis", None)
nv.validate_any((), kwargs)
values = self._data.copy()
np.putmask(values, self._mask, False)
result = values.any(axis=axis)
if skipna:
return result
else:
if result or self.size == 0 or not self._mask.any():
return result
else:
return self.dtype.na_value
def all(self, *, skipna: bool = True, axis: int | None = 0, **kwargs):
"""
Return whether all elements are True.
Returns True unless there is at least one element that is False.
By default, NAs are skipped. If ``skipna=False`` is specified and
missing values are present, similar :ref:`Kleene logic <boolean.kleene>`
is used as for logical operations.
Parameters
----------
skipna : bool, default True
Exclude NA values. If the entire array is NA and `skipna` is
True, then the result will be True, as for an empty array.
If `skipna` is False, the result will still be False if there is
at least one element that is False, otherwise NA will be returned
if there are NA's present.
axis : int or None, default 0
**kwargs : any, default None
Additional keywords have no effect but might be accepted for
compatibility with NumPy.
Returns
-------
bool or :attr:`pandas.NA`
See Also
--------
numpy.all : Numpy version of this method.
BooleanArray.any : Return whether any element is True.
Examples
--------
The result indicates whether any element is True (and by default
skips NAs):
>>> pd.array([True, True, pd.NA]).all()
True
>>> pd.array([True, False, pd.NA]).all()
False
>>> pd.array([], dtype="boolean").all()
True
>>> pd.array([pd.NA], dtype="boolean").all()
True
With ``skipna=False``, the result can be NA if this is logically
required (whether ``pd.NA`` is True or False influences the result):
>>> pd.array([True, True, pd.NA]).all(skipna=False)
<NA>
>>> pd.array([True, False, pd.NA]).all(skipna=False)
False
"""
kwargs.pop("axis", None)
nv.validate_all((), kwargs)
values = self._data.copy()
np.putmask(values, self._mask, True)
result = values.all(axis=axis)
if skipna:
return result
else:
if not result or self.size == 0 or not self._mask.any():
return result
else:
return self.dtype.na_value
def _logical_method(self, other, op):
assert op.__name__ in {"or_", "ror_", "and_", "rand_", "xor", "rxor"}
other_is_booleanarray = isinstance(other, BooleanArray)
other_is_scalar = lib.is_scalar(other)
mask = None
if other_is_booleanarray:
other, mask = other._data, other._mask
elif is_list_like(other):
other = np.asarray(other, dtype="bool")
if other.ndim > 1:
raise NotImplementedError("can only perform ops with 1-d structures")
other, mask = coerce_to_array(other, copy=False)
elif isinstance(other, np.bool_):
other = other.item()
if other_is_scalar and other is not libmissing.NA and not lib.is_bool(other):
raise TypeError(
"'other' should be pandas.NA or a bool. "
f"Got {type(other).__name__} instead."
)
if not other_is_scalar and len(self) != len(other):
raise ValueError("Lengths must match to compare")
if op.__name__ in {"or_", "ror_"}:
result, mask = ops.kleene_or(self._data, other, self._mask, mask)
elif op.__name__ in {"and_", "rand_"}:
result, mask = ops.kleene_and(self._data, other, self._mask, mask)
elif op.__name__ in {"xor", "rxor"}:
result, mask = ops.kleene_xor(self._data, other, self._mask, mask)
# error: Argument 2 to "BooleanArray" has incompatible type "Optional[Any]";
# expected "ndarray"
return BooleanArray(result, mask) # type: ignore[arg-type]
def _cmp_method(self, other, op):
from pandas.arrays import (
FloatingArray,
IntegerArray,
)
if isinstance(other, (IntegerArray, FloatingArray)):
return NotImplemented
mask = None
if isinstance(other, BooleanArray):
other, mask = other._data, other._mask
elif is_list_like(other):
other = np.asarray(other)
if other.ndim > 1:
raise NotImplementedError("can only perform ops with 1-d structures")
if len(self) != len(other):
raise ValueError("Lengths must match to compare")
if other is libmissing.NA:
# numpy does not handle pd.NA well as "other" scalar (it returns
# a scalar False instead of an array)
result = np.zeros_like(self._data)
mask = np.ones_like(self._data)
else:
# numpy will show a DeprecationWarning on invalid elementwise
# comparisons, this will raise in the future
with warnings.catch_warnings():
warnings.filterwarnings("ignore", "elementwise", FutureWarning)
with np.errstate(all="ignore"):
result = op(self._data, other)
# nans propagate
if mask is None:
mask = self._mask.copy()
else:
mask = self._mask | mask
return BooleanArray(result, mask, copy=False)
def _arith_method(self, other, op):
mask = None
op_name = op.__name__
if isinstance(other, BooleanArray):
other, mask = other._data, other._mask
elif is_list_like(other):
other = np.asarray(other)
if other.ndim > 1:
raise NotImplementedError("can only perform ops with 1-d structures")
if len(self) != len(other):
raise ValueError("Lengths must match")
# nans propagate
if mask is None:
mask = self._mask
if other is libmissing.NA:
mask |= True
else:
mask = self._mask | mask
if other is libmissing.NA:
# if other is NA, the result will be all NA and we can't run the
# actual op, so we need to choose the resulting dtype manually
if op_name in {"floordiv", "rfloordiv", "mod", "rmod", "pow", "rpow"}:
dtype = "int8"
else:
dtype = "bool"
result = np.zeros(len(self._data), dtype=dtype)
else:
if op_name in {"pow", "rpow"} and isinstance(other, np.bool_):
# Avoid DeprecationWarning: In future, it will be an error
# for 'np.bool_' scalars to be interpreted as an index
other = bool(other)
with np.errstate(all="ignore"):
result = op(self._data, other)
# divmod returns a tuple
if op_name == "divmod":
div, mod = result
return (
self._maybe_mask_result(div, mask, other, "floordiv"),
self._maybe_mask_result(mod, mask, other, "mod"),
)
return self._maybe_mask_result(result, mask, other, op_name)
def _maybe_mask_result(self, result, mask, other, op_name: str):
"""
Parameters
----------
result : array-like
mask : array-like bool
other : scalar or array-like
op_name : str
"""
# if we have a float operand we are by-definition
# a float result
# or our op is a divide
if (is_float_dtype(other) or is_float(other)) or (
op_name in ["rtruediv", "truediv"]
):
from pandas.core.arrays import FloatingArray
return FloatingArray(result, mask, copy=False)
elif is_bool_dtype(result):
return BooleanArray(result, mask, copy=False)
elif is_integer_dtype(result):
from pandas.core.arrays import IntegerArray
return IntegerArray(result, mask, copy=False)
else:
result[mask] = np.nan
return result
def __abs__(self):
return self.copy()
|
jorisvandenbossche/pandas
|
pandas/core/arrays/boolean.py
|
Python
|
bsd-3-clause
| 23,248 | 0.000559 |
# -*- coding: utf-8 -*-
import csv
import json
from cStringIO import StringIO
from datetime import datetime
from django.conf import settings
from django.core import mail
from django.core.cache import cache
import mock
from pyquery import PyQuery as pq
from olympia import amo
from olympia.amo.tests import TestCase
from olympia.amo.tests import formset, initial
from olympia.access.models import Group, GroupUser
from olympia.addons.models import Addon, CompatOverride, CompatOverrideRange
from olympia.amo.urlresolvers import reverse
from olympia.amo.tests.test_helpers import get_image_path
from olympia.amo.utils import urlparams
from olympia.applications.models import AppVersion
from olympia.bandwagon.models import FeaturedCollection, MonthlyPick
from olympia.compat.cron import compatibility_report
from olympia.compat.models import CompatReport
from olympia.constants.base import VALIDATOR_SKELETON_RESULTS
from olympia.devhub.models import ActivityLog
from olympia.files.models import File, FileUpload
from olympia.stats.models import UpdateCount
from olympia.users.models import UserProfile
from olympia.users.utils import get_task_user
from olympia.versions.models import ApplicationsVersions, Version
from olympia.zadmin import forms, tasks
from olympia.zadmin.forms import DevMailerForm
from olympia.zadmin.models import (
EmailPreviewTopic, ValidationJob, ValidationResult)
from olympia.zadmin.tasks import updated_versions
from olympia.zadmin.views import find_files
class TestSiteEvents(TestCase):
fixtures = ['base/users', 'zadmin/tests/siteevents']
def setUp(self):
super(TestSiteEvents, self).setUp()
self.client.login(username='admin@mozilla.com', password='password')
def test_get(self):
url = reverse('zadmin.site_events')
response = self.client.get(url)
assert response.status_code == 200
events = response.context['events']
assert len(events) == 1
def test_add(self):
url = reverse('zadmin.site_events')
new_event = {
'event_type': 2,
'start': '2012-01-01',
'description': 'foo',
}
response = self.client.post(url, new_event, follow=True)
assert response.status_code == 200
events = response.context['events']
assert len(events) == 2
def test_edit(self):
url = reverse('zadmin.site_events', args=[1])
modified_event = {
'event_type': 2,
'start': '2012-01-01',
'description': 'bar',
}
response = self.client.post(url, modified_event, follow=True)
assert response.status_code == 200
events = response.context['events']
assert events[0].description == 'bar'
def test_delete(self):
url = reverse('zadmin.site_events.delete', args=[1])
response = self.client.get(url, follow=True)
assert response.status_code == 200
events = response.context['events']
assert len(events) == 0
class BulkValidationTest(TestCase):
fixtures = ['base/addon_3615', 'base/appversion', 'base/users']
def setUp(self):
super(BulkValidationTest, self).setUp()
assert self.client.login(username='admin@mozilla.com',
password='password')
self.addon = Addon.objects.get(pk=3615)
self.creator = UserProfile.objects.get(username='editor')
self.version = self.addon.get_version()
ApplicationsVersions.objects.filter(
application=amo.FIREFOX.id, version=self.version).update(
max=AppVersion.objects.get(application=1, version='3.7a1pre'))
self.application_version = self.version.apps.all()[0]
self.application = self.application_version.application
self.min = self.application_version.min
self.max = self.application_version.max
self.curr_max = self.appversion('3.7a1pre')
self.counter = 0
self.old_task_user = settings.TASK_USER_ID
settings.TASK_USER_ID = self.creator.id
def tearDown(self):
settings.TASK_USER_ID = self.old_task_user
super(BulkValidationTest, self).tearDown()
def appversion(self, version, application=amo.FIREFOX.id):
return AppVersion.objects.get(application=application,
version=version)
def create_job(self, **kwargs):
kw = dict(application=amo.FIREFOX.id,
curr_max_version=kwargs.pop('current', self.curr_max),
target_version=kwargs.pop('target',
self.appversion('3.7a3')),
creator=self.creator)
kw.update(kwargs)
return ValidationJob.objects.create(**kw)
def create_file(self, version=None, platform=amo.PLATFORM_ALL.id):
if not version:
version = self.version
return File.objects.create(version=version,
filename='file-%s' % self.counter,
platform=platform,
status=amo.STATUS_PUBLIC)
def create_result(self, job, f, **kwargs):
self.counter += 1
kw = dict(file=f,
validation='{}',
errors=0,
warnings=0,
notices=0,
validation_job=job,
task_error=None,
valid=0,
completed=datetime.now())
kw.update(kwargs)
return ValidationResult.objects.create(**kw)
def start_validation(self, new_max='3.7a3'):
self.new_max = self.appversion(new_max)
r = self.client.post(reverse('zadmin.start_validation'),
{'application': amo.FIREFOX.id,
'curr_max_version': self.curr_max.id,
'target_version': self.new_max.id,
'finish_email': 'fliggy@mozilla.com'},
follow=True)
assert r.status_code == 200
class TestBulkValidation(BulkValidationTest):
@mock.patch('olympia.zadmin.tasks.bulk_validate_file')
def test_start(self, bulk_validate_file):
new_max = self.appversion('3.7a3')
r = self.client.post(reverse('zadmin.start_validation'),
{'application': amo.FIREFOX.id,
'curr_max_version': self.curr_max.id,
'target_version': new_max.id,
'finish_email': 'fliggy@mozilla.com'},
follow=True)
self.assertNoFormErrors(r)
self.assert3xx(r, reverse('zadmin.validation'))
job = ValidationJob.objects.get()
assert job.application == amo.FIREFOX.id
assert job.curr_max_version.version == self.curr_max.version
assert job.target_version.version == new_max.version
assert job.finish_email == 'fliggy@mozilla.com'
assert job.completed is None
assert job.result_set.all().count() == len(self.version.all_files)
assert bulk_validate_file.delay.called
@mock.patch('olympia.zadmin.tasks.bulk_validate_file')
def test_ignore_user_disabled_addons(self, bulk_validate_file):
self.addon.update(disabled_by_user=True)
r = self.client.post(reverse('zadmin.start_validation'),
{'application': amo.FIREFOX.id,
'curr_max_version': self.curr_max.id,
'target_version': self.appversion('3.7a3').id,
'finish_email': 'fliggy@mozilla.com'},
follow=True)
self.assertNoFormErrors(r)
self.assert3xx(r, reverse('zadmin.validation'))
assert not bulk_validate_file.delay.called
@mock.patch('olympia.zadmin.tasks.bulk_validate_file')
def test_ignore_non_public_addons(self, bulk_validate_file):
target_ver = self.appversion('3.7a3').id
for status in (amo.STATUS_DISABLED, amo.STATUS_NULL,
amo.STATUS_DELETED):
self.addon.update(status=status)
r = self.client.post(reverse('zadmin.start_validation'),
{'application': amo.FIREFOX.id,
'curr_max_version': self.curr_max.id,
'target_version': target_ver,
'finish_email': 'fliggy@mozilla.com'},
follow=True)
self.assertNoFormErrors(r)
self.assert3xx(r, reverse('zadmin.validation'))
assert not bulk_validate_file.delay.called, (
'Addon with status %s should be ignored' % status)
@mock.patch('olympia.zadmin.tasks.bulk_validate_file')
def test_ignore_lang_packs(self, bulk_validate_file):
target_ver = self.appversion('3.7a3').id
self.addon.update(type=amo.ADDON_LPAPP)
r = self.client.post(reverse('zadmin.start_validation'),
{'application': amo.FIREFOX.id,
'curr_max_version': self.curr_max.id,
'target_version': target_ver,
'finish_email': 'fliggy@mozilla.com'},
follow=True)
self.assertNoFormErrors(r)
self.assert3xx(r, reverse('zadmin.validation'))
assert not bulk_validate_file.delay.called, (
'Lang pack addons should be ignored')
@mock.patch('olympia.zadmin.tasks.bulk_validate_file')
def test_ignore_themes(self, bulk_validate_file):
target_ver = self.appversion('3.7a3').id
self.addon.update(type=amo.ADDON_THEME)
self.client.post(reverse('zadmin.start_validation'),
{'application': amo.FIREFOX.id,
'curr_max_version': self.curr_max.id,
'target_version': target_ver,
'finish_email': 'fliggy@mozilla.com'})
assert not bulk_validate_file.delay.called, (
'Theme addons should be ignored')
@mock.patch('olympia.zadmin.tasks.bulk_validate_file')
def test_validate_all_non_disabled_addons(self, bulk_validate_file):
target_ver = self.appversion('3.7a3').id
bulk_validate_file.delay.called = False
self.addon.update(status=amo.STATUS_PUBLIC)
r = self.client.post(reverse('zadmin.start_validation'),
{'application': amo.FIREFOX.id,
'curr_max_version': self.curr_max.id,
'target_version': target_ver,
'finish_email': 'fliggy@mozilla.com'},
follow=True)
self.assertNoFormErrors(r)
self.assert3xx(r, reverse('zadmin.validation'))
assert bulk_validate_file.delay.called, (
'Addon with status %s should be validated' % self.addon.status)
def test_grid(self):
job = self.create_job()
for res in (dict(errors=0), dict(errors=1)):
self.create_result(job, self.create_file(), **res)
r = self.client.get(reverse('zadmin.validation'))
assert r.status_code == 200
doc = pq(r.content)
assert doc('table tr td').eq(0).text() == str(job.pk) # ID
assert doc('table tr td').eq(3).text() == 'Firefox' # Application
assert doc('table tr td').eq(4).text() == self.curr_max.version
assert doc('table tr td').eq(5).text() == '3.7a3'
assert doc('table tr td').eq(6).text() == '2' # tested
assert doc('table tr td').eq(7).text() == '1' # failing
assert doc('table tr td').eq(8).text()[0] == '1' # passing
assert doc('table tr td').eq(9).text() == '0' # exceptions
def test_application_versions_json(self):
r = self.client.post(reverse('zadmin.application_versions_json'),
{'application': amo.FIREFOX.id})
assert r.status_code == 200
data = json.loads(r.content)
empty = True
for id, ver in data['choices']:
empty = False
assert AppVersion.objects.get(pk=id).version == ver
assert not empty, "Unexpected: %r" % data
def test_job_status(self):
job = self.create_job()
def get_data():
self.create_result(job, self.create_file(), **{})
r = self.client.post(reverse('zadmin.job_status'),
{'job_ids': json.dumps([job.pk])})
assert r.status_code == 200
data = json.loads(r.content)[str(job.pk)]
return data
data = get_data()
assert data['completed'] == 1
assert data['total'] == 1
assert data['percent_complete'] == '100'
assert data['job_id'] == job.pk
assert data['completed_timestamp'] == ''
job.update(completed=datetime.now())
data = get_data()
assert data['completed_timestamp'] != '', (
'Unexpected: %s' % data['completed_timestamp'])
class TestBulkUpdate(BulkValidationTest):
def setUp(self):
super(TestBulkUpdate, self).setUp()
self.job = self.create_job(completed=datetime.now())
self.update_url = reverse('zadmin.notify', args=[self.job.pk])
self.list_url = reverse('zadmin.validation')
self.data = {'text': '{{ APPLICATION }} {{ VERSION }}',
'subject': '..'}
self.version_one = Version.objects.create(addon=self.addon)
self.version_two = Version.objects.create(addon=self.addon)
appver = AppVersion.objects.get(application=1, version='3.7a1pre')
for v in self.version_one, self.version_two:
ApplicationsVersions.objects.create(
application=amo.FIREFOX.id, version=v,
min=appver, max=appver)
def test_no_update_link(self):
self.create_result(self.job, self.create_file(), **{})
r = self.client.get(self.list_url)
doc = pq(r.content)
assert doc('table tr td a.set-max-version').text() == (
'Notify and set max versions')
def test_update_link(self):
self.create_result(self.job, self.create_file(), **{'valid': 1})
r = self.client.get(self.list_url)
doc = pq(r.content)
assert doc('table tr td a.set-max-version').text() == (
'Notify and set max versions')
def test_update_url(self):
self.create_result(self.job, self.create_file(), **{'valid': 1})
r = self.client.get(self.list_url)
doc = pq(r.content)
assert doc('table tr td a.set-max-version').attr('data-job-url') == (
self.update_url)
def test_update_anonymous(self):
self.client.logout()
r = self.client.post(self.update_url)
assert r.status_code == 302
def test_version_pks(self):
for version in [self.version_one, self.version_two]:
for x in range(0, 3):
self.create_result(self.job, self.create_file(version))
assert sorted(updated_versions(self.job)) == (
[self.version_one.pk, self.version_two.pk])
def test_update_passing_only(self):
self.create_result(self.job, self.create_file(self.version_one))
self.create_result(self.job, self.create_file(self.version_two),
errors=1)
assert sorted(updated_versions(self.job)) == (
[self.version_one.pk])
def test_update_pks(self):
self.create_result(self.job, self.create_file(self.version))
r = self.client.post(self.update_url, self.data)
assert r.status_code == 302
assert self.version.apps.all()[0].max == self.job.target_version
def test_update_unclean_pks(self):
self.create_result(self.job, self.create_file(self.version))
self.create_result(self.job, self.create_file(self.version),
errors=1)
r = self.client.post(self.update_url, self.data)
assert r.status_code == 302
assert self.version.apps.all()[0].max == self.job.curr_max_version
def test_update_pks_logs(self):
self.create_result(self.job, self.create_file(self.version))
assert ActivityLog.objects.for_addons(self.addon).count() == 0
self.client.post(self.update_url, self.data)
upd = amo.LOG.MAX_APPVERSION_UPDATED.id
logs = ActivityLog.objects.for_addons(self.addon).filter(action=upd)
assert logs.count() == 1
assert logs[0].user == get_task_user()
def test_update_wrong_version(self):
self.create_result(self.job, self.create_file(self.version))
av = self.version.apps.all()[0]
av.max = self.appversion('3.6')
av.save()
self.client.post(self.update_url, self.data)
assert self.version.apps.all()[0].max == self.appversion('3.6')
def test_update_all_within_range(self):
self.create_result(self.job, self.create_file(self.version))
# Create an appversion in between current and target.
av = self.version.apps.all()[0]
av.max = self.appversion('3.7a2')
av.save()
self.client.post(self.update_url, self.data)
assert self.version.apps.all()[0].max == self.appversion('3.7a3')
def test_version_comparison(self):
# regression test for bug 691984
job = self.create_job(completed=datetime.now(),
current=self.appversion('3.0.9'),
target=self.appversion('3.5'))
# .* was not sorting right
self.version.apps.all().update(max=self.appversion('3.0.*'))
self.create_result(job, self.create_file(self.version))
self.client.post(reverse('zadmin.notify', args=[job.pk]),
self.data)
assert self.version.apps.all()[0].max == self.appversion('3.5')
def test_update_different_app(self):
self.create_result(self.job, self.create_file(self.version))
target = self.version.apps.all()[0]
target.application = amo.FIREFOX.id
target.save()
assert self.version.apps.all()[0].max == self.curr_max
def test_update_twice(self):
self.create_result(self.job, self.create_file(self.version))
self.client.post(self.update_url, self.data)
assert self.version.apps.all()[0].max == self.job.target_version
now = self.version.modified
self.client.post(self.update_url, self.data)
assert self.version.modified == now
def test_update_notify(self):
self.create_result(self.job, self.create_file(self.version))
self.client.post(self.update_url, self.data)
assert len(mail.outbox) == 1
def test_update_subject(self):
data = self.data.copy()
data['subject'] = '{{ PASSING_ADDONS.0.name }}'
f = self.create_file(self.version)
self.create_result(self.job, f)
self.client.post(self.update_url, data)
assert mail.outbox[0].subject == (
'%s' % self.addon.name)
@mock.patch('olympia.zadmin.tasks.log')
def test_bulk_email_logs_stats(self, log):
log.info = mock.Mock()
self.create_result(self.job, self.create_file(self.version))
self.client.post(self.update_url, self.data)
assert log.info.call_args_list[-8][0][0] == (
'[1@None] bulk update stats for job %s: '
'{bumped: 1, is_dry_run: 0, processed: 1}'
% self.job.pk)
assert log.info.call_args_list[-2][0][0] == (
'[1@None] bulk email stats for job %s: '
'{author_emailed: 1, is_dry_run: 0, processed: 1}'
% self.job.pk)
def test_application_version(self):
self.create_result(self.job, self.create_file(self.version))
self.client.post(self.update_url, self.data)
assert mail.outbox[0].body == 'Firefox 3.7a3'
def test_multiple_result_links(self):
# Creates validation results for two files of the same addon:
results = [
self.create_result(self.job, self.create_file(self.version)),
self.create_result(self.job, self.create_file(self.version))]
self.client.post(self.update_url,
{'text': '{{ PASSING_ADDONS.0.links }}',
'subject': '..'})
body = mail.outbox[0].body
assert all((reverse('devhub.bulk_compat_result',
args=(self.addon.slug, result.pk))
in body)
for result in results)
def test_notify_mail_preview(self):
self.create_result(self.job, self.create_file(self.version))
self.client.post(self.update_url,
{'text': 'the message', 'subject': 'the subject',
'preview_only': 'on'})
assert len(mail.outbox) == 0
rs = self.job.get_notify_preview_emails()
assert [e.subject for e in rs] == ['the subject']
# version should not be bumped since it's in preview mode:
assert self.version.apps.all()[0].max == self.max
upd = amo.LOG.MAX_APPVERSION_UPDATED.id
logs = ActivityLog.objects.for_addons(self.addon).filter(action=upd)
assert logs.count() == 0
class TestBulkNotify(BulkValidationTest):
def setUp(self):
super(TestBulkNotify, self).setUp()
self.job = self.create_job(completed=datetime.now())
self.update_url = reverse('zadmin.notify', args=[self.job.pk])
self.syntax_url = reverse('zadmin.notify.syntax')
self.list_url = reverse('zadmin.validation')
self.version_one = Version.objects.create(addon=self.addon)
self.version_two = Version.objects.create(addon=self.addon)
def test_no_notify_link(self):
self.create_result(self.job, self.create_file(), **{})
r = self.client.get(self.list_url)
doc = pq(r.content)
assert len(doc('table tr td a.notify')) == 0
def test_notify_link(self):
self.create_result(self.job, self.create_file(), **{'errors': 1})
r = self.client.get(self.list_url)
doc = pq(r.content)
assert doc('table tr td a.set-max-version').text() == (
'Notify and set max versions')
def test_notify_url(self):
self.create_result(self.job, self.create_file(), **{'errors': 1})
r = self.client.get(self.list_url)
doc = pq(r.content)
assert doc('table tr td a.set-max-version').attr('data-job-url') == (
self.update_url)
def test_notify_anonymous(self):
self.client.logout()
r = self.client.post(self.update_url)
assert r.status_code == 302
def test_notify_log(self):
self.create_result(self.job, self.create_file(self.version),
**{'errors': 1})
assert ActivityLog.objects.for_addons(self.addon).count() == 0
self.client.post(self.update_url, {'text': '..', 'subject': '..'})
upd = amo.LOG.BULK_VALIDATION_USER_EMAILED.id
logs = (ActivityLog.objects.for_user(self.creator)
.filter(action=upd))
assert logs.count() == 1
assert logs[0].user == self.creator
def test_compat_bump_log(self):
self.create_result(self.job, self.create_file(self.version),
**{'errors': 0})
assert ActivityLog.objects.for_addons(self.addon).count() == 0
self.client.post(self.update_url, {'text': '..', 'subject': '..'})
upd = amo.LOG.MAX_APPVERSION_UPDATED.id
logs = ActivityLog.objects.for_addons(self.addon).filter(action=upd)
assert logs.count() == 1
assert logs[0].user == self.creator
def test_notify_mail(self):
self.create_result(self.job, self.create_file(self.version),
**{'errors': 1})
r = self.client.post(self.update_url,
{'text': '..',
'subject': '{{ FAILING_ADDONS.0.name }}'})
assert r.status_code == 302
assert len(mail.outbox) == 1
assert mail.outbox[0].body == '..'
assert mail.outbox[0].subject == self.addon.name
assert mail.outbox[0].to == [u'del@icio.us']
def test_result_links(self):
result = self.create_result(self.job, self.create_file(self.version),
**{'errors': 1})
r = self.client.post(self.update_url,
{'text': '{{ FAILING_ADDONS.0.links }}',
'subject': '...'})
assert r.status_code == 302
assert len(mail.outbox) == 1
res = reverse('devhub.bulk_compat_result',
args=(self.addon.slug, result.pk))
email = mail.outbox[0].body
assert res in email, ('Unexpected message: %s' % email)
def test_notify_mail_partial(self):
self.create_result(self.job, self.create_file(self.version),
**{'errors': 1})
self.create_result(self.job, self.create_file(self.version))
r = self.client.post(self.update_url, {'text': '..', 'subject': '..'})
assert r.status_code == 302
assert len(mail.outbox) == 1
def test_notify_mail_multiple(self):
self.create_result(self.job, self.create_file(self.version),
**{'errors': 1})
self.create_result(self.job, self.create_file(self.version),
**{'errors': 1})
r = self.client.post(self.update_url, {'text': '..', 'subject': '..'})
assert r.status_code == 302
assert len(mail.outbox) == 1
def test_notify_mail_preview(self):
for i in range(2):
self.create_result(self.job, self.create_file(self.version),
**{'errors': 1})
r = self.client.post(self.update_url,
{'text': 'the message', 'subject': 'the subject',
'preview_only': 'on'})
assert r.status_code == 302
assert len(mail.outbox) == 0
rs = self.job.get_notify_preview_emails()
assert [e.subject for e in rs] == ['the subject']
def test_notify_rendering(self):
self.create_result(self.job, self.create_file(self.version),
**{'errors': 1})
r = self.client.post(self.update_url,
{'text': '{{ FAILING_ADDONS.0.name }}'
'{{ FAILING_ADDONS.0.compat_link }}',
'subject': '{{ FAILING_ADDONS.0.name }} blah'})
assert r.status_code == 302
assert len(mail.outbox) == 1
url = reverse('devhub.versions.edit', args=[self.addon.pk,
self.version.pk])
assert str(self.addon.name) in mail.outbox[0].body
assert url in mail.outbox[0].body
assert str(self.addon.name) in mail.outbox[0].subject
def test_notify_unicode(self):
self.addon.name = u'འབྲུག་ཡུལ།'
self.addon.save()
self.create_result(self.job, self.create_file(self.version),
**{'errors': 1})
r = self.client.post(self.update_url,
{'text': '{{ FAILING_ADDONS.0.name }}',
'subject': '{{ FAILING_ADDONS.0.name }} blah'})
assert r.status_code == 302
assert len(mail.outbox) == 1
assert mail.outbox[0].body == self.addon.name
def test_notify_template(self):
for text, res in (['some sample text', True],
['{{ FAILING_ADDONS.0.name }}{% if %}', False]):
assert forms.NotifyForm(
{'text': text, 'subject': '...'}).is_valid() == res
def test_notify_syntax(self):
for text, res in (['some sample text', True],
['{{ FAILING_ADDONS.0.name }}{% if %}', False]):
r = self.client.post(self.syntax_url, {'text': text,
'subject': '..'})
assert r.status_code == 200
assert json.loads(r.content)['valid'] == res
class TestBulkValidationTask(BulkValidationTest):
def test_validate(self):
self.start_validation()
res = ValidationResult.objects.get()
self.assertCloseToNow(res.completed)
assert not res.task_error
validation = json.loads(res.validation)
assert res.errors == 1
assert validation['messages'][0]['id'] == ['main', 'prepare_package',
'not_found']
assert res.valid is False
assert res.warnings == 0, [mess['message']
for mess in validation['messages']]
assert res.notices == 0
assert validation['errors'] == 1
self.assertCloseToNow(res.validation_job.completed)
assert res.validation_job.stats['total'] == 1
assert res.validation_job.stats['completed'] == 1
assert res.validation_job.stats['passing'] == 0
assert res.validation_job.stats['failing'] == 1
assert res.validation_job.stats['errors'] == 0
assert len(mail.outbox) == 1
assert mail.outbox[0].subject == (
'Behold! Validation results for Firefox %s->%s'
% (self.curr_max.version, self.new_max.version))
assert mail.outbox[0].to == ['fliggy@mozilla.com']
@mock.patch('validator.validate.validate')
def test_validator_bulk_compat_flag(self, validate):
try:
self.start_validation()
except Exception:
# We only care about the call to `validate()`, not the result.
pass
assert validate.call_args[1].get('compat_test')
@mock.patch('olympia.zadmin.tasks.run_validator')
def test_task_error(self, run_validator):
run_validator.side_effect = RuntimeError('validation error')
try:
self.start_validation()
except:
# the real test is how it's handled, below...
pass
res = ValidationResult.objects.get()
err = res.task_error.strip()
assert err.endswith('RuntimeError: validation error'), (
'Unexpected: %s' % err)
self.assertCloseToNow(res.completed)
assert res.validation_job.stats['total'] == 1
assert res.validation_job.stats['errors'] == 1
assert res.validation_job.stats['passing'] == 0
assert res.validation_job.stats['failing'] == 0
@mock.patch('olympia.zadmin.tasks.run_validator')
def test_validate_for_appversions(self, run_validator):
data = {
"errors": 1,
"warnings": 50,
"notices": 1,
"messages": [],
"compatibility_summary": {
"errors": 0,
"warnings": 0,
"notices": 0
},
"metadata": {}
}
run_validator.return_value = json.dumps(data)
self.start_validation()
assert run_validator.called
assert run_validator.call_args[1]['for_appversions'] == (
{amo.FIREFOX.guid: [self.new_max.version]})
@mock.patch('olympia.zadmin.tasks.run_validator')
def test_validate_all_tiers(self, run_validator):
run_validator.return_value = json.dumps(VALIDATOR_SKELETON_RESULTS)
res = self.create_result(self.create_job(), self.create_file(), **{})
tasks.bulk_validate_file(res.id)
assert run_validator.called
assert run_validator.call_args[1]['test_all_tiers']
@mock.patch('olympia.zadmin.tasks.run_validator')
def test_merge_with_compat_summary(self, run_validator):
data = {
"errors": 1,
"detected_type": "extension",
"success": False,
"warnings": 50,
"notices": 1,
"ending_tier": 5,
"messages": [
{"description": "A global function was called ...",
"tier": 3,
"message": "Global called in dangerous manner",
"uid": "de93a48831454e0b9d965642f6d6bf8f",
"id": [],
"compatibility_type": None,
"for_appversions": None,
"type": "warning"},
{"description": ("...no longer indicate the language "
"of Firefox's UI..."),
"tier": 5,
"message": "navigator.language may not behave as expected",
"uid": "f44c1930887c4d9e8bd2403d4fe0253a",
"id": [],
"compatibility_type": "error",
"for_appversions": {
"{ec8030f7-c20a-464f-9b0e-13a3a9e97384}": ["4.2a1pre",
"5.0a2",
"6.0a1"]},
"type": "warning"}],
"compatibility_summary": {
"notices": 1,
"errors": 6,
"warnings": 0},
"metadata": {
"version": "1.0",
"name": "FastestFox",
"id": "<id>"}}
run_validator.return_value = json.dumps(data)
res = self.create_result(self.create_job(), self.create_file(), **{})
tasks.bulk_validate_file(res.id)
assert run_validator.called
res = ValidationResult.objects.get(pk=res.pk)
assert res.errors == (
data['errors'] + data['compatibility_summary']['errors'])
assert res.warnings == (
data['warnings'] + data['compatibility_summary']['warnings'])
assert res.notices == (
data['notices'] + data['compatibility_summary']['notices'])
@mock.patch('validator.validate.validate')
def test_app_version_overrides(self, validate):
validate.return_value = json.dumps(VALIDATOR_SKELETON_RESULTS)
self.start_validation(new_max='3.7a4')
assert validate.called
overrides = validate.call_args[1]['overrides']
assert overrides['targetapp_minVersion'] == {amo.FIREFOX.guid: '3.7a4'}
assert overrides['targetapp_maxVersion'] == {amo.FIREFOX.guid: '3.7a4'}
def create_version(self, addon, statuses, version_str=None):
max = self.max
if version_str:
max = AppVersion.objects.filter(version=version_str)[0]
version = Version.objects.create(addon=addon)
ApplicationsVersions.objects.create(application=self.application,
min=self.min, max=max,
version=version)
for status in statuses:
File.objects.create(status=status, version=version)
return version
def find_files(self, job_kwargs=None):
if not job_kwargs:
job_kwargs = {}
job = self.create_job(**job_kwargs)
find_files(job)
return list(job.result_set.values_list('file_id', flat=True))
def test_getting_disabled(self):
self.addon.update(status=amo.STATUS_DISABLED)
assert len(self.find_files()) == 0
def test_getting_deleted(self):
self.addon.update(status=amo.STATUS_DELETED)
assert len(self.find_files()) == 0
def test_getting_status(self):
self.create_version(self.addon, [amo.STATUS_PUBLIC,
amo.STATUS_NOMINATED])
ids = self.find_files()
assert len(ids) == 2
def test_getting_latest_public(self):
old_version = self.create_version(self.addon, [amo.STATUS_PUBLIC])
self.create_version(self.addon, [amo.STATUS_NULL])
ids = self.find_files()
assert len(ids) == 1
assert old_version.files.all()[0].pk == ids[0]
def test_getting_latest_public_order(self):
self.create_version(self.addon, [amo.STATUS_PURGATORY])
new_version = self.create_version(self.addon, [amo.STATUS_PUBLIC])
ids = self.find_files()
assert len(ids) == 1
assert new_version.files.all()[0].pk == ids[0]
def delete_orig_version(self, fixup=True):
# Because deleting versions resets the status...
self.version.delete()
# Don't really care what status this is, as long
# as it gets past the first SQL query.
self.addon.update(status=amo.STATUS_PUBLIC)
def test_no_versions(self):
self.delete_orig_version()
assert len(self.find_files()) == 0
def test_no_files(self):
self.version.files.all().delete()
self.addon.update(status=amo.STATUS_PUBLIC)
assert len(self.find_files()) == 0
def test_not_public(self):
version = self.create_version(self.addon, [amo.STATUS_LITE])
self.delete_orig_version()
ids = self.find_files()
assert len(ids) == 1
assert version.files.all()[0].pk == ids[0]
def test_not_public_and_newer(self):
self.create_version(self.addon, [amo.STATUS_LITE])
new_version = self.create_version(self.addon, [amo.STATUS_LITE])
self.delete_orig_version()
ids = self.find_files()
assert len(ids) == 1
assert new_version.files.all()[0].pk == ids[0]
def test_not_public_w_beta(self):
self.create_version(self.addon, [amo.STATUS_LITE])
self.create_version(self.addon, [amo.STATUS_BETA])
self.delete_orig_version()
ids = self.find_files()
assert len(ids) == 2
def test_not_public_w_multiple_files(self):
self.create_version(self.addon, [amo.STATUS_BETA])
new_version = self.create_version(self.addon, [amo.STATUS_LITE,
amo.STATUS_BETA])
self.delete_orig_version()
ids = self.find_files()
assert len(ids) == 2
assert sorted([v.id for v in new_version.files.all()]) == sorted(ids)
def test_not_prelim_w_multiple_files(self):
self.create_version(self.addon, [amo.STATUS_BETA])
self.create_version(self.addon, [amo.STATUS_BETA,
amo.STATUS_NOMINATED])
self.delete_orig_version()
ids = self.find_files()
assert len(ids) == 3
def test_public_partial(self):
self.create_version(self.addon, [amo.STATUS_PUBLIC])
new_version = self.create_version(self.addon, [amo.STATUS_BETA,
amo.STATUS_DISABLED])
ids = self.find_files()
assert len(ids) == 2
assert new_version.files.all()[1].pk not in ids
def test_getting_w_unreviewed(self):
old_version = self.create_version(self.addon, [amo.STATUS_PUBLIC])
new_version = self.create_version(self.addon, [amo.STATUS_UNREVIEWED])
ids = self.find_files()
assert len(ids) == 2
old_version_pk = old_version.files.all()[0].pk
new_version_pk = new_version.files.all()[0].pk
assert sorted([old_version_pk, new_version_pk]) == sorted(ids)
def test_multiple_files(self):
self.create_version(self.addon, [amo.STATUS_PUBLIC, amo.STATUS_PUBLIC,
amo.STATUS_PUBLIC])
ids = self.find_files()
assert len(ids) == 3
def test_multiple_public(self):
self.create_version(self.addon, [amo.STATUS_PUBLIC])
new_version = self.create_version(self.addon, [amo.STATUS_PUBLIC])
ids = self.find_files()
assert len(ids) == 1
assert new_version.files.all()[0].pk == ids[0]
def test_multiple_addons(self):
addon = Addon.objects.create(type=amo.ADDON_EXTENSION)
self.create_version(addon, [amo.STATUS_PURGATORY])
ids = self.find_files()
assert len(ids) == 1
assert self.version.files.all()[0].pk == ids[0]
def test_no_app(self):
version = self.create_version(self.addon, [amo.STATUS_LITE])
self.delete_orig_version()
version.apps.all().delete()
ids = self.find_files()
assert len(ids) == 0
def test_wrong_version(self):
self.create_version(self.addon, [amo.STATUS_LITE],
version_str='4.0b2pre')
self.delete_orig_version()
ids = self.find_files()
assert len(ids) == 0
def test_version_slightly_newer_than_current(self):
# addon matching current app/version but with a newer public version
# that is within range of the target app/version.
# See bug 658739.
self.create_version(self.addon, [amo.STATUS_PUBLIC],
version_str='3.7a2')
newer = self.create_version(self.addon, [amo.STATUS_PUBLIC],
version_str='3.7a3')
kw = dict(curr_max_version=self.appversion('3.7a2'),
target_version=self.appversion('3.7a4'))
ids = self.find_files(job_kwargs=kw)
assert newer.files.all()[0].pk == ids[0]
def test_version_compatible_with_newer_app(self):
# addon with a newer public version that is already compatible with
# an app/version higher than the target.
# See bug 658739.
self.create_version(self.addon, [amo.STATUS_PUBLIC],
version_str='3.7a2')
# A version that supports a newer Firefox than what we're targeting
self.create_version(self.addon, [amo.STATUS_PUBLIC],
version_str='3.7a4')
kw = dict(curr_max_version=self.appversion('3.7a2'),
target_version=self.appversion('3.7a3'))
ids = self.find_files(job_kwargs=kw)
assert len(ids) == 0
def test_version_compatible_with_target_app(self):
self.create_version(self.addon, [amo.STATUS_PUBLIC],
version_str='3.7a2')
# Already has a version that supports target:
self.create_version(self.addon, [amo.STATUS_PUBLIC],
version_str='3.7a3')
kw = dict(curr_max_version=self.appversion('3.7a2'),
target_version=self.appversion('3.7a3'))
ids = self.find_files(job_kwargs=kw)
assert len(ids) == 0
def test_version_webextension(self):
self.version.files.update(is_webextension=True)
assert not self.find_files()
class TestTallyValidationErrors(BulkValidationTest):
def setUp(self):
super(TestTallyValidationErrors, self).setUp()
self.data = {
"errors": 1,
"warnings": 1,
"notices": 0,
"messages": [
{"message": "message one",
"description": ["message one long"],
"id": ["path", "to", "test_one"],
"uid": "de93a48831454e0b9d965642f6d6bf8f",
"type": "error"},
{"message": "message two",
"description": "message two long",
"id": ["path", "to", "test_two"],
"uid": "f44c1930887c4d9e8bd2403d4fe0253a",
"compatibility_type": "error",
"type": "warning"}],
"metadata": {},
"compatibility_summary": {
"errors": 1,
"warnings": 1,
"notices": 0}}
def csv(self, job_id):
r = self.client.get(reverse('zadmin.validation_tally_csv',
args=[job_id]))
assert r.status_code == 200
rdr = csv.reader(StringIO(r.content))
header = rdr.next()
rows = sorted((r for r in rdr), key=lambda r: r[0])
return header, rows
@mock.patch('olympia.zadmin.tasks.run_validator')
def test_csv(self, run_validator):
run_validator.return_value = json.dumps(self.data)
self.start_validation()
res = ValidationResult.objects.get()
assert res.task_error is None
header, rows = self.csv(res.validation_job.pk)
assert header == ['message_id', 'message', 'long_message',
'type', 'addons_affected']
assert rows.pop(0) == ['path.to.test_one',
'message one', 'message one long', 'error', '1']
assert rows.pop(0) == ['path.to.test_two',
'message two', 'message two long', 'error', '1']
def test_count_per_addon(self):
job = self.create_job()
data_str = json.dumps(self.data)
for i in range(3):
tasks.tally_validation_results(job.pk, data_str)
header, rows = self.csv(job.pk)
assert rows.pop(0) == ['path.to.test_one',
'message one', 'message one long', 'error', '3']
assert rows.pop(0) == ['path.to.test_two',
'message two', 'message two long', 'error', '3']
def test_nested_list_messages(self):
job = self.create_job()
self.data['messages'] = [{
"message": "message one",
"description": ["message one long", ["something nested"]],
"id": ["path", "to", "test_one"],
"uid": "de93a48831454e0b9d965642f6d6bf8f",
"type": "error",
}]
data_str = json.dumps(self.data)
# This was raising an exception. bug 733845
tasks.tally_validation_results(job.pk, data_str)
class TestEmailPreview(TestCase):
fixtures = ['base/addon_3615', 'base/users']
def setUp(self):
super(TestEmailPreview, self).setUp()
assert self.client.login(username='admin@mozilla.com',
password='password')
addon = Addon.objects.get(pk=3615)
self.topic = EmailPreviewTopic(addon)
def test_csv(self):
self.topic.send_mail('the subject', u'Hello Ivan Krsti\u0107',
from_email='admin@mozilla.org',
recipient_list=['funnyguy@mozilla.org'])
r = self.client.get(reverse('zadmin.email_preview_csv',
args=[self.topic.topic]))
assert r.status_code == 200
rdr = csv.reader(StringIO(r.content))
assert rdr.next() == ['from_email', 'recipient_list', 'subject',
'body']
assert rdr.next() == ['admin@mozilla.org', 'funnyguy@mozilla.org',
'the subject', 'Hello Ivan Krsti\xc4\x87']
class TestMonthlyPick(TestCase):
fixtures = ['base/addon_3615', 'base/users']
def setUp(self):
super(TestMonthlyPick, self).setUp()
assert self.client.login(username='admin@mozilla.com',
password='password')
self.url = reverse('zadmin.monthly_pick')
addon = Addon.objects.get(pk=3615)
MonthlyPick.objects.create(addon=addon,
locale='zh-CN',
blurb="test data",
image="http://www.google.com")
self.f = self.client.get(self.url).context['form'].initial_forms[0]
self.initial = self.f.initial
def test_form_initial(self):
assert self.initial['addon'] == 3615
assert self.initial['locale'] == 'zh-CN'
assert self.initial['blurb'] == 'test data'
assert self.initial['image'] == 'http://www.google.com'
def test_success_insert(self):
dupe = initial(self.f)
del dupe['id']
dupe.update(locale='fr')
data = formset(initial(self.f), dupe, initial_count=1)
self.client.post(self.url, data)
assert MonthlyPick.objects.count() == 2
assert MonthlyPick.objects.all()[1].locale == 'fr'
def test_insert_no_image(self):
dupe = initial(self.f)
dupe.update(id='', image='', locale='en-US')
data = formset(initial(self.f), dupe, initial_count=1)
self.client.post(self.url, data)
assert MonthlyPick.objects.count() == 2
assert MonthlyPick.objects.all()[1].image == ''
def test_success_insert_no_locale(self):
dupe = initial(self.f)
del dupe['id']
del dupe['locale']
data = formset(initial(self.f), dupe, initial_count=1)
self.client.post(self.url, data)
assert MonthlyPick.objects.count() == 2
assert MonthlyPick.objects.all()[1].locale == ''
def test_insert_long_blurb(self):
dupe = initial(self.f)
dupe.update(id='', blurb='x' * 201, locale='en-US')
data = formset(initial(self.f), dupe, initial_count=1)
r = self.client.post(self.url, data)
assert r.context['form'].errors[1]['blurb'][0] == (
'Ensure this value has at most 200 characters (it has 201).')
def test_success_update(self):
d = initial(self.f)
d.update(locale='fr')
r = self.client.post(self.url, formset(d, initial_count=1))
assert r.status_code == 302
assert MonthlyPick.objects.all()[0].locale == 'fr'
def test_success_delete(self):
d = initial(self.f)
d.update(DELETE=True)
self.client.post(self.url, formset(d, initial_count=1))
assert MonthlyPick.objects.count() == 0
def test_require_login(self):
self.client.logout()
r = self.client.get(self.url)
assert r.status_code == 302
class TestFeatures(TestCase):
fixtures = ['base/users', 'base/collections', 'base/addon_3615.json']
def setUp(self):
super(TestFeatures, self).setUp()
assert self.client.login(username='admin@mozilla.com',
password='password')
self.url = reverse('zadmin.features')
FeaturedCollection.objects.create(application=amo.FIREFOX.id,
locale='zh-CN', collection_id=80)
self.f = self.client.get(self.url).context['form'].initial_forms[0]
self.initial = self.f.initial
def test_form_initial(self):
assert self.initial['application'] == amo.FIREFOX.id
assert self.initial['locale'] == 'zh-CN'
assert self.initial['collection'] == 80
def test_form_attrs(self):
r = self.client.get(self.url)
assert r.status_code == 200
doc = pq(r.content)
assert doc('#features tr').attr('data-app') == str(amo.FIREFOX.id)
assert doc('#features td.app').hasClass(amo.FIREFOX.short)
assert doc('#features td.collection.loading').attr(
'data-collection') == '80'
assert doc('#features .collection-ac.js-hidden')
assert not doc('#features .collection-ac[disabled]')
def test_disabled_autocomplete_errors(self):
"""If any collection errors, autocomplete field should be enabled."""
d = dict(application=amo.FIREFOX.id, collection=999)
data = formset(self.initial, d, initial_count=1)
r = self.client.post(self.url, data)
doc = pq(r.content)
assert not doc('#features .collection-ac[disabled]')
def test_required_app(self):
d = dict(locale='zh-CN', collection=80)
data = formset(self.initial, d, initial_count=1)
r = self.client.post(self.url, data)
assert r.status_code == 200
assert r.context['form'].errors[0]['application'] == (
['This field is required.'])
assert r.context['form'].errors[0]['collection'] == (
['Invalid collection for this application.'])
def test_bad_app(self):
d = dict(application=999, collection=80)
data = formset(self.initial, d, initial_count=1)
r = self.client.post(self.url, data)
assert r.context['form'].errors[0]['application'] == [
'Select a valid choice. 999 is not one of the available choices.']
def test_bad_collection_for_app(self):
d = dict(application=amo.THUNDERBIRD.id, collection=80)
data = formset(self.initial, d, initial_count=1)
r = self.client.post(self.url, data)
assert r.context['form'].errors[0]['collection'] == (
['Invalid collection for this application.'])
def test_optional_locale(self):
d = dict(application=amo.FIREFOX.id, collection=80)
data = formset(self.initial, d, initial_count=1)
r = self.client.post(self.url, data)
assert r.context['form'].errors == [{}]
def test_bad_locale(self):
d = dict(application=amo.FIREFOX.id, locale='klingon', collection=80)
data = formset(self.initial, d, initial_count=1)
r = self.client.post(self.url, data)
assert r.context['form'].errors[0]['locale'] == (
['Select a valid choice. klingon is not one of the available '
'choices.'])
def test_required_collection(self):
d = dict(application=amo.FIREFOX.id)
data = formset(self.initial, d, initial_count=1)
r = self.client.post(self.url, data)
assert r.context['form'].errors[0]['collection'] == (
['This field is required.'])
def test_bad_collection(self):
d = dict(application=amo.FIREFOX.id, collection=999)
data = formset(self.initial, d, initial_count=1)
r = self.client.post(self.url, data)
assert r.context['form'].errors[0]['collection'] == (
['Invalid collection for this application.'])
def test_success_insert(self):
dupe = initial(self.f)
del dupe['id']
dupe.update(locale='fr')
data = formset(initial(self.f), dupe, initial_count=1)
self.client.post(self.url, data)
assert FeaturedCollection.objects.count() == 2
assert FeaturedCollection.objects.all()[1].locale == 'fr'
def test_success_update(self):
d = initial(self.f)
d.update(locale='fr')
r = self.client.post(self.url, formset(d, initial_count=1))
assert r.status_code == 302
assert FeaturedCollection.objects.all()[0].locale == 'fr'
def test_success_delete(self):
d = initial(self.f)
d.update(DELETE=True)
self.client.post(self.url, formset(d, initial_count=1))
assert FeaturedCollection.objects.count() == 0
class TestLookup(TestCase):
fixtures = ['base/users']
def setUp(self):
super(TestLookup, self).setUp()
assert self.client.login(username='admin@mozilla.com',
password='password')
self.user = UserProfile.objects.get(pk=999)
self.url = reverse('zadmin.search', args=['users', 'userprofile'])
def test_logged_out(self):
self.client.logout()
assert self.client.get('%s?q=admin' % self.url).status_code == 403
def check_results(self, q, expected):
res = self.client.get(urlparams(self.url, q=q))
assert res.status_code == 200
content = json.loads(res.content)
assert len(content) == len(expected)
ids = [int(c['value']) for c in content]
emails = [u'%s' % c['label'] for c in content]
for d in expected:
id = d['value']
email = u'%s' % d['label']
assert id in ids, (
'Expected user ID "%s" not found' % id)
assert email in emails, (
'Expected username "%s" not found' % email)
def test_lookup_wrong_model(self):
self.url = reverse('zadmin.search', args=['doesnt', 'exist'])
res = self.client.get(urlparams(self.url, q=''))
assert res.status_code == 404
def test_lookup_empty(self):
users = UserProfile.objects.values('id', 'email')
self.check_results('', [dict(
value=u['id'], label=u['email']) for u in users])
def test_lookup_by_id(self):
self.check_results(self.user.id, [dict(value=self.user.id,
label=self.user.email)])
def test_lookup_by_email(self):
self.check_results(self.user.email, [dict(value=self.user.id,
label=self.user.email)])
def test_lookup_by_username(self):
self.check_results(self.user.username, [dict(value=self.user.id,
label=self.user.email)])
class TestAddonSearch(amo.tests.ESTestCase):
fixtures = ['base/users', 'base/addon_3615']
def setUp(self):
super(TestAddonSearch, self).setUp()
self.reindex(Addon)
assert self.client.login(username='admin@mozilla.com',
password='password')
self.url = reverse('zadmin.addon-search')
def test_lookup_addon(self):
res = self.client.get(urlparams(self.url, q='delicious'))
# There's only one result, so it should just forward us to that page.
assert res.status_code == 302
class TestAddonAdmin(TestCase):
fixtures = ['base/users', 'base/addon_3615']
def setUp(self):
super(TestAddonAdmin, self).setUp()
assert self.client.login(username='admin@mozilla.com',
password='password')
self.url = reverse('admin:addons_addon_changelist')
def test_basic(self):
res = self.client.get(self.url)
doc = pq(res.content)
rows = doc('#result_list tbody tr')
assert rows.length == 1
assert rows.find('a').attr('href') == (
'/en-US/admin/models/addons/addon/3615/')
class TestAddonManagement(TestCase):
fixtures = ['base/addon_3615', 'base/users']
def setUp(self):
super(TestAddonManagement, self).setUp()
self.addon = Addon.objects.get(pk=3615)
self.url = reverse('zadmin.addon_manage', args=[self.addon.slug])
self.client.login(username='admin@mozilla.com', password='password')
def test_can_manage_unlisted_addons(self):
"""Unlisted addons can be managed too."""
self.addon.update(is_listed=False)
assert self.client.get(self.url).status_code == 200
def _form_data(self, data=None):
initial_data = {
'status': '4',
'form-0-status': '4',
'form-0-id': '67442',
'form-TOTAL_FORMS': '1',
'form-INITIAL_FORMS': '1',
}
if data:
initial_data.update(data)
return initial_data
def test_addon_status_change(self):
data = self._form_data({'status': '3'})
r = self.client.post(self.url, data, follow=True)
assert r.status_code == 200
addon = Addon.objects.get(pk=3615)
assert addon.status == 3
def test_addon_file_status_change(self):
data = self._form_data({'form-0-status': '1'})
r = self.client.post(self.url, data, follow=True)
assert r.status_code == 200
file = File.objects.get(pk=67442)
assert file.status == 1
def test_addon_deleted_file_status_change(self):
file = File.objects.get(pk=67442)
file.version.update(deleted=True)
data = self._form_data({'form-0-status': '1'})
r = self.client.post(self.url, data, follow=True)
# Form errors are silently suppressed.
assert r.status_code == 200
# But no change.
assert file.status == 4
@mock.patch.object(File, 'file_path',
amo.tests.AMOPaths().file_fixture_path(
'delicious_bookmarks-2.1.106-fx.xpi'))
def test_regenerate_hash(self):
version = Version.objects.create(addon_id=3615)
file = File.objects.create(
filename='delicious_bookmarks-2.1.106-fx.xpi', version=version)
r = self.client.post(reverse('zadmin.recalc_hash', args=[file.id]))
assert json.loads(r.content)[u'success'] == 1
file = File.objects.get(pk=file.id)
assert file.size, 'File size should not be zero'
assert file.hash, 'File hash should not be empty'
@mock.patch.object(File, 'file_path',
amo.tests.AMOPaths().file_fixture_path(
'delicious_bookmarks-2.1.106-fx.xpi'))
def test_regenerate_hash_get(self):
""" Don't allow GET """
version = Version.objects.create(addon_id=3615)
file = File.objects.create(
filename='delicious_bookmarks-2.1.106-fx.xpi', version=version)
r = self.client.get(reverse('zadmin.recalc_hash', args=[file.id]))
assert r.status_code == 405 # GET out of here
class TestCompat(amo.tests.ESTestCase):
fixtures = ['base/users']
def setUp(self):
super(TestCompat, self).setUp()
self.url = reverse('zadmin.compat')
self.client.login(username='admin@mozilla.com', password='password')
self.app = amo.FIREFOX
self.app_version = amo.COMPAT[0]['main']
self.addon = self.populate(guid='xxx')
self.generate_reports(self.addon, good=0, bad=0, app=self.app,
app_version=self.app_version)
def update(self):
compatibility_report()
self.refresh()
def populate(self, **kw):
now = datetime.now()
name = 'Addon %s' % now
kw.update(guid=name)
addon = amo.tests.addon_factory(name=name, **kw)
UpdateCount.objects.create(addon=addon, count=10, date=now)
return addon
def generate_reports(self, addon, good, bad, app, app_version):
defaults = dict(guid=addon.guid, app_guid=app.guid,
app_version=app_version)
for x in xrange(good):
CompatReport.objects.create(works_properly=True, **defaults)
for x in xrange(bad):
CompatReport.objects.create(works_properly=False, **defaults)
self.update()
def get_pq(self, **kw):
r = self.client.get(self.url, kw)
assert r.status_code == 200
return pq(r.content)('#compat-results')
def test_defaults(self):
r = self.client.get(self.url)
assert r.status_code == 200
assert r.context['app'] == self.app
assert r.context['version'] == self.app_version
table = pq(r.content)('#compat-results')
assert table.length == 1
assert table.find('.no-results').length == 1
def check_row(self, tr, addon, good, bad, percentage, app, app_version):
assert tr.length == 1
version = addon.current_version.version
name = tr.find('.name')
assert name.find('.version').text() == 'v' + version
assert name.remove('.version').text() == unicode(addon.name)
assert name.find('a').attr('href') == addon.get_url_path()
assert tr.find('.maxver').text() == (
addon.compatible_apps[app].max.version)
incompat = tr.find('.incompat')
assert incompat.find('.bad').text() == str(bad)
assert incompat.find('.total').text() == str(good + bad)
percentage += '%'
assert percentage in incompat.text(), (
'Expected incompatibility to be %r' % percentage)
assert tr.find('.version a').attr('href') == (
reverse('devhub.versions.edit',
args=[addon.slug, addon.current_version.id]))
assert tr.find('.reports a').attr('href') == (
reverse('compat.reporter_detail', args=[addon.guid]))
form = tr.find('.overrides form')
assert form.attr('action') == reverse(
'admin:addons_compatoverride_add')
self.check_field(form, '_compat_ranges-TOTAL_FORMS', '1')
self.check_field(form, '_compat_ranges-INITIAL_FORMS', '0')
self.check_field(form, '_continue', '1')
self.check_field(form, '_confirm', '1')
self.check_field(form, 'addon', str(addon.id))
self.check_field(form, 'guid', addon.guid)
compat_field = '_compat_ranges-0-%s'
self.check_field(form, compat_field % 'min_version', '0')
self.check_field(form, compat_field % 'max_version', version)
self.check_field(form, compat_field % 'app', str(app.id))
self.check_field(form, compat_field % 'min_app_version',
app_version + 'a1')
self.check_field(form, compat_field % 'max_app_version',
app_version + '*')
def check_field(self, form, name, val):
assert form.find('input[name="%s"]' % name).val() == val
def test_firefox_hosted(self):
addon = self.populate()
self.generate_reports(addon, good=0, bad=11, app=self.app,
app_version=self.app_version)
tr = self.get_pq().find('tr[data-guid="%s"]' % addon.guid)
self.check_row(tr, addon, good=0, bad=11, percentage='100.0',
app=self.app, app_version=self.app_version)
# Add an override for this current app version.
compat = CompatOverride.objects.create(addon=addon, guid=addon.guid)
CompatOverrideRange.objects.create(
compat=compat,
app=amo.FIREFOX.id, min_app_version=self.app_version + 'a1',
max_app_version=self.app_version + '*')
# Check that there is an override for this current app version.
tr = self.get_pq().find('tr[data-guid="%s"]' % addon.guid)
assert tr.find('.overrides a').attr('href') == (
reverse('admin:addons_compatoverride_change', args=[compat.id]))
def test_non_default_version(self):
app_version = amo.COMPAT[2]['main']
addon = self.populate()
self.generate_reports(addon, good=0, bad=11, app=self.app,
app_version=app_version)
pq = self.get_pq()
assert pq.find('tr[data-guid="%s"]' % addon.guid).length == 0
appver = '%s-%s' % (self.app.id, app_version)
tr = self.get_pq(appver=appver)('tr[data-guid="%s"]' % addon.guid)
self.check_row(tr, addon, good=0, bad=11, percentage='100.0',
app=self.app, app_version=app_version)
def test_minor_versions(self):
addon = self.populate()
self.generate_reports(addon, good=0, bad=1, app=self.app,
app_version=self.app_version)
self.generate_reports(addon, good=1, bad=2, app=self.app,
app_version=self.app_version + 'a2')
tr = self.get_pq(ratio=0.0, minimum=0).find('tr[data-guid="%s"]' %
addon.guid)
self.check_row(tr, addon, good=1, bad=3, percentage='75.0',
app=self.app, app_version=self.app_version)
def test_ratio(self):
addon = self.populate()
self.generate_reports(addon, good=11, bad=11, app=self.app,
app_version=self.app_version)
# Should not show up for > 80%.
pq = self.get_pq()
assert pq.find('tr[data-guid="%s"]' % addon.guid).length == 0
# Should not show up for > 50%.
tr = self.get_pq(ratio=.5).find('tr[data-guid="%s"]' % addon.guid)
assert tr.length == 0
# Should show up for > 40%.
tr = self.get_pq(ratio=.4).find('tr[data-guid="%s"]' % addon.guid)
assert tr.length == 1
def test_min_incompatible(self):
addon = self.populate()
self.generate_reports(addon, good=0, bad=11, app=self.app,
app_version=self.app_version)
# Should show up for >= 10.
pq = self.get_pq()
assert pq.find('tr[data-guid="%s"]' % addon.guid).length == 1
# Should show up for >= 0.
tr = self.get_pq(minimum=0).find('tr[data-guid="%s"]' % addon.guid)
assert tr.length == 1
# Should not show up for >= 20.
tr = self.get_pq(minimum=20).find('tr[data-guid="%s"]' % addon.guid)
assert tr.length == 0
class TestMemcache(TestCase):
fixtures = ['base/addon_3615', 'base/users']
def setUp(self):
super(TestMemcache, self).setUp()
self.url = reverse('zadmin.memcache')
cache.set('foo', 'bar')
self.client.login(username='admin@mozilla.com', password='password')
def test_login(self):
self.client.logout()
assert self.client.get(self.url).status_code == 302
def test_can_clear(self):
self.client.post(self.url, {'yes': 'True'})
assert cache.get('foo') is None
def test_cant_clear(self):
self.client.post(self.url, {'yes': 'False'})
assert cache.get('foo') == 'bar'
class TestElastic(amo.tests.ESTestCase):
fixtures = ['base/addon_3615', 'base/users']
def setUp(self):
super(TestElastic, self).setUp()
self.url = reverse('zadmin.elastic')
self.client.login(username='admin@mozilla.com', password='password')
def test_login(self):
self.client.logout()
self.assert3xx(
self.client.get(self.url),
reverse('users.login') + '?to=/en-US/admin/elastic')
class TestEmailDevs(TestCase):
fixtures = ['base/addon_3615', 'base/users']
def setUp(self):
super(TestEmailDevs, self).setUp()
self.login('admin')
self.addon = Addon.objects.get(pk=3615)
def post(self, recipients='eula', subject='subject', message='msg',
preview_only=False):
return self.client.post(reverse('zadmin.email_devs'),
dict(recipients=recipients, subject=subject,
message=message,
preview_only=preview_only))
def test_preview(self):
res = self.post(preview_only=True)
self.assertNoFormErrors(res)
preview = EmailPreviewTopic(topic='email-devs')
assert [e.recipient_list for e in preview.filter()] == ['del@icio.us']
assert len(mail.outbox) == 0
def test_actual(self):
subject = 'about eulas'
message = 'message about eulas'
res = self.post(subject=subject, message=message)
self.assertNoFormErrors(res)
self.assert3xx(res, reverse('zadmin.email_devs'))
assert len(mail.outbox) == 1
assert mail.outbox[0].subject == subject
assert mail.outbox[0].body == message
assert mail.outbox[0].to == ['del@icio.us']
assert mail.outbox[0].from_email == settings.DEFAULT_FROM_EMAIL
def test_only_eulas(self):
self.addon.update(eula=None)
res = self.post()
self.assertNoFormErrors(res)
assert len(mail.outbox) == 0
def test_sdk_devs(self):
(File.objects.filter(version__addon=self.addon)
.update(jetpack_version='1.5'))
res = self.post(recipients='sdk')
self.assertNoFormErrors(res)
assert len(mail.outbox) == 1
assert mail.outbox[0].to == ['del@icio.us']
def test_only_sdk_devs(self):
res = self.post(recipients='sdk')
self.assertNoFormErrors(res)
assert len(mail.outbox) == 0
def test_only_extensions(self):
self.addon.update(type=amo.ADDON_EXTENSION)
res = self.post(recipients='all_extensions')
self.assertNoFormErrors(res)
assert len(mail.outbox) == 1
def test_ignore_deleted_always(self):
self.addon.update(status=amo.STATUS_DELETED)
for name, label in DevMailerForm._choices:
res = self.post(recipients=name)
self.assertNoFormErrors(res)
assert len(mail.outbox) == 0
def test_exclude_pending_for_addons(self):
self.addon.update(status=amo.STATUS_PENDING)
for name, label in DevMailerForm._choices:
if name in ('payments', 'desktop_apps'):
continue
res = self.post(recipients=name)
self.assertNoFormErrors(res)
assert len(mail.outbox) == 0
def test_exclude_fxa_migrated(self):
user = self.addon.authors.get()
user.update(fxa_id='yup')
res = self.post(recipients='fxa')
self.assertNoFormErrors(res)
assert len(mail.outbox) == 0
def test_include_fxa_not_migrated(self):
res = self.post(recipients='fxa')
user = self.addon.authors.get()
self.assertNoFormErrors(res)
assert len(mail.outbox) == 1
user = self.addon.authors.get()
user.update(fxa_id='')
res = self.post(recipients='fxa')
self.assertNoFormErrors(res)
assert len(mail.outbox) == 2
class TestFileDownload(TestCase):
fixtures = ['base/users']
def setUp(self):
super(TestFileDownload, self).setUp()
assert self.client.login(username='admin@mozilla.com',
password='password')
self.file = open(get_image_path('animated.png'), 'rb')
resp = self.client.post(reverse('devhub.upload'),
{'upload': self.file})
assert resp.status_code == 302
self.upload = FileUpload.objects.get()
self.url = reverse('zadmin.download_file', args=[self.upload.uuid])
def test_download(self):
"""Test that downloading file_upload objects works."""
resp = self.client.get(self.url)
assert resp.status_code == 200
assert resp.content == self.file.read()
class TestPerms(TestCase):
fixtures = ['base/users']
FILE_ID = '1234567890abcdef1234567890abcdef'
def assert_status(self, view, status, **kw):
"""Check that requesting the named view returns the expected status."""
assert self.client.get(reverse(view, kwargs=kw)).status_code == status
def test_admin_user(self):
# Admin should see views with Django's perm decorator and our own.
assert self.client.login(username='admin@mozilla.com',
password='password')
self.assert_status('zadmin.index', 200)
self.assert_status('zadmin.settings', 200)
self.assert_status('zadmin.langpacks', 200)
self.assert_status('zadmin.download_file', 404, uuid=self.FILE_ID)
self.assert_status('zadmin.addon-search', 200)
self.assert_status('zadmin.monthly_pick', 200)
self.assert_status('zadmin.features', 200)
self.assert_status('discovery.module_admin', 200)
def test_staff_user(self):
# Staff users have some privileges.
user = UserProfile.objects.get(email='regular@mozilla.com')
group = Group.objects.create(name='Staff', rules='AdminTools:View')
GroupUser.objects.create(group=group, user=user)
assert self.client.login(username='regular@mozilla.com',
password='password')
self.assert_status('zadmin.index', 200)
self.assert_status('zadmin.settings', 200)
self.assert_status('zadmin.langpacks', 200)
self.assert_status('zadmin.download_file', 404, uuid=self.FILE_ID)
self.assert_status('zadmin.addon-search', 200)
self.assert_status('zadmin.monthly_pick', 200)
self.assert_status('zadmin.features', 200)
self.assert_status('discovery.module_admin', 200)
def test_sr_reviewers_user(self):
# Sr Reviewers users have only a few privileges.
user = UserProfile.objects.get(email='regular@mozilla.com')
group = Group.objects.create(name='Sr Reviewer',
rules='ReviewerAdminTools:View')
GroupUser.objects.create(group=group, user=user)
assert self.client.login(username='regular@mozilla.com',
password='password')
self.assert_status('zadmin.index', 200)
self.assert_status('zadmin.langpacks', 200)
self.assert_status('zadmin.download_file', 404, uuid=self.FILE_ID)
self.assert_status('zadmin.addon-search', 200)
self.assert_status('zadmin.settings', 403)
def test_bulk_compat_user(self):
# Bulk Compatibility Updaters only have access to /admin/validation/*.
user = UserProfile.objects.get(email='regular@mozilla.com')
group = Group.objects.create(name='Bulk Compatibility Updaters',
rules='BulkValidationAdminTools:View')
GroupUser.objects.create(group=group, user=user)
assert self.client.login(username='regular@mozilla.com',
password='password')
self.assert_status('zadmin.index', 200)
self.assert_status('zadmin.validation', 200)
self.assert_status('zadmin.langpacks', 403)
self.assert_status('zadmin.download_file', 403, uuid=self.FILE_ID)
self.assert_status('zadmin.addon-search', 403)
self.assert_status('zadmin.settings', 403)
def test_unprivileged_user(self):
# Unprivileged user.
assert self.client.login(username='regular@mozilla.com',
password='password')
self.assert_status('zadmin.index', 403)
self.assert_status('zadmin.settings', 403)
self.assert_status('zadmin.langpacks', 403)
self.assert_status('zadmin.download_file', 403, uuid=self.FILE_ID)
self.assert_status('zadmin.addon-search', 403)
self.assert_status('zadmin.monthly_pick', 403)
self.assert_status('zadmin.features', 403)
self.assert_status('discovery.module_admin', 403)
# Anonymous users should also get a 403.
self.client.logout()
self.assert3xx(self.client.get(reverse('zadmin.index')),
reverse('users.login') + '?to=/en-US/admin/')
|
andymckay/addons-server
|
src/olympia/zadmin/tests/test_views.py
|
Python
|
bsd-3-clause
| 77,618 | 0.000013 |
import warnings
class DimensionSelection:
""" Instances of this class to be passed to construct_mdx function
"""
SUBSET = 1
EXPRESSION = 2
ITERABLE = 3
def __init__(self, dimension_name, elements=None, subset=None, expression=None):
warnings.warn(
f"class DimensionSelection will be deprecated. Use https://github.com/cubewise-code/mdxpy instead",
DeprecationWarning,
stacklevel=2)
self.dimension_name = dimension_name
self.selection_type = self.determine_selection_type(elements, subset, expression)
if self.selection_type == self.SUBSET:
self.expression = curly_braces(expression="Tm1SubsetToSet([{dimension}], '{subset}')".format(
dimension=dimension_name,
subset=subset))
elif self.selection_type == self.EXPRESSION:
self.expression = curly_braces(expression=expression)
elif self.selection_type == self.ITERABLE:
self.expression = curly_braces(expression=",".join(["[{}].[{}]".format(dimension_name, element)
for element
in elements]))
elif not self.selection_type:
self.expression = curly_braces(expression="TM1SubsetAll([{dimension}])".format(dimension=dimension_name))
@staticmethod
def determine_selection_type(elements=None, subset=None, expression=None):
warnings.warn(
f"Module MdxUtils will be deprecated. Use https://github.com/cubewise-code/mdxpy instead",
DeprecationWarning,
stacklevel=2)
if elements is not None and subset is None and expression is None:
return DimensionSelection.ITERABLE
elif elements is None and subset is not None and expression is None:
return DimensionSelection.SUBSET
elif elements is None and subset is None and expression is not None:
return DimensionSelection.EXPRESSION
elif elements is None and subset is None and expression is None:
return None
else:
raise ValueError("DimensionSelection constructor takes one type of selection only: "
"elements, subset or expression")
def construct_mdx_axis(dim_selections):
""" Construct MDX for one Axis (Row or Column).
Can have multiple dimensions stacked.
:param dim_selections: instances of TM1py.Utils.MDXUtils.DimensionSelection
:return: a valid MDX for an Axis
"""
warnings.warn(
f"Module MdxUtils will be deprecated. Use https://github.com/cubewise-code/mdxpy instead",
DeprecationWarning,
stacklevel=2)
return "*".join(selection.expression
for selection
in dim_selections)
def construct_mdx(cube_name, rows, columns, contexts=None, suppress=None):
""" Method to construct MDX Query from different dimension selection
:param cube_name: Name of the Cube
:param rows: List of DimensionSelections
:param columns: List of DimensionSelections
:param contexts: Dictionary of Dimensions and Elements
:param suppress: "Both", "Rows", "Columns" or None
:return: Generated MDX Query
"""
warnings.warn(
f"Module MdxUtils will be deprecated. Use https://github.com/cubewise-code/mdxpy instead",
DeprecationWarning,
stacklevel=2)
# MDX Skeleton
mdx_template = "SELECT {}{} ON ROWS, {}{} ON COLUMNS FROM [{}] {}"
# Suppression
mdx_rows_suppress = "NON EMPTY " if suppress and suppress.upper() in ["ROWS", "BOTH"] else ""
mdx_columns_suppress = "NON EMPTY " if suppress and suppress.upper() in ["COLUMNS", "BOTH"] else ""
# Rows and Columns
mdx_rows = construct_mdx_axis(rows)
mdx_columns = construct_mdx_axis(columns)
# Context filter (where statement)
mdx_where = ""
if contexts:
mdx_where_parts = ["[{}].[{}]".format(dim, elem)
for dim, elem
in contexts.items()]
mdx_where = "".join(["WHERE (",
",".join(mdx_where_parts),
")"])
# Return Full MDX
return mdx_template.format(mdx_rows_suppress, mdx_rows, mdx_columns_suppress, mdx_columns, cube_name, mdx_where)
def curly_braces(expression):
""" Put curly braces around a string
:param expression:
:return:
"""
warnings.warn(
f"Module MdxUtils will be deprecated. Use https://github.com/cubewise-code/mdxpy instead",
DeprecationWarning,
stacklevel=2)
return "".join(["{" if not expression.startswith("{") else "",
expression,
"}" if not expression.endswith("}") else ""])
def read_cube_name_from_mdx(mdx):
""" Read the cube name from a valid MDX Query
:param mdx: The MDX Query as String
:return: String, name of a cube
"""
warnings.warn(
f"Module MdxUtils will be deprecated. Use https://github.com/cubewise-code/mdxpy instead",
DeprecationWarning,
stacklevel=2)
cube, _, _, _ = read_dimension_composition_from_mdx(mdx)
return cube
def read_dimension_composition_from_mdx(mdx):
""" Parse a valid MDX Query and return the name of the cube and a list of dimensions for each axis
:param mdx:
:return:
"""
warnings.warn(
f"Module MdxUtils will be deprecated. Use https://github.com/cubewise-code/mdxpy instead",
DeprecationWarning,
stacklevel=2)
mdx_rows, mdx_columns, mdx_from, mdx_where = split_mdx(mdx)
cube = mdx_from[1:-1]
rows = read_dimension_composition_from_mdx_set_or_tuple(mdx_rows)
columns = read_dimension_composition_from_mdx_set_or_tuple(mdx_columns)
titles = read_dimension_composition_from_mdx_set_or_tuple(mdx_where)
return cube, rows, columns, titles
def read_dimension_composition_from_mdx_set_or_tuple(mdx):
warnings.warn(
f"Module MdxUtils will be deprecated. Use https://github.com/cubewise-code/mdxpy instead",
DeprecationWarning,
stacklevel=2)
mdx_without_spaces = ''.join(mdx.split())
# case for mdx statement no where statement
if len(mdx_without_spaces) == 0:
return []
# case for tuples mdx statement on rows or columns
if mdx_without_spaces[1] == '(' and mdx_without_spaces[-2] == ')':
return read_dimension_composition_from_mdx_tuple(mdx)
# case for where mdx statement
elif mdx_without_spaces[0] == '(' and mdx_without_spaces[-1] == ')':
return read_dimension_composition_from_mdx_tuple(mdx)
# case for set mdx statement on rows or columns
else:
return read_dimension_composition_from_mdx_set(mdx)
def read_dimension_composition_from_mdx_set(mdx):
warnings.warn(
f"Module MdxUtils will be deprecated. Use https://github.com/cubewise-code/mdxpy instead",
DeprecationWarning,
stacklevel=2)
dimensions = []
mdx_without_spaces = ''.join(mdx.split())
for sub_mdx in mdx_without_spaces.split("}*{"):
pos_start, pos_end = sub_mdx.find("["), sub_mdx.find("]")
dimension_name = sub_mdx[pos_start + 1:pos_end]
dimensions.append(dimension_name)
return dimensions
def read_dimension_composition_from_mdx_tuple(mdx):
warnings.warn(
f"Module MdxUtils will be deprecated. Use https://github.com/cubewise-code/mdxpy instead",
DeprecationWarning,
stacklevel=2)
dimensions = []
for unique_member_name in mdx.split(","):
pos_start, pos_end = unique_member_name.find("["), unique_member_name.find("]")
dimension_name = unique_member_name[pos_start + 1:pos_end]
# only parse through first tuple of potentially many tuples
if dimension_name in dimensions:
return dimensions
dimensions.append(dimension_name)
return dimensions
def split_mdx(mdx):
warnings.warn(
f"Module MdxUtils will be deprecated. Use https://github.com/cubewise-code/mdxpy instead",
DeprecationWarning,
stacklevel=2)
try:
mdx_rows, mdx_rest = _find_case_and_space_insensitive_first_occurrence(
text=mdx,
pattern_start="{",
pattern_end="}ONROWS"
)
mdx_columns, mdx_rest = _find_case_and_space_insensitive_first_occurrence(
text=mdx_rest,
pattern_start="{",
pattern_end="}ONCOLUMNSFROM"
)
mdx_from, mdx_where = _find_case_and_space_insensitive_first_occurrence(
text=mdx_rest,
pattern_end="]WHERE"
)
return mdx_rows, mdx_columns, mdx_from, mdx_where
except ValueError:
ValueError("Can't parse mdx: {}".format(mdx))
def _find_case_and_space_insensitive_first_occurrence(text, pattern_start=None, pattern_end=None):
warnings.warn(
f"Module MdxUtils will be deprecated. Use https://github.com/cubewise-code/mdxpy instead",
DeprecationWarning,
stacklevel=2)
text_without_spaces = ''.join(text.split())
text_without_spaces_and_uppercase = text_without_spaces.upper()
if pattern_start:
pattern_start = ''.join(pattern_start.split()).upper()
if pattern_end:
pattern_end = ''.join(pattern_end.split()).upper()
if text_without_spaces_and_uppercase.count(pattern_end) > 1:
raise ValueError("Invalid state. {} has more than 1 occurrences in text: {}".format(pattern_end, text))
pos_start = text_without_spaces_and_uppercase.find(pattern_start) if pattern_start else 0
pos_end = text_without_spaces_and_uppercase.find(pattern_end) if pattern_end else -1
# case of mdx statement without where clause
if pos_start == 0 and pos_end == -1:
return text, ""
selection = text_without_spaces[pos_start:pos_end + 1]
text = text_without_spaces[pos_end + len(pattern_end):]
return selection, text
|
OLAPLINE/TM1py
|
TM1py/Utils/MDXUtils.py
|
Python
|
mit
| 10,016 | 0.002895 |
import boto3
import numpy as np
import time
import json
import os
import pandas as pd
name = 'Flavio C.'
root_dir = '/document/'
file_name = 'augmented-data.png'
# Get all files in directory
meine_id_kartes = os.listdir(root_dir)
# get the results
client = boto3.client(
service_name='textract',
region_name='eu-west-1',
endpoint_url='https://textract.eu-west-1.amazonaws.com',
)
meine_id_karte_card_info = []
# For every card get all info
for meine_id_karte in meine_id_kartes:
time.sleep(5)
with open(root_dir + meine_id_karte, 'rb') as file:
img_test = file.read()
bytes_test = bytearray(img_test)
print('Image loaded', root_dir + meine_id_karte)
try:
# Process using image bytes
response = client.analyze_document(Document={'Bytes': bytes_test}, FeatureTypes=['FORMS'])
# Get the text blocks
blocks = response['Blocks']
meine_id_karte_text = []
for i in blocks:
i = json.dumps(i)
i = json.loads(i)
try:
meine_id_karte_text.append(i['Text'])
except:
pass
meine_id_karte_card_info.append((meine_id_karte, meine_id_karte_text))
except:
pass
df_legible = pd.DataFrame(meine_id_karte_card_info)
df_legible.to_csv('normal-karte.csv')
print(df_legible)
|
fclesio/learning-space
|
Python/textract_extraction.py
|
Python
|
gpl-2.0
| 1,358 | 0.002209 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# thumbor imaging service
# https://github.com/thumbor/thumbor/wiki
# Licensed under the MIT license:
# http://www.opensource.org/licenses/mit-license
# Copyright (c) 2011 globo.com timehome@corp.globo.com
from os.path import abspath, join, dirname
from preggy import expect
import mock
# from tornado.concurrent import Future
import tornado.web
from tests.base import PythonTestCase, TestCase
from tornado.concurrent import Future
import thumbor.loaders.http_loader as loader
from thumbor.context import Context
from thumbor.config import Config
from thumbor.loaders import LoaderResult
def fixture_for(filename):
return abspath(join(dirname(__file__), 'fixtures', filename))
class MainHandler(tornado.web.RequestHandler):
def get(self):
self.write('Hello')
class EchoUserAgentHandler(tornado.web.RequestHandler):
def get(self):
self.write(self.request.headers['User-Agent'])
class HandlerMock(object):
def __init__(self, headers):
self.request = RequestMock(headers)
class RequestMock(object):
def __init__(self, headers):
self.headers = headers
class ResponseMock:
def __init__(self, error=None, content_type=None, body=None, code=None):
self.error = error
self.code = code
self.time_info = None
self.headers = {
'Content-Type': 'image/jpeg'
}
if content_type:
self.headers['Content-Type'] = content_type
self.body = body
class ReturnContentTestCase(PythonTestCase):
def test_return_none_on_error(self):
response_mock = ResponseMock(error='Error', code=599)
callback_mock = mock.Mock()
ctx = Context(None, None, None)
loader.return_contents(response_mock, 'some-url', callback_mock, ctx)
result = callback_mock.call_args[0][0]
expect(result).to_be_instance_of(LoaderResult)
expect(result.buffer).to_be_null()
expect(result.successful).to_be_false()
def test_return_body_if_valid(self):
response_mock = ResponseMock(body='body', code=200)
callback_mock = mock.Mock()
ctx = Context(None, None, None)
loader.return_contents(response_mock, 'some-url', callback_mock, ctx)
result = callback_mock.call_args[0][0]
expect(result).to_be_instance_of(LoaderResult)
expect(result.buffer).to_equal('body')
def test_return_upstream_error_on_body_none(self):
response_mock = ResponseMock(body=None, code=200)
callback_mock = mock.Mock()
ctx = Context(None, None, None)
loader.return_contents(response_mock, 'some-url', callback_mock, ctx)
result = callback_mock.call_args[0][0]
expect(result).to_be_instance_of(LoaderResult)
expect(result.buffer).to_be_null()
expect(result.successful).to_be_false()
expect(result.error).to_equal(LoaderResult.ERROR_UPSTREAM)
def test_return_upstream_error_on_body_empty(self):
response_mock = ResponseMock(body='', code=200)
callback_mock = mock.Mock()
ctx = Context(None, None, None)
loader.return_contents(response_mock, 'some-url', callback_mock, ctx)
result = callback_mock.call_args[0][0]
expect(result).to_be_instance_of(LoaderResult)
expect(result.buffer).to_be_null()
expect(result.successful).to_be_false()
expect(result.error).to_equal(LoaderResult.ERROR_UPSTREAM)
class ValidateUrlTestCase(PythonTestCase):
def test_with_allowed_sources(self):
config = Config()
config.ALLOWED_SOURCES = ['s.glbimg.com']
ctx = Context(None, config, None)
expect(
loader.validate(
ctx,
'http://www.google.com/logo.jpg'
)
).to_be_false()
expect(
loader.validate(
ctx,
'http://s2.glbimg.com/logo.jpg'
)
).to_be_false()
expect(
loader.validate(
ctx,
'/glob=:sfoir%20%20%3Co-pmb%20%20%20%20_%20%20%20%200%20%20g.-%3E%3Ca%20hplass='
)
).to_be_false()
expect(
loader.validate(ctx, 'http://s.glbimg.com/logo.jpg')).to_be_true()
def test_without_allowed_sources(self):
config = Config()
config.ALLOWED_SOURCES = []
ctx = Context(None, config, None)
is_valid = loader.validate(ctx, 'http://www.google.com/logo.jpg')
expect(is_valid).to_be_true()
class NormalizeUrlTestCase(PythonTestCase):
def test_should_normalize_url(self):
for url in ['http://some.url', 'some.url']:
expect(loader._normalize_url(url)).to_equal('http://some.url')
def test_should_normalize_quoted_url(self):
url = 'https%3A//www.google.ca/images/branding/googlelogo/2x/googlelogo_color_272x92dp.png'
expected = 'https://www.google.ca/images/branding/googlelogo/2x/googlelogo_color_272x92dp.png'
result = loader._normalize_url(url)
expect(result).to_equal(expected)
class HttpLoaderTestCase(TestCase):
def get_app(self):
application = tornado.web.Application([
(r"/", MainHandler),
])
return application
def test_load_with_callback(self):
url = self.get_url('/')
config = Config()
ctx = Context(None, config, None)
loader.load(ctx, url, self.stop)
result = self.wait()
expect(result).to_be_instance_of(LoaderResult)
expect(result.buffer).to_equal('Hello')
expect(result.successful).to_be_true()
def test_load_with_curl(self):
url = self.get_url('/')
config = Config()
config.HTTP_LOADER_CURL_ASYNC_HTTP_CLIENT = True
ctx = Context(None, config, None)
loader.load(ctx, url, self.stop)
result = self.wait()
expect(result).to_be_instance_of(LoaderResult)
expect(result.buffer).to_equal('Hello')
expect(result.successful).to_be_true()
def test_should_return_a_future(self):
url = self.get_url('/')
config = Config()
ctx = Context(None, config, None)
future = loader.load(ctx, url)
expect(isinstance(future, Future)).to_be_true()
class HttpLoaderWithUserAgentForwardingTestCase(TestCase):
def get_app(self):
application = tornado.web.Application([
(r"/", EchoUserAgentHandler),
])
return application
def test_load_with_user_agent(self):
url = self.get_url('/')
config = Config()
config.HTTP_LOADER_FORWARD_USER_AGENT = True
ctx = Context(None, config, None, HandlerMock({"User-Agent": "test-user-agent"}))
loader.load(ctx, url, self.stop)
result = self.wait()
expect(result).to_be_instance_of(LoaderResult)
expect(result.buffer).to_equal('test-user-agent')
def test_load_with_default_user_agent(self):
url = self.get_url('/')
config = Config()
config.HTTP_LOADER_FORWARD_USER_AGENT = True
config.HTTP_LOADER_DEFAULT_USER_AGENT = "DEFAULT_USER_AGENT"
ctx = Context(None, config, None, HandlerMock({}))
loader.load(ctx, url, self.stop)
result = self.wait()
expect(result).to_be_instance_of(LoaderResult)
expect(result.buffer).to_equal('DEFAULT_USER_AGENT')
|
BetterCollective/thumbor
|
tests/loaders/test_http_loader.py
|
Python
|
mit
| 7,414 | 0.00054 |
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Getting Things GNOME! - a personal organizer for the GNOME desktop
# Copyright (c) 2008-2013 - Lionel Dricot & Bertrand Rousseau
#
# This program is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option) any later
# version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program. If not, see <http://www.gnu.org/licenses/>.
# -----------------------------------------------------------------------------
""" The main window for GTG, listing tags, and open and closed tasks """
from webbrowser import open as openurl
import threading
from gi.repository import GObject, Gtk, Gdk
from GTG import info
from GTG.backends.backendsignals import BackendSignals
from GTG.core.dirs import ICONS_DIR
from GTG.core.search import parse_search_query, SEARCH_COMMANDS, InvalidQuery
from GTG.core.tag import SEARCH_TAG, ALLTASKS_TAG
from GTG.core.task import Task
from GTG.core.translations import _, ngettext
from GTG.gtk.browser import GnomeConfig
from GTG.gtk.browser.custominfobar import CustomInfoBar
from GTG.gtk.browser.modifytags_dialog import ModifyTagsDialog
from GTG.gtk.browser.tag_context_menu import TagContextMenu
from GTG.gtk.browser.treeview_factory import TreeviewFactory
from GTG.gtk.editor.calendar import GTGCalendar
from GTG.gtk.tag_completion import TagCompletion
from GTG.tools.dates import Date
from GTG.tools.logger import Log
class TaskBrowser(GObject.GObject):
""" The UI for browsing open and closed tasks,
and listing tags in a tree """
__string_signal__ = (GObject.SignalFlags.RUN_FIRST, None, (str, ))
__none_signal__ = (GObject.SignalFlags.RUN_FIRST, None, tuple())
__gsignals__ = {'task-added-via-quick-add': __string_signal__,
'visibility-toggled': __none_signal__,
}
def __init__(self, requester, vmanager):
GObject.GObject.__init__(self)
# Object prime variables
self.req = requester
self.vmanager = vmanager
self.config = self.req.get_config('browser')
self.tag_active = False
self.applied_tags = []
# Treeviews handlers
self.vtree_panes = {}
self.tv_factory = TreeviewFactory(self.req, self.config)
# Active Tasks
self.activetree = self.req.get_tasks_tree(name='active', refresh=False)
self.activetree.apply_filter('active', refresh=False)
self.vtree_panes['active'] = \
self.tv_factory.active_tasks_treeview(self.activetree)
# Workview Tasks
self.workview_tree = \
self.req.get_tasks_tree(name='workview', refresh=False)
self.workview_tree.apply_filter('workview', refresh=False)
self.vtree_panes['workview'] = \
self.tv_factory.active_tasks_treeview(self.workview_tree)
# Closed Tasks
self.closedtree = \
self.req.get_tasks_tree(name='closed', refresh=False)
self.closedtree.apply_filter('closed', refresh=False)
self.vtree_panes['closed'] = \
self.tv_factory.closed_tasks_treeview(self.closedtree)
# YOU CAN DEFINE YOUR INTERNAL MECHANICS VARIABLES BELOW
# Setup GTG icon theme
self._init_icon_theme()
# Tags
self.tagtree = None
self.tagtreeview = None
# Load window tree
self.builder = Gtk.Builder()
self.builder.add_from_file(GnomeConfig.BROWSER_UI_FILE)
# Define aliases for specific widgets
self._init_widget_aliases()
# Init non-GtkBuilder widgets
self._init_ui_widget()
# Initialize tooltip for GtkEntry button
self._init_toolbar_tooltips()
# Initialize "About" dialog
self._init_about_dialog()
# Create our dictionary and connect it
self._init_signal_connections()
# Define accelerator keys
self._init_accelerators()
# Initialize search completion
self._init_search_completion()
self.restore_state_from_conf()
self.on_select_tag()
self.browser_shown = False
# Update the title when a task change
self.activetree.register_cllbck('node-added-inview',
self._update_window_title)
self.activetree.register_cllbck('node-deleted-inview',
self._update_window_title)
self._update_window_title()
vmanager.timer.connect('refresh', self.refresh_all_views)
# INIT HELPER FUNCTIONS #######################################################
def _init_icon_theme(self):
"""
sets the deafault theme for icon and its directory
"""
# TODO(izidor): Add icon dirs on app level
Gtk.IconTheme.get_default().prepend_search_path(ICONS_DIR)
# TODO(izidor): Set it outside browser as it applies to every window
Gtk.Window.set_default_icon_name("gtg")
def _init_widget_aliases(self):
"""
defines aliases for UI elements found in the glide file
"""
self.window = self.builder.get_object("MainWindow")
self.taskpopup = self.builder.get_object("task_context_menu")
self.defertopopup = self.builder.get_object("defer_to_context_menu")
self.ctaskpopup = self.builder.get_object("closed_task_context_menu")
self.about = self.builder.get_object("about_dialog")
self.main_pane = self.builder.get_object("main_pane")
self.workview_pane = self.builder.get_object("workview_pane")
self.closed_pane = self.builder.get_object("closed_pane")
self.menu_view_workview = self.builder.get_object("view_workview")
self.toggle_workview = self.builder.get_object("workview_toggle")
self.quickadd_entry = self.builder.get_object("quickadd_field")
self.quickadd_pane = self.builder.get_object("quickadd_pane")
self.sidebar = self.builder.get_object("sidebar_vbox")
self.sidebar_container = self.builder.get_object("sidebar-scroll")
self.sidebar_notebook = self.builder.get_object("sidebar_notebook")
self.main_notebook = self.builder.get_object("main_notebook")
self.accessory_notebook = self.builder.get_object("accessory_notebook")
self.vbox_toolbars = self.builder.get_object("vbox_toolbars")
self.tagpopup = TagContextMenu(self.req, self.vmanager)
def _init_ui_widget(self):
""" Sets the main pane with three trees for active tasks,
actionable tasks (workview), closed tasks and creates
ModifyTagsDialog & Calendar """
# Tasks treeviews
self.main_pane.add(self.vtree_panes['active'])
self.workview_pane.add(self.vtree_panes['workview'])
self.closed_pane.add(self.vtree_panes['closed'])
tag_completion = TagCompletion(self.req.get_tag_tree())
self.modifytags_dialog = ModifyTagsDialog(tag_completion, self.req)
self.calendar = GTGCalendar()
self.calendar.set_transient_for(self.window)
self.calendar.connect("date-changed", self.on_date_changed)
def init_tags_sidebar(self):
"""
initializes the tagtree (left area with tags and searches)
"""
# The tags treeview
self.tagtree = self.req.get_tag_tree()
self.tagtreeview = self.tv_factory.tags_treeview(self.tagtree)
# Tags treeview
self.tagtreeview.get_selection().connect('changed',
self.on_select_tag)
self.tagtreeview.connect('button-press-event',
self.on_tag_treeview_button_press_event)
self.tagtreeview.connect('key-press-event',
self.on_tag_treeview_key_press_event)
self.tagtreeview.connect('node-expanded',
self.on_tag_expanded)
self.tagtreeview.connect('node-collapsed',
self.on_tag_collapsed)
self.sidebar_container.add(self.tagtreeview)
for path_t in self.config.get("expanded_tags"):
# the tuple was stored as a string. we have to reconstruct it
path = ()
for p in path_t[1:-1].split(","):
p = p.strip(" '")
path += (p, )
if path[-1] == '':
path = path[:-1]
self.tagtreeview.expand_node(path)
# expanding search tag does not work automatically, request it
self.expand_search_tag()
def _init_toolbar_tooltips(self):
"""
Sets tooltips for widgets which cannot be setup in .ui yet
"""
quick_add_icon_tooltip = GnomeConfig.QUICKADD_ICON_TOOLTIP
self.quickadd_entry.set_icon_tooltip_text(1, quick_add_icon_tooltip)
def _init_about_dialog(self):
"""
Show the about dialog
"""
self.about.set_website(info.URL)
self.about.set_website_label(info.URL)
self.about.set_version(info.VERSION)
self.about.set_authors(info.AUTHORS)
self.about.set_artists(info.ARTISTS)
self.about.set_documenters(info.DOCUMENTERS)
self.about.set_translator_credits(info.TRANSLATORS)
def _init_signal_connections(self):
"""
connects signals on UI elements
"""
SIGNAL_CONNECTIONS_DIC = {
"on_add_task":
self.on_add_task,
"on_edit_active_task":
self.on_edit_active_task,
"on_edit_done_task":
self.on_edit_done_task,
"on_delete_task":
self.on_delete_tasks,
"on_modify_tags":
self.on_modify_tags,
"on_mark_as_done":
self.on_mark_as_done,
"on_mark_as_started":
self.on_mark_as_started,
"on_start_for_tomorrow":
self.on_start_for_tomorrow,
"on_start_for_next_week":
self.on_start_for_next_week,
"on_start_for_next_month":
self.on_start_for_next_month,
"on_start_for_next_year":
self.on_start_for_next_year,
"on_start_for_specific_date":
self.on_start_for_specific_date,
"on_start_clear":
self.on_start_clear,
"on_set_due_today":
self.on_set_due_today,
"on_set_due_tomorrow":
self.on_set_due_tomorrow,
"on_set_due_next_week":
self.on_set_due_next_week,
"on_set_due_next_month":
self.on_set_due_next_month,
"on_set_due_next_year":
self.on_set_due_next_year,
"on_set_due_soon":
self.on_set_due_soon,
"on_set_due_someday":
self.on_set_due_someday,
"on_set_due_for_specific_date":
self.on_set_due_for_specific_date,
"on_set_due_clear":
self.on_set_due_clear,
"on_dismiss_task":
self.on_dismiss_task,
"on_move":
self.on_move,
"on_size_allocate":
self.on_size_allocate,
"gtk_main_quit":
self.on_close,
"on_add_subtask":
self.on_add_subtask,
"on_tagcontext_deactivate":
self.on_tagcontext_deactivate,
"on_view_sidebar_toggled":
self.on_sidebar_toggled,
"on_quickadd_field_activate":
self.on_quickadd_activate,
"on_quickadd_field_icon_press":
self.on_quickadd_iconpress,
"on_quickadd_field_changed":
self.on_quickadd_changed,
"on_quickadd_entrycompletion_action_activated":
self.on_entrycompletion_action_activated,
"on_view_quickadd_toggled":
self.on_toggle_quickadd,
"on_about_clicked":
self.on_about_clicked,
"on_about_delete":
self.on_about_close,
"on_about_close":
self.on_about_close,
"on_documentation_clicked":
lambda w: openurl(info.HELP_URI),
"on_translate_clicked":
lambda w: openurl(info.TRANSLATE_URL),
"on_report_bug_clicked":
lambda w: openurl(info.REPORT_BUG_URL),
"on_preferences_activate":
self.open_preferences,
"on_edit_plugins_activate":
self.open_plugins,
"on_edit_backends_activate":
self.open_edit_backends,
# temporary connections to ensure functionality
"temporary_search":
self.temporary_search,
}
self.builder.connect_signals(SIGNAL_CONNECTIONS_DIC)
# When destroying this window, quit GTG
self.window.connect("destroy", self.quit)
self.window.connect("delete-event", self.quit)
# Active tasks TreeView
self.vtree_panes['active'].connect('row-activated',
self.on_edit_active_task)
tsk_treeview_btn_press = self.on_task_treeview_button_press_event
self.vtree_panes['active'].connect('button-press-event',
tsk_treeview_btn_press)
task_treeview_key_press = self.on_task_treeview_key_press_event
self.vtree_panes['active'].connect('key-press-event',
task_treeview_key_press)
self.vtree_panes['active'].connect('node-expanded',
self.on_task_expanded)
self.vtree_panes['active'].connect('node-collapsed',
self.on_task_collapsed)
# Workview tasks TreeView
self.vtree_panes['workview'].connect('row-activated',
self.on_edit_active_task)
tsk_treeview_btn_press = self.on_task_treeview_button_press_event
self.vtree_panes['workview'].connect('button-press-event',
tsk_treeview_btn_press)
task_treeview_key_press = self.on_task_treeview_key_press_event
self.vtree_panes['workview'].connect('key-press-event',
task_treeview_key_press)
self.vtree_panes['workview'].connect('node-expanded',
self.on_task_expanded)
self.vtree_panes['workview'].connect('node-collapsed',
self.on_task_collapsed)
self.vtree_panes['workview'].set_col_visible('startdate', False)
# Closed tasks Treeview
self.vtree_panes['closed'].connect('row-activated',
self.on_edit_done_task)
# I did not want to break the variable and there was no other
# option except this name:(Nimit)
clsd_tsk_btn_prs = self.on_closed_task_treeview_button_press_event
self.vtree_panes['closed'].connect('button-press-event',
clsd_tsk_btn_prs)
clsd_tsk_key_prs = self.on_closed_task_treeview_key_press_event
self.vtree_panes['closed'].connect('key-press-event',
clsd_tsk_key_prs)
self.closedtree.apply_filter(self.get_selected_tags()[0], refresh=True)
b_signals = BackendSignals()
b_signals.connect(b_signals.BACKEND_FAILED, self.on_backend_failed)
b_signals.connect(b_signals.BACKEND_STATE_TOGGLED,
self.remove_backend_infobar)
b_signals.connect(b_signals.INTERACTION_REQUESTED,
self.on_backend_needing_interaction)
self.selection = self.vtree_panes['active'].get_selection()
def _add_accelerator_for_widget(self, agr, name, accel):
widget = self.builder.get_object(name)
key, mod = Gtk.accelerator_parse(accel)
widget.add_accelerator("activate", agr, key, mod,
Gtk.AccelFlags.VISIBLE)
def _init_accelerators(self):
"""
initialize gtk accelerators for different interface elements
"""
agr = Gtk.AccelGroup()
self.builder.get_object("MainWindow").add_accel_group(agr)
self._add_accelerator_for_widget(agr, "tags", "F9")
# self._add_accelerator_for_widget(agr, "file_quit", "<Control>q")
self._add_accelerator_for_widget(agr, "new_task", "<Control>n")
self._add_accelerator_for_widget(agr, "tcm_add_subtask",
"<Control><Shift>n")
self._add_accelerator_for_widget(agr, "tcm_edit", "<Control>e")
self._add_accelerator_for_widget(agr, "tcm_mark_as_done", "<Control>d")
self._add_accelerator_for_widget(agr, "tcm_dismiss", "<Control>i")
self._add_accelerator_for_widget(agr, "tcm_modifytags", "<Control>t")
# TODO(jakubbrindza): We cannot apply this function to closed_pane
# widget since it yields the following issue:
# widget `GtkScrolledWindow' has no activatable signal "activate"
# without arguments. This will be handled before 0.4
# release and shortcuts for active/workview and closed will be added.
# self._add_accelerator_for_widget(agr, "closed_pane", "<Control>F9")
# self._add_accelerator_for_widget(agr, "help_contents", "F1")
quickadd_field = self.builder.get_object("quickadd_field")
key, mod = Gtk.accelerator_parse("<Control>l")
quickadd_field.add_accelerator("grab-focus", agr, key, mod,
Gtk.AccelFlags.VISIBLE)
# HELPER FUNCTIONS ##########################################################
def temporary_search(self, widget):
self.quickadd_entry.grab_focus()
def open_preferences(self, widget):
self.vmanager.open_preferences(self.config)
def open_plugins(self, widget):
self.vmanager.configure_plugins()
def open_edit_backends(self, widget):
self.vmanager.open_edit_backends()
def quit(self, widget=None, data=None):
self.vmanager.close_browser()
def on_window_state_event(self, widget, event, data=None):
""" This event checks for the window state: maximized?
and stores the state in self.config.max
This is used to check the window state afterwards
and maximize it if needed """
mask = Gdk.WindowState.MAXIMIZED
is_maximized = widget.get_window().get_state() & mask == mask
self.config.set("max", is_maximized)
def restore_collapsed_tasks(self):
for path_s in self.config.get("collapsed_tasks"):
# the tuple was stored as a string. we have to reconstruct it
path = ()
for p in path_s[1:-1].split(","):
p = p.strip(" '")
path += (p, )
if path[-1] == '':
path = path[:-1]
try:
self.vtree_panes['active'].collapse_node(path)
except IndexError:
print("Invalid liblarch path {0}".format(path))
def restore_state_from_conf(self):
# Extract state from configuration dictionary
# if "browser" not in self.config:
# #necessary to have the minimum width of the tag pane
# # inferior to the "first run" width
# self.builder.get_object("hpaned1").set_position(250)
# return
width = self.config.get('width')
height = self.config.get('height')
if width and height:
self.window.resize(width, height)
# checks for maximum size of window
self.window.connect('window-state-event', self.on_window_state_event)
if self.config.get("max"):
self.window.maximize()
xpos = self.config.get("x_pos")
ypos = self.config.get("y_pos")
if ypos and xpos:
self.window.move(xpos, ypos)
tag_pane = self.config.get("tag_pane")
if not tag_pane:
self.builder.get_object("tags").set_active(False)
self.sidebar.hide()
else:
self.builder.get_object("tags").set_active(True)
if not self.tagtreeview:
self.init_tags_sidebar()
self.sidebar.show()
sidebar_width = self.config.get("sidebar_width")
self.builder.get_object("hpaned1").set_position(sidebar_width)
self.builder.get_object("hpaned1").connect('notify::position',
self.on_sidebar_width)
botpos = self.config.get("bottom_pane_position")
self.builder.get_object("vpaned1").set_position(botpos)
on_bottom_pan_position = self.on_bottom_pane_position
self.builder.get_object("vpaned1").connect('notify::position',
on_bottom_pan_position)
# Callbacks for sorting and restoring previous state
model = self.vtree_panes['active'].get_model()
model.connect('sort-column-changed', self.on_sort_column_changed)
sort_column = self.config.get('tasklist_sort_column')
sort_order = self.config.get('tasklist_sort_order')
if sort_column and sort_order:
sort_column, sort_order = int(sort_column), int(sort_order)
model.set_sort_column_id(sort_column, sort_order)
self.restore_collapsed_tasks()
def open_task(req, t):
""" Open the task if loaded. Otherwise ask for next iteration """
if req.has_task(t):
self.vmanager.open_task(t)
return False
else:
return True
for t in self.config.get("opened_tasks"):
GObject.idle_add(open_task, self.req, t)
def refresh_all_views(self, timer):
active_tree = self.req.get_tasks_tree(name='active', refresh=False)
active_tree.refresh_all()
workview_tree = self.req.get_tasks_tree(name='workview', refresh=False)
workview_tree.refresh_all()
closed_tree = self.req.get_tasks_tree(name='closed', refresh=False)
closed_tree.refresh_all()
def _update_window_title(self, nid=None, path=None, state_id=None):
count = self.activetree.get_n_nodes()
# Set the title of the window:
parenthesis = ""
if count == 0:
parenthesis = _("no active tasks")
else:
parenthesis = ngettext("%(tasks)d active task",
"%(tasks)d active tasks",
count) % {'tasks': count}
self.window.set_title("%s - " % parenthesis + info.NAME)
# SIGNAL CALLBACKS ############################################################
# Typically, reaction to user input & interactions with the GUI
def on_sort_column_changed(self, model):
sort_column, sort_order = model.get_sort_column_id()
if sort_order == Gtk.SortType.ASCENDING:
sort_order = 0
else:
sort_order = 1
self.config.set('tasklist_sort_column', sort_column)
self.config.set('tasklist_sort_order', sort_order)
def on_move(self, widget=None, data=None):
xpos, ypos = self.window.get_position()
self.config.set('x_pos', xpos)
self.config.set('y_pos', ypos)
def on_size_allocate(self, widget=None, data=None):
width, height = self.window.get_size()
self.config.set('width', width)
self.config.set('height', height)
def on_bottom_pane_position(self, widget, data=None):
self.config.set('bottom_pane_position', widget.get_position())
def on_sidebar_width(self, widget, data=None):
self.config.set('sidebar_width', widget.get_position())
def on_about_clicked(self, widget):
"""
show the about dialog
"""
self.about.show()
def on_about_close(self, widget, response):
"""
close the about dialog
"""
self.about.hide()
return True
def on_tagcontext_deactivate(self, menushell):
self.reset_cursor()
def on_sidebar_toggled(self, widget):
tags = self.builder.get_object("tags")
if self.sidebar.get_property("visible"):
tags.set_active(False)
self.config.set("tag_pane", False)
self.sidebar.hide()
else:
tags.set_active(True)
if not self.tagtreeview:
self.init_tags_sidebar()
self.sidebar.show()
self.config.set("tag_pane", True)
def on_toggle_quickadd(self, widget):
if widget.get_active():
self.quickadd_pane.show()
self.config.set('quick_add', True)
else:
self.quickadd_pane.hide()
self.config.set('quick_add', False)
def _expand_not_collapsed(self, model, path, iter, colt):
""" Expand all not collapsed nodes
Workaround around bug in Gtk, see LP #1076909 """
# Generate tid from treeview
tid_build = []
current_iter = iter
while current_iter is not None:
tid = str(model.get_value(current_iter, 0))
tid_build.insert(0, tid)
current_iter = model.iter_parent(current_iter)
tid = str(tuple(tid_build))
# expand if the node was not stored as collapsed
if tid not in colt:
self.vtree_panes['active'].expand_row(path, False)
def on_task_expanded(self, sender, tid):
colt = self.config.get("collapsed_tasks")
if tid in colt:
colt.remove(tid)
# restore expanded state of subnodes
self.vtree_panes['active'].get_model().foreach(
self._expand_not_collapsed, colt)
self.config.set("collapsed_tasks", colt)
def on_task_collapsed(self, sender, tid):
colt = self.config.get("collapsed_tasks")
if tid not in colt:
colt.append(str(tid))
self.config.set("collapsed_tasks", colt)
def on_tag_expanded(self, sender, tag):
colt = self.config.get("expanded_tags")
# Directly appending tag to colt causes GTG to forget the state of
# sub-tags (expanded/collapsed) in specific scenarios. Below is an
# updated way which checks if any child of the tag is in colt or not
# If yes, then the tag is inserted before the first child.
# If no, it's appended to colt
if tag not in colt:
tag_has_been_inserted = False
for index, colt_tag in enumerate(colt):
if tag[1:-1] in colt_tag:
colt.insert(index, tag)
tag_has_been_inserted = True
break
if not tag_has_been_inserted:
colt.append(tag)
self.config.set("expanded_tags", colt)
def on_tag_collapsed(self, sender, tag):
colt = self.config.get("expanded_tags")
# When a tag is collapsed, we should also remove it's children
# from colt, otherwise when parent tag is expanded, they also get
# expanded (unwanted situation)
colt = [colt_tag for colt_tag in colt if tag[1:-1] not in colt_tag]
self.config.set("expanded_tags", colt)
def on_quickadd_activate(self, widget):
""" Add a new task from quickadd toolbar """
text = str(self.quickadd_entry.get_text())
text = text.strip()
if text:
tags = self.get_selected_tags(nospecial=True)
# We will select quick-added task in browser.
# This has proven to be quite complex and deserves an explanation.
# We register a callback on the sorted treemodel that we're
# displaying, which is a TreeModelSort. When a row gets added,
# we're notified of it.
# We have to verify that that row belongs to the task we should
# select. So, we have to wait for the task to be created, and then
# wait for its tid to show up (invernizzi)
def select_next_added_task_in_browser(treemodelsort, path,
iter, self):
# copy() is required because boxed structures are not copied
# when passed in a callback without transfer
# See https://bugzilla.gnome.org/show_bug.cgi?id=722899
iter = iter.copy()
def selecter(treemodelsort, path, iter, self):
self.__last_quick_added_tid_event.wait()
treeview = self.vtree_panes['active']
treemodelsort.disconnect(
self.__quick_add_select_handle)
selection = treeview.get_selection()
selection.unselect_all()
# Since we use iter for selection,
# the task selected is bound to be correct
selection.select_iter(iter)
# It cannot be another thread than the main gtk thread !
GObject.idle_add(selecter, treemodelsort, path, iter, self)
# event that is set when the new task is created
self.__last_quick_added_tid_event = threading.Event()
self.__quick_add_select_handle = \
self.vtree_panes['active'].get_model().connect(
"row-inserted", select_next_added_task_in_browser,
self)
task = self.req.new_task(newtask=True)
self.__last_quick_added_tid = task.get_id()
self.__last_quick_added_tid_event.set()
task.set_complex_title(text, tags=tags)
self.quickadd_entry.set_text('')
# signal the event for the plugins to catch
GObject.idle_add(self.emit, "task-added-via-quick-add",
task.get_id())
else:
# if no text is selected, we open the currently selected task
nids = self.vtree_panes['active'].get_selected_nodes()
for nid in nids:
self.vmanager.open_task(nid)
def on_quickadd_iconpress(self, widget, icon, event):
""" Clear the text in quickadd field by clicking on 'clear' icon """
if icon == Gtk.EntryIconPosition.SECONDARY:
self.quickadd_entry.set_text('')
def on_tag_treeview_button_press_event(self, treeview, event):
"""
deals with mouse click event on the tag tree
"""
Log.debug("Received button event #%d at %d, %d" % (
event.button, event.x, event.y))
if event.button == 3:
x = int(event.x)
y = int(event.y)
time = event.time
pthinfo = treeview.get_path_at_pos(x, y)
if pthinfo is not None:
path, col, cellx, celly = pthinfo
treeview.grab_focus()
# The location we want the cursor to return to
# after we're done.
self.previous_cursor = treeview.get_cursor()
# For use in is_task_visible
self.previous_tag = self.get_selected_tags()
# Let's us know that we're working on a tag.
self.tag_active = True
# This location is stored in case we need to work with it
# later on.
self.target_cursor = path, col
treeview.set_cursor(path, col, 0)
# the nospecial=True disable right clicking for special tags
selected_tags = self.get_selected_tags(nospecial=True)
selected_search = self.get_selected_search()
# popup menu for searches
# FIXME thos two branches could be simplified
# (there is no difference betweenn search and normal tag
if selected_search is not None:
my_tag = self.req.get_tag(selected_search)
self.tagpopup.set_tag(my_tag)
self.tagpopup.popup(None, None, None, None, event.button,
time)
elif len(selected_tags) > 0:
# Then we are looking at single, normal tag rather than
# the special 'All tags' or 'Tasks without tags'. We only
# want to popup the menu for normal tags.
my_tag = self.req.get_tag(selected_tags[0])
self.tagpopup.set_tag(my_tag)
self.tagpopup.popup(None, None, None, None, event.button,
time)
else:
self.reset_cursor()
return True
def on_tag_treeview_key_press_event(self, treeview, event):
keyname = Gdk.keyval_name(event.keyval)
is_shift_f10 = (keyname == "F10" and
event.get_state() & Gdk.ModifierType.SHIFT_MASK)
if is_shift_f10 or keyname == "Menu":
selected_tags = self.get_selected_tags(nospecial=True)
selected_search = self.get_selected_search()
# FIXME thos two branches could be simplified (there is
# no difference betweenn search and normal tag
# popup menu for searches
if selected_search is not None:
self.tagpopup.set_tag(selected_search)
self.tagpopup.popup(None, None, None, None, 0, event.time)
elif len(selected_tags) > 0:
# Then we are looking at single, normal tag rather than
# the special 'All tags' or 'Tasks without tags'. We only
# want to popup the menu for normal tags.
selected_tag = self.req.get_tag(selected_tags[0])
self.tagpopup.set_tag(selected_tag)
self.tagpopup.popup(None, None, None, None, 0, event.time)
else:
self.reset_cursor()
return True
def on_task_treeview_button_press_event(self, treeview, event):
""" Pop up context menu on right mouse click in the main
task tree view """
Log.debug("Received button event #%d at %d,%d" % (
event.button, event.x, event.y))
if event.button == 3:
x = int(event.x)
y = int(event.y)
time = event.time
pthinfo = treeview.get_path_at_pos(x, y)
if pthinfo is not None:
path, col, cellx, celly = pthinfo
selection = treeview.get_selection()
if selection.count_selected_rows() > 0:
if not selection.path_is_selected(path):
treeview.set_cursor(path, col, 0)
else:
treeview.set_cursor(path, col, 0)
treeview.grab_focus()
self.taskpopup.popup(None, None, None, None, event.button,
time)
return True
def on_task_treeview_key_press_event(self, treeview, event):
keyname = Gdk.keyval_name(event.keyval)
is_shift_f10 = (keyname == "F10" and
event.get_state() & Gdk.ModifierType.SHIFT_MASK)
if keyname == "Delete":
self.on_delete_tasks()
return True
elif is_shift_f10 or keyname == "Menu":
self.taskpopup.popup(None, None, None, None, 0, event.time)
return True
def on_closed_task_treeview_button_press_event(self, treeview, event):
if event.button == 3:
x = int(event.x)
y = int(event.y)
time = event.time
pthinfo = treeview.get_path_at_pos(x, y)
if pthinfo is not None:
path, col, cellx, celly = pthinfo
treeview.grab_focus()
treeview.set_cursor(path, col, 0)
self.ctaskpopup.popup(None, None, None, None, event.button,
time)
return True
def on_closed_task_treeview_key_press_event(self, treeview, event):
keyname = Gdk.keyval_name(event.keyval)
is_shift_f10 = (keyname == "F10" and
event.get_state() & Gdk.ModifierType.SHIFT_MASK)
if keyname == "Delete":
self.on_delete_tasks()
return True
elif is_shift_f10 or keyname == "Menu":
self.ctaskpopup.popup(None, None, None, None, 0, event.time)
return True
def on_add_task(self, widget):
tags = [tag for tag in self.get_selected_tags() if tag.startswith('@')]
task = self.req.new_task(tags=tags, newtask=True)
uid = task.get_id()
self.vmanager.open_task(uid, thisisnew=True)
def on_add_subtask(self, widget):
uid = self.get_selected_task()
if uid:
zetask = self.req.get_task(uid)
tags = [t.get_name() for t in zetask.get_tags()]
task = self.req.new_task(tags=tags, newtask=True)
# task.add_parent(uid)
zetask.add_child(task.get_id())
self.vmanager.open_task(task.get_id(), thisisnew=True)
def on_edit_active_task(self, widget, row=None, col=None):
tid = self.get_selected_task()
if tid:
self.vmanager.open_task(tid)
def on_edit_done_task(self, widget, row=None, col=None):
tid = self.get_selected_task('closed')
if tid:
self.vmanager.open_task(tid)
def on_delete_tasks(self, widget=None, tid=None):
# If we don't have a parameter, then take the selection in the
# treeview
if not tid:
# tid_to_delete is a [project, task] tuple
tids_todelete = self.get_selected_tasks()
if not tids_todelete:
return
else:
tids_todelete = [tid]
Log.debug("going to delete %s" % tids_todelete)
self.vmanager.ask_delete_tasks(tids_todelete)
def update_start_date(self, widget, new_start_date):
tasks = [self.req.get_task(uid)
for uid in self.get_selected_tasks()
if uid is not None]
start_date = Date.parse(new_start_date)
# FIXME:If the task dialog is displayed, refresh its start_date widget
for task in tasks:
task.set_start_date(start_date)
def on_mark_as_started(self, widget):
self.update_start_date(widget, "today")
def on_start_for_tomorrow(self, widget):
self.update_start_date(widget, "tomorrow")
def on_start_for_next_week(self, widget):
self.update_start_date(widget, "next week")
def on_start_for_next_month(self, widget):
self.update_start_date(widget, "next month")
def on_start_for_next_year(self, widget):
self.update_start_date(widget, "next year")
def on_start_clear(self, widget):
self.update_start_date(widget, None)
def update_due_date(self, widget, new_due_date):
tasks = [self.req.get_task(uid)
for uid in self.get_selected_tasks()
if uid is not None]
due_date = Date.parse(new_due_date)
# FIXME: If the task dialog is displayed, refresh its due_date widget
for task in tasks:
task.set_due_date(due_date)
def on_set_due_today(self, widget):
self.update_due_date(widget, "today")
def on_set_due_tomorrow(self, widget):
self.update_due_date(widget, "tomorrow")
def on_set_due_next_week(self, widget):
self.update_due_date(widget, "next week")
def on_set_due_next_month(self, widget):
self.update_due_date(widget, "next month")
def on_set_due_next_year(self, widget):
self.update_due_date(widget, "next year")
def on_set_due_soon(self, widget):
self.update_due_date(widget, "soon")
def on_set_due_someday(self, widget):
self.update_due_date(widget, "someday")
def on_set_due_clear(self, widget):
self.update_due_date(widget, None)
def on_start_for_specific_date(self, widget):
""" Display Calendar to set start date of selected tasks """
self.calendar.set_title("Set Start Date")
# Get task from task name
task = self.req.get_task(self.get_selected_tasks()[0])
date = task.get_start_date()
self.calendar.set_date(date, GTGCalendar.DATE_KIND_START)
# Shows the calendar just above the mouse on widget's line of symmetry
rect = widget.get_allocation()
result, x, y = widget.get_window().get_origin()
self.calendar.show_at_position(x + rect.x + rect.width,
y + rect.y)
def on_set_due_for_specific_date(self, widget):
""" Display Calendar to set due date of selected tasks """
self.calendar.set_title("Set Due Date")
# Get task from task name
task = self.req.get_task(self.get_selected_tasks()[0])
if not task.get_due_date():
date = task.get_start_date()
else:
date = task.get_due_date()
self.calendar.set_date(date, GTGCalendar.DATE_KIND_DUE)
# Shows the calendar just above the mouse on widget's line of symmetry
rect = widget.get_allocation()
result, x, y = widget.get_window().get_origin()
self.calendar.show_at_position(x + rect.x + rect.width,
y + rect.y)
def on_date_changed(self, calendar):
# Get tasks' list from task names' list
tasks = [self.req.get_task(task) for task in self.get_selected_tasks()]
date, date_kind = calendar.get_selected_date()
if date_kind == GTGCalendar.DATE_KIND_DUE:
for task in tasks:
task.set_due_date(date)
elif date_kind == GTGCalendar.DATE_KIND_START:
for task in tasks:
task.set_start_date(date)
def on_modify_tags(self, widget):
""" Run Modify Tags dialog on selected tasks """
tasks = self.get_selected_tasks()
self.modifytags_dialog.modify_tags(tasks)
def close_all_task_editors(self, task_id):
""" Including editors of subtasks """
all_subtasks = []
def trace_subtasks(root):
all_subtasks.append(root)
for i in root.get_subtasks():
if i not in all_subtasks:
trace_subtasks(i)
trace_subtasks(self.req.get_task(task_id))
for task in all_subtasks:
self.vmanager.close_task(task.get_id())
def on_mark_as_done(self, widget):
tasks_uid = [uid for uid in self.get_selected_tasks()
if uid is not None]
if len(tasks_uid) == 0:
return
tasks = [self.req.get_task(uid) for uid in tasks_uid]
tasks_status = [task.get_status() for task in tasks]
for uid, task, status in zip(tasks_uid, tasks, tasks_status):
if status == Task.STA_DONE:
# Marking as undone
task.set_status(Task.STA_ACTIVE)
# Parents of that task must be updated - not to be shown
# in workview, update children count, etc.
for parent_id in task.get_parents():
parent = self.req.get_task(parent_id)
parent.modified()
else:
task.set_status(Task.STA_DONE)
self.close_all_task_editors(uid)
def on_dismiss_task(self, widget):
tasks_uid = [uid for uid in self.get_selected_tasks()
if uid is not None]
if len(tasks_uid) == 0:
return
tasks = [self.req.get_task(uid) for uid in tasks_uid]
tasks_status = [task.get_status() for task in tasks]
for uid, task, status in zip(tasks_uid, tasks, tasks_status):
if status == Task.STA_DISMISSED:
task.set_status(Task.STA_ACTIVE)
else:
task.set_status(Task.STA_DISMISSED)
self.close_all_task_editors(uid)
def apply_filter_on_panes(self, filter_name, refresh=True):
""" Apply filters for every pane: active tasks, closed tasks """
# Reset quickadd_entry if another filter is applied
self.quickadd_entry.set_text("")
for pane in self.vtree_panes:
vtree = self.req.get_tasks_tree(name=pane, refresh=False)
vtree.apply_filter(filter_name, refresh=refresh)
def unapply_filter_on_panes(self, filter_name, refresh=True):
""" Apply filters for every pane: active tasks, closed tasks """
for pane in self.vtree_panes:
vtree = self.req.get_tasks_tree(name=pane, refresh=False)
vtree.unapply_filter(filter_name, refresh=refresh)
def on_select_tag(self, widget=None, row=None, col=None):
"""
callback for when selecting an element of the tagtree (left sidebar)
"""
# FIXME add support for multiple selection of tags in future
# When you click on a tag, you want to unselect the tasks
new_taglist = self.get_selected_tags()
for tagname in self.applied_tags:
if tagname not in new_taglist:
self.unapply_filter_on_panes(tagname, refresh=False)
for tagname in new_taglist:
if tagname not in self.applied_tags:
self.apply_filter_on_panes(tagname)
# In case of search tag, set query in quickadd for
# refining search query
tag = self.req.get_tag(tagname)
if tag.is_search_tag():
self.quickadd_entry.set_text(tag.get_attribute("query"))
self.applied_tags = new_taglist
def on_close(self, widget=None):
"""Closing the window."""
# Saving is now done in main.py
self.quit()
# PUBLIC METHODS ###########################################################
def get_selected_task(self, tv=None):
"""
Returns the'uid' of the selected task, if any.
If multiple tasks are selected, returns only the first and
takes care of selecting only that (unselecting the others)
@param tv: The tree view to find the selected task in. Defaults to
the task_tview.
"""
ids = self.get_selected_tasks(tv)
if len(ids) > 0:
# FIXME: we should also unselect all the others
return ids[0]
else:
return None
def get_selected_tasks(self, tv=None):
"""
Returns a list of 'uids' of the selected tasks, and the corresponding
iters
@param tv: The tree view to find the selected task in. Defaults to
the task_tview.
"""
# FIXME Why we have active as back case? is that so? Study this code
selected = []
if tv:
selected = self.vtree_panes[tv].get_selected_nodes()
else:
if 'active' in self.vtree_panes:
selected = self.vtree_panes['active'].get_selected_nodes()
for i in self.vtree_panes:
if len(selected) == 0:
selected = self.vtree_panes[i].get_selected_nodes()
return selected
# If nospecial=True, only normal @tag are considered
def get_selected_tags(self, nospecial=False):
"""
Returns the selected nodes from the tagtree
@param nospecial: doesn't return tags that do not stat with
"""
taglist = []
if self.tagtreeview:
taglist = self.tagtreeview.get_selected_nodes()
# If no selection, we display all
if not nospecial and (not taglist or len(taglist) < 0):
taglist = ['gtg-tags-all']
if nospecial:
for t in list(taglist):
if not t.startswith('@'):
taglist.remove(t)
return taglist
def reset_cursor(self):
""" Returns the cursor to the tag that was selected prior
to any right click action. Should be used whenever we're done
working with any tag through a right click menu action.
"""
if self.tag_active:
self.tag_active = False
path, col = self.previous_cursor
if self.tagtreeview:
self.tagtreeview.set_cursor(path, col, 0)
def set_target_cursor(self):
""" Selects the last tag to be right clicked.
We need this because the context menu will deactivate
(and in turn, call reset_cursor()) before, for example, the color
picker dialog begins. Should be used at the beginning of any tag
editing function to remind the user which tag they're working with.
"""
if not self.tag_active:
self.tag_active = True
path, col = self.target_cursor
if self.tagtreeview:
self.tagtreeview.set_cursor(path, col, 0)
def add_page_to_sidebar_notebook(self, icon, page):
"""Adds a new page tab to the left panel. The tab will
be added as the last tab. Also causes the tabs to be
shown if they're not.
@param icon: a Gtk.Image picture to display on the tab
@param page: Gtk.Frame-based panel to be added
"""
return self._add_page(self.sidebar_notebook, icon, page)
def add_page_to_main_notebook(self, title, page):
"""Adds a new page tab to the top right main panel. The tab
will be added as the last tab. Also causes the tabs to be
shown.
@param title: Short text to use for the tab label
@param page: Gtk.Frame-based panel to be added
"""
return self._add_page(self.main_notebook, Gtk.Label(label=title), page)
def remove_page_from_sidebar_notebook(self, page):
"""Removes a new page tab from the left panel. If this leaves
only one tab in the notebook, the tab selector will be hidden.
@param page: Gtk.Frame-based panel to be removed
"""
return self._remove_page(self.sidebar_notebook, page)
def remove_page_from_main_notebook(self, page):
"""Removes a new page tab from the top right main panel. If
this leaves only one tab in the notebook, the tab selector will
be hidden.
@param page: Gtk.Frame-based panel to be removed
"""
return self._remove_page(self.main_notebook, page)
def hide(self):
""" Hides the task browser """
self.browser_shown = False
self.window.hide()
GObject.idle_add(self.emit, "visibility-toggled")
def show(self):
""" Unhides the TaskBrowser """
self.browser_shown = True
# redraws the GDK window, bringing it to front
self.window.show()
self.window.present()
self.window.grab_focus()
self.quickadd_entry.grab_focus()
GObject.idle_add(self.emit, "visibility-toggled")
def iconify(self):
""" Minimizes the TaskBrowser """
self.window.iconify()
def is_visible(self):
""" Returns true if window is shown or false if hidden. """
return self.window.get_property("visible")
def is_active(self):
""" Returns true if window is the currently active window """
return self.window.get_property("is-active")
def get_builder(self):
return self.builder
def get_window(self):
return self.window
def is_shown(self):
return self.browser_shown
# BACKENDS RELATED METHODS ###################################################
def on_backend_failed(self, sender, backend_id, error_code):
"""
Signal callback.
When a backend fails to work, loads a Gtk.Infobar to alert the user
@param sender: not used, only here for signal compatibility
@param backend_id: the id of the failing backend
@param error_code: a backend error code, as specified
in BackendsSignals
"""
infobar = self._new_infobar(backend_id)
infobar.set_error_code(error_code)
def on_backend_needing_interaction(self, sender, backend_id, description,
interaction_type, callback):
'''
Signal callback.
When a backend needs some kind of feedback from the user,
loads a Gtk.Infobar to alert the user.
This is used, for example, to request confirmation after authenticating
via OAuth.
@param sender: not used, only here for signal compatibility
@param backend_id: the id of the failing backend
@param description: a string describing the interaction needed
@param interaction_type: a string describing the type of interaction
(yes/no, only confirm, ok/cancel...)
@param callback: the function to call when the user provides the
feedback
'''
infobar = self._new_infobar(backend_id)
infobar.set_interaction_request(description, interaction_type,
callback)
def __remove_backend_infobar(self, child, backend_id):
'''
Helper function to remove an Gtk.Infobar related to a backend
@param child: a Gtk.Infobar
@param backend_id: the id of the backend which Gtk.Infobar should be
removed.
'''
if isinstance(child, CustomInfoBar) and\
child.get_backend_id() == backend_id:
if self.vbox_toolbars:
self.vbox_toolbars.remove(child)
def remove_backend_infobar(self, sender, backend_id):
'''
Signal callback.
Deletes the Gtk.Infobars related to a backend
@param sender: not used, only here for signal compatibility
@param backend_id: the id of the backend which Gtk.Infobar should be
removed.
'''
backend = self.req.get_backend(backend_id)
if not backend or (backend and backend.is_enabled()):
# remove old infobar related to backend_id, if any
if self.vbox_toolbars:
self.vbox_toolbars.foreach(self.__remove_backend_infobar,
backend_id)
def _new_infobar(self, backend_id):
'''
Helper function to create a new infobar for a backend
@param backend_id: the backend for which we're creating the infobar
@returns Gtk.Infobar: the created infobar
'''
# remove old infobar related to backend_id, if any
if not self.vbox_toolbars:
return
self.vbox_toolbars.foreach(self.__remove_backend_infobar, backend_id)
# add a new one
infobar = CustomInfoBar(self.req, self, self.vmanager, backend_id)
self.vbox_toolbars.pack_start(infobar, True, True, 0)
return infobar
# SEARCH RELATED STUFF ########################################################
def get_selected_search(self):
""" return just one selected view """
if self.tagtreeview:
tags = self.tagtreeview.get_selected_nodes()
if len(tags) > 0:
tag = self.tagtree.get_node(tags[0])
if tag.is_search_tag():
return tags[0]
return None
def _init_search_completion(self):
""" Initialize search completion """
self.search_completion = self.builder.get_object(
"quickadd_entrycompletion")
self.quickadd_entry.set_completion(self.search_completion)
self.search_possible_actions = {
'add': _("Add Task"),
'open': _("Open Task"),
'search': _("Search"),
}
self.search_actions = []
self.search_complete_store = Gtk.ListStore(str)
for tagname in self.req.get_all_tags():
# only for regular tags
if tagname.startswith("@"):
self.search_complete_store.append([tagname])
for command in SEARCH_COMMANDS:
self.search_complete_store.append([command])
self.search_completion.set_model(self.search_complete_store)
self.search_completion.set_text_column(0)
def on_quickadd_changed(self, editable):
""" Decide which actions are allowed with the current query """
# delete old actions
for i in range(len(self.search_actions)):
self.search_completion.delete_action(0)
self.search_actions = []
new_actions = []
query = self.quickadd_entry.get_text()
query = query.strip()
# If the tag pane is hidden, reset search filter when query is empty
if query == '' and not self.config.get("tag_pane"):
tree = self.req.get_tasks_tree(refresh=False)
filters = tree.list_applied_filters()
for tag_id in self.req.get_all_tags():
tag = self.req.get_tag(tag_id)
if tag.is_search_tag() and tag_id in filters:
self.req.remove_tag(tag_id)
self.apply_filter_on_panes(ALLTASKS_TAG)
return
if query:
if self.req.get_task_id(query) is not None:
new_actions.append('open')
else:
new_actions.append('add')
# Is query parsable?
try:
parse_search_query(query)
new_actions.append('search')
except InvalidQuery:
pass
# Add new order of actions
for aid, name in enumerate(new_actions):
action = self.search_possible_actions[name]
self.search_completion.insert_action_markup(aid, action)
self.search_actions.append(name)
else:
tree = self.req.get_tasks_tree(refresh=False)
filters = tree.list_applied_filters()
for tag_id in self.req.get_all_tags():
tag = self.req.get_tag(tag_id)
if tag.is_search_tag() and tag_id in filters:
self.req.remove_tag(tag_id)
self.apply_filter_on_panes(ALLTASKS_TAG)
def expand_search_tag(self):
""" For some unknown reason, search tag is not expanded correctly and
it must be done manually """
if self.tagtreeview is not None:
model = self.tagtreeview.get_model()
search_iter = model.my_get_iter((SEARCH_TAG, ))
search_path = model.get_path(search_iter)
self.tagtreeview.expand_row(search_path, False)
def on_entrycompletion_action_activated(self, completion, index):
""" Executes action from completition of quickadd toolbar """
action = self.search_actions[index]
if action == 'add':
self.on_quickadd_activate(None)
elif action == 'open':
task_title = self.quickadd_entry.get_text()
task_id = self.req.get_task_id(task_title)
self.vmanager.open_task(task_id)
self.quickadd_entry.set_text('')
elif action == 'search':
query = self.quickadd_entry.get_text()
# ! at the beginning is reserved keyword for liblarch
if query.startswith('!'):
label = '_' + query
else:
label = query
# find possible name collisions
name, number = label, 1
already_search = False
while True:
tag = self.req.get_tag(name)
if tag is None:
break
if tag.is_search_tag() and tag.get_attribute("query") == query:
already_search = True
break
# this name is used, adding number
number += 1
name = label + ' ' + str(number)
if not already_search:
tag = self.req.new_search_tag(name, query)
# Apply new search right now
if self.tagtreeview is not None:
# Select new search in tagsidebar and apply it
# Make sure search tag parent is expanded
# (otherwise selection does not work)
self.expand_search_tag()
# Get iterator for new search tag
model = self.tagtreeview.get_model()
path = self.tagtree.get_paths_for_node(tag.get_id())[0]
tag_iter = model.my_get_iter(path)
# Select only it and apply filters on top of that
selection = self.tagtreeview.get_selection()
selection.unselect_all()
selection.select_iter(tag_iter)
self.on_select_tag()
else:
self.apply_filter_on_panes(name)
|
jakubbrindza/gtg
|
GTG/gtk/browser/browser.py
|
Python
|
gpl-3.0
| 61,395 | 0.000016 |
#-*- coding: utf-8 -*-
###########################################################################
## ##
## Copyrights Frederic Rodrigo 2011 ##
## ##
## This program is free software: you can redistribute it and/or modify ##
## it under the terms of the GNU General Public License as published by ##
## the Free Software Foundation, either version 3 of the License, or ##
## (at your option) any later version. ##
## ##
## This program is distributed in the hope that it will be useful, ##
## but WITHOUT ANY WARRANTY; without even the implied warranty of ##
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the ##
## GNU General Public License for more details. ##
## ##
## You should have received a copy of the GNU General Public License ##
## along with this program. If not, see <http://www.gnu.org/licenses/>. ##
## ##
###########################################################################
from .Analyser import Analyser
import os
import psycopg2
import psycopg2.extensions
import re
from modules import DictCursorUnicode
from collections import defaultdict
from inspect import getframeinfo, stack
class Analyser_Osmosis(Analyser):
sql_create_highways = """
CREATE UNLOGGED TABLE {0}.highways AS
SELECT
id,
nodes,
tags,
tags->'highway' AS highway,
linestring,
ST_Transform(linestring, {1}) AS linestring_proj,
is_polygon,
tags->'highway' LIKE '%_link' AS is_link,
(tags?'junction' AND tags->'junction' = 'roundabout') AS is_roundabout,
(tags?'oneway' AND tags->'oneway' IN ('yes', 'true', '1', '-1')) AS is_oneway,
(tags?'area' AND tags->'area' != 'no') AS is_area,
tags->'highway' IN ('planned', 'proposed', 'construction') AS is_construction,
CASE tags->'highway'
WHEN 'motorway' THEN 1
WHEN 'primary' THEN 1
WHEN 'trunk' THEN 1
WHEN 'motorway_link' THEN 2
WHEN 'primary_link' THEN 2
WHEN 'trunk_link' THEN 2
WHEN 'secondary' THEN 2
WHEN 'secondary_link' THEN 2
WHEN 'tertiary' THEN 3
WHEN 'tertiary_link' THEN 3
WHEN 'unclassified' THEN 4
WHEN 'unclassified_link' THEN 4
WHEN 'residential' THEN 4
WHEN 'residential_link' THEN 4
WHEN 'living_street' THEN 5
WHEN 'track' THEN 5
WHEN 'cycleway' THEN 5
WHEN 'service' THEN 5
WHEN 'road' THEN 5
ELSE NULL
END AS level
FROM
ways
WHERE
tags != ''::hstore AND
tags?'highway' AND
tags->'highway' NOT IN ('services', 'rest_area', 'razed', 'no') AND
ST_NPoints(linestring) >= 2
;
CREATE INDEX idx_highways_linestring ON {0}.highways USING gist(linestring);
CREATE INDEX idx_highways_linestring_proj ON {0}.highways USING gist(linestring_proj);
CREATE INDEX idx_highways_id ON {0}.highways(id);
CREATE INDEX idx_highways_highway ON {0}.highways(highway);
ANALYZE {0}.highways;
"""
sql_create_highway_ends = """
CREATE UNLOGGED TABLE {0}.highway_ends AS
SELECT
id,
nodes,
linestring,
highway,
is_link,
is_roundabout,
(ends_geom(nodes, linestring)).id AS nid,
(ends_geom(nodes, linestring)).geom AS geom,
level
FROM
highways
WHERE
NOT is_area AND
NOT is_construction
;
ANALYZE {0}.highway_ends;
"""
sql_create_buildings = """
CREATE UNLOGGED TABLE {0}.buildings AS
SELECT
*,
CASE WHEN polygon_proj IS NOT NULL AND wall THEN ST_Area(polygon_proj) ELSE NULL END AS area
FROM (
SELECT DISTINCT ON (id)
id,
tags,
linestring,
CASE WHEN ST_IsValid(linestring) = 't' AND ST_IsSimple(linestring) = 't' AND ST_IsValid(ST_MakePolygon(ST_Transform(linestring, {1}))) THEN ST_MakePolygon(ST_Transform(linestring, {1})) ELSE NULL END AS polygon_proj,
(NOT tags?'wall' OR tags->'wall' != 'no') AND tags->'building' != 'roof' AS wall,
tags?'layer' AS layer,
ST_NPoints(linestring) AS npoints,
relation_members.relation_id IS NOT NULL AS relation
FROM
ways
LEFT JOIN relation_members ON
relation_members.member_type = 'W' AND
relation_members.member_id = ways.id
WHERE
tags != ''::hstore AND
tags?'building' AND
tags->'building' != 'no' AND
is_polygon
) AS t
;
CREATE INDEX idx_buildings_linestring ON {0}.buildings USING GIST(linestring);
CREATE INDEX idx_buildings_linestring_wall ON {0}.buildings USING GIST(linestring) WHERE wall;
CREATE INDEX idx_buildings_polygon_proj ON {0}.buildings USING gist(polygon_proj);
ANALYZE {0}.buildings;
"""
def __init__(self, config, logger = None):
Analyser.__init__(self, config, logger)
self.classs = {}
self.classs_change = {}
self.explain_sql = False
self.FixTypeTable = {
self.node:"node", self.node_full:"node", self.node_new:"node", self.node_position:"node",
self.way:"way", self.way_full:"way",
self.relation:"relation", self.relation_full:"relation",
}
self.typeMapping = {'N': self.node_full, 'W': self.way_full, 'R': self.relation_full}
self.resume_from_timestamp = None
self.already_issued_objects = None
if hasattr(config, "verbose") and config.verbose:
self.explain_sql = True
def __enter__(self):
Analyser.__enter__(self)
psycopg2.extensions.register_type(psycopg2.extensions.UNICODE)
psycopg2.extensions.register_type(psycopg2.extensions.UNICODEARRAY)
# open database connections + output file
self.apiconn = self.config.osmosis_manager.osmosis()
self.gisconn = self.apiconn.conn()
self.giscurs = self.gisconn.cursor(cursor_factory=DictCursorUnicode.DictCursorUnicode50)
return self
def __exit__(self, exc_type, exc_value, traceback):
# close database connections + output file
self.config.osmosis_manager.osmosis_close()
Analyser.__exit__(self, exc_type, exc_value, traceback)
def timestamp(self):
return self.apiconn.timestamp()
def analyser(self):
self.init_analyser()
if self.classs != {} or self.classs_change != {}:
self.logger.log(u"run osmosis all analyser {0}".format(self.__class__.__name__))
self.error_file.analyser(self.timestamp(), self.analyser_version())
if hasattr(self, 'requires_tables_common'):
self.requires_tables_build(self.requires_tables_common)
if hasattr(self, 'requires_tables_full'):
self.requires_tables_build(self.requires_tables_full)
self.dump_class(self.classs)
self.dump_class(self.classs_change)
try:
self.analyser_osmosis_common()
self.analyser_osmosis_full()
finally:
self.error_file.analyser_end()
def analyser_deferred_clean(self):
if hasattr(self, 'requires_tables_common'):
self.requires_tables_clean(self.requires_tables_common)
if hasattr(self, 'requires_tables_full'):
self.requires_tables_clean(self.requires_tables_full)
def analyser_change(self):
self.init_analyser()
if self.classs != {}:
self.logger.log(u"run osmosis base analyser {0}".format(self.__class__.__name__))
self.error_file.analyser(self.timestamp(), self.analyser_version())
if hasattr(self, 'requires_tables_common'):
self.requires_tables_build(self.requires_tables_common)
self.dump_class(self.classs)
try:
self.analyser_osmosis_common()
finally:
self.error_file.analyser_end()
if self.classs_change != {}:
self.logger.log(u"run osmosis change analyser {0}".format(self.__class__.__name__))
self.error_file.analyser(self.timestamp(), self.analyser_version(), change=True)
try:
if hasattr(self, 'requires_tables_diff'):
self.requires_tables_build(self.requires_tables_diff)
self.dump_class(self.classs_change)
self.dump_delete()
self.analyser_osmosis_diff()
finally:
self.error_file.analyser_end()
def analyser_change_deferred_clean(self):
if self.classs != {}:
if hasattr(self, 'requires_tables_common'):
self.requires_tables_clean(self.requires_tables_common)
if self.classs_change != {}:
if hasattr(self, 'requires_tables_diff'):
self.requires_tables_clean(self.requires_tables_diff)
def analyser_resume(self, timestamp, already_issued_objects):
if not self.resume_from_timestamp or self.resume_from_timestamp != timestamp:
self.db_setup_resume_from_timestamp(timestamp)
self.resume_from_timestamp = timestamp
self.already_issued_objects = already_issued_objects
self.analyser_change()
def analyser_resume_deferred_clean(self):
self.analyser_change_deferred_clean()
def requires_tables_build(self, tables):
for table in tables:
self.giscurs.execute("SELECT 1 FROM pg_tables WHERE schemaname = '{0}' AND tablename = '{1}'".format(self.config.db_schema.split(',')[0], table))
if not self.giscurs.fetchone():
self.logger.log(u"requires table {0}".format(table))
if table == 'highways':
self.giscurs.execute(self.sql_create_highways.format(self.config.db_schema.split(',')[0], self.config.options.get("proj")))
elif table == 'touched_highways':
self.requires_tables_build(["highways"])
self.create_view_touched('highways', 'W')
elif table == 'not_touched_highways':
self.requires_tables_build(["highways"])
self.create_view_not_touched('highways', 'W')
elif table == 'highway_ends':
self.requires_tables_build(["highways"])
self.giscurs.execute(self.sql_create_highway_ends.format(self.config.db_schema.split(',')[0]))
elif table == 'touched_highway_ends':
self.requires_tables_build(["highway_ends"])
self.create_view_touched('highway_ends', 'W')
elif table == 'buildings':
self.giscurs.execute(self.sql_create_buildings.format(self.config.db_schema.split(',')[0], self.config.options.get("proj")))
elif table == 'touched_buildings':
self.requires_tables_build(["buildings"])
self.create_view_touched('buildings', 'W')
elif table == 'not_touched_buildings':
self.requires_tables_build(["buildings"])
self.create_view_not_touched('buildings', 'W')
else:
raise Exception('Unknow table name {0}'.format(table))
self.giscurs.execute('COMMIT')
self.giscurs.execute('BEGIN')
def requires_tables_clean(self, tables):
for table in tables:
self.logger.log(u"requires table clean {0}".format(table))
self.giscurs.execute('DROP TABLE IF EXISTS {0}.{1} CASCADE'.format(self.config.db_schema.split(',')[0], table))
self.giscurs.execute('COMMIT')
self.giscurs.execute('BEGIN')
def db_setup_resume_from_timestamp(self, timestamp):
self.giscurs.execute("SELECT tstamp_action FROM metainfo")
tstamp_action = self.giscurs.fetchone()[0]
if tstamp_action != timestamp:
self.logger.log("osmosis resume post scripts")
osmosis_resume_post_scripts = [ # Scripts to run each time the database is updated
"/osmosis/ActionFromTimestamp.sql",
"/osmosis/CreateTouched.sql",
]
for script in osmosis_resume_post_scripts: # self.config.analyser_conf.osmosis_resume_post_scripts:
self.giscurs.execute(open('./' + script, 'r').read().replace(':timestamp', str(timestamp)))
self.giscurs.execute('COMMIT')
self.giscurs.execute('BEGIN')
def init_analyser(self):
if len(set(self.classs.keys()) & set(self.classs_change.keys())) > 0:
self.logger.log(u"Warning: duplicate class in {0}".format(self.__class__.__name__))
self.giscurs.execute("SET search_path TO {0},public;".format(self.config.db_schema_path or self.config.db_schema))
def dump_class(self, classs):
for id_ in classs:
data = classs[id_]
self.error_file.classs(
id = id_,
item = data['item'],
level = data['level'],
tags = data.get('tags'),
title = data.get('title'),
detail = data.get('detail'),
fix = data.get('fix'),
trap = data.get('trap'),
example = data.get('example'),
source = data.get('source'),
resource = data.get('resource'),
)
def analyser_osmosis_common(self):
"""
Run check not supporting diff mode.
"""
pass
def analyser_osmosis_full(self):
"""
Run check supporting diff mode. Full data check.
Alternative method of analyser_osmosis_diff().
"""
pass
def analyser_osmosis_diff(self):
"""
Run check supporting diff mode. Checks only on data changed from last run.
Alternative method of analyser_osmosis_full().
"""
pass
def dump_delete(self):
if self.already_issued_objects:
# Resume
types = {'N': 'node', 'W': 'way', 'R': 'relation'}
for t, ids in self.already_issued_objects.items():
if ids:
sql = "SELECT v.id, l.id FROM (VALUES ({0})) AS v(id) LEFT JOIN {1}s AS l ON v.id = l.id WHERE l.id IS NULL;".format('),('.join(map(str, ids)), types[t])
self.giscurs.execute(sql)
for res in self.giscurs.fetchall():
self.error_file.delete(types[t], res[0])
else:
# Change
for t in ["node", "way", "relation"]:
sql = "(SELECT id FROM actions WHERE data_type='{0}' AND action='D') UNION ALL (SELECT id FROM touched_{1}s) EXCEPT (SELECT id FROM actions WHERE data_type='{0}' AND action='C')".format(t[0].upper(), t)
self.giscurs.execute(sql)
for res in self.giscurs.fetchall():
self.error_file.delete(t, res[0])
def create_view_touched(self, table, type, id = 'id'):
"""
@param type in 'N', 'W', 'R'
"""
sql = """
CREATE OR REPLACE TEMPORARY VIEW touched_{0} AS
SELECT
{0}.*
FROM
{0}
JOIN transitive_touched ON
transitive_touched.data_type = '{1}' AND
{0}.{2} = transitive_touched.id
"""
self.giscurs.execute(sql.format(table, type, id))
def create_view_not_touched(self, table, type, id = 'id'):
"""
@param type in 'N', 'W', 'R'
"""
sql = """
CREATE OR REPLACE TEMPORARY VIEW not_touched_{0} AS
SELECT
{0}.*
FROM
{0}
LEFT JOIN transitive_touched ON
transitive_touched.data_type = '{1}' AND
{0}.{2} = transitive_touched.id
WHERE
transitive_touched.id IS NULL
"""
self.giscurs.execute(sql.format(table, type, id))
def run00(self, sql, callback = None):
if self.explain_sql:
self.logger.log(sql.strip())
if self.explain_sql and (sql.strip().startswith("SELECT") or sql.strip().startswith("CREATE UNLOGGED TABLE")) and not ';' in sql[:-1] and " AS " in sql:
sql_explain = "EXPLAIN " + sql.split(";")[0]
self.giscurs.execute(sql_explain)
for res in self.giscurs.fetchall():
self.logger.log(res[0])
try:
self.giscurs.execute(sql)
except:
self.logger.err("sql={0}".format(sql))
raise
if callback:
while True:
many = self.giscurs.fetchmany(1000)
if not many:
break
for res in many:
ret = None
try:
ret = callback(res)
except:
self.logger.err("res={0}".format(res))
self.logger.err("ret={0}".format(ret))
raise
def run0(self, sql, callback = None):
caller = getframeinfo(stack()[1][0])
self.logger.log("{0}:{1} sql".format(os.path.basename(caller.filename), caller.lineno))
self.run00(sql, callback)
def run(self, sql, callback = None):
def callback_package(res):
ret = callback(res)
if ret and ret.__class__ == dict:
if "self" in ret:
res = ret["self"](res)
if "data" in ret:
self.geom = defaultdict(list)
for (i, d) in enumerate(ret["data"]):
if d is not None:
d(res[i])
ret["fixType"] = list(map(lambda datai: self.FixTypeTable[datai] if datai is not None and datai in self.FixTypeTable else None, ret["data"]))
self.error_file.error(
ret["class"],
ret.get("subclass"),
ret.get("text"),
res,
ret.get("fixType"),
ret.get("fix"),
self.geom)
caller = getframeinfo(stack()[1][0])
if callback:
self.logger.log("{0}:{1} xml generation".format(os.path.basename(caller.filename), caller.lineno))
self.run00(sql, callback_package)
else:
self.logger.log("{0}:{1} sql".format(os.path.basename(caller.filename), caller.lineno))
self.run00(sql)
def node(self, res):
self.geom["node"].append({"id":res, "tag":{}})
def node_full(self, res):
self.geom["node"].append(self.apiconn.NodeGet(res))
def node_position(self, res):
node = self.apiconn.NodeGet(res)
if node:
self.geom["position"].append({'lat': str(node['lat']), 'lon': str(node['lon'])})
def node_new(self, res):
pass
def way(self, res):
self.geom["way"].append({"id":res, "nd":[], "tag":{}})
def way_full(self, res):
self.geom["way"].append(self.apiconn.WayGet(res))
def relation(self, res):
self.geom["relation"].append({"id":res, "member":[], "tag":{}})
def relation_full(self, res):
self.geom["relation"].append(self.apiconn.RelationGet(res))
def any_full(self, res):
self.typeMapping[res[0]](int(res[1:]))
def array_full(self, res):
for type, id in map(lambda r: (r[0], r[1:]), res):
self.typeMapping[type](int(id))
re_points = re.compile(r"[\(,][^\(,\)]*[\),]")
def get_points(self, text):
pts = []
for r in self.re_points.findall(text):
lon, lat = r[1:-1].split(" ")
pts.append({"lat":lat, "lon":lon})
return pts
def positionAsText(self, res):
if res is None:
self.logger.err("NULL location provided")
return []
for loc in self.get_points(res):
self.geom["position"].append(loc)
# def positionWay(self, res):
# self.geom["position"].append()
# def positionRelation(self, res):
# self.geom["position"].append()
###########################################################################
from .Analyser import TestAnalyser
from modules import IssuesFileOsmose
class TestAnalyserOsmosis(TestAnalyser):
@classmethod
def teardown_class(cls):
cls.clean()
@classmethod
def load_osm(cls, osm_file, dst, analyser_options=None, skip_db=False):
import modules.OsmOsisManager
(conf, analyser_conf) = cls.init_config(osm_file, dst, analyser_options)
if not skip_db:
import pytest
osmosis_manager = modules.OsmOsisManager.OsmOsisManager(conf, conf.db_host, conf.db_user, conf.db_password, conf.db_base, conf.db_schema or conf.country, conf.db_persistent, cls.logger)
if not osmosis_manager.check_database():
pytest.skip("database not present")
osmosis_manager.init_database(conf)
# create directory for results
import os
for i in ["normal", "diff_empty", "diff_full"]:
dirname = os.path.join(os.path.dirname(dst), i)
try:
os.makedirs(dirname)
except OSError:
if os.path.isdir(dirname):
pass
else:
raise
cls.conf = conf
cls.xml_res_file = dst
analyser_conf.osmosis_manager = osmosis_manager
analyser_conf.db_schema = conf.db_schema
analyser_conf.db_schema_path = conf.db_schema_path
return analyser_conf
@classmethod
def clean(cls):
# clean database
import modules.OsmOsisManager
osmosis_manager = modules.OsmOsisManager.OsmOsisManager(cls.conf, cls.conf.db_host, cls.conf.db_user, cls.conf.db_password, cls.conf.db_base, cls.conf.db_schema or cls.conf.country, cls.conf.db_persistent, cls.logger)
osmosis_manager.clean_database(cls.conf, False)
# clean results file
import os
try:
os.remove(cls.xml_res_file)
except OSError:
pass
class Test(TestAnalyserOsmosis):
from modules import config
default_xml_res_path = config.dir_tmp + "/tests/osmosis/"
@classmethod
def setup_class(cls):
TestAnalyserOsmosis.setup_class()
cls.analyser_conf = cls.load_osm("tests/osmosis.test.osm",
cls.default_xml_res_path + "osmosis.test.xml",
{"test": True,
"addr:city-admin_level": "8,9",
"driving_side": "left",
"proj": 2969})
import modules.OsmOsisManager
cls.conf.osmosis_manager = modules.OsmOsisManager.OsmOsisManager(cls.conf, cls.conf.db_host, cls.conf.db_user, cls.conf.db_password, cls.conf.db_base, cls.conf.db_schema or cls.conf.country, cls.conf.db_persistent, cls.logger)
def test(self):
# run all available osmosis analysers, for basic SQL check
import importlib
import inspect
import os
for fn in os.listdir("analysers/"):
if not fn.startswith("analyser_osmosis_") or not fn.endswith(".py"):
continue
analyser = importlib.import_module("analysers." + fn[:-3], package=".")
for name, obj in inspect.getmembers(analyser):
if (inspect.isclass(obj) and obj.__module__ == ("analysers." + fn[:-3]) and
(name.startswith("Analyser") or name.startswith("analyser"))):
self.xml_res_file = self.default_xml_res_path + "normal/{0}.xml".format(name)
self.analyser_conf.error_file = IssuesFileOsmose.IssuesFileOsmose(self.xml_res_file)
with obj(self.analyser_conf, self.logger) as analyser_obj:
analyser_obj.analyser()
self.root_err = self.load_errors()
self.check_num_err(min=0, max=5)
def test_change_empty(self):
# run all available osmosis analysers, for basic SQL check
import importlib
import inspect
import os
self.conf.osmosis_manager.set_pgsql_schema()
for script in self.conf.osmosis_change_init_post_scripts:
self.conf.osmosis_manager.psql_f(script)
self.conf.osmosis_manager.psql_c("TRUNCATE TABLE actions")
for script in self.conf.osmosis_change_post_scripts:
self.conf.osmosis_manager.psql_f(script)
for fn in os.listdir("analysers/"):
if not fn.startswith("analyser_osmosis_") or not fn.endswith(".py"):
continue
analyser = importlib.import_module("analysers." + fn[:-3], package=".")
for name, obj in inspect.getmembers(analyser):
if (inspect.isclass(obj) and obj.__module__ == ("analysers." + fn[:-3]) and
(name.startswith("Analyser") or name.startswith("analyser"))):
self.xml_res_file = self.default_xml_res_path + "diff_empty/{0}.xml".format(name)
self.analyser_conf.error_file = IssuesFileOsmose.IssuesFileOsmose(self.xml_res_file)
with obj(self.analyser_conf, self.logger) as analyser_obj:
analyser_obj.analyser_change()
self.root_err = self.load_errors()
self.check_num_err(min=0, max=5)
def test_change_full(self):
# run all available osmosis analysers, after marking all elements as new
import importlib
import inspect
import os
self.conf.osmosis_manager.set_pgsql_schema()
for script in self.conf.osmosis_change_init_post_scripts:
self.conf.osmosis_manager.psql_f(script)
self.conf.osmosis_manager.psql_c("TRUNCATE TABLE actions;"
"INSERT INTO actions (SELECT 'R', 'C', id FROM relations);"
"INSERT INTO actions (SELECT 'W', 'C', id FROM ways);"
"INSERT INTO actions (SELECT 'N', 'C', id FROM nodes);")
for script in self.conf.osmosis_change_post_scripts:
self.conf.osmosis_manager.psql_f(script)
for fn in os.listdir("analysers/"):
if not fn.startswith("analyser_osmosis_") or not fn.endswith(".py"):
continue
analyser = importlib.import_module("analysers." + fn[:-3], package=".")
for name, obj in inspect.getmembers(analyser):
if (inspect.isclass(obj) and obj.__module__ == ("analysers." + fn[:-3]) and
(name.startswith("Analyser") or name.startswith("analyser"))):
self.xml_res_file = self.default_xml_res_path + "diff_full/{0}.xml".format(name)
self.analyser_conf.error_file = IssuesFileOsmose.IssuesFileOsmose(self.xml_res_file)
with obj(self.analyser_conf, self.logger) as analyser_obj:
analyser_obj.analyser_change()
self.root_err = self.load_errors()
self.check_num_err(min=0, max=5)
def test_cmp_normal_change(self):
# compare results between normal and change_full
# must be run after both test() and test_change_full()
import importlib
import inspect
import os
for fn in os.listdir("analysers/"):
if not fn.startswith("analyser_osmosis_") or not fn.endswith(".py"):
continue
analyser = importlib.import_module("analysers." + fn[:-3], package=".")
for name, obj in inspect.getmembers(analyser):
if (inspect.isclass(obj) and obj.__module__ == ("analysers." + fn[:-3]) and
(name.startswith("Analyser") or name.startswith("analyser"))):
normal_xml = (self.default_xml_res_path +
"normal/{0}.xml".format(name))
change_xml = (self.default_xml_res_path +
"diff_full/{0}.xml".format(name))
print(normal_xml, change_xml)
self.compare_results(normal_xml, change_xml, convert_checked_to_normal=True)
|
tkasp/osmose-backend
|
analysers/Analyser_Osmosis.py
|
Python
|
gpl-3.0
| 28,447 | 0.005765 |
import sys
import os
import time
from watchdog.observers import Observer
from watchdog.events import FileSystemEventHandler, FileModifiedEvent
class LessCompiler(FileSystemEventHandler):
def __init__(self, source):
self.source = source
FileSystemEventHandler.__init__(self)
def compile_css(self):
if len(sys.argv) < 3:
destination = self.source.replace('less', 'css')
else:
destination = sys.argv[2]
cmd = 'lessc %s > %s -x' % (source, os.path.abspath(destination))
print(cmd)
os.system(cmd)
def on_any_event(self, event):
if '__' not in event.src_path and isinstance(event, FileModifiedEvent):
self.compile_css()
if __name__ == "__main__":
if len(sys.argv) < 2:
sys.stderr.write(
'Usage: %s source [destination=../css/$1.css]\n' % sys.argv[0])
sys.exit(1)
source = os.path.abspath(sys.argv[1])
event_handler = LessCompiler(source)
# Run once at startup
event_handler.compile_css()
observer = Observer()
observer.schedule(event_handler, os.path.dirname(source), recursive=True)
observer.start()
try:
while True:
time.sleep(1)
except KeyboardInterrupt:
observer.stop()
observer.join()
|
hzlf/openbroadcast
|
website/tools/suit/watch_less.py
|
Python
|
gpl-3.0
| 1,306 | 0 |
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""RNN helpers for TensorFlow models."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import rnn_cell_impl
from tensorflow.python.ops import tensor_array_ops
from tensorflow.python.ops import variable_scope as vs
from tensorflow.python.util import nest
# pylint: disable=protected-access
_concat = rnn_cell_impl._concat
_like_rnncell = rnn_cell_impl._like_rnncell
# pylint: enable=protected-access
def _transpose_batch_time(x):
"""Transpose the batch and time dimensions of a Tensor.
Retains as much of the static shape information as possible.
Args:
x: A tensor of rank 2 or higher.
Returns:
x transposed along the first two dimensions.
Raises:
ValueError: if `x` is rank 1 or lower.
"""
x_static_shape = x.get_shape()
if x_static_shape.ndims is not None and x_static_shape.ndims < 2:
raise ValueError(
"Expected input tensor %s to have rank at least 2, but saw shape: %s" %
(x, x_static_shape))
x_rank = array_ops.rank(x)
x_t = array_ops.transpose(
x, array_ops.concat(
([1, 0], math_ops.range(2, x_rank)), axis=0))
x_t.set_shape(
tensor_shape.TensorShape([
x_static_shape[1].value, x_static_shape[0].value
]).concatenate(x_static_shape[2:]))
return x_t
def _infer_state_dtype(explicit_dtype, state):
"""Infer the dtype of an RNN state.
Args:
explicit_dtype: explicitly declared dtype or None.
state: RNN's hidden state. Must be a Tensor or a nested iterable containing
Tensors.
Returns:
dtype: inferred dtype of hidden state.
Raises:
ValueError: if `state` has heterogeneous dtypes or is empty.
"""
if explicit_dtype is not None:
return explicit_dtype
elif nest.is_sequence(state):
inferred_dtypes = [element.dtype for element in nest.flatten(state)]
if not inferred_dtypes:
raise ValueError("Unable to infer dtype from empty state.")
all_same = all([x == inferred_dtypes[0] for x in inferred_dtypes])
if not all_same:
raise ValueError(
"State has tensors of different inferred_dtypes. Unable to infer a "
"single representative dtype.")
return inferred_dtypes[0]
else:
return state.dtype
def _on_device(fn, device):
"""Build the subgraph defined by lambda `fn` on `device` if it's not None."""
if device:
with ops.device(device):
return fn()
else:
return fn()
# pylint: disable=unused-argument
def _rnn_step(
time, sequence_length, min_sequence_length, max_sequence_length,
zero_output, state, call_cell, state_size, skip_conditionals=False):
"""Calculate one step of a dynamic RNN minibatch.
Returns an (output, state) pair conditioned on the sequence_lengths.
When skip_conditionals=False, the pseudocode is something like:
if t >= max_sequence_length:
return (zero_output, state)
if t < min_sequence_length:
return call_cell()
# Selectively output zeros or output, old state or new state depending
# on if we've finished calculating each row.
new_output, new_state = call_cell()
final_output = np.vstack([
zero_output if time >= sequence_lengths[r] else new_output_r
for r, new_output_r in enumerate(new_output)
])
final_state = np.vstack([
state[r] if time >= sequence_lengths[r] else new_state_r
for r, new_state_r in enumerate(new_state)
])
return (final_output, final_state)
Args:
time: Python int, the current time step
sequence_length: int32 `Tensor` vector of size [batch_size]
min_sequence_length: int32 `Tensor` scalar, min of sequence_length
max_sequence_length: int32 `Tensor` scalar, max of sequence_length
zero_output: `Tensor` vector of shape [output_size]
state: Either a single `Tensor` matrix of shape `[batch_size, state_size]`,
or a list/tuple of such tensors.
call_cell: lambda returning tuple of (new_output, new_state) where
new_output is a `Tensor` matrix of shape `[batch_size, output_size]`.
new_state is a `Tensor` matrix of shape `[batch_size, state_size]`.
state_size: The `cell.state_size` associated with the state.
skip_conditionals: Python bool, whether to skip using the conditional
calculations. This is useful for `dynamic_rnn`, where the input tensor
matches `max_sequence_length`, and using conditionals just slows
everything down.
Returns:
A tuple of (`final_output`, `final_state`) as given by the pseudocode above:
final_output is a `Tensor` matrix of shape [batch_size, output_size]
final_state is either a single `Tensor` matrix, or a tuple of such
matrices (matching length and shapes of input `state`).
Raises:
ValueError: If the cell returns a state tuple whose length does not match
that returned by `state_size`.
"""
# Convert state to a list for ease of use
flat_state = nest.flatten(state)
flat_zero_output = nest.flatten(zero_output)
def _copy_one_through(output, new_output):
copy_cond = (time >= sequence_length)
return _on_device(
lambda: array_ops.where(copy_cond, output, new_output),
device=new_output.op.device)
def _copy_some_through(flat_new_output, flat_new_state):
# Use broadcasting select to determine which values should get
# the previous state & zero output, and which values should get
# a calculated state & output.
flat_new_output = [
_copy_one_through(zero_output, new_output)
for zero_output, new_output in zip(flat_zero_output, flat_new_output)]
flat_new_state = [
_copy_one_through(state, new_state)
for state, new_state in zip(flat_state, flat_new_state)]
return flat_new_output + flat_new_state
def _maybe_copy_some_through():
"""Run RNN step. Pass through either no or some past state."""
new_output, new_state = call_cell()
nest.assert_same_structure(state, new_state)
flat_new_state = nest.flatten(new_state)
flat_new_output = nest.flatten(new_output)
return control_flow_ops.cond(
# if t < min_seq_len: calculate and return everything
time < min_sequence_length, lambda: flat_new_output + flat_new_state,
# else copy some of it through
lambda: _copy_some_through(flat_new_output, flat_new_state))
# TODO(ebrevdo): skipping these conditionals may cause a slowdown,
# but benefits from removing cond() and its gradient. We should
# profile with and without this switch here.
if skip_conditionals:
# Instead of using conditionals, perform the selective copy at all time
# steps. This is faster when max_seq_len is equal to the number of unrolls
# (which is typical for dynamic_rnn).
new_output, new_state = call_cell()
nest.assert_same_structure(state, new_state)
new_state = nest.flatten(new_state)
new_output = nest.flatten(new_output)
final_output_and_state = _copy_some_through(new_output, new_state)
else:
empty_update = lambda: flat_zero_output + flat_state
final_output_and_state = control_flow_ops.cond(
# if t >= max_seq_len: copy all state through, output zeros
time >= max_sequence_length, empty_update,
# otherwise calculation is required: copy some or all of it through
_maybe_copy_some_through)
if len(final_output_and_state) != len(flat_zero_output) + len(flat_state):
raise ValueError("Internal error: state and output were not concatenated "
"correctly.")
final_output = final_output_and_state[:len(flat_zero_output)]
final_state = final_output_and_state[len(flat_zero_output):]
for output, flat_output in zip(final_output, flat_zero_output):
output.set_shape(flat_output.get_shape())
for substate, flat_substate in zip(final_state, flat_state):
substate.set_shape(flat_substate.get_shape())
final_output = nest.pack_sequence_as(
structure=zero_output, flat_sequence=final_output)
final_state = nest.pack_sequence_as(
structure=state, flat_sequence=final_state)
return final_output, final_state
def _reverse_seq(input_seq, lengths):
"""Reverse a list of Tensors up to specified lengths.
Args:
input_seq: Sequence of seq_len tensors of dimension (batch_size, n_features)
or nested tuples of tensors.
lengths: A `Tensor` of dimension batch_size, containing lengths for each
sequence in the batch. If "None" is specified, simply reverses
the list.
Returns:
time-reversed sequence
"""
if lengths is None:
return list(reversed(input_seq))
flat_input_seq = tuple(nest.flatten(input_) for input_ in input_seq)
flat_results = [[] for _ in range(len(input_seq))]
for sequence in zip(*flat_input_seq):
input_shape = tensor_shape.unknown_shape(
ndims=sequence[0].get_shape().ndims)
for input_ in sequence:
input_shape.merge_with(input_.get_shape())
input_.set_shape(input_shape)
# Join into (time, batch_size, depth)
s_joined = array_ops.stack(sequence)
# TODO(schuster, ebrevdo): Remove cast when reverse_sequence takes int32
if lengths is not None:
lengths = math_ops.to_int64(lengths)
# Reverse along dimension 0
s_reversed = array_ops.reverse_sequence(s_joined, lengths, 0, 1)
# Split again into list
result = array_ops.unstack(s_reversed)
for r, flat_result in zip(result, flat_results):
r.set_shape(input_shape)
flat_result.append(r)
results = [nest.pack_sequence_as(structure=input_, flat_sequence=flat_result)
for input_, flat_result in zip(input_seq, flat_results)]
return results
def bidirectional_dynamic_rnn(cell_fw, cell_bw, inputs, sequence_length=None,
initial_state_fw=None, initial_state_bw=None,
dtype=None, parallel_iterations=None,
swap_memory=False, time_major=False, scope=None):
"""Creates a dynamic version of bidirectional recurrent neural network.
Takes input and builds independent forward and backward RNNs. The input_size
of forward and backward cell must match. The initial state for both directions
is zero by default (but can be set optionally) and no intermediate states are
ever returned -- the network is fully unrolled for the given (passed in)
length(s) of the sequence(s) or completely unrolled if length(s) is not
given.
Args:
cell_fw: An instance of RNNCell, to be used for forward direction.
cell_bw: An instance of RNNCell, to be used for backward direction.
inputs: The RNN inputs.
If time_major == False (default), this must be a tensor of shape:
`[batch_size, max_time, ...]`, or a nested tuple of such elements.
If time_major == True, this must be a tensor of shape:
`[max_time, batch_size, ...]`, or a nested tuple of such elements.
sequence_length: (optional) An int32/int64 vector, size `[batch_size]`,
containing the actual lengths for each of the sequences in the batch.
If not provided, all batch entries are assumed to be full sequences; and
time reversal is applied from time `0` to `max_time` for each sequence.
initial_state_fw: (optional) An initial state for the forward RNN.
This must be a tensor of appropriate type and shape
`[batch_size, cell_fw.state_size]`.
If `cell_fw.state_size` is a tuple, this should be a tuple of
tensors having shapes `[batch_size, s] for s in cell_fw.state_size`.
initial_state_bw: (optional) Same as for `initial_state_fw`, but using
the corresponding properties of `cell_bw`.
dtype: (optional) The data type for the initial states and expected output.
Required if initial_states are not provided or RNN states have a
heterogeneous dtype.
parallel_iterations: (Default: 32). The number of iterations to run in
parallel. Those operations which do not have any temporal dependency
and can be run in parallel, will be. This parameter trades off
time for space. Values >> 1 use more memory but take less time,
while smaller values use less memory but computations take longer.
swap_memory: Transparently swap the tensors produced in forward inference
but needed for back prop from GPU to CPU. This allows training RNNs
which would typically not fit on a single GPU, with very minimal (or no)
performance penalty.
time_major: The shape format of the `inputs` and `outputs` Tensors.
If true, these `Tensors` must be shaped `[max_time, batch_size, depth]`.
If false, these `Tensors` must be shaped `[batch_size, max_time, depth]`.
Using `time_major = True` is a bit more efficient because it avoids
transposes at the beginning and end of the RNN calculation. However,
most TensorFlow data is batch-major, so by default this function
accepts input and emits output in batch-major form.
scope: VariableScope for the created subgraph; defaults to
"bidirectional_rnn"
Returns:
A tuple (outputs, output_states) where:
outputs: A tuple (output_fw, output_bw) containing the forward and
the backward rnn output `Tensor`.
If time_major == False (default),
output_fw will be a `Tensor` shaped:
`[batch_size, max_time, cell_fw.output_size]`
and output_bw will be a `Tensor` shaped:
`[batch_size, max_time, cell_bw.output_size]`.
If time_major == True,
output_fw will be a `Tensor` shaped:
`[max_time, batch_size, cell_fw.output_size]`
and output_bw will be a `Tensor` shaped:
`[max_time, batch_size, cell_bw.output_size]`.
It returns a tuple instead of a single concatenated `Tensor`, unlike
in the `bidirectional_rnn`. If the concatenated one is preferred,
the forward and backward outputs can be concatenated as
`tf.concat(outputs, 2)`.
output_states: A tuple (output_state_fw, output_state_bw) containing
the forward and the backward final states of bidirectional rnn.
Raises:
TypeError: If `cell_fw` or `cell_bw` is not an instance of `RNNCell`.
"""
if not _like_rnncell(cell_fw):
raise TypeError("cell_fw must be an instance of RNNCell")
if not _like_rnncell(cell_bw):
raise TypeError("cell_bw must be an instance of RNNCell")
with vs.variable_scope(scope or "bidirectional_rnn"):
# Forward direction
with vs.variable_scope("fw") as fw_scope:
output_fw, output_state_fw = dynamic_rnn(
cell=cell_fw, inputs=inputs, sequence_length=sequence_length,
initial_state=initial_state_fw, dtype=dtype,
parallel_iterations=parallel_iterations, swap_memory=swap_memory,
time_major=time_major, scope=fw_scope)
# Backward direction
if not time_major:
time_dim = 1
batch_dim = 0
else:
time_dim = 0
batch_dim = 1
def _reverse(input_, seq_lengths, seq_dim, batch_dim):
if seq_lengths is not None:
return array_ops.reverse_sequence(
input=input_, seq_lengths=seq_lengths,
seq_dim=seq_dim, batch_dim=batch_dim)
else:
return array_ops.reverse(input_, axis=[seq_dim])
with vs.variable_scope("bw") as bw_scope:
inputs_reverse = _reverse(
inputs, seq_lengths=sequence_length,
seq_dim=time_dim, batch_dim=batch_dim)
tmp, output_state_bw = dynamic_rnn(
cell=cell_bw, inputs=inputs_reverse, sequence_length=sequence_length,
initial_state=initial_state_bw, dtype=dtype,
parallel_iterations=parallel_iterations, swap_memory=swap_memory,
time_major=time_major, scope=bw_scope)
output_bw = _reverse(
tmp, seq_lengths=sequence_length,
seq_dim=time_dim, batch_dim=batch_dim)
outputs = (output_fw, output_bw)
output_states = (output_state_fw, output_state_bw)
return (outputs, output_states)
def dynamic_rnn(cell, inputs, sequence_length=None, initial_state=None,
dtype=None, parallel_iterations=None, swap_memory=False,
time_major=False, scope=None):
"""Creates a recurrent neural network specified by RNNCell `cell`.
Performs fully dynamic unrolling of `inputs`.
`Inputs` may be a single `Tensor` where the maximum time is either the first
or second dimension (see the parameter
`time_major`). Alternatively, it may be a (possibly nested) tuple of
Tensors, each of them having matching batch and time dimensions.
The corresponding output is either a single `Tensor` having the same number
of time steps and batch size, or a (possibly nested) tuple of such tensors,
matching the nested structure of `cell.output_size`.
The parameter `sequence_length` is optional and is used to copy-through state
and zero-out outputs when past a batch element's sequence length. So it's more
for correctness than performance.
Args:
cell: An instance of RNNCell.
inputs: The RNN inputs.
If `time_major == False` (default), this must be a `Tensor` of shape:
`[batch_size, max_time, ...]`, or a nested tuple of such
elements.
If `time_major == True`, this must be a `Tensor` of shape:
`[max_time, batch_size, ...]`, or a nested tuple of such
elements.
This may also be a (possibly nested) tuple of Tensors satisfying
this property. The first two dimensions must match across all the inputs,
but otherwise the ranks and other shape components may differ.
In this case, input to `cell` at each time-step will replicate the
structure of these tuples, except for the time dimension (from which the
time is taken).
The input to `cell` at each time step will be a `Tensor` or (possibly
nested) tuple of Tensors each with dimensions `[batch_size, ...]`.
sequence_length: (optional) An int32/int64 vector sized `[batch_size]`.
initial_state: (optional) An initial state for the RNN.
If `cell.state_size` is an integer, this must be
a `Tensor` of appropriate type and shape `[batch_size, cell.state_size]`.
If `cell.state_size` is a tuple, this should be a tuple of
tensors having shapes `[batch_size, s] for s in cell.state_size`.
dtype: (optional) The data type for the initial state and expected output.
Required if initial_state is not provided or RNN state has a heterogeneous
dtype.
parallel_iterations: (Default: 32). The number of iterations to run in
parallel. Those operations which do not have any temporal dependency
and can be run in parallel, will be. This parameter trades off
time for space. Values >> 1 use more memory but take less time,
while smaller values use less memory but computations take longer.
swap_memory: Transparently swap the tensors produced in forward inference
but needed for back prop from GPU to CPU. This allows training RNNs
which would typically not fit on a single GPU, with very minimal (or no)
performance penalty.
time_major: The shape format of the `inputs` and `outputs` Tensors.
If true, these `Tensors` must be shaped `[max_time, batch_size, depth]`.
If false, these `Tensors` must be shaped `[batch_size, max_time, depth]`.
Using `time_major = True` is a bit more efficient because it avoids
transposes at the beginning and end of the RNN calculation. However,
most TensorFlow data is batch-major, so by default this function
accepts input and emits output in batch-major form.
scope: VariableScope for the created subgraph; defaults to "rnn".
Returns:
A pair (outputs, state) where:
outputs: The RNN output `Tensor`.
If time_major == False (default), this will be a `Tensor` shaped:
`[batch_size, max_time, cell.output_size]`.
If time_major == True, this will be a `Tensor` shaped:
`[max_time, batch_size, cell.output_size]`.
Note, if `cell.output_size` is a (possibly nested) tuple of integers
or `TensorShape` objects, then `outputs` will be a tuple having the
same structure as `cell.output_size`, containing Tensors having shapes
corresponding to the shape data in `cell.output_size`.
state: The final state. If `cell.state_size` is an int, this
will be shaped `[batch_size, cell.state_size]`. If it is a
`TensorShape`, this will be shaped `[batch_size] + cell.state_size`.
If it is a (possibly nested) tuple of ints or `TensorShape`, this will
be a tuple having the corresponding shapes.
Raises:
TypeError: If `cell` is not an instance of RNNCell.
ValueError: If inputs is None or an empty list.
"""
if not _like_rnncell(cell):
raise TypeError("cell must be an instance of RNNCell")
# By default, time_major==False and inputs are batch-major: shaped
# [batch, time, depth]
# For internal calculations, we transpose to [time, batch, depth]
flat_input = nest.flatten(inputs)
if not time_major:
# (B,T,D) => (T,B,D)
flat_input = [ops.convert_to_tensor(input_) for input_ in flat_input]
flat_input = tuple(_transpose_batch_time(input_) for input_ in flat_input)
parallel_iterations = parallel_iterations or 32
if sequence_length is not None:
sequence_length = math_ops.to_int32(sequence_length)
if sequence_length.get_shape().ndims not in (None, 1):
raise ValueError(
"sequence_length must be a vector of length batch_size, "
"but saw shape: %s" % sequence_length.get_shape())
sequence_length = array_ops.identity( # Just to find it in the graph.
sequence_length, name="sequence_length")
# Create a new scope in which the caching device is either
# determined by the parent scope, or is set to place the cached
# Variable using the same placement as for the rest of the RNN.
with vs.variable_scope(scope or "rnn") as varscope:
if varscope.caching_device is None:
varscope.set_caching_device(lambda op: op.device)
input_shape = tuple(array_ops.shape(input_) for input_ in flat_input)
batch_size = input_shape[0][1]
for input_ in input_shape:
if input_[1].get_shape() != batch_size.get_shape():
raise ValueError("All inputs should have the same batch size")
if initial_state is not None:
state = initial_state
else:
if not dtype:
raise ValueError("If there is no initial_state, you must give a dtype.")
state = cell.zero_state(batch_size, dtype)
def _assert_has_shape(x, shape):
x_shape = array_ops.shape(x)
packed_shape = array_ops.stack(shape)
return control_flow_ops.Assert(
math_ops.reduce_all(math_ops.equal(x_shape, packed_shape)),
["Expected shape for Tensor %s is " % x.name,
packed_shape, " but saw shape: ", x_shape])
if sequence_length is not None:
# Perform some shape validation
with ops.control_dependencies(
[_assert_has_shape(sequence_length, [batch_size])]):
sequence_length = array_ops.identity(
sequence_length, name="CheckSeqLen")
inputs = nest.pack_sequence_as(structure=inputs, flat_sequence=flat_input)
(outputs, final_state) = _dynamic_rnn_loop(
cell,
inputs,
state,
parallel_iterations=parallel_iterations,
swap_memory=swap_memory,
sequence_length=sequence_length,
dtype=dtype)
# Outputs of _dynamic_rnn_loop are always shaped [time, batch, depth].
# If we are performing batch-major calculations, transpose output back
# to shape [batch, time, depth]
if not time_major:
# (T,B,D) => (B,T,D)
outputs = nest.map_structure(_transpose_batch_time, outputs)
return (outputs, final_state)
def _dynamic_rnn_loop(cell,
inputs,
initial_state,
parallel_iterations,
swap_memory,
sequence_length=None,
dtype=None):
"""Internal implementation of Dynamic RNN.
Args:
cell: An instance of RNNCell.
inputs: A `Tensor` of shape [time, batch_size, input_size], or a nested
tuple of such elements.
initial_state: A `Tensor` of shape `[batch_size, state_size]`, or if
`cell.state_size` is a tuple, then this should be a tuple of
tensors having shapes `[batch_size, s] for s in cell.state_size`.
parallel_iterations: Positive Python int.
swap_memory: A Python boolean
sequence_length: (optional) An `int32` `Tensor` of shape [batch_size].
dtype: (optional) Expected dtype of output. If not specified, inferred from
initial_state.
Returns:
Tuple `(final_outputs, final_state)`.
final_outputs:
A `Tensor` of shape `[time, batch_size, cell.output_size]`. If
`cell.output_size` is a (possibly nested) tuple of ints or `TensorShape`
objects, then this returns a (possibly nsted) tuple of Tensors matching
the corresponding shapes.
final_state:
A `Tensor`, or possibly nested tuple of Tensors, matching in length
and shapes to `initial_state`.
Raises:
ValueError: If the input depth cannot be inferred via shape inference
from the inputs.
"""
state = initial_state
assert isinstance(parallel_iterations, int), "parallel_iterations must be int"
state_size = cell.state_size
flat_input = nest.flatten(inputs)
flat_output_size = nest.flatten(cell.output_size)
# Construct an initial output
input_shape = array_ops.shape(flat_input[0])
time_steps = input_shape[0]
batch_size = input_shape[1]
inputs_got_shape = tuple(input_.get_shape().with_rank_at_least(3)
for input_ in flat_input)
const_time_steps, const_batch_size = inputs_got_shape[0].as_list()[:2]
for shape in inputs_got_shape:
if not shape[2:].is_fully_defined():
raise ValueError(
"Input size (depth of inputs) must be accessible via shape inference,"
" but saw value None.")
got_time_steps = shape[0].value
got_batch_size = shape[1].value
if const_time_steps != got_time_steps:
raise ValueError(
"Time steps is not the same for all the elements in the input in a "
"batch.")
if const_batch_size != got_batch_size:
raise ValueError(
"Batch_size is not the same for all the elements in the input.")
# Prepare dynamic conditional copying of state & output
def _create_zero_arrays(size):
size = _concat(batch_size, size)
return array_ops.zeros(
array_ops.stack(size), _infer_state_dtype(dtype, state))
flat_zero_output = tuple(_create_zero_arrays(output)
for output in flat_output_size)
zero_output = nest.pack_sequence_as(structure=cell.output_size,
flat_sequence=flat_zero_output)
if sequence_length is not None:
min_sequence_length = math_ops.reduce_min(sequence_length)
max_sequence_length = math_ops.reduce_max(sequence_length)
time = array_ops.constant(0, dtype=dtypes.int32, name="time")
with ops.name_scope("dynamic_rnn") as scope:
base_name = scope
def _create_ta(name, dtype):
return tensor_array_ops.TensorArray(dtype=dtype,
size=time_steps,
tensor_array_name=base_name + name)
output_ta = tuple(_create_ta("output_%d" % i,
_infer_state_dtype(dtype, state))
for i in range(len(flat_output_size)))
input_ta = tuple(_create_ta("input_%d" % i, flat_input[i].dtype)
for i in range(len(flat_input)))
input_ta = tuple(ta.unstack(input_)
for ta, input_ in zip(input_ta, flat_input))
def _time_step(time, output_ta_t, state):
"""Take a time step of the dynamic RNN.
Args:
time: int32 scalar Tensor.
output_ta_t: List of `TensorArray`s that represent the output.
state: nested tuple of vector tensors that represent the state.
Returns:
The tuple (time + 1, output_ta_t with updated flow, new_state).
"""
input_t = tuple(ta.read(time) for ta in input_ta)
# Restore some shape information
for input_, shape in zip(input_t, inputs_got_shape):
input_.set_shape(shape[1:])
input_t = nest.pack_sequence_as(structure=inputs, flat_sequence=input_t)
call_cell = lambda: cell(input_t, state)
if sequence_length is not None:
(output, new_state) = _rnn_step(
time=time,
sequence_length=sequence_length,
min_sequence_length=min_sequence_length,
max_sequence_length=max_sequence_length,
zero_output=zero_output,
state=state,
call_cell=call_cell,
state_size=state_size,
skip_conditionals=True)
else:
(output, new_state) = call_cell()
# Pack state if using state tuples
output = nest.flatten(output)
output_ta_t = tuple(
ta.write(time, out) for ta, out in zip(output_ta_t, output))
return (time + 1, output_ta_t, new_state)
_, output_final_ta, final_state = control_flow_ops.while_loop(
cond=lambda time, *_: time < time_steps,
body=_time_step,
loop_vars=(time, output_ta, state),
parallel_iterations=parallel_iterations,
swap_memory=swap_memory)
# Unpack final output if not using output tuples.
final_outputs = tuple(ta.stack() for ta in output_final_ta)
# Restore some shape information
for output, output_size in zip(final_outputs, flat_output_size):
shape = _concat(
[const_time_steps, const_batch_size], output_size, static=True)
output.set_shape(shape)
final_outputs = nest.pack_sequence_as(
structure=cell.output_size, flat_sequence=final_outputs)
return (final_outputs, final_state)
def raw_rnn(cell, loop_fn,
parallel_iterations=None, swap_memory=False, scope=None):
"""Creates an `RNN` specified by RNNCell `cell` and loop function `loop_fn`.
**NOTE: This method is still in testing, and the API may change.**
This function is a more primitive version of `dynamic_rnn` that provides
more direct access to the inputs each iteration. It also provides more
control over when to start and finish reading the sequence, and
what to emit for the output.
For example, it can be used to implement the dynamic decoder of a seq2seq
model.
Instead of working with `Tensor` objects, most operations work with
`TensorArray` objects directly.
The operation of `raw_rnn`, in pseudo-code, is basically the following:
```python
time = tf.constant(0, dtype=tf.int32)
(finished, next_input, initial_state, _, loop_state) = loop_fn(
time=time, cell_output=None, cell_state=None, loop_state=None)
emit_ta = TensorArray(dynamic_size=True, dtype=initial_state.dtype)
state = initial_state
while not all(finished):
(output, cell_state) = cell(next_input, state)
(next_finished, next_input, next_state, emit, loop_state) = loop_fn(
time=time + 1, cell_output=output, cell_state=cell_state,
loop_state=loop_state)
# Emit zeros and copy forward state for minibatch entries that are finished.
state = tf.where(finished, state, next_state)
emit = tf.where(finished, tf.zeros_like(emit), emit)
emit_ta = emit_ta.write(time, emit)
# If any new minibatch entries are marked as finished, mark these.
finished = tf.logical_or(finished, next_finished)
time += 1
return (emit_ta, state, loop_state)
```
with the additional properties that output and state may be (possibly nested)
tuples, as determined by `cell.output_size` and `cell.state_size`, and
as a result the final `state` and `emit_ta` may themselves be tuples.
A simple implementation of `dynamic_rnn` via `raw_rnn` looks like this:
```python
inputs = tf.placeholder(shape=(max_time, batch_size, input_depth),
dtype=tf.float32)
sequence_length = tf.placeholder(shape=(batch_size,), dtype=tf.int32)
inputs_ta = tf.TensorArray(dtype=tf.float32, size=max_time)
inputs_ta = inputs_ta.unstack(inputs)
cell = tf.contrib.rnn.LSTMCell(num_units)
def loop_fn(time, cell_output, cell_state, loop_state):
emit_output = cell_output # == None for time == 0
if cell_output is None: # time == 0
next_cell_state = cell.zero_state(batch_size, tf.float32)
else:
next_cell_state = cell_state
elements_finished = (time >= sequence_length)
finished = tf.reduce_all(elements_finished)
next_input = tf.cond(
finished,
lambda: tf.zeros([batch_size, input_depth], dtype=tf.float32),
lambda: inputs_ta.read(time))
next_loop_state = None
return (elements_finished, next_input, next_cell_state,
emit_output, next_loop_state)
outputs_ta, final_state, _ = raw_rnn(cell, loop_fn)
outputs = outputs_ta.stack()
```
Args:
cell: An instance of RNNCell.
loop_fn: A callable that takes inputs
`(time, cell_output, cell_state, loop_state)`
and returns the tuple
`(finished, next_input, next_cell_state, emit_output, next_loop_state)`.
Here `time` is an int32 scalar `Tensor`, `cell_output` is a
`Tensor` or (possibly nested) tuple of tensors as determined by
`cell.output_size`, and `cell_state` is a `Tensor`
or (possibly nested) tuple of tensors, as determined by the `loop_fn`
on its first call (and should match `cell.state_size`).
The outputs are: `finished`, a boolean `Tensor` of
shape `[batch_size]`, `next_input`: the next input to feed to `cell`,
`next_cell_state`: the next state to feed to `cell`,
and `emit_output`: the output to store for this iteration.
Note that `emit_output` should be a `Tensor` or (possibly nested)
tuple of tensors with shapes and structure matching `cell.output_size`
and `cell_output` above. The parameter `cell_state` and output
`next_cell_state` may be either a single or (possibly nested) tuple
of tensors. The parameter `loop_state` and
output `next_loop_state` may be either a single or (possibly nested) tuple
of `Tensor` and `TensorArray` objects. This last parameter
may be ignored by `loop_fn` and the return value may be `None`. If it
is not `None`, then the `loop_state` will be propagated through the RNN
loop, for use purely by `loop_fn` to keep track of its own state.
The `next_loop_state` parameter returned may be `None`.
The first call to `loop_fn` will be `time = 0`, `cell_output = None`,
`cell_state = None`, and `loop_state = None`. For this call:
The `next_cell_state` value should be the value with which to initialize
the cell's state. It may be a final state from a previous RNN or it
may be the output of `cell.zero_state()`. It should be a
(possibly nested) tuple structure of tensors.
If `cell.state_size` is an integer, this must be
a `Tensor` of appropriate type and shape `[batch_size, cell.state_size]`.
If `cell.state_size` is a `TensorShape`, this must be a `Tensor` of
appropriate type and shape `[batch_size] + cell.state_size`.
If `cell.state_size` is a (possibly nested) tuple of ints or
`TensorShape`, this will be a tuple having the corresponding shapes.
The `emit_output` value may be either `None` or a (possibly nested)
tuple structure of tensors, e.g.,
`(tf.zeros(shape_0, dtype=dtype_0), tf.zeros(shape_1, dtype=dtype_1))`.
If this first `emit_output` return value is `None`,
then the `emit_ta` result of `raw_rnn` will have the same structure and
dtypes as `cell.output_size`. Otherwise `emit_ta` will have the same
structure, shapes (prepended with a `batch_size` dimension), and dtypes
as `emit_output`. The actual values returned for `emit_output` at this
initializing call are ignored. Note, this emit structure must be
consistent across all time steps.
parallel_iterations: (Default: 32). The number of iterations to run in
parallel. Those operations which do not have any temporal dependency
and can be run in parallel, will be. This parameter trades off
time for space. Values >> 1 use more memory but take less time,
while smaller values use less memory but computations take longer.
swap_memory: Transparently swap the tensors produced in forward inference
but needed for back prop from GPU to CPU. This allows training RNNs
which would typically not fit on a single GPU, with very minimal (or no)
performance penalty.
scope: VariableScope for the created subgraph; defaults to "rnn".
Returns:
A tuple `(emit_ta, final_state, final_loop_state)` where:
`emit_ta`: The RNN output `TensorArray`.
If `loop_fn` returns a (possibly nested) set of Tensors for
`emit_output` during initialization, (inputs `time = 0`,
`cell_output = None`, and `loop_state = None`), then `emit_ta` will
have the same structure, dtypes, and shapes as `emit_output` instead.
If `loop_fn` returns `emit_output = None` during this call,
the structure of `cell.output_size` is used:
If `cell.output_size` is a (possibly nested) tuple of integers
or `TensorShape` objects, then `emit_ta` will be a tuple having the
same structure as `cell.output_size`, containing TensorArrays whose
elements' shapes correspond to the shape data in `cell.output_size`.
`final_state`: The final cell state. If `cell.state_size` is an int, this
will be shaped `[batch_size, cell.state_size]`. If it is a
`TensorShape`, this will be shaped `[batch_size] + cell.state_size`.
If it is a (possibly nested) tuple of ints or `TensorShape`, this will
be a tuple having the corresponding shapes.
`final_loop_state`: The final loop state as returned by `loop_fn`.
Raises:
TypeError: If `cell` is not an instance of RNNCell, or `loop_fn` is not
a `callable`.
"""
if not _like_rnncell(cell):
raise TypeError("cell must be an instance of RNNCell")
if not callable(loop_fn):
raise TypeError("loop_fn must be a callable")
parallel_iterations = parallel_iterations or 32
# Create a new scope in which the caching device is either
# determined by the parent scope, or is set to place the cached
# Variable using the same placement as for the rest of the RNN.
with vs.variable_scope(scope or "rnn") as varscope:
if varscope.caching_device is None:
varscope.set_caching_device(lambda op: op.device)
time = constant_op.constant(0, dtype=dtypes.int32)
(elements_finished, next_input, initial_state, emit_structure,
init_loop_state) = loop_fn(
time, None, None, None) # time, cell_output, cell_state, loop_state
flat_input = nest.flatten(next_input)
# Need a surrogate loop state for the while_loop if none is available.
loop_state = (init_loop_state if init_loop_state is not None
else constant_op.constant(0, dtype=dtypes.int32))
input_shape = [input_.get_shape() for input_ in flat_input]
static_batch_size = input_shape[0][0]
for input_shape_i in input_shape:
# Static verification that batch sizes all match
static_batch_size.merge_with(input_shape_i[0])
batch_size = static_batch_size.value
if batch_size is None:
batch_size = array_ops.shape(flat_input[0])[0]
nest.assert_same_structure(initial_state, cell.state_size)
state = initial_state
flat_state = nest.flatten(state)
flat_state = [ops.convert_to_tensor(s) for s in flat_state]
state = nest.pack_sequence_as(structure=state,
flat_sequence=flat_state)
if emit_structure is not None:
flat_emit_structure = nest.flatten(emit_structure)
flat_emit_size = [emit.get_shape() for emit in flat_emit_structure]
flat_emit_dtypes = [emit.dtype for emit in flat_emit_structure]
else:
emit_structure = cell.output_size
flat_emit_size = nest.flatten(emit_structure)
flat_emit_dtypes = [flat_state[0].dtype] * len(flat_emit_size)
flat_emit_ta = [
tensor_array_ops.TensorArray(
dtype=dtype_i, dynamic_size=True, size=0, name="rnn_output_%d" % i)
for i, dtype_i in enumerate(flat_emit_dtypes)]
emit_ta = nest.pack_sequence_as(structure=emit_structure,
flat_sequence=flat_emit_ta)
flat_zero_emit = [
array_ops.zeros(_concat(batch_size, size_i), dtype_i)
for size_i, dtype_i in zip(flat_emit_size, flat_emit_dtypes)]
zero_emit = nest.pack_sequence_as(structure=emit_structure,
flat_sequence=flat_zero_emit)
def condition(unused_time, elements_finished, *_):
return math_ops.logical_not(math_ops.reduce_all(elements_finished))
def body(time, elements_finished, current_input,
emit_ta, state, loop_state):
"""Internal while loop body for raw_rnn.
Args:
time: time scalar.
elements_finished: batch-size vector.
current_input: possibly nested tuple of input tensors.
emit_ta: possibly nested tuple of output TensorArrays.
state: possibly nested tuple of state tensors.
loop_state: possibly nested tuple of loop state tensors.
Returns:
Tuple having the same size as Args but with updated values.
"""
(next_output, cell_state) = cell(current_input, state)
nest.assert_same_structure(state, cell_state)
nest.assert_same_structure(cell.output_size, next_output)
next_time = time + 1
(next_finished, next_input, next_state, emit_output,
next_loop_state) = loop_fn(
next_time, next_output, cell_state, loop_state)
nest.assert_same_structure(state, next_state)
nest.assert_same_structure(current_input, next_input)
nest.assert_same_structure(emit_ta, emit_output)
# If loop_fn returns None for next_loop_state, just reuse the
# previous one.
loop_state = loop_state if next_loop_state is None else next_loop_state
def _copy_some_through(current, candidate):
"""Copy some tensors through via array_ops.where."""
def copy_fn(cur_i, cand_i):
return _on_device(
lambda: array_ops.where(elements_finished, cur_i, cand_i),
device=cand_i.op.device)
return nest.map_structure(copy_fn, current, candidate)
emit_output = _copy_some_through(zero_emit, emit_output)
next_state = _copy_some_through(state, next_state)
emit_ta = nest.map_structure(
lambda ta, emit: ta.write(time, emit), emit_ta, emit_output)
elements_finished = math_ops.logical_or(elements_finished, next_finished)
return (next_time, elements_finished, next_input,
emit_ta, next_state, loop_state)
returned = control_flow_ops.while_loop(
condition, body, loop_vars=[
time, elements_finished, next_input,
emit_ta, state, loop_state],
parallel_iterations=parallel_iterations,
swap_memory=swap_memory)
(emit_ta, final_state, final_loop_state) = returned[-3:]
if init_loop_state is None:
final_loop_state = None
return (emit_ta, final_state, final_loop_state)
|
mortada/tensorflow
|
tensorflow/python/ops/rnn.py
|
Python
|
apache-2.0
| 44,560 | 0.004129 |
# coding: utf8
from pygrim import Server as WebServer
from routes import Routes
from test_iface import Test
from uwsgidecorators import postfork as postfork_decorator
# from pygrim.components.session import FileSessionStorage
# to create custom session handler, view, etc:
"""
class MySessionClass(SessionStorage):
pass
from pygrim import register_session_handler
register_session_handler("myhandler", MySessionClass)
"""
inheritance = (
WebServer,
Test,
Routes
)
def __init__(self, *args, **kwargs):
WebServer.__init__(self, *args, **kwargs)
def postfork(self):
# for all interfaces call postfork to ensure all will be called
for cls in inheritance:
pfork = getattr(cls, "postfork", None)
if pfork:
pfork(self)
# Dynamicaly creating type.
# It allows me to do the trick with inheritance in postfork without
# using inspect
Server = type("Server", inheritance, {
"__init__": __init__,
"postfork": postfork
})
# naming instance of Server as application
# can bee seen in configfile in section uwsgi->module=server:application
# server is filename and application is method (uwsgi will do __call__ on
# this object on every request)
application = Server()
@postfork_decorator
def postfork():
application.do_postfork()
|
ondrejkajinek/pyGrim
|
example/server.py
|
Python
|
mit
| 1,308 | 0.000765 |
import parser
import logging
def test(code):
log = logging.getLogger()
parser.parser.parse(code, tracking=True)
print "Programa con 1 var y 1 asignacion bien: "
s = "program id; var beto: int; { id = 1234; }"
test(s)
print "Original: \n{0}".format(s)
print "\n"
print "Programa con 1 var mal: "
s = "program ; var beto: int; { id = 1234; }"
test(s)
print "Original: \n{0}".format(s)
print "\n"
print "Programa sin vars bien: "
s = "program id; { id = 1234; }"
test(s)
print "Original: \n{0}".format(s)
print "\n"
print "Programa con var mal: "
s = "program id; var beto int; { id = 1234; }"
test(s)
print "Original: \n{0}".format(s)
print "\n"
print "Programa con var mal: "
s = "program id; var beto: int { id = 1234; }"
test(s);
print "Original: \n{0}".format(s)
print "\n"
print "Programa con var mal: "
s = "program id; beto: int; { id = 1234; }"
test(s)
print "Original: \n{0}".format(s)
print "\n"
print "Programa con bloque vacio bien: "
s = "program id; var beto: int; { }"
test(s)
print "Original: \n{0}".format(s)
print "\n"
print "Programa con bloque lleno y estatuto mal: "
s = "program id; var beto: int; { id = 1234; id2 = 12345 }"
test(s)
print "Original: \n{0}".format(s)
print "\n"
print "Programa con bloque lleno y condicion mal: "
s = "program id; var beto: int; { id = 1234; if ( 8 > 3 ) { id3 = 34234; } else { } }"
test(s)
print "\n"
print "Original: \n{0}".format(s)
|
betoesquivel/PLYpractice
|
testingParser.py
|
Python
|
mit
| 1,412 | 0.003541 |
from django.conf.urls.defaults import *
urlpatterns = patterns('member.views',
url(r'^$', 'login', name='passport_index'),
url(r'^register/$', 'register', name='passport_register'),
url(r'^login/$', 'login', name='passport_login'),
url(r'^logout/$', 'logout', name='passport_logout'),
url(r'^active/$', 'active', name='passport_active'),
url(r'^forget/$', 'forget', name='passport_forget'),
url(r'^profile/$', 'profile', name='passport_profile'),
)
|
masiqi/douquan
|
member/urls.py
|
Python
|
mit
| 478 | 0.002092 |
from __future__ import absolute_import
from sentry.testutils import TestCase
from .util import invalid_schema
from sentry.api.validators.sentry_apps.schema import validate_component
class TestImageSchemaValidation(TestCase):
def setUp(self):
self.schema = {
"type": "image",
"url": "https://example.com/image.gif",
"alt": "example video",
}
def test_valid_schema(self):
validate_component(self.schema)
@invalid_schema
def test_missing_url(self):
del self.schema["url"]
validate_component(self.schema)
@invalid_schema
def test_invalid_url(self):
self.schema["url"] = "not-a-url"
validate_component(self.schema)
def test_missing_alt(self):
del self.schema["alt"]
validate_component(self.schema)
@invalid_schema
def test_invalid_alt_type(self):
self.schema["alt"] = 1
validate_component(self.schema)
|
mvaled/sentry
|
tests/sentry/api/validators/sentry_apps/test_image.py
|
Python
|
bsd-3-clause
| 966 | 0 |
from cog.models import *
from django.forms import ModelForm, ModelMultipleChoiceField, NullBooleanSelect
from django.db import models
from django.contrib.admin.widgets import FilteredSelectMultiple
from django import forms
from django.forms import ModelForm, Textarea, TextInput, Select, SelectMultiple, FileInput, CheckboxSelectMultiple
from django.core.exceptions import ObjectDoesNotExist
from os.path import basename
import re
from cog.utils import *
from django.db.models import Q
from cog.forms.forms_image import ImageForm
from cog.utils import hasText
#note parent and peer formatting is in forms_other.py
class ProjectForm(ModelForm):
# define the widget for parent/peer selection so we can set the styling. The class is set to .selectfilter and its
# styles are controlled in cogstyle.css
parents = forms.ModelMultipleChoiceField("parents", required=False,
widget=forms.SelectMultiple(attrs={'size': '20',
'class': 'selectprojects'}))
peers = forms.ModelMultipleChoiceField("peers", required=False,
widget=forms.SelectMultiple(attrs={'size': '20',
'class': 'selectprojects'}))
# filtering of what is see in the form is done down below.
# ERROR: FilteredSelectMultiple does not exist in the module but choosing widget=SelectMultiple throws an error.
# FilteredSelectMultiple throws an error in IE.
# extra field not present in model, used for deletion of previously uploaded logo
delete_logo = forms.BooleanField(required=False)
# specify size of logo_url text field
logo_url = forms.CharField(required=False, widget=TextInput(attrs={'size': '80'}))
# extra fields to manage folder state
#folders = ModelMultipleChoiceField(queryset=Folder.objects.all(), required=False, widget=CheckboxSelectMultiple)
# override __init__ method to change the querysets for 'parent' and 'peers'
def __init__(self, *args, **kwargs):
super(ProjectForm, self).__init__(*args, **kwargs)
current_site = Site.objects.get_current()
queryset2 = Q(site__id=current_site.id) | Q(site__peersite__enabled=True)
if 'instance' in kwargs:
# peer and parent query-set options: exclude the project itself, projects from disabled peer nodes
instance = kwargs.get('instance')
queryset1 = ~Q(id=instance.id)
self.fields['parents'].queryset = \
Project.objects.filter(queryset1).filter(queryset2).distinct().\
extra(select={'snl': 'lower(short_name)'}, order_by=['snl'])
self.fields['peers'].queryset = \
Project.objects.filter(queryset1).filter(queryset2).distinct().\
extra(select={'snl': 'lower(short_name)'}, order_by=['snl'])
else:
# peer and parent query-set options: exclude projects from disabled peer nodes
self.fields['parents'].queryset = \
Project.objects.filter(queryset2).distinct().extra(select={'snl': 'lower(short_name)'},
order_by=['snl'])
self.fields['peers'].queryset = \
Project.objects.filter(queryset2).distinct().extra(select={'snl': 'lower(short_name)'},
order_by=['snl'])
# overridden validation method for project short name
def clean_short_name(self):
short_name = self.cleaned_data['short_name']
# must not start with any of the URL matching patterns
if short_name in ('admin', 'project', 'news', 'post', 'doc', 'signal'):
raise forms.ValidationError("Sorry, '%s' "
"is a reserved URL keyword - it cannot be used as project short name"
% short_name)
# only allows letters, numbers, '-' and '_'
if re.search("[^a-zA-Z0-9_\-]", short_name):
raise forms.ValidationError("Project short name contains invalid characters")
# do not allow new projects to have the same short name as existing ones, regardless to case
if self.instance.id is None: # new projects only
try:
p = Project.objects.get(short_name__iexact=short_name)
raise forms.ValidationError("The new project short name conflicts with an existing project: %s"
% p.short_name)
except Project.DoesNotExist:
pass
return short_name
def clean_long_name(self):
long_name = self.cleaned_data['long_name']
# do not allow quotation characters in long name (causes problems in browser widget)
if '\"' in long_name:
raise forms.ValidationError("Quotation characters are not allowed in project long name")
# check for non-ascii characters
try:
long_name.decode('ascii')
except (UnicodeDecodeError, UnicodeEncodeError):
raise forms.ValidationError("Project long name contains invalid non-ASCII characters")
return long_name
class Meta:
model = Project
fields = ('short_name', 'long_name', 'author', 'description',
'parents', 'peers', 'logo', 'logo_url', 'active', 'private', 'shared',
'dataSearchEnabled', 'nodesWidgetEnabled',
'site', 'maxUploadSize')
class ContactusForm(ModelForm):
# overridden validation method for project short name
def clean_projectContacts(self):
value = self.cleaned_data['projectContacts']
if not hasText(value):
raise forms.ValidationError("Project Contacts cannot be empty")
return value
class Meta:
model = Project
fields = ('projectContacts', 'technicalSupport', 'meetingSupport', 'getInvolved')
widgets = {'projectContacts': Textarea(attrs={'rows': 4}),
'technicalSupport': Textarea(attrs={'rows': 4}),
'meetingSupport': Textarea(attrs={'rows': 4}),
'getInvolved': Textarea(attrs={'rows': 4}), }
class DevelopmentOverviewForm(ModelForm):
class Meta:
model = Project
widgets = {'developmentOverview': Textarea(attrs={'rows': 8})}
fields = ('developmentOverview',)
class SoftwareForm(ModelForm):
class Meta:
model = Project
widgets = {'software_features': Textarea(attrs={'rows': 8}),
'system_requirements': Textarea(attrs={'rows': 8}),
'license': Textarea(attrs={'rows': 1}),
'implementationLanguage': Textarea(attrs={'rows': 1}),
'bindingLanguage': Textarea(attrs={'rows': 1}),
'supportedPlatforms': Textarea(attrs={'rows': 8}),
'externalDependencies': Textarea(attrs={'rows': 8}),
}
fields = ('software_features', 'system_requirements', 'license',
'implementationLanguage', 'bindingLanguage', 'supportedPlatforms', 'externalDependencies')
def clean(self):
features = self.cleaned_data.get('software_features')
if not hasText(features):
self._errors["software_features"] = self.error_class(["'SoftwareFeatures' must not be empty."])
print 'error'
return self.cleaned_data
class UsersForm(ModelForm):
class Meta:
model = Project
widgets = {'getting_started': Textarea(attrs={'rows': 12}), }
fields = ('getting_started', )
class ProjectTagForm(ModelForm):
# since this is the base form, we don't have access to the project's specific tags. The form is initialized in the
# form constructor in views_project.py
# field['tags'] is the list of preexisting tags
tags = forms.ModelMultipleChoiceField("tags", required=False,
widget=forms.SelectMultiple(attrs={'size': '7'}))
# override __init__ method to change the queryset for 'tags'
def __init__(self, *args, **kwargs):
super(ProjectTagForm, self).__init__(*args, **kwargs)
self.fields['tags'].queryset = ProjectTag.objects.all().order_by('name')
class Meta:
model = ProjectTag
fields = ('tags', 'name')
widgets = {'name': TextInput, }
#override clean function
def clean(self):
name = self.cleaned_data['name']
try:
tag = ProjectTag.objects.get(name__iexact=name)
# check tag with same name (independently of case) does not exist already
if tag is not None and tag.id != self.instance.id: # not this tag
self._errors["name"] = self.error_class(["Tag with this name already exist: %s" % tag.name])
except ObjectDoesNotExist:
# capitalize the tag name - NOT ANY MORE SINCE WE WANT TO CONSERVE CASE
#self.cleaned_data['name'] = self.cleaned_data['name'].capitalize()
# only allow letters, numbers, '-' and '_'
if re.search("[^a-zA-Z0-9_\-\s]", name):
self._errors["name"] = self.error_class(["Tag name contains invalid characters"])
# impose maximum length
if len(name) > MAX_PROJECT_TAG_LENGTH:
self._errors["name"] = self.error_class(["Tag name must contain at most %s characters"
% MAX_PROJECT_TAG_LENGTH])
return self.cleaned_data
|
sashakames/COG
|
cog/forms/forms_project.py
|
Python
|
bsd-3-clause
| 9,756 | 0.005638 |
"""Connectors"""
__copyright__ = "Copyright (C) 2014 Ivan D Vasin"
__docformat__ = "restructuredtext"
import abc as _abc
import re as _re
from ... import plain as _plain
from .. import _std as _std_http
_BASIC_USER_TOKENS = ('user', 'password')
class HttpBasicClerk(_std_http.HttpStandardClerk):
"""An authentication clerk for HTTP Basic authentication"""
__metaclass__ = _abc.ABCMeta
_BASIC_USER_TOKENS = _BASIC_USER_TOKENS
def _inputs(self, upstream_affordances, downstream_affordances):
return ((),)
def _append_response_auth_challenge(self, realm, input=None,
affordances=None):
self._append_response_auth_challenge_header('Basic realm="{}"'
.format(realm))
def _outputs(self, upstream_affordances, downstream_affordances):
return (_BASIC_USER_TOKENS,)
def _provisionsets(self, upstream_affordances, downstream_affordances):
return (_plain.PlainAuth.PROVISIONS,)
class HttpBasicScanner(_std_http.HttpStandardScanner):
"""An authentication scanner for HTTP Basic authentication"""
__metaclass__ = _abc.ABCMeta
_AUTHORIZATION_HEADER_RE = \
_re.compile(r'\s*Basic\s*(?P<creds_base64>[^\s]*)')
_BASIC_USER_TOKENS = _BASIC_USER_TOKENS
def _outputs(self, upstream_affordances, downstream_affordances):
return (self._BASIC_USER_TOKENS,)
def _provisionsets(self, upstream_affordances, downstream_affordances):
return (_plain.PlainAuth.PROVISIONS,)
|
nisavid/bedframe
|
bedframe/auth/http/_basic/_connectors.py
|
Python
|
lgpl-3.0
| 1,567 | 0.000638 |
import re
# noinspection PyPackageRequirements
import wx
import gui.fitCommands as cmd
import gui.mainFrame
from gui.contextMenu import ContextMenuSingle
from service.fit import Fit
_t = wx.GetTranslation
class DroneSplitStack(ContextMenuSingle):
def __init__(self):
self.mainFrame = gui.mainFrame.MainFrame.getInstance()
def display(self, callingWindow, srcContext, mainItem):
if srcContext != "droneItem":
return False
if mainItem is None:
return False
return mainItem.amount > 1
def getText(self, callingWindow, itmContext, mainItem):
return _t("Split {} Stack").format(itmContext)
def activate(self, callingWindow, fullContext, mainItem, i):
with DroneStackSplit(self.mainFrame, mainItem.amount) as dlg:
if dlg.ShowModal() == wx.ID_OK:
if dlg.input.GetLineText(0).strip() == '':
return
fitID = self.mainFrame.getActiveFit()
fit = Fit.getInstance().getFit(fitID)
cleanInput = re.sub(r'[^0-9.]', '', dlg.input.GetLineText(0).strip())
if mainItem in fit.drones:
position = fit.drones.index(mainItem)
self.mainFrame.command.Submit(cmd.GuiSplitLocalDroneStackCommand(
fitID=fitID, position=position, amount=int(cleanInput)))
DroneSplitStack.register()
class DroneStackSplit(wx.Dialog):
def __init__(self, parent, value):
super().__init__(parent, title="Split Drone Stack", style=wx.DEFAULT_DIALOG_STYLE)
self.SetMinSize((346, 156))
bSizer1 = wx.BoxSizer(wx.VERTICAL)
bSizer2 = wx.BoxSizer(wx.VERTICAL)
text = wx.StaticText(self, wx.ID_ANY, "New Amount:")
bSizer2.Add(text, 0)
bSizer1.Add(bSizer2, 0, wx.ALL, 10)
self.input = wx.TextCtrl(self, wx.ID_ANY, style=wx.TE_PROCESS_ENTER)
self.input.SetValue(str(value))
self.input.SelectAll()
bSizer1.Add(self.input, 0, wx.LEFT | wx.RIGHT | wx.EXPAND, 15)
bSizer3 = wx.BoxSizer(wx.VERTICAL)
bSizer3.Add(wx.StaticLine(self, wx.ID_ANY), 0, wx.BOTTOM | wx.EXPAND, 15)
bSizer3.Add(self.CreateStdDialogButtonSizer(wx.OK | wx.CANCEL), 0, wx.EXPAND)
bSizer1.Add(bSizer3, 0, wx.ALL | wx.EXPAND, 10)
self.input.SetFocus()
self.input.Bind(wx.EVT_CHAR, self.onChar)
self.input.Bind(wx.EVT_TEXT_ENTER, self.processEnter)
self.SetSizer(bSizer1)
self.CenterOnParent()
self.Fit()
def processEnter(self, evt):
self.EndModal(wx.ID_OK)
# checks to make sure it's valid number
@staticmethod
def onChar(event):
key = event.GetKeyCode()
acceptable_characters = "1234567890"
acceptable_keycode = [3, 22, 13, 8, 127] # modifiers like delete, copy, paste
if key in acceptable_keycode or key >= 255 or (key < 255 and chr(key) in acceptable_characters):
event.Skip()
return
else:
return False
|
pyfa-org/Pyfa
|
gui/builtinContextMenus/droneSplitStack.py
|
Python
|
gpl-3.0
| 3,081 | 0.002597 |
import numpy as np
### Digitised data for HL-1 i_Kr channel.
# I-V curves.
def IV_Toyoda():
"""Data points in IV curve for i_Kr.
Data from Figure 1E from Toyoda 2010. Reported as mean \pm SEM from
10 cells.
"""
x = [-80, -70, -60, -50, -40, -30, -20, -10, 0, 10, 20, 30, 40]
y = np.asarray([0.015652640252213246, 0.017761353533590096, 0.12218171120781562,
0.26069293359766377, 1.0809824000541504, 4.458125770428815,
9.301137273329594, 14.007824367613809, 16.191748422695483,
15.307494653370496, 11.423322889562002, 6.891346595664643,
4.0639525877947165])
N = 10
sem = np.asarray([0.2201587783000427, 0.22230654182737197, 0.32668784925564154,
0.6016015807539574, 1.6605271002198556, 5.753539579376788,
11.142043967973596, 16.666872765186966, 19.498562300111555,
18.989347092904445, 14.321046390390517, 8.561798966730493,
5.666184179183357])
sem = np.abs(y-sem)
sd = np.sqrt(N) * sem
return x, y.tolist(), sd.tolist()
### Activation curves.
def Act_Toyoda():
"""Data points from activation curve for i_Kr.
Data from Figure 2B in Toyoda 2010. Reported as mean \pm SEM for
10 cells.
"""
x = [-80, -70, -60, -50, -40, -30, -20, -10, 0, 10, 20, 30, 40, 50]
y = np.asarray([9.784726905999186E-4, 0.002037953817909388, 0.006032853017018169,
0.01100804532103461, 0.0649050517407257, 0.24600350793837167,
0.5190802174666932, 0.7735641254593133, 0.9331361824637671,
0.9860547161928584, 1.0057060886487157, 1.0018732063230271,
0.9970627615138931, 0.9991016255389565])
N = 10
sem = np.asarray([0.008805344008242733, 0.007907879754353253, 0.010926126677170744,
0.016877971257478475, 0.08251847037867321, 0.2802500521093595,
0.5631114885436777, 0.8058537242491018, 0.952704726068606,
0.9929031148199019, 1.0135338701735128, 1.0087225151572248,
1.005869925936444, 1.0069294070637538])
sem = np.abs(y-sem)
sd = np.sqrt(N) * sem
return x, y.tolist(), sd.tolist()
### Activation kinetics.
def ActKin_Toyoda():
"""Data points for activation time constants for i_Kr.
Data from Figure 3C in Toyoda 2010. Reported as mean \pm SEM for
10 cells.
"""
x = [-30, -20, -10, 0, 10, 20, 30, 40]
y = np.asarray([457.75075987841944, 259.87841945288756, 184.1945288753799,
116.71732522796344, 77.50759878419456, 48.32826747720367,
49.24012158054711, 36.474164133738554])
N = 10
sem = np.asarray([350.15197568389056, 234.34650455927044, 164.13373860182367,
99.39209726443767, 64.741641337386, 39.209726443769,
40.121580547112444, 27.355623100303887])
sem = np.abs(y-sem)
sd = np.sqrt(N) * sem
return x, y.tolist(), sd.tolist()
### Deactivation kinetics.
def DeactKinFast_Toyoda():
"""Data points for fast deactivation time constant for i_Kr.
Data from Figure 3C in Toyoda 2010. Reported as mean \pm SEM for
10 cells.
"""
x = [-120, -110, -100, -90, -80, -70, -60, -50, -40, -30]
y = np.asarray([4.430675707271462, 6.125789104512478, 7.817005689346161,
3.1291403631829553, 13.027043878107747, 15.63011456628476,
30.095082222741894, 45.460213545320016, 64.47665809367948,
69.81918790429427])
N = 10
sem = np.asarray([15.372924947393017, 17.068038344634147, 18.759254929467716,
14.979346894240507, 23.05743901488586, 26.572363806406315,
40.121580547112444, 56.40246278544157, 79.0663237471748,
88.0562699711636])
sem = np.abs(y-sem)
sd = np.sqrt(N) * sem
return x, y.tolist(), sd.tolist()
def DeactKinSlow_Toyoda():
"""Data points for slow deactivation time constant for i_Kr.
Data from Figure 3C in Toyoda 2010. Reported as mean \pm SEM for
10 cells.
"""
x = [-120, -110, -100, -90, -80, -70, -60, -50, -40, -30]
y = np.asarray([55.49840230691302, 57.18961889174648, 60.700646870859714,
47.80999142701285, 55.88418673525064, 86.75473462707498,
246.20060790273556, 438.46933208635335, 551.4067492790898,
565.8678201231394])
N = 10
sem = np.asarray([45.46411035772735, 47.15922375496848, 44.291169823084715,
30.48086665107951, 35.819499649286854, 61.22281973345798,
165.04169589275978, 310.8097576182683, 474.81490141064614,
467.3875769620451])
sem = np.abs(y-sem)
sd = np.sqrt(N) * sem
return x, y.tolist(), sd.tolist()
def DeactKinRelAmp_Toyoda():
"""Data points for relative amplitude of fast to slow component.
Data from Figure 3D in Toyoda 2010. Reported as mena \pm SEM for 10 cells.
"""
x = [-120, -110, -100, -90, -80, -70, -60, -50, -40, -30]
y = np.asarray([0.9269882659713168, 0.908735332464146, 0.8370273794002607,
0.7131681877444589, 0.7001303780964798, 0.7561929595827901,
0.740547588005215, 0.7053455019556714, 0.6453715775749673,
0.5567144719687093])
N = 10
sem = np.asarray([0.9113428943937418, 0.894393741851369, 0.8161668839634941,
0.681877444589309, 0.620599739243807, 0.741851368970013,
0.7014341590612776, 0.6792698826597132, 0.6062581486310299,
0.49543676662320724])
sem = np.abs(y-sem)
sd = np.sqrt(N) * sem
return x, y.tolist(), sd.tolist()
### Inactivation.
def Inact_Toyoda():
"""Data points for steady state inactivation.
Data from Figure 6B in Toyoda 2010. Reported for a single cell.
"""
x = [-120, -110, -100, -90, -80, -70, -60, -50, -40, -30, -20, -10, 0, 10]
y = [1.0021276595744677, 0.9654255319148934, 0.9670212765957444,
0.9877659574468083, 0.9382978723404253, 0.8393617021276594,
0.7452127659574466, 0.5920212765957444, 0.4835106382978722,
0.3829787234042552, 0.27606382978723376, 0.17074468085106376,
0.13563829787234027, 0.0494680851063829]
return x, y, None
def InactKin_Toyoda():
"""Data points for inactivation kinetics.
Data from Figure 5B in Toyoda 2010.
"""
x = [-120, -110, -100, -90, -80, -70, -60, -50, -40, -30, -20, -10]
y = np.asarray([1.0140488976949733, 1.0888568124153863, 1.235334725970283,
1.310268157326135, 2.318292256520138, 3.972601511578908,
5.626910766637675, 6.599037108096718, 8.253346363155487,
9.692269071804478, 12.136328997032429, 10.59573781367952])
N = 4
sem = np.asarray([0.7710716681472896, 0.8342368149101667, 1.0050945482894313,
1.0683322153126547, 2.0360788295229995, 3.5638631542687254,
5.3285711695705, 6.037891836031692, 7.630364233007597,
8.770418985804161, 10.29823957068006, 9.736787715067898])
sem = np.abs(y-sem)
sd = np.sqrt(N) * sem
return x, y.tolist(), sd.tolist()
|
c22n/ion-channel-ABC
|
docs/examples/hl1/data/ikr/data_ikr.py
|
Python
|
gpl-3.0
| 6,863 | 0.009617 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# || ____ _ __
# +------+ / __ )(_) /_______________ _____ ___
# | 0xBC | / __ / / __/ ___/ ___/ __ `/_ / / _ \
# +------+ / /_/ / / /_/ /__/ / / /_/ / / /_/ __/
# || || /_____/_/\__/\___/_/ \__,_/ /___/\___/
#
# Copyright (C) 2011-2013 Bitcraze AB
#
# Crazyflie Nano Quadcopter Client
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
"""
Crazyflie ablock is used to receive characters sent using ablock
from the firmware.
"""
__author__ = 'Bitcraze AB'
__all__ = ['Ablock']
import time
import struct
from cflib.utils.callbacks import Caller
from cflib.crtp.crtpstack import CRTPPacket, CRTPPort
from PyQt4.QtCore import pyqtSlot, pyqtSignal
import urllib2
import datetime
import math
class Ablock:
"""
Crazyflie ablock is used to receive characters sent using ablock
from the firmware.
"""
receivedChar = Caller()
def __init__(self, crazyflie):
"""
Initialize the ablock and register it to receive data from the copter.
"""
self.cf = crazyflie
self.cf.add_port_callback(CRTPPort.ABLOCK, self.incoming)
self.init_gps = 0
self.active = False
self.taccs = 60.0
self.lat = 33.767440
self.lon = -117.500734
self.altmsl = 382.0
self.gh = -32.5
self.pacc = 150.0
def incoming(self, packet):
"""
Callback for data received from the firmware
<S> Begin AssistNow input
<n> Ready for Next Line input
<r> Repeat Last Line input
<R> Repeat Last Block input
<E> End of AssistNow input
<G> Go Ahead output
<E> End of Data output
<X> Abort AssistNow output
"""
# This might be done prettier ;-)
ablock_text = "%s" % struct.unpack("%is" % len(packet.data),
packet.data)
self.receivedChar.call(ablock_text)
# size = len(ablock_text)
# print ("insize %d" % size)
print ("indata %s" % ablock_text)
"""Begin AssistNow Transfers"""
if ablock_text == "<S>\n":
if self.init_gps == 0 :
self.receivedChar.call("Reset Gps\n")
self.rst_hex(False)
if self.init_gps == 1 :
self.receivedChar.call("Nav_Pvt Enable\n")
self.pvt_hex(False)
if self.init_gps == 2 :
self.receivedChar.call("NMEA Disable\n")
self.ebx_hex(False)
if self.init_gps == 3 :
self.receivedChar.call("Time_UTC\n")
self.utc_hex(self.taccs, False)
if self.init_gps == 4 :
self.receivedChar.call("Pos_LLH\n")
self.llh_hex(self.lat, self.lon, self.altmsl, self.gh, self.pacc, False)
if self.init_gps == 5 :
self.receivedChar.call("Alm\n")
msg = "B5620611020008002191"
self.fileFormat(msg, False)
self.sv_alm_hex(True)
if self.init_gps > 5 :
if self.init_gps == 99 :
self.receivedChar.call("AssistNow Aborted\n")
else:
self.receivedChar.call("Finished\n")
self.outgoing("<E>\n")
self.init_gps = 0
else:
self.init_gps += 1
self.loadlines()
self.endBlock = True
self.lineNbr = -1
self.active = True
self.outgoing("<G>\n")
elif (self.active):
if (ablock_text == "<n>\n"):
self.lineNbr +=1
line = self.lines[self.lineNbr]
if (self.endBlock):
self.blockNbr = self.lineNbr
endBlock = False
size = len(line)
if (size == 0):
self.active = False
self.outgoing("<E>\n")
self.receivedChar.call("EOM\n")
elif (size > 29):
self.active = False
self.init_gps = 99
self.outgoing("<X>\n")
self.receivedChar.call("EOM\n")
elif ((line[size-2] == "<") and (line[size-1] == ">")):
self.endBlock = True
self.outgoing("%s\n" % line)
else:
print ("line %d" %self.lineNbr)
self.outgoing("%s\n" % line)
elif (ablock_text == "<r>\n"):
print "<r>\n"
line = self.lines[self.lineNbr]
self.outgoing("%s\n" % line)
elif (ablock_text == "<R>\n"):
print "<R>\n"
endBlock = False
self.lineNbr = self.blockNbr
line = self.lines[self.lineNbr]
self.outgoing("%s\n" % line)
elif (ablock_text == "<X>\n"):
print "<X>\n"
self.active = False
self.receivedChar.call("EOM\n")
self.init_gps = 99
elif (ablock_text == "<E>\n"):
self.active = False
self.receivedChar.call("EOM\n")
else:
self.active = False
self.init_gps = 99
self.outgoing("<X>\n")
def loadlines(self):
with open("a-block.txt", "r") as mfile:
data = mfile.read()
mfile.closed
self.lines = data.splitlines()
self.lines.append("");
def outgoing(self, p):
time.sleep(100.0 / 1000.0)
pk = CRTPPacket()
pk.port = CRTPPort.ABLOCK
pk.data = p
self.cf.send_packet(pk)
def putFile(self, data, add):
if add :
with open("a-block.txt", "a") as mfile:
mfile.write(data)
mfile.closed
else:
with open("a-block.txt", "w") as mfile:
mfile.write(data)
mfile.closed
def fileFormat(self, data, add):
block = ""
lineLen = 28
dataLen = len(data)
nbrFull = dataLen / lineLen
lenLast = dataLen % lineLen
if lenLast > lineLen - 2 :
nbrFull -= 1
iData = 0
while nbrFull > 0 :
i = 0
while i < lineLen :
block = block + data[iData]
iData += 1
i += 1
block = block + "\n"
nbrFull -= 1
lenNext = 0
if lenLast > lineLen - 2 :
lenNext = lineLen - 2
i = 0
while i < lenNext :
block = block + data[iData]
iData += 1
i += 1
if lenNext > 0 :
block = block + "\n"
lenLast -= lenNext
i = 0
while i < lenLast :
block = block + data[iData]
iData += 1
i += 1
block = block + "<>\n"
print len(block)
self.putFile(block, add)
def get_int_len(self, val, base):
value = val
if value < 0:
value = - value
l = 1
while value > base - 1:
l += 1
value /= base
return l
def itoa(self, decimal, base, precision):
digits = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ"
data = []
i = 1
len = 0
n = decimal
numLen = self.get_int_len(decimal, base)
fillZero = 0
minusPrecision = 0
if (decimal < 0 and base == 10):
n = -decimal
data.append("-")
else:
if decimal < 0 :
minusPrecision = precision
while base**minusPrecision + decimal <= 0 :
minusPrecision += 1
n = base**minusPrecision + decimal
numLen = self.get_int_len(n, base)
# print n
if (numLen < precision):
fillZero = precision - numLen
while fillZero > 0:
data.append("0")
fillZero -= 1
while n / i:
i *= base
if n == 0:
i = base
while i / base :
i /= base
data.append(digits[(n / i) % base])
len += 1
return data
def listToString(self, list1):
return str(list1).replace('[','').replace(']','').replace(',','').replace("'",'').replace(' ','')
def cksum(self, msg, len):
cka = 0
ckb = 0
if len > 0 :
i = 4
while i < len :
vala = int(msg[i], 16)
vala -= 48
if vala > 9 :
vala -= 7
valb = int(msg[i+1], 16)
valb -= 48
if valb > 9 :
valb -= 7
cka = cka + (vala * 16) + (valb & 15)
ckb += cka
i += 2
cka = cka % 256
ckb = ckb % 256
return cka, ckb
def append_0(self, msg, nbr):
msg.append(self.itoa(0, 16, nbr))
return msg
def append_2(self, msg, int_val):
msg.append(self.itoa(int_val, 16, 2))
return msg
def append_4(self, msg, int_val):
x = self.itoa(int_val, 16, 4)
msg.append(x[2:])
msg.append(x[:-2])
return msg
def append_8(self, msg, int_val):
x = self.itoa(int_val, 16, 8)
msg.append(x[6:])
msg.append(x[4:-2])
msg.append(x[2:-4])
msg.append(x[:-6])
return msg
def ini_utc(self, hdr, taccs):
msg = []
today = datetime.datetime.utcnow()
msg.append(hdr)
msg = self.append_4(msg, int(today.year))
msg = self.append_2(msg, int(today.month))
msg = self.append_2(msg, int(today.day))
msg = self.append_2(msg, int(today.hour))
msg = self.append_2(msg, int(today.minute))
msg = self.append_2(msg, int(today.second))
msg = self.append_0(msg, 10)
msg = self.append_4(msg, int(taccs))
msg = self.append_0(msg, 12)
# print msg
return msg
def ini_llh(self, hdr, lat, lon, alt, gh, pacc):
msg = []
msg.append(hdr)
msg = self.append_8(msg, int(round(lat * 10000000.0)))
msg = self.append_8(msg, int(round(lon * 10000000.0)))
msg = self.append_8(msg, int(round((alt + gh) * 100.0)))
msg = self.append_8(msg, int(round(pacc * 100.0)))
# print msg
return msg
def alm_load(self):
today = datetime.datetime.now()
dayNbr = (today - datetime.datetime(today.year, 1, 1)).days + 1
# print dayNbr
# print
data = urllib2.urlopen("http://www.navcen.uscg.gov/?Do=getAlmanac&almanac=%d" % dayNbr).read(20000)
data = data.split("\n")
return data
def isfloat(self, value):
try:
val = float(value)
return val
except ValueError:
print("Bad Value")
return 0.0
def isIndex(self, data, i):
try:
line = data[i]
return line
except IndexError:
line = "*********************************************\n"
return line
def ini_alm(self, data, i, hdr):
msg = []
eod = 0
mI = 55.0 * math.pi / 180.0
line = self.isIndex(data, i)
if (line[18:-21] == "almanac"):
i += 1
line = data[i]
svId = line[25:-1]
svId = int(round(self.isfloat(svId)))
# print(svId)
i += 1
line = data[i]
svHealth = line[25:-1]
svHealth = int(round(self.isfloat(svHealth)))
# print(svHealth)
i += 1
line = data[i]
e = line[25:-1]
# print(e)
e = int(round(self.isfloat(e) * 2**21))
# print(e)
i += 1
line = data[i]
toa = line[25:-1]
# print(toa)
toa = int(round(self.isfloat(toa) * 2**-12))
# print(toa)
i += 1
line = data[i]
deltaI = line[25:-1]
# print(deltaI)
deltaI = int(round((self.isfloat(deltaI) - mI) / math.pi * 2**19))
# print(deltaI)
i += 1
line = data[i]
omegaDot = line[25:-1]
omegaDot = int(round(self.isfloat(omegaDot) / math.pi * 2**38))
# print(omegaDot)
i += 1
line = data[i]
sqrtA = line[25:-1]
sqrtA = int(round(self.isfloat(sqrtA) * 2**11))
# print(sqrtA)
i += 1
line = data[i]
omega0 = line[25:-1]
omega0 = int(round(self.isfloat(omega0) / math.pi * 2**23))
# print(omega0)
i += 1
line = data[i]
omega = line[25:-1]
omega = int(round(self.isfloat(omega) / math.pi * 2**23))
# print(omega)
i += 1
line = data[i]
mo = line[25:-1]
mo = int(round(self.isfloat(mo) / math.pi * 2**23))
# print(mo)
i += 1
line = data[i]
af0 = line[25:-1]
af0 = int(round(self.isfloat(af0) * 2**20))
# print(af0)
i += 1
line = data[i]
af1 = line[25:-1]
af1 = int(round(self.isfloat(af1) * 2**38))
# print(af1)
i += 1
line = data[i]
almWNa = line[25:-1]
almWNa = int(round(self.isfloat(almWNa)))
almWNa = almWNa % 256
# print(almWNa)
i += 2
else:
print("End of SV Alm Data")
eod = 1
if eod == 0 :
msg.append(hdr)
msg = self.append_2(msg, svId)
msg = self.append_2(msg, svHealth)
msg = self.append_4(msg, e)
msg = self.append_2(msg, almWNa)
msg = self.append_2(msg, toa)
msg = self.append_4(msg, deltaI)
msg = self.append_4(msg, omegaDot)
msg = self.append_8(msg, sqrtA)
msg = self.append_8(msg, omega0)
msg = self.append_8(msg, omega)
msg = self.append_8(msg, mo)
msg = self.append_4(msg, af0)
msg = self.append_4(msg, af1)
msg = self.append_0(msg, 8)
print "G", svId
return eod, i, msg
def rst_hex(self, add):
msg = "B56206040400FFB90100C78D"
self.fileFormat(msg, add)
def pvt_hex(self, add):
msg = "B56206010800010700010000000018E1"
self.fileFormat(msg, add)
def ebx_hex(self, add):
msg = "B5620600140001000000D0080000802500000700010000000000A0A9"
self.fileFormat(msg, add)
def out_hex(self, msg_list, add):
msg_str = self.listToString(msg_list)
len_str = len(msg_str)
cka, ckb = self.cksum(msg_str, len_str)
msg = msg_str + self.listToString(self.itoa(cka, 16, 2) + self.itoa(ckb, 16, 2))
# print len(msg)
# for z in msg:
# print z,
# print
self.fileFormat(msg, add)
def utc_hex(self, taccs, add):
utc_Hdr = "B5621340180010000080"
msg_list = self.ini_utc(utc_Hdr, taccs)
self.out_hex(msg_list, add)
def llh_hex(self, lat, lon, alt, gh, pacc, add):
llh_Hdr = "B5621340140001000000"
msg_list = self.ini_llh(llh_Hdr, lat, lon, alt, gh, pacc)
self.out_hex(msg_list, add)
def alm_hex(self, indx_sv_list, add):
sv_Hdr = "B562130024000200"
data = self.alm_load()
eod, i_list, msg_list = self.ini_alm(data,indx_sv_list,sv_Hdr)
if eod == 0:
# print msg_list
# print len(msg_list)
self.out_hex(msg_list, add)
return eod, i_list
def sv_alm_hex(self, add):
indx_sv_list = 0
j = 0
while j < 32 :
j += 1
eod, indx_sv_list = self.alm_hex(indx_sv_list, add)
if eod != 0 :
break
# msg = "B5620611020008002191"
# self.fileFormat(msg, True)
|
jackemoore/cfclient-gps-2-ebx-io
|
lib/cflib/crazyflie/ablock.py
|
Python
|
gpl-2.0
| 17,198 | 0.005815 |
#!/usr/bin/env python
old_new_salaries = [
# (old_salary, new_salary)
(2401, 2507), (2172, 2883), (2463, 2867), (2462, 3325), (2949, 2974),
(2713, 3109), (2778, 3771), (2596, 3045), (2819, 2848), (2974, 3322),
(2539, 2790), (2440, 3051), (2526, 3240), (2869, 3635), (2341, 2495),
(2197, 2897), (2706, 2782), (2712, 3056), (2666, 2959), (2149, 2377)
]
def is_high_raise(r):
return r > 500
raises = map( lambda ss: ss[1] - ss[0] , old_new_salaries)
high_raises = filter(is_high_raise, raises)
total_high_raises = reduce(lambda a,b: a + b, high_raises)
print "total high raises: %s" % total_high_raises
|
OmniaGM/spark-training
|
quiz/quiz1/quiz.py
|
Python
|
mit
| 628 | 0.007962 |
"""
prepare prediction:
filtered pws -> filtered pws
Uses:
PROCESSED_DATA_DIR/neural_networks/training_data_filtered.csv
"""
import os
import random
import logging
import platform
import pandas
from filter_weather_data.filters import StationRepository
from filter_weather_data import get_repository_parameters
from filter_weather_data import RepositoryParameter
from filter_weather_data import PROCESSED_DATA_DIR
from interpolation.interpolator.prepare.neural_network_single_group import load_eddh
from interpolation.interpolator.prepare.neural_network_single_group import fill_missing_eddh_values
if platform.uname()[1].startswith("ccblade"): # the output files can turn several gigabyte so better not store them
# on a network drive
PROCESSED_DATA_DIR = "/export/scratch/1kastner"
def join_to_big_vector(output_csv_file, station_dicts, eddh_df):
"""
:param station_dicts: The stations to use
:param output_csv_file: Where to save the joined data to
:return:
"""
joined_stations = []
while len(station_dicts):
station_dict = station_dicts.pop()
logging.debug("work on %s" % station_dict["name"])
station_df = station_dict["data_frame"]
for attribute in station_df.columns:
if attribute not in ["temperature", "humidity", "dewpoint"]:
station_df.drop(attribute, axis=1, inplace=True)
position = station_dict["meta_data"]["position"]
station_df['lat'] = position["lat"]
station_df['lon'] = position["lon"]
joined_stations.append(station_df.join(eddh_df, how="left"))
common_df = pandas.concat(joined_stations)
common_df.sort_index(inplace=True)
common_df = fill_missing_eddh_values(common_df)
common_df.to_csv(output_csv_file)
def run():
start_date = "2016-01-01T00:00"
end_date = "2016-12-31T23:59"
eddh_df = load_eddh(start_date, end_date)
station_repository = StationRepository(*get_repository_parameters(
RepositoryParameter.ONLY_OUTDOOR_AND_SHADED
))
station_dicts = station_repository.load_all_stations(
start_date,
end_date,
# limit=5, # for testing purposes
limit_to_temperature=False
)
random.shuffle(station_dicts)
split_point = int(len(station_dicts) * .7)
training_dicts, evaluation_dicts = station_dicts[:split_point], station_dicts[split_point:]
logging.info("training stations: %s" % [station["name"] for station in training_dicts])
logging.info("evaluation stations: %s" % [station["name"] for station in evaluation_dicts])
training_csv_file = os.path.join(
PROCESSED_DATA_DIR,
"neural_networks",
"training_data_filtered.csv"
)
join_to_big_vector(training_csv_file, training_dicts, eddh_df)
evaluation_csv_file = os.path.join(
PROCESSED_DATA_DIR,
"neural_networks",
"evaluation_data_filtered.csv"
)
join_to_big_vector(evaluation_csv_file, evaluation_dicts, eddh_df)
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
run()
|
1kastner/analyse_weather_data
|
interpolation/interpolator/prepare/neural_network_single_group_filtered.py
|
Python
|
agpl-3.0
| 3,132 | 0.002554 |
''' Module '''
import re
import logging
class CurrentCost:
''' Class '''
'''
def __init__(self, data=None, logger=None):
''' Method '''
self._data = data
self.logger = logger or logging.getLogger(__name__)
self.time = None
self.uid = None
self.value = None
'''
def parse_data(self):
''' Method '''
try:
'''#http://www.marcus-povey.co.uk - USED REGEX REGEX!'''
uidregex = re.compile('<id>([0-9]+)</id>')
valueregex = re.compile('<watts>([0-9]+)</watts>')
timeregex = re.compile('<time>([0-9\.\:]+)</time>')
self.value = str(int(valueregex.findall(self._data)[0]))
self.time = timeregex.findall(self._data)[0]
self.uid = uidregex.findall(self._data)[0]
self.logger.info('Parsed data sucessfully!')
except Exception:
self.logger.error('Could not get details from device',
exc_info=True)
|
gljohn/meterd
|
meterd/parser/currentcost.py
|
Python
|
gpl-3.0
| 1,012 | 0.003953 |
from django.db import models
class Foo(models.Model):
name = models.CharField(max_length=5)
class Meta:
app_label = 'complex_app'
|
openhatch/new-mini-tasks
|
vendor/packages/Django/tests/regressiontests/admin_scripts/complex_app/models/foo.py
|
Python
|
apache-2.0
| 148 | 0.006757 |
#!/usr/bin/env python3
# vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:
# Copyright 2014-2017 Florian Bruhin (The Compiler) <mail@qutebrowser.org>
#
# This file is part of qutebrowser.
#
# qutebrowser is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# qutebrowser is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with qutebrowser. If not, see <http://www.gnu.org/licenses/>.
"""Various small code checkers."""
import os
import re
import sys
import os.path
import argparse
import subprocess
import tokenize
import traceback
import collections
sys.path.insert(0, os.path.join(os.path.dirname(__file__), os.pardir,
os.pardir))
from scripts import utils
def _get_files(only_py=False):
"""Iterate over all python files and yield filenames."""
for (dirpath, _dirnames, filenames) in os.walk('.'):
parts = dirpath.split(os.sep)
if len(parts) >= 2:
rootdir = parts[1]
if rootdir.startswith('.') or rootdir == 'htmlcov':
# ignore hidden dirs and htmlcov
continue
if only_py:
endings = {'.py'}
else:
endings = {'.py', '.asciidoc', '.js', '.feature'}
files = (e for e in filenames if os.path.splitext(e)[1] in endings)
for name in files:
yield os.path.join(dirpath, name)
def check_git():
"""Check for uncommitted git files.."""
if not os.path.isdir(".git"):
print("No .git dir, ignoring")
print()
return False
untracked = []
gitst = subprocess.check_output(['git', 'status', '--porcelain'])
gitst = gitst.decode('UTF-8').strip()
for line in gitst.splitlines():
s, name = line.split(maxsplit=1)
if s == '??' and name != '.venv/':
untracked.append(name)
status = True
if untracked:
status = False
utils.print_col("Untracked files:", 'red')
print('\n'.join(untracked))
print()
return status
def check_spelling():
"""Check commonly misspelled words."""
# Words which I often misspell
words = {'[Bb]ehaviour', '[Qq]uitted', 'Ll]ikelyhood', '[Ss]ucessfully',
'[Oo]ccur[^rs .]', '[Ss]eperator', '[Ee]xplicitely',
'[Aa]uxillary', '[Aa]ccidentaly', '[Aa]mbigious', '[Ll]oosly',
'[Ii]nitialis', '[Cc]onvienence', '[Ss]imiliar', '[Uu]ncommited',
'[Rr]eproducable', '[Aa]n [Uu]ser', '[Cc]onvienience',
'[Ww]ether', '[Pp]rogramatically', '[Ss]plitted', '[Ee]xitted',
'[Mm]ininum', '[Rr]esett?ed', '[Rr]ecieved', '[Rr]egularily',
'[Uu]nderlaying', '[Ii]nexistant', '[Ee]lipsis', 'commiting',
'existant', '[Rr]esetted'}
# Words which look better when splitted, but might need some fine tuning.
words |= {'[Ww]ebelements', '[Mm]ouseevent', '[Kk]eysequence',
'[Nn]ormalmode', '[Ee]ventloops', '[Ss]izehint',
'[Ss]tatemachine', '[Mm]etaobject', '[Ll]ogrecord',
'[Ff]iletype'}
# Files which should be ignored, e.g. because they come from another
# package
ignored = [
os.path.join('.', 'scripts', 'dev', 'misc_checks.py'),
os.path.join('.', 'qutebrowser', '3rdparty', 'pdfjs'),
os.path.join('.', 'tests', 'end2end', 'data', 'hints', 'ace',
'ace.js'),
]
seen = collections.defaultdict(list)
try:
ok = True
for fn in _get_files():
with tokenize.open(fn) as f:
if any(fn.startswith(i) for i in ignored):
continue
for line in f:
for w in words:
if (re.search(w, line) and
fn not in seen[w] and
'# pragma: no spellcheck' not in line):
print('Found "{}" in {}!'.format(w, fn))
seen[w].append(fn)
ok = False
print()
return ok
except Exception:
traceback.print_exc()
return None
def check_vcs_conflict():
"""Check VCS conflict markers."""
try:
ok = True
for fn in _get_files(only_py=True):
with tokenize.open(fn) as f:
for line in f:
if any(line.startswith(c * 7) for c in '<>=|'):
print("Found conflict marker in {}".format(fn))
ok = False
print()
return ok
except Exception:
traceback.print_exc()
return None
def main():
parser = argparse.ArgumentParser()
parser.add_argument('checker', choices=('git', 'vcs', 'spelling'),
help="Which checker to run.")
args = parser.parse_args()
if args.checker == 'git':
ok = check_git()
elif args.checker == 'vcs':
ok = check_vcs_conflict()
elif args.checker == 'spelling':
ok = check_spelling()
return 0 if ok else 1
if __name__ == '__main__':
sys.exit(main())
|
lahwaacz/qutebrowser
|
scripts/dev/misc_checks.py
|
Python
|
gpl-3.0
| 5,504 | 0.000182 |
from django.test import TestCase
from django.test.client import RequestFactory
from myuw.dao.canvas import get_indexed_data_for_regid
from myuw.dao.canvas import get_indexed_by_decrosslisted
from myuw.dao.schedule import _get_schedule
from myuw.dao.term import get_current_quarter
FDAO_SWS = 'restclients.dao_implementation.sws.File'
class TestCanvas(TestCase):
def test_crosslinks(self):
with self.settings(RESTCLIENTS_SWS_DAO_CLASS=FDAO_SWS):
data = get_indexed_data_for_regid(
"12345678901234567890123456789012")
physics = data['2013,spring,PHYS,121/A']
self.assertEquals(physics.course_url,
'https://canvas.uw.edu/courses/149650')
has_section_a = '2013,spring,TRAIN,100/A' in data
self.assertFalse(has_section_a)
train = data['2013,spring,TRAIN,100/B']
self.assertEquals(train.course_url,
'https://canvas.uw.edu/courses/249650')
def test_crosslinks_lookup(self):
with self.settings(RESTCLIENTS_SWS_DAO_CLASS=FDAO_SWS):
data = get_indexed_data_for_regid(
"12345678901234567890123456789012")
now_request = RequestFactory().get("/")
now_request.session = {}
term = get_current_quarter(now_request)
schedule = _get_schedule("12345678901234567890123456789012", term)
canvas_data_by_course_id = get_indexed_by_decrosslisted(
data, schedule.sections)
physics = data['2013,spring,PHYS,121/A']
self.assertEquals(physics.course_url,
'https://canvas.uw.edu/courses/149650')
train = data['2013,spring,TRAIN,100/A']
self.assertEquals(train.course_url,
'https://canvas.uw.edu/courses/249650')
|
fanglinfang/myuw
|
myuw/test/dao/canvas.py
|
Python
|
apache-2.0
| 1,890 | 0 |
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
#
# See LICENSE for more details.
#
# Copyright: Red Hat Inc. 2014
# Author: Ruda Moura <rmoura@redhat.com>
"""
Module to help extract and create compressed archives.
"""
import logging
import os
import platform
import stat
import tarfile
import zipfile
LOG = logging.getLogger(__name__)
try:
import lzma
LZMA_CAPABLE = True
except ImportError:
LZMA_CAPABLE = False
class ArchiveException(Exception):
"""
Base exception for all archive errors.
"""
pass
class _WrapLZMA(object):
""" wraps tar.xz for python 2.7's tarfile """
def __init__(self, filename, mode):
"""
Creates an instance of :class:`ArchiveFile`.
:param filename: the archive file name.
:param mode: file mode, `r` read, `w` write.
"""
self._engine = tarfile.open(fileobj=lzma.LZMAFile(filename, mode),
mode=mode)
methods = dir(self._engine)
for meth in dir(self):
try:
methods.remove(meth)
except ValueError:
pass
for method in methods:
setattr(self, method, getattr(self._engine, method))
@classmethod
def open(cls, filename, mode='r'):
"""
Creates an instance of :class:`_WrapLZMA`.
:param filename: the archive file name.
:param mode: file mode, `r` read, `w` write.
"""
return cls(filename, mode)
class ArchiveFile(object):
"""
Class that represents an Archive file.
Archives are ZIP files or Tarballs.
"""
# extension info: is_zip, is_tar, zipfile|tarfile, +mode
_extension_table = {
'.zip': (True, False, zipfile.ZipFile, ''),
'.tar': (False, True, tarfile.open, ''),
'.tar.gz': (False, True, tarfile.open, ':gz'),
'.tgz': (False, True, tarfile.open, ':gz'),
'.tar.bz2': (False, True, tarfile.open, ':bz2'),
'.tbz2': (False, True, tarfile.open, ':bz2')}
if LZMA_CAPABLE:
_extension_table['.xz'] = (False, True, _WrapLZMA.open, '')
def __init__(self, filename, mode='r'):
"""
Creates an instance of :class:`ArchiveFile`.
:param filename: the archive file name.
:param mode: file mode, `r` read, `w` write.
"""
self.filename = filename
self.mode = mode
engine = None
for ext in ArchiveFile._extension_table:
if filename.endswith(ext):
(self.is_zip,
self.is_tar,
engine,
extra_mode) = ArchiveFile._extension_table[ext]
if engine is not None:
self.mode += extra_mode
self._engine = engine(self.filename, self.mode)
else:
raise ArchiveException('file is not an archive')
def __repr__(self):
return "ArchiveFile('%s', '%s')" % (self.filename, self.mode)
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, exc_traceback):
if self._engine is not None:
self.close()
@classmethod
def open(cls, filename, mode='r'):
"""
Creates an instance of :class:`ArchiveFile`.
:param filename: the archive file name.
:param mode: file mode, `r` read, `w` write.
"""
return cls(filename, mode)
def add(self, filename, arcname=None):
"""
Add file to the archive.
:param filename: file to archive.
:param arcname: alternative name for the file in the archive.
"""
if self.is_zip:
self._engine.write(filename, arcname, zipfile.ZIP_DEFLATED)
else:
self._engine.add(filename, arcname)
def list(self):
"""
List files to the standard output.
"""
if self.is_zip:
self._engine.printdir()
else:
self._engine.list()
def extract(self, path='.'):
"""
Extract all files from the archive.
:param path: destination path.
"""
self._engine.extractall(path)
if self.is_zip:
self._update_zip_extra_attrs(path)
def _update_zip_extra_attrs(self, dst_dir):
if platform.system() != "Linux":
LOG.warn("Attr handling in zip files only supported on Linux.")
return
# Walk all files and re-create files as symlinks
for path, info in self._engine.NameToInfo.iteritems():
dst = os.path.join(dst_dir, path)
if not os.path.exists(dst):
LOG.warn("One or more files in the ZIP archive '%s' could "
"not be found after extraction. Their paths are "
"probably stored in unsupported format and their "
"attributes are not going to be updated",
self.filename)
return
attr = info.external_attr >> 16
if attr & stat.S_IFLNK == stat.S_IFLNK:
dst = os.path.join(dst_dir, path)
src = open(dst, 'r').read()
os.remove(dst)
os.symlink(src, dst)
continue # Don't override any other attributes on links
mode = attr & 511 # Mask only permissions
if mode and mode != 436: # If mode is stored and is not default
os.chmod(dst, mode)
def close(self):
"""
Close archive.
"""
self._engine.close()
def is_archive(filename):
"""
Test if a given file is an archive.
:param filename: file to test.
:return: `True` if it is an archive.
"""
return zipfile.is_zipfile(filename) or tarfile.is_tarfile(filename)
def compress(filename, path):
"""
Compress files in an archive.
:param filename: archive file name.
:param path: origin directory path to files to compress. No
individual files allowed.
"""
with ArchiveFile.open(filename, 'w') as x:
if os.path.isdir(path):
for root, _, files in os.walk(path):
for name in files:
newroot = root.replace(path, '')
x.add(os.path.join(root, name),
os.path.join(newroot, name))
elif os.path.isfile(path):
x.add(path, os.path.basename(path))
def uncompress(filename, path):
"""
Extract files from an archive.
:param filename: archive file name.
:param path: destination path to extract to.
"""
with ArchiveFile.open(filename) as x:
x.extract(path)
# Some aliases
create = compress
extract = uncompress
|
adereis/avocado
|
avocado/utils/archive.py
|
Python
|
gpl-2.0
| 7,118 | 0.00014 |
## This file is part of Invenio.
## Copyright (C) 2009, 2010, 2011 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""Invenio BibCatalog HTML generator."""
from invenio.legacy.bibcatalog.api import bibcatalog_system
from invenio.base.i18n import wash_language, gettext_set_language
from invenio.config import CFG_SITE_LANG
from invenio.legacy.webstyle.templates import Template as DefaultTemplate
class Template(DefaultTemplate):
""" HTML generators for BibCatalog """
SHOW_MAX_TICKETS = 25
def tmpl_your_tickets(self, uid, ln=CFG_SITE_LANG, start=1):
""" make a pretty html body of tickets that belong to the user given as param """
ln = wash_language(ln)
_ = gettext_set_language(ln)
if bibcatalog_system is None:
return _("Error: No BibCatalog system configured.")
#errors? tell what happened and get out
bibcat_probs = bibcatalog_system.check_system(uid)
if bibcat_probs:
return _("Error")+" "+bibcat_probs
tickets = bibcatalog_system.ticket_search(uid, owner=uid) #get ticket id's
lines = "" #put result here
i = 1
lines += (_("You have %(x_num)i tickets.", x_num=len(tickets))) + "<br/>"
#make a prev link if needed
if (start > 1):
newstart = start - self.SHOW_MAX_TICKETS
if (newstart < 1):
newstart = 1
lines += '<a href="/yourtickets/display?start='+str(newstart)+'">'+_("Previous")+'</a>'
lines += """<table border="1">"""
lastshown = len(tickets) #what was the number of the last shown ticket?
for ticket in tickets:
#get info and show only for those that within the show range
if (i >= start) and (i < start+self.SHOW_MAX_TICKETS):
ticket_info = bibcatalog_system.ticket_get_info(uid, ticket)
subject = ticket_info['subject']
status = ticket_info['status']
text = ""
if 'text' in ticket_info:
text = ticket_info['text']
display = '<a href="'+ticket_info['url_display']+'">'+_("show")+'</a>'
close = '<a href="'+ticket_info['url_close']+'">'+_("close")+'</a>'
lines += "<tr><td>"+str(ticket)+"</td><td>"+subject+" "+text+"</td><td>"+status+"</td><td>"+display+"</td><td>"+close+"</td></tr>\n"
lastshown = i
i = i+1
lines += "</table>"
#make next link if needed
if (len(tickets) > lastshown):
newstart = lastshown+1
lines += '<a href="/yourtickets/display?start='+str(newstart)+'">'+_("Next")+'</a>'
return lines
|
PXke/invenio
|
invenio/legacy/bibcatalog/templates.py
|
Python
|
gpl-2.0
| 3,365 | 0.009807 |
# Copyright 2011 The greplin-twisted-utils Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""DNS resolver that uses a short lived local cache to improve performance."""
from greplin.defer import lazymap
from twisted.internet import defer, interfaces
from twisted.python.failure import Failure
from zope.interface import implements
import collections
import time
class CachingDNS(object):
"""DNS resolver that uses a short lived local cache to improve performance."""
implements(interfaces.IResolverSimple)
def __init__(self, original, timeout = 60, useFallback = True):
self._original = original
self._timeout = timeout
self._fallback = {} if useFallback else None
self._cache = lazymap.DeferredMap(self.__fetchHost)
self._stats = {
'miss': collections.defaultdict(int),
'hit': collections.defaultdict(int),
'error': collections.defaultdict(int),
'fallback': collections.defaultdict(int),
}
def __fetchHost(self, args):
"""Actually fetches the host name."""
return self._original.getHostByName(*args).addCallback(lambda x: (x, time.time()))
def __fallback(self, err, key):
"""Returns the fallback for the given key."""
try:
result = self._fallback[key]
self._stats['fallback'][str(key)] += 1
return result
except KeyError:
self._stats['error'][str(key)] += 1
return err
def getStats(self):
"""Gets stats about hits / misses / failures."""
return self._stats
def getHostByName(self, name, *args):
"""Gets a host by name."""
key = (name,) + args
if key in self._cache:
# If we failed last time, try again
if isinstance(self._cache[key], Failure):
del self._cache[key]
# Check for a cache hit.
elif time.time() >= self._cache[key][1] + self._timeout:
# Ensure the item hasn't expired.
if self._fallback is not None:
self._fallback[key] = self._cache[key][0]
del self._cache[key]
else:
# If the item is in cache and not expired, return it immediately.
self._stats['hit'][str(key)] += 1
return defer.succeed(self._cache[key][0])
# If it wasn't already in the cache, this always returns a deferred.
result = self._cache[key].addCallback(lambda x: x[0]).addErrback(self.__fallback, key)
self._stats['miss'][str(key)] += 1
return result
|
Cue/greplin-twisted-utils
|
src/greplin/net/dnsCache.py
|
Python
|
apache-2.0
| 2,900 | 0.01069 |
from django.contrib import admin
from django.contrib.auth.models import Group as AuthGroup
from sigma_core.models.user import User
from sigma_core.models.group import Group
from sigma_core.models.group_member import GroupMember
from sigma_core.models.group_field import GroupField
from sigma_core.models.group_field_value import GroupFieldValue
from sigma_core.models.group_invitation import GroupInvitation
from sigma_core.models.participation import Participation
from sigma_core.models.publication import Publication
from sigma_core.models.event import Event
from sigma_core.models.shared_publication import SharedPublication
admin.site.unregister(AuthGroup)
from sigma_core.models.acknowledgment import Acknowledgment
from sigma_core.models.acknowledgment_invitation import AcknowledgmentInvitation
admin.site.register(Acknowledgment)
admin.site.register(AcknowledgmentInvitation)
admin.site.register(GroupMember)
#admin.site.register(GroupInvitation)
#admin.site.register(SharedPublication)
#admin.site.register(Participation)
admin.site.register(GroupField)
admin.site.register(GroupFieldValue)
class ParticipationInline(admin.TabularInline):
model = Participation
extra = 0
class EventAdmin(admin.ModelAdmin):
list_display = ['name', 'date_start', 'date_end', 'place_name']
list_filter = ['date_start', 'date_end']
search_fields = ['name', 'place_name']
inlines = [ParticipationInline]
admin.site.register(Event, EventAdmin)
class SharedInline(admin.TabularInline):
model = SharedPublication
extra = 0
class PublicationAdmin(admin.ModelAdmin):
inlines = [SharedInline]
list_display = ['title', 'group', 'author', 'related_event', 'internal']
list_filter = ['group', 'author', 'internal']
admin.site.register(Publication, PublicationAdmin)
class GroupsInline(admin.TabularInline):
model = GroupMember
extra = 0
class InvitationsInline(admin.TabularInline):
model = GroupInvitation
extra = 0
class UserAdmin(admin.ModelAdmin):
list_display = ['firstname', 'lastname', 'email', 'is_active', 'is_superuser']
list_filter = ['is_active', 'is_superuser']
search_fields = ['firstname', 'lastname', 'email']
inlines = [GroupsInline, InvitationsInline]
admin.site.register(User, UserAdmin)
class MembersInline(admin.TabularInline):
model = GroupMember
extra = 0
class ParentsInline(admin.TabularInline):
model = Acknowledgment
extra = 0
fk_name = "acknowledged"
class ChildrenInline(admin.TabularInline):
model = Acknowledgment
extra = 0
fk_name = "acknowledged_by"
class GroupAdmin(admin.ModelAdmin):
list_display = ['name', 'is_protected', 'can_anyone_ask', 'need_validation_to_join', 'members_visibility', 'group_visibility']
list_filter = ['is_protected', 'can_anyone_ask', 'need_validation_to_join']
search_fields = ['name', 'description']
inlines = [MembersInline, InvitationsInline, ParentsInline, ChildrenInline]
admin.site.register(Group, GroupAdmin)
from sigma_core.models.tag import Tag
from sigma_core.models.like import Like
from sigma_core.models.comment import Comment
admin.site.register(Tag)
admin.site.register(Like)
admin.site.register(Comment)
|
SRLKilling/sigma-backend
|
data-server/django_app/sigma_core/admin.py
|
Python
|
agpl-3.0
| 3,213 | 0.008092 |
import pytz
from datetime import datetime
from decimal import Decimal
from furs_fiscal.api import FURSInvoiceAPI
# Path to our .p12 cert file
P12_CERT_PATH = 'demo_podjetje.p12'
# Password for out .p12 cert file
P12_CERT_PASS = 'Geslo123#'
class InvoiceDemo():
def demo_zoi(self):
"""
Obtaining Invoice ZOI - Protective Mark of the Invoice Issuer
Our Invoice Number on the Receipt is:
11/BP101/B1
Where:
* 11 - Invoice Number
* BP101 - Business premise ID
* B1 - Electronic Register ID
"""
# First we'll need to initialize FURSInvoice APi - so that it loads all the certs
api = FURSInvoiceAPI(p12_path=P12_CERT_PATH,
p12_password=P12_CERT_PASS,
production=False,
request_timeout=1.0)
date_issued = datetime.now(tz=pytz.UTC)
# let's get that ZOI
zoi = api.calculate_zoi(tax_number=10039856, # Issuer Tax Number
issued_date=date_issued, # DateTime of the Invoice
invoice_number='11', # Invoice Number - Sequential
business_premise_id='BP101', # Business premise ID
electronic_device_id='B1', # Electronic Device ID
invoice_amount=Decimal('19.15')) # Invoice Amount
print("ZOI: " + zoi)
# Let's obtain data for Code128/QR/PDF417 that should be placed at the bottom of the Invoice
print_data = api.prepare_printable(tax_number=10039856,
zoi=zoi,
issued_date=date_issued)
print("QR/Code128/PDF417 Data: " + print_data)
if __name__ == "__main__":
demo = InvoiceDemo()
demo.demo_zoi()
|
boris-savic/python-furs-fiscal
|
demos/invoice_demo.py
|
Python
|
mit
| 1,878 | 0.004792 |
#!/usr/bin/python
import xml.dom.minidom
import sys
from optparse import OptionParser
import random
from hadoop_conf import *
chunk_size = []
def xml_children(node, children_name):
"""return list of node's children nodes with name of children_name"""
return node.getElementsByTagName(children_name)
def xml_text(node):
return node.childNodes[0].nodeValue
def xml_child_text(node, child_name):
"""probably encoded in utf-8, be careful."""
return xml_text(xml_children(node, child_name)[0])
class empty_t:
pass
class hnode_t:
"""HDFS node for a HDFS tree.
5-level hierarchy: rack_group (multiple identical racks), rack, node_group
(multiple identical nodes), node, and disk.
disk should be initiated with a capacity. Other nodes' capacity are
calculated by summing up children's capacity."""
def __init__(self, parent, capacity=None, num=1):
self.parent = parent
self._capacity = capacity
self._num = num
self._children = []
self.used = 0
self.end = None
self.reserved = None
if parent <> None:
self.index_stack = parent.index_stack[:] + [len(parent.children())]
parent.children().append(self)
if parent._capacity <> None:
parent._capacity = None
else:
self.index_stack = []
def clone(self, parent=None):
'''clone a node from self, and append it to parent's children'''
if parent == None:
parent = self.parent
node = hnode_t(parent, self._capacity)
node._children = []
if self._children <> []:
for child in self._children:
#print self, self.parent, self._children
child.clone(node)
#node._children.append(child.clone(node)) ## wrong!!!
node.used = 0
node.reserved = self.reserved
return node
def capacity(self):
if self._capacity <> None:
return self._capacity
else :
assert self._children <> []
self._capacity = 0
for child in self._children:
self._capacity += child.capacity()
return self._capacity
def children(self):
return self._children;
def add_chunk(self):
if self.used >= self.capacity():
print 'error: node full' + self.index_stack
self.used += chunk_size
parent = self.parent
if parent != None:
parent.add_chunk()
def name(self):
if len(self.index_stack) == 5: #disk
return 'd_rg%d_%d_ng%d_%d_disk%d' % tuple(self.index_stack)
elif len(self.index_stack) == 4: #node
return 'n_rg%d_%d_ng%d_%d' % tuple(self.index_stack)
elif len(self.index_stack) == 3: #node group template
return 'n_rg%d_%d_ng%d' % tuple(self.index_stack)
elif len(self.index_stack) == 2: #rack
return 'r_rg%d_%d' % tuple(self.index_stack)
elif len(self.index_stack) == 1: #rack_group
return 'rg_rg%d' % tuple(self.index_stack)
else:
print 'error: request name for unknown node type. (' \
+ self.index_stack + ')'
def dump(self, level=0):
if options.verbose == False:
return
print self.index_stack, self.used, self._capacity, len(self.children())
node = self
if node.children() <> []:
for child in node.children():
child.dump()
def prev_node(self):
if self.index_stack == []:
return None
myindex = self.index_stack[-1]
if myindex == 0:
return self.parent.prev_node()
siblings = self.parent.children()
return siblings[myindex-1]
def global_end(self):
'''global index at the end of a node'''
if self.end <> None:
return self.end
# end should be previous node's end + self.capacity()
prev = self.prev_node()
if prev <> None:
self.end = prev.global_end() + self.capacity()
else:
# Otherwise, this is a first node
self.end = self.capacity()
return self.end
def choose_disk(self):
'''when a node is chosen for replication, it needs to choose a disk to put the data.'''
if self.used >= self.capacity():
return None
disk_id = random.randrange(len(self.children()))
disk = self.children()[disk_id]
if disk.used < disk.capacity():
return disk
else:
return self.choose_disk()
class machine_type_t:
def __init__(self, mt):
disk = xml_children(mt, u'disk')[0]
self.disk = empty_t()
self.disk.type = str(xml_child_text(disk, u'type'))
self.disk.capacity = int(xml_child_text(disk, u'capa'))*1024 # in MB
self.disk.num = int(xml_child_text(disk, u'num'))
cpu = xml_children(mt, u'cpu')[0]
self.cpu = empty_t()
self.cpu.type = str(xml_child_text(cpu, u'type'))
self.cpu.cores = int(xml_child_text(cpu, u'number_of_cores'))
self.cpu.num = int(xml_child_text(cpu, u'num'))
mem = xml_children(mt, u'mem')[0]
self.mem = empty_t()
self.mem.type = str(xml_child_text(mem, u'type'))
self.mem.capacity = str(xml_child_text(mem, u'capa')) # in MB
# TODO: other parts of machine_type
class topology_t:
def __init__(self, topo_xml):
root = xml.dom.minidom.parse(topo_xml)
self.htree = hnode_t(None)
self.dmt = {} # dict of machine type
topo = root.getElementsByTagName(u"topo")[0]
# populate dict of machine type
list_machine_type = topo.getElementsByTagName(u'machine_type')
for mt_node in list_machine_type:
name = str(xml_child_text(mt_node, u'name'))
self.dmt[name] = machine_type_t(mt_node)
# topology
for rack_group in xml_children(topo, u"rack_group"):
rg_node = hnode_t(self.htree)
# rgname not in use currently. maybe a name-node map is needed.
rg_node.rgname = str(xml_child_text(rack_group, u'name'))
num_rack = len(xml_children(rack_group, u"rack_index"))
self.racks = num_rack
rack_node = hnode_t(rg_node)
# populate the first rack_node
for node_group in xml_children(rack_group, u"compute_node_group"):
ng_node = hnode_t(rack_node)
# machine type and disk
mt_name = str(xml_child_text(node_group, u'machine_type_name'))
mt = self.dmt[mt_name]
ng_node.reserved = mt
num_node = len(xml_children(node_group, u'node_index'))
self.nodes = num_node
node_node = hnode_t(ng_node)
# populate the first node_node
for i in range(mt.disk.num):
disk_node = hnode_t(node_node, mt.disk.capacity)
#self.htree.dump()
# clone other node_nodes
for i in range(num_node-1):
new_node_node = node_node.clone()
#self.htree.dump()
# clone other rack_nodes
for i in range(num_rack-1):
new_rack_node = rack_node.clone()
#self.htree.dump()
self.routers = []
for router in xml_children(topo, u'router'):
rt = empty_t()
rt.connect_to_groups = []
for connect_to_group in xml_children(router, u'connect_to_group'):
rgname = str(xml_child_text(connect_to_group, u'rack_group_name'))
switch = empty_t()
switch.rg = self.find_hnode(tuple([int(rgname[5:])]))
switch.index = int(xml_child_text(connect_to_group, u'switch_index'))
rt.connect_to_groups.append(switch)
rt.name = str(xml_child_text(router, u'name'))
self.routers.append(rt)
self.data_nodes = int(xml_child_text(topo, u'data_nodes'))
self.job_tracker = str(xml_child_text(topo, u'job_tracker'))
topology = xml_children(topo, u'topology')
if len(topology) > 0 :
self.topology = str(xml_text(topology[0]))
else:
self.topology = None
def find_hnode(self, index_stack):
if len(index_stack) > 5:
print 'Wrong index stack' + index_stack
return None
node = self.htree
for i in index_stack:
children = node.children()
node = children[i]
return node
def totcl(self, topo_tcl):
f = open(topo_tcl, 'w')
f.write('set int_bw %s\n' % (int_bw))
f.write('set int_latency %s\n' % (int_latency))
num_of_nodes = 0
if self.topology == 'dcell':
# special case, assume everything is symmetric
# take the first ng to get mt (machine type)
# number of nodes in a rack matters,
# number of racks does not matter.
rg = self.htree.children()[0]
racks = len(rg.children())
r = rg.children()[0]
ng = r.children()[0]
nodes = len(ng.children())
mt = ng.reserved
f.write('set cpu_freq %f\n' % (freq_table[mt.cpu.type]))
f.write('set cpu_cores %d\n' % (mt.cpu.cores * mt.cpu.num))
f.write('set rbw %f\n' % (read_bw_table[mt.disk.type]))
f.write('set wbw %f\n' % (write_bw_table[mt.disk.type]))
f.write("\nset num_of_nodes %d\n" % (self.data_nodes))
f.write('setup_2level_dcell %d\n' % (nodes))
f.write('\n')
f.write('set jt $%s\n' % (self.job_tracker))
f.write('set racks %d\n' % (racks))
f.write('set nodes %d\n' % (nodes))
f.write('set data_nodes %d\n' % (self.data_nodes))
f.write('set_mapnodes %d %d %d\n' % (racks, nodes, self.data_nodes))
f.write('\n')
f.close()
return
for rg in self.htree.children():
self.racks = len(rg.children())
for r in rg.children():
f.write('set %s [$ns node]\n' % (r.name()))
for ng in r.children():
self.nodes = len(ng.children())
mt = ng.reserved
# cpu information for all nodes in a node group
freq = freq_table[mt.cpu.type]
cores = mt.cpu.cores * mt.cpu.num
# disk read and write bandwidths
rbw = read_bw_table[mt.disk.type]
wbw = write_bw_table[mt.disk.type]
f.write('for {set i 0} {$i < %d} {incr i} {\n' \
% (len(ng.children())))
f.write('\tnewnode "%s_$i" $%s\n' % (ng.name(), r.name()))
num_of_nodes += len(ng.children())
#f.write('\t$n30 set freq %f\n' % (freq))
f.write('\t$n30 set tasklist [new MRPerf/TaskList %f %d]\n' % (freq, cores))
f.write('\tfor {set j 0} {$j < %d} {incr j} {\n' \
% (mt.disk.num))
f.write('\t\t$n30 newdisk %f %f\n' % (rbw, wbw))
f.write('\t}\n')
f.write('}\n')
f.write('\n')
if True:
# Guanying 2009.3.10: add a dedicated jobtracker
# it does not count into num_of_nodes
rg = self.htree.children()[0]
r = rg.children()[0]
ng = r.children()[0]
mt = ng.reserved
# cpu information for all nodes in a node group
freq = freq_table[mt.cpu.type]
cores = mt.cpu.cores * mt.cpu.num
# disk read and write bandwidths
rbw = read_bw_table[mt.disk.type]
wbw = write_bw_table[mt.disk.type]
'''
jt = ng.name()+'_jobtracker'
f.write('\nnewnode "%s" $%s\n' % (jt, r.name()))
f.write('set jt $%s\n' % (jt))
f.write('$jt set tasklist [new MRPerf/TaskList %f %d]\n' % (freq, cores))
f.write('for {set j 0} {$j < %d} {incr j} {\n' \
% (mt.disk.num))
f.write('\t$jt newdisk %f %f\n' % (rbw, wbw))
f.write('}\n')'''
#f.write("\nset num_of_nodes %d\n" % (num_of_nodes))
f.write("\nset num_of_nodes %d\n" % (self.data_nodes))
for rt in self.routers:
f.write('set %s [$ns node]\n' % (rt.name))
f.write('$%s shape hexagon\n' % (rt.name))
f.write('\n')
for switch in rt.connect_to_groups:
for r in switch.rg.children():
f.write('$ns duplex-link $%s $%s %s %s DropTail\n' \
% (r.name(), rt.name, ext_bw, ext_latency))
f.write('\n')
f.write('set jt $%s\n' % (self.job_tracker))
f.write('set racks %d\n' % (self.racks))
f.write('set nodes %d\n' % (self.nodes))
f.write('set data_nodes %d\n' % (self.data_nodes))
f.write('set_mapnodes %d %d %d\n' % (self.racks, self.nodes, self.data_nodes))
f.write('\n')
f.close()
def totcl2(self, mapnodes_tcl):
f = open(mapnodes_tcl, 'w')
for rg_id in range(len(self.htree.children())):
rg = self.htree.children()[rg_id]
racks = len(rg.children())
f.write('for {set i 0} {$i < %d} {incr i} {\n' % (racks))
r = rg.children()[0]
for ng_id in range(len(r.children())):
ng = r.children()[ng_id]
nodes = len(ng.children())
f.write('\tfor {set j 0} {$j < %d} {incr j} {\n' % (nodes))
n = ng.children()[0]
'''
set mn [format "%s%s%s%s" "\$n_rg0_" $i "_ng0_" $j]
set tcp0 [new Agent/TCP/FullTcp]
set dummy [new MRPerf/NodeApp $tcp0]
eval "$dummy set hnode $mn"
set app11 [$dummy new-connection $jt]
$ns at 0.05 "$app11 snd {heartbeat}"
'''
f.write('\t\tset mn [format "%%s%%s%%s%%s" "\\$n_rg%d_" $i "_ng%d_" $j]\n' % (rg_id, ng_id))
f.write('\t\tset tcp0 [new Agent/TCP/FullTcp]\n')
f.write('\t\tset dummy [new MRPerf/NodeApp $tcp0]\n')
f.write('\t\teval "$dummy set hnode $mn"\n')
f.write('\t\tset app11 [$dummy new-connection $jt]\n')
f.write('\t\t$ns at 0.05 "$app11 send_heartbeat"\n')
f.write('\t}\n')
f.write('}\n')
f.write('\n')
f.close()
class conf_t:
def __init__(self, gen_xml):
root = xml.dom.minidom.parse(gen_xml)
conf = xml_children(root, u'conf')[0]
self.path = str(xml_child_text(conf, u'path'))
files_node = xml_children(conf, u'number_files')[0]
self.files = empty_t()
self.files.min = int(xml_child_text(files_node, u'min_files'))
self.files.max = int(xml_child_text(files_node, u'max_files'))
size_node = xml_children(conf, u'file_size')[0]
self.size = empty_t()
self.size.unit_size = int(xml_child_text(size_node, u'unit_size')) #MB
global chunk_size
chunk_size = self.size.unit_size
self.size.min_unit = int(xml_child_text(size_node, u'min_unit'))
self.size.max_unit = int(xml_child_text(size_node, u'max_unit'))
self.replicas = int(xml_child_text(conf, u'replication_level'))
self.method = str(xml_child_text(conf, u'gen_method'))
self.name_node = str(xml_child_text(conf, u'name_node'))
#TODO: move into xml
self.factor = 0.5
class job_t:
def __init__(self, job_xml):
root = xml.dom.minidom.parse(job_xml)
job_node = xml_children(root, u'job')[0]
self.tcl = 'set cycles_per_byte ' + \
str(xml_child_text(job_node, u'cycles_per_byte')) + \
'\n\t# in cycles per byte, 1G cycles per 1GB\n\n'
filter_ratio_node = xml_children(job_node, u'filter_ratio')[0]
distr_node = [node for node in filter_ratio_node.childNodes \
if node.nodeType == node.ELEMENT_NODE][0]
s = str(distr_node.nodeName)
self.tcl += 'set filter_ratio [new RandomVariable/%s]\n' % \
(s.capitalize())
if (distr_node.nodeName == u'constant'):
self.tcl += '$filter_ratio set val_ %s \n' % (xml_text(distr_node))
elif (distr_node.nodeName == u'uniform'):
self.tcl += '$filter_ratio set min_ ' + \
str(xml_child_text(distr_node, u'uniform_min')) + '\n'
self.tcl += '$filter_ratio set max_ ' + \
str(xml_child_text(distr_node, u'uniform_max')) + '\n'
elif (distr_node.nodeName == u'pareto'):
self.tcl += '$filter_ratio set avg_ %s\n' % \
(xml_child_text(distr_node, u'pareto_scale'))
self.tcl += '$filter_ratio set shape_ %s\n' % \
(xml_child_text(distr_node, u'pareto_shape'))
elif (distr_node.nodeName == u'exponential'):
self.tcl += '$filter_ratio set avg_ %f\n' % \
(1/float(xml_child_text(distr_node, u'exp_lambda')))
elif (distr_node.nodeName == u'normal'):
self.tcl += '$filter_ratio set avg_ %s\n' % \
(xml_child_text(distr_node, u'normal_average'))
self.tcl += '$filter_ratio set std_ %s\n' % \
(xml_child_text(distr_node, u'normal_variance'))
else:
print 'warning: unknown distribution method'
self.tcl += '\n'
self.tcl += 'set avg_record_size %s\n\t# in byte\n' % \
(xml_child_text(job_node, u'average_record_size'))
self.tcl += 'set jt $%s\n' % (xml_child_text(job_node, u'job_tracker'))
self.input = str(xml_child_text(job_node, u'input_dir'))
self.output = str(xml_child_text(job_node, u'output_dir'))
self.tcl += '\n'
def _const(self):
return self.constant
def _uniform(self):
return random.uniform(self.uniform.min, self.uniform.max)
def _pareto(self):
return random.paretovariate(self.pareto.alpha)
def _gauss(self):
return random.gauss(self.gauss.mu, self.gauss.sigma)
def _expo(self):
return random.expovariate(self.expo.lambd)
global_i = 0
def get_new_filename():
global global_i
filename = 'file_'+str(global_i).zfill(8)
global_i += 1
return filename
class disk_t:
def __init__(self, rg, rack, ng, node, disk):
self.rg = rg
self.rack = rack
self.ng = ng
self.node = node
self.disk = disk
def name(self):
return 'n_rg%d_%d_ng%d_%d_disk%d' % (rg, rack, ng, node, disk)
global_last_chunk_on_disk = None
global_linear_chunk_index = 0
class distribute_linear:
def __init__(self, topo):
self.topo = topo
def distribute_chunk(self, replicas=1):
'''search the tree to find a node with used < capacity().'''
'''wanggy 2008.7.9:
Note that the function always generate only 1 replica for each chunk.
That is because of the nature of linear distribution. It's not easy to
define a linear way to generate multiple copies for each chunk, on different
disks. Maybe a possible option is to divide the storage space into several
parts, and put 1 replica in each part.
parameter replicas here can be more than 1, but the function will not
produce more than 1 replicas for that.'''
node = self.topo.htree
if node.used >= node.capacity():
print 'error: HDFS full'
return ['error: HDFS full']
while node.children() <> []:
for child in node.children():
if child.used < child.capacity():
break
node = child
node.add_chunk()
return [node]
class distribute_RR:
def __init__(self, topo):
self.order = []
stack = []
self.last_disk = -1
self.topo = topo
stack.append(topo.htree)
while stack <> []:
node = stack.pop()
if node.children() <> []:
temp = node.children()[:]
temp.reverse()
for child in temp:
stack.append(child)
else:
# reach a disk
# assert len(node.index_stack) == 5
self.order.append(node)
def distribute_chunk(self, replicas=1):
'''This is a round robin algorithm on disk level, not on node level.'''
#cycle = len(self.order)
# Guanying 2009.4.7: workaround for data_nodes
cycle = self.topo.data_nodes
i = (self.last_disk + 1) % cycle
#print i, self.last_disk
disklist = []
while len(disklist) < replicas:
disk = self.order[i]
if disk.used < disk.capacity():
disk.add_chunk()
if disk in disklist:
print 'warning: round robin for multiple replicas has ' \
'iterated around all disks.'
break
disklist.append(disk)
i = (i + 1) % cycle
else :
del self.order[i]
if self.order == []:
print "error: HDFS full"
return ["error: HDFS full"]
i = i % cycle
self.last_disk = (i-1) % cycle
#print i, self.last_disk
return disklist
class distribute_random:
def __init__(self, topo):
self.topo = topo
def distribute_chunk(self, replicas=1):
return self.distribute_chunk_random(replicas)
def distribute_chunk_random(self, replicas=1):
'''random at disk level. replicas guaranteed to be on different disks,
but maybe on disks on the same node. Then when viewed at node level, two or
more replicas can be on the same node. I don't consider that as an error.
If it is preferred to have replicas on different nodes, I can change that
later'''
# TODO: comments above
disklist = []
while len(disklist) < replicas:
disk = self.distribute_one_chunk()
if disk == None:
break
if not disk in disklist:
disklist.append(disk)
for disk in disklist:
disk.add_chunk()
return disklist
def distribute_one_chunk(self):
while True:
node = self.topo.htree
if node.capacity() <= node.used:
print 'error: HDFS full\n'
return None
index = random.randrange(node.capacity()-node.used)
while node.children() <> []:
temp = node
for child in node.children():
if child.capacity() - child.used > index:
node = child
break
else:
index -= (child.capacity() - child.used)
if temp.index_stack == node.index_stack:
print 'error\n'
print node.index_stack
print node.capacity(), index, node.global_end()
print node.global_end(), node.used, index
print child.global_end(), child.used, index
sys.exit()
# Guanying 2009.4.7: workaround for data_nodes
r = node.index_stack[1]
n = node.index_stack[3]
#print r, n
if r*self.topo.nodes + n >= self.topo.data_nodes:
continue
if node.used < node.capacity():
break
return node
def is_invalid_datanode(self, node):
'''Guanying 2009.4.7: workaround for data_nodes'''
r = node.index_stack[1]
n = node.index_stack[3]
#print r, n
return (r*self.topo.nodes + n >= self.topo.data_nodes)
def choose_hnode(self, exclude_nodes=None, level=4, subtree=None):
if exclude_nodes == None:
exclude_nodes = []
if subtree == None:
subtree = self.topo.htree
while True:
node = subtree
if node.capacity() <= node.used:
print 'error: HDFS full at %s\n' % (node.index_stack)
return None
index = random.randrange(node.capacity()-node.used)
while len(node.index_stack) < level:
temp = node
for child in node.children():
if child.capacity() - child.used > index:
node = child
break
else:
index -= (child.capacity() - child.used)
if temp.index_stack == node.index_stack:
print 'error\n'
print node.index_stack
print node.capacity(), index, node.global_end()
print node.global_end(), node.used, index
print child.global_end(), child.used, index
sys.exit()
if node.used < node.capacity() and not node in exclude_nodes:
break
exclude_nodes.append(node)
return (node, exclude_nodes)
class distribute_Hadoop(distribute_random):
def __init__(self, topo):
self.topo = topo
racks = 0
for rg in topo.htree.children():
racks += len(rg.children())
if racks < 2:
self.one_rack = True
else:
self.one_rack = False
def distribute_chunk(self, replicas=1):
if self.one_rack:
return self.distribute_chunk_random(replicas)
else:
local_node, ex = self.choose_node()
while self.is_invalid_datanode(local_node):
local_node, ex = self.choose_node()
#print local_node.index_stack
return self.distribute_chunk_Hadoop(local_node, replicas)
def distribute_chunk_Hadoop(self, local_node, replicas=1):
'''Always on more than one racks. First chunk on local node, second
in same rack, third on another rack. Problem is: which node is local?
Answer 1: Randomly choose a local node.'''
ex = [local_node]
rack = local_node.parent.parent
another_node, ex1 = self.choose_node(rack, ex)
while self.is_invalid_datanode(another_node):
another_node, ex1 = self.choose_node(rack, ex)
#print another_node.index_stack
ex = ex1
another_rack, ex = self.choose_rack([rack])
remote_node, ex1 = self.choose_node(another_rack, [])
while self.is_invalid_datanode(remote_node):
remote_node, ex1 = self.choose_node(another_rack, [])
#print remote_node.index_stack
disklist = [node.choose_disk() for node in [local_node, another_node, remote_node]]
for disk in disklist:
if disk <> None:
disk.add_chunk()
#print [disk.index_stack for disk in disklist]
return disklist
def choose_node(self, subtree=None, exclude_nodes=None):
return self.choose_hnode(exclude_nodes, 4, subtree)
def choose_rack(self, exclude_racks=None):
return self.choose_hnode(exclude_racks, 2, self.topo.htree)
def gen(topo, conf):
method_dict = {'linear': distribute_linear,
'RR': distribute_RR,
'random': distribute_random,
'Hadoop': distribute_Hadoop}
method = method_dict[conf.method](topo)
xml_template = """<?xml version="1.0" encoding="UTF-8"?>
<root xsi:noNamespaceSchemaLocation="metadata.xsd" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<chunk_size>16</chunk_size>
<name_node>1</name_node>
<dir name="data">
<file name="file">
<chunk id="0">
<rep>n1</rep>
</chunk>
</file>
</dir>
</root>
"""
meta = xml.dom.minidom.parseString(xml_template)
root = xml_children(meta, u'root')[0]
chunk_size_node = xml_children(root, u'chunk_size')[0].childNodes[0]
chunk_size_node.nodeValue = unicode(str(chunk_size), 'utf-8')
name_node_text_node = xml_children(root, u'name_node')[0].childNodes[0]
name_node_text_node.nodeValue = unicode(conf.name_node, 'utf-8')
dir = xml_children(root, u'dir')[0]
dir.setAttribute(u'name', unicode(conf.path, 'utf-8'))
file_node = xml_children(dir, u'file')[0]
chunk_node = xml_children(file_node, u'chunk')[0]
replica_node = xml_children(chunk_node, u'rep')[0]
if (conf.files.min+conf.files.max)/2 \
* (conf.size.min_unit+conf.size.max_unit)/2 \
* conf.size.unit_size * conf.replicas \
>= conf.factor * topo.htree.capacity():
print "warning: too much data to be stored in DFS\n"
files = int(random.uniform(conf.files.min, conf.files.max))
for i in range(files):
new_file = file_node.cloneNode(False) # the param is 'deep',
#means to clone all child nodes as well.
name = get_new_filename()
new_file.setAttribute(u'name', unicode(name, 'utf-8'))
dir.appendChild(new_file)
chunks = int(random.uniform(conf.size.min_unit, conf.size.max_unit) \
* conf.size.unit_size / chunk_size)
# if i % 10 == 0:
# print i
for j in range(chunks):
new_chunk = chunk_node.cloneNode(False)
new_chunk.setAttribute(u'id', unicode(str(j), 'utf-8'))
new_file.appendChild(new_chunk)
for node in method.distribute_chunk(conf.replicas):
new_replica = replica_node.cloneNode(True)
text_node = new_replica.childNodes[0]
text_node.nodeValue = unicode(node.name(), 'utf-8')
new_chunk.appendChild(new_replica)
#TODO: make xml prettier. see results from the following two commented lines
#print file_node.childNodes
#print new_file.childNodes
dir.removeChild(file_node)
file_node.unlink()
#verify(dir)
return meta
def verify(dir_node):
for file_node in xml_children(dir_node, u'file'):
filename = str(file_node.getAttribute(u'name'))
for chunk_node in xml_children(file_node, u'chunk'):
chunk_id = int(chunk_node.getAttribute(u'id'))
if len(xml_children(chunk_node, u'rep')) != 3:
print chunk_node
list = []
for replica_node in xml_children(chunk_node, u'rep'):
replica = str(replica_node.childNodes[0].nodeValue)
if replica in list:
print 'error!!!'
list.append(replica)
def main():
global options
usage = "usage: %prog options"
parser = OptionParser(usage)
parser.add_option("-v", "--verbose", default=False,
action="store_true", dest="verbose")
parser.add_option("-t", "--topology", dest="topo_xml",
help="topology configuration xml")
parser.add_option("-g", "--gen", dest="gen_xml",
help="metadata generation configuration xml")
parser.add_option("-m", "--metadata", dest="meta_xml",
help="metadata configuration xml")
(options, args) = parser.parse_args()
if None in (options.topo_xml, options.meta_xml, options.gen_xml):
print 'xmls not defined'
parser.print_help()
sys.exit()
random.seed()
topo = topology_t(options.topo_xml)
conf = conf_t(options.gen_xml)
meta = gen(topo, conf)
f = open(options.meta_xml, 'w')
f.write(meta.toxml())
f.close()
# f = open(options.meta_xml)
# print f.read()
# f.close()
if __name__ == '__main__':
main()
|
toomanyjoes/mrperfcs386m
|
test/gen.py
|
Python
|
mit
| 26,543 | 0.029951 |
import pyaf.Bench.TS_datasets as tsds
import tests.artificial.process_artificial_dataset as art
art.process_dataset(N = 128 , FREQ = 'D', seed = 0, trendtype = "Lag1Trend", cycle_length = 5, transform = "None", sigma = 0.0, exog_count = 100, ar_order = 0);
|
antoinecarme/pyaf
|
tests/artificial/transf_None/trend_Lag1Trend/cycle_5/ar_/test_artificial_128_None_Lag1Trend_5__100.py
|
Python
|
bsd-3-clause
| 260 | 0.088462 |
# Copyright 2011 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module for the User related pages."""
import os
from django.conf.urls import url as django_url
from soc.logic import accounts
from soc.logic import cleaning
from soc.models.user import User
from soc.views import base
from soc.views.forms import ModelForm
class UserCreateForm(ModelForm):
"""Django form for the User profile.
"""
class Meta:
model = User
fields = ['link_id', 'name']
clean_link_id = cleaning.clean_user_not_exist('link_id')
def templatePath(self):
# TODO: This needs a more generic form.
return 'modules/gsoc/_form.html'
class UserEditForm(ModelForm):
"""Django form to edit a User profile.
"""
class Meta:
model = User
fields = ['name']
def templatePath(self):
# TODO: This needs a more generic form.
return 'modules/gsoc/_form.html'
class CreateUserPage(base.RequestHandler):
"""View for creating the user profile.
"""
def djangoURLPatterns(self):
return [
django_url(r'^user/create$', self, name='create_user'),
]
def checkAccess(self, data, check, mutator):
"""Ensures that the user is logged in and does not have a User profile."""
check.isNotUser()
def templatePath(self):
# TODO: make this specific to the current active program
return 'soc/user/base.html'
def context(self, data, check, mutator):
# TODO: program specific in core module, needs to be avoided
from soc.modules.gsoc.views.forms import GSoCBoundField
form = UserCreateForm(GSoCBoundField, data=data.POST or None)
return {
'base_layout': 'modules/gsoc/base.html',
'app_version': os.environ.get('CURRENT_VERSION_ID', '').split('.')[0],
'page_name': 'Create User profile',
'forms': [form],
}
def post(self, data, check, mutator):
"""Handler for HTTP POST request."""
from soc.modules.gsoc.views.forms import GSoCBoundField
form = UserCreateForm(GSoCBoundField, data=data.POST)
if not form.is_valid():
# TODO(nathaniel): problematic self-call.
return self.get(data, check, mutator)
cleaned_data = form.cleaned_data
norm_account = accounts.normalizeAccount(data.gae_user)
cleaned_data['account'] = norm_account
cleaned_data['account_id'] = data.gae_user.user_id()
form.create(key_name=cleaned_data['link_id'])
return data.redirect.to('edit_user', validated=True)
class EditUserPage(base.RequestHandler):
"""View to edit the user profile."""
def djangoURLPatterns(self):
return [
django_url(r'^user/edit', self, name='edit_user'),
]
def checkAccess(self, data, check, mutator):
check.isUser()
def templatePath(self):
# TODO: make this specific to the current active program
return 'soc/user/base.html'
def context(self, data, check, mutator):
# TODO: program specific in core module
from soc.modules.gsoc.views.forms import GSoCBoundField
form = UserEditForm(
GSoCBoundField, data=data.POST or None, instance=data.user)
return {
'base_layout': 'modules/gsoc/base.html',
'app_version': os.environ.get('CURRENT_VERSION_ID', '').split('.')[0],
'page_name': 'Edit User profile',
'forms': [form],
}
def post(self, data, check, mutator):
"""Handler for HTTP POST request."""
from soc.modules.gsoc.views.forms import GSoCBoundField
form = UserEditForm(
GSoCBoundField, data=data.POST, instance=data.user)
if not form.is_valid():
# TODO(nathaniel): problematic self-call.
return self.get(data, check, mutator)
form.save()
# TODO(nathaniel): redirection to same page.
return data.redirect.to('edit_user', validated=True)
|
rhyolight/nupic.son
|
app/soc/views/user.py
|
Python
|
apache-2.0
| 4,262 | 0.005397 |
import xmlrpclib
from SimpleXMLRPCServer import SimpleXMLRPCServer
import json
import KVSHandler as handler
with open('config.json') as d:
config = json.load(d)
ip = config['ip']
port = int(config['port'])
def write(key, value):
global handler
return handler.write(key,value)
def delete(key):
global handler
return handler.delete(key)
def list():
global handler
return handler.list()
def read(key):
global handler
return handler.read(key)
server = SimpleXMLRPCServer((ip, port), allow_none=True)
print "Listening on port " + str(port) + "..."
server.register_function(write, "write")
server.register_function(delete, "delete")
server.register_function(list, "list")
server.register_function(read, "read")
server.serve_forever()
|
f-apolinario/BFTStorageService
|
server/StorageService.py
|
Python
|
mit
| 750 | 0.025333 |
import plotly.express as px
import plotly.graph_objects as go
from numpy.testing import assert_array_equal
import numpy as np
import pandas as pd
import pytest
def _compare_figures(go_trace, px_fig):
"""Compare a figure created with a go trace and a figure created with
a px function call. Check that all values inside the go Figure are the
same in the px figure (which sets more parameters).
"""
go_fig = go.Figure(go_trace)
go_fig = go_fig.to_plotly_json()
px_fig = px_fig.to_plotly_json()
del go_fig["layout"]["template"]
del px_fig["layout"]["template"]
for key in go_fig["data"][0]:
assert_array_equal(go_fig["data"][0][key], px_fig["data"][0][key])
for key in go_fig["layout"]:
assert go_fig["layout"][key] == px_fig["layout"][key]
def test_pie_like_px():
# Pie
labels = ["Oxygen", "Hydrogen", "Carbon_Dioxide", "Nitrogen"]
values = [4500, 2500, 1053, 500]
fig = px.pie(names=labels, values=values)
trace = go.Pie(labels=labels, values=values)
_compare_figures(trace, fig)
labels = ["Eve", "Cain", "Seth", "Enos", "Noam", "Abel", "Awan", "Enoch", "Azura"]
parents = ["", "Eve", "Eve", "Seth", "Seth", "Eve", "Eve", "Awan", "Eve"]
values = [10, 14, 12, 10, 2, 6, 6, 4, 4]
# Sunburst
fig = px.sunburst(names=labels, parents=parents, values=values)
trace = go.Sunburst(labels=labels, parents=parents, values=values)
_compare_figures(trace, fig)
# Treemap
fig = px.treemap(names=labels, parents=parents, values=values)
trace = go.Treemap(labels=labels, parents=parents, values=values)
_compare_figures(trace, fig)
# Funnel
x = ["A", "B", "C"]
y = [3, 2, 1]
fig = px.funnel(y=y, x=x)
trace = go.Funnel(y=y, x=x)
_compare_figures(trace, fig)
# Funnelarea
fig = px.funnel_area(values=y, names=x)
trace = go.Funnelarea(values=y, labels=x)
_compare_figures(trace, fig)
def test_sunburst_treemap_colorscales():
labels = ["Eve", "Cain", "Seth", "Enos", "Noam", "Abel", "Awan", "Enoch", "Azura"]
parents = ["", "Eve", "Eve", "Seth", "Seth", "Eve", "Eve", "Awan", "Eve"]
values = [10, 14, 12, 10, 2, 6, 6, 4, 4]
for func, colorway in zip(
[px.sunburst, px.treemap], ["sunburstcolorway", "treemapcolorway"]
):
# Continuous colorscale
fig = func(
names=labels,
parents=parents,
values=values,
color=values,
color_continuous_scale="Viridis",
range_color=(5, 15),
)
assert fig.layout.coloraxis.cmin, fig.layout.coloraxis.cmax == (5, 15)
# Discrete colorscale, color arg passed
color_seq = px.colors.sequential.Reds
fig = func(
names=labels,
parents=parents,
values=values,
color=labels,
color_discrete_sequence=color_seq,
)
assert np.all([col in color_seq for col in fig.data[0].marker.colors])
# Numerical color arg passed, fall back to continuous
fig = func(names=labels, parents=parents, values=values, color=values,)
assert [
el[0] == px.colors.sequential.Viridis
for i, el in enumerate(fig.layout.coloraxis.colorscale)
]
# Numerical color arg passed, continuous colorscale
# even if color_discrete_sequence if passed
fig = func(
names=labels,
parents=parents,
values=values,
color=values,
color_discrete_sequence=color_seq,
)
assert [
el[0] == px.colors.sequential.Viridis
for i, el in enumerate(fig.layout.coloraxis.colorscale)
]
# Discrete colorscale, no color arg passed
color_seq = px.colors.sequential.Reds
fig = func(
names=labels,
parents=parents,
values=values,
color_discrete_sequence=color_seq,
)
assert list(fig.layout[colorway]) == color_seq
def test_sunburst_treemap_with_path():
vendors = ["A", "B", "C", "D", "E", "F", "G", "H"]
sectors = [
"Tech",
"Tech",
"Finance",
"Finance",
"Tech",
"Tech",
"Finance",
"Finance",
]
regions = ["North", "North", "North", "North", "South", "South", "South", "South"]
values = [1, 3, 2, 4, 2, 2, 1, 4]
total = ["total",] * 8
df = pd.DataFrame(
dict(
vendors=vendors,
sectors=sectors,
regions=regions,
values=values,
total=total,
)
)
path = ["total", "regions", "sectors", "vendors"]
# No values
fig = px.sunburst(df, path=path)
assert fig.data[0].branchvalues == "total"
# Values passed
fig = px.sunburst(df, path=path, values="values")
assert fig.data[0].branchvalues == "total"
assert fig.data[0].values[-1] == np.sum(values)
# Values passed
fig = px.sunburst(df, path=path, values="values")
assert fig.data[0].branchvalues == "total"
assert fig.data[0].values[-1] == np.sum(values)
# Continuous colorscale
fig = px.sunburst(df, path=path, values="values", color="values")
assert "coloraxis" in fig.data[0].marker
assert np.all(np.array(fig.data[0].marker.colors) == np.array(fig.data[0].values))
# Error when values cannot be converted to numerical data type
df["values"] = ["1 000", "3 000", "2", "4", "2", "2", "1 000", "4 000"]
msg = "Column `values` of `df` could not be converted to a numerical data type."
with pytest.raises(ValueError, match=msg):
fig = px.sunburst(df, path=path, values="values")
# path is a mixture of column names and array-like
path = [df.total, "regions", df.sectors, "vendors"]
fig = px.sunburst(df, path=path)
assert fig.data[0].branchvalues == "total"
def test_sunburst_treemap_with_path_and_hover():
df = px.data.tips()
fig = px.sunburst(
df, path=["sex", "day", "time", "smoker"], color="smoker", hover_data=["smoker"]
)
assert "smoker" in fig.data[0].hovertemplate
def test_sunburst_treemap_with_path_color():
vendors = ["A", "B", "C", "D", "E", "F", "G", "H"]
sectors = [
"Tech",
"Tech",
"Finance",
"Finance",
"Tech",
"Tech",
"Finance",
"Finance",
]
regions = ["North", "North", "North", "North", "South", "South", "South", "South"]
values = [1, 3, 2, 4, 2, 2, 1, 4]
calls = [8, 2, 1, 3, 2, 2, 4, 1]
total = ["total",] * 8
df = pd.DataFrame(
dict(
vendors=vendors,
sectors=sectors,
regions=regions,
values=values,
total=total,
calls=calls,
)
)
path = ["total", "regions", "sectors", "vendors"]
fig = px.sunburst(df, path=path, values="values", color="calls")
colors = fig.data[0].marker.colors
assert np.all(np.array(colors[:8]) == np.array(calls))
fig = px.sunburst(df, path=path, color="calls")
colors = fig.data[0].marker.colors
assert np.all(np.array(colors[:8]) == np.array(calls))
# Hover info
df["hover"] = [el.lower() for el in vendors]
fig = px.sunburst(df, path=path, color="calls", hover_data=["hover"])
custom = fig.data[0].customdata
assert np.all(custom[:8, 0] == df["hover"])
assert np.all(custom[8:, 0] == "(?)")
assert np.all(custom[:8, 1] == df["calls"])
# Discrete color
fig = px.sunburst(df, path=path, color="vendors")
assert len(np.unique(fig.data[0].marker.colors)) == 9
# Discrete color and color_discrete_map
cmap = {"Tech": "yellow", "Finance": "magenta", "(?)": "black"}
fig = px.sunburst(df, path=path, color="sectors", color_discrete_map=cmap)
assert np.all(np.in1d(fig.data[0].marker.colors, list(cmap.values())))
# Numerical column in path
df["regions"] = df["regions"].map({"North": 1, "South": 2})
path = ["total", "regions", "sectors", "vendors"]
fig = px.sunburst(df, path=path, values="values", color="calls")
colors = fig.data[0].marker.colors
assert np.all(np.array(colors[:8]) == np.array(calls))
def test_sunburst_treemap_with_path_non_rectangular():
vendors = ["A", "B", "C", "D", None, "E", "F", "G", "H", None]
sectors = [
"Tech",
"Tech",
"Finance",
"Finance",
None,
"Tech",
"Tech",
"Finance",
"Finance",
"Finance",
]
regions = [
"North",
"North",
"North",
"North",
"North",
"South",
"South",
"South",
"South",
"South",
]
values = [1, 3, 2, 4, 1, 2, 2, 1, 4, 1]
total = ["total",] * 10
df = pd.DataFrame(
dict(
vendors=vendors,
sectors=sectors,
regions=regions,
values=values,
total=total,
)
)
path = ["total", "regions", "sectors", "vendors"]
msg = "Non-leaves rows are not permitted in the dataframe"
with pytest.raises(ValueError, match=msg):
fig = px.sunburst(df, path=path, values="values")
df.loc[df["vendors"].isnull(), "sectors"] = "Other"
fig = px.sunburst(df, path=path, values="values")
assert fig.data[0].values[-1] == np.sum(values)
def test_pie_funnelarea_colorscale():
labels = ["A", "B", "C", "D"]
values = [3, 2, 1, 4]
for func, colorway in zip(
[px.sunburst, px.treemap], ["sunburstcolorway", "treemapcolorway"]
):
# Discrete colorscale, no color arg passed
color_seq = px.colors.sequential.Reds
fig = func(names=labels, values=values, color_discrete_sequence=color_seq,)
assert list(fig.layout[colorway]) == color_seq
# Discrete colorscale, color arg passed
color_seq = px.colors.sequential.Reds
fig = func(
names=labels,
values=values,
color=labels,
color_discrete_sequence=color_seq,
)
assert np.all([col in color_seq for col in fig.data[0].marker.colors])
def test_funnel():
fig = px.funnel(
x=[5, 4, 3, 3, 2, 1],
y=["A", "B", "C", "A", "B", "C"],
color=["0", "0", "0", "1", "1", "1"],
)
assert len(fig.data) == 2
def test_parcats_dimensions_max():
df = px.data.tips()
# default behaviour
fig = px.parallel_categories(df)
assert [d.label for d in fig.data[0].dimensions] == [
"sex",
"smoker",
"day",
"time",
"size",
]
# explicit subset of default
fig = px.parallel_categories(df, dimensions=["sex", "smoker", "day"])
assert [d.label for d in fig.data[0].dimensions] == ["sex", "smoker", "day"]
# shrinking max
fig = px.parallel_categories(df, dimensions_max_cardinality=4)
assert [d.label for d in fig.data[0].dimensions] == [
"sex",
"smoker",
"day",
"time",
]
# explicit superset of default, violating the max
fig = px.parallel_categories(
df, dimensions=["sex", "smoker", "day", "size"], dimensions_max_cardinality=4
)
assert [d.label for d in fig.data[0].dimensions] == ["sex", "smoker", "day", "size"]
|
plotly/python-api
|
packages/python/plotly/plotly/tests/test_core/test_px/test_px_functions.py
|
Python
|
mit
| 11,286 | 0.00124 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Exercise 9.3 from Kane 1985."""
from __future__ import division
from sympy import cos, diff, expand, pi, solve, symbols
from sympy.physics.mechanics import ReferenceFrame, Point
from sympy.physics.mechanics import dot, dynamicsymbols
from util import msprint, subs, partial_velocities
from util import generalized_active_forces, potential_energy
g, m, Px, Py, Pz, R, t = symbols('g m Px Py Pz R t')
q = dynamicsymbols('q1:6')
qd = dynamicsymbols('q1:6', level=1)
u = dynamicsymbols('u1:6')
## --- Define ReferenceFrames ---
A = ReferenceFrame('A')
B_prime = A.orientnew('B_prime', 'Axis', [q[0], A.z])
B = B_prime.orientnew('B', 'Axis', [pi/2 - q[1], B_prime.x])
C = B.orientnew('C', 'Axis', [q[2], B.z])
## --- Define Points and their velocities ---
pO = Point('O')
pO.set_vel(A, 0)
# R is the point in plane H that comes into contact with disk C.
pR = pO.locatenew('R', q[3]*A.x + q[4]*A.y)
pR.set_vel(A, pR.pos_from(pO).diff(t, A))
pR.set_vel(B, 0)
# C^ is the point in disk C that comes into contact with plane H.
pC_hat = pR.locatenew('C^', 0)
pC_hat.set_vel(C, 0)
# C* is the point at the center of disk C.
pCs = pC_hat.locatenew('C*', R*B.y)
pCs.set_vel(C, 0)
pCs.set_vel(B, 0)
# calculate velocities in A
pCs.v2pt_theory(pR, A, B)
pC_hat.v2pt_theory(pCs, A, C)
## --- Expressions for generalized speeds u1, u2, u3, u4, u5 ---
u_expr = map(lambda x: dot(C.ang_vel_in(A), x), B)
u_expr += qd[3:]
kde = [u_i - u_ex for u_i, u_ex in zip(u, u_expr)]
kde_map = solve(kde, qd)
## --- Define forces on each point in the system ---
R_C_hat = Px*A.x + Py*A.y + Pz*A.z
R_Cs = -m*g*A.z
forces = [(pC_hat, R_C_hat), (pCs, R_Cs)]
## --- Calculate generalized active forces ---
partials = partial_velocities([pC_hat, pCs], u, A, kde_map)
Fr, _ = generalized_active_forces(partials, forces)
# Impose the condition that disk C is rolling without slipping
u_indep = u[:3]
u_dep = u[3:]
vc = map(lambda x: dot(pC_hat.vel(A), x), [A.x, A.y])
vc_map = solve(subs(vc, kde_map), u_dep)
partials_tilde = partial_velocities([pC_hat, pCs], u_indep, A, kde_map, vc_map)
Fr_tilde, _ = generalized_active_forces(partials_tilde, forces)
Fr_tilde = map(expand, Fr_tilde)
# solve for ∂V/∂qs using 5.1.9
V_gamma = m * g * R * cos(q[1])
print(('\nVerify V_γ = {0} is a potential energy '.format(V_gamma) +
'contribution of γ for C.'))
V_gamma_dot = -sum(fr * ur for fr, ur in
zip(*generalized_active_forces(partials_tilde,
forces[1:])))
if V_gamma_dot == V_gamma.diff(t).subs(kde_map):
print('d/dt(V_γ) == -sum(Fr_γ * ur).')
else:
print('d/dt(V_γ) != -sum(Fr_γ * ur).')
print('d/dt(V_γ) = {0}'.format(msprint(V_gamma.diff(t))))
print('-sum(Fr_γ * ur) = {0}'.format(msprint(V_gamma_dot)))
#print('\nFinding a potential energy function V while C is rolling '
# 'without slip.')
#V = potential_energy(Fr_tilde, q, u_indep, kde_map, vc_map)
#if V is not None:
# print('V = {0}'.format(V))
print('\nFinding a potential energy function V while C is rolling with slip.')
V = potential_energy(Fr, q, u, kde_map)
if V is not None:
print('V = {0}'.format(V))
print('\nFinding a potential energy function V while C is rolling with slip '
'without friction.')
V = potential_energy(subs(Fr, {Px: 0, Py: 0}), q, u, kde_map)
if V is not None:
print('Define a2, C as functions of t such that the respective '
'contributing potential terms go to zero.')
print('V = {0}'.format(V.subs(dict(zip(symbols('C α2'), [0, pi/2])))))
|
nouiz/pydy
|
examples/Kane1985/Chapter5/Ex9.3.py
|
Python
|
bsd-3-clause
| 3,590 | 0.002237 |
"""
Util class
"""
from django.forms import ModelForm, CharField, URLField, BooleanField
from django.db import models
from models import Entry
def getForm(user):
""" If no form is passed in to new/edit views, use this one """
class _Form(ModelForm):
class Meta:
model = Entry
fields = ('title', 'body',)
def save(self, force_insert=False, force_update=False, commit=True):
m = super(ModelForm, self).save(commit=False)
import bleach
TAGS = ['b', 'em', 'i', 'strong', 'br', 'li', 'ul', 'ol', 'p', 'span']
m.title = bleach.clean(self.cleaned_data['title'])
m.body = bleach.clean(self.cleaned_data['body'], tags=TAGS)
if commit:
m.save()
return m
class _AdminForm(ModelForm):
published = BooleanField(required = False, initial = False)
class Meta:
model = Entry
fields = ('title', 'body', 'published')
class _AnonForm(ModelForm):
owner_if_anonymous = CharField(max_length = 150, label="Name")
url_if_anonymous = URLField(max_length=1000, label="URL", required=False)
class Meta:
model = Entry
fields = ('title', 'owner_if_anonymous', 'url_if_anonymous', 'body')
def save(self, force_insert=False, force_update=False, commit=True):
m = super(ModelForm, self).save(commit=False)
import bleach
TAGS = ['b', 'em', 'i', 'strong', 'br', 'li', 'ul', 'ol', 'p', 'span']
m.title = bleach.clean(self.cleaned_data['title'])
m.body = bleach.clean(self.cleaned_data['body'], tags=TAGS)
if commit:
m.save()
return m
if user.is_staff:
return _AdminForm
if user.is_authenticated():
return _Form
return _AnonForm
def getConvo(entry):
s, t = getConvoWithTitle(entry)
return s
def getConvoWithTitle(entry):
""" return list containing a sorted Entry thread """
sorted = []
original = entry.getOriginal()
if original:
if original.published == True:
sorted.append(original)
else:
return None, None
sorted.extend(__sortConvo(Entry.objects.filter(published = True).filter(parent=original)))
return sorted, original.title
def __sortConvo(children):
""" Private function: Sorts a queryset (or list) of Entries """
sorted = []
for c in children:
sorted.append(c)
sorted.extend(__sortConvo(Entry.objects.filter(published = True).filter(parent=c)))
return sorted
|
manfredmacx/django-convo
|
convo/Convo.py
|
Python
|
mit
| 2,261 | 0.037152 |
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2011 Deepin, Inc.
# 2011 Hou Shaohui
#
# Author: Hou Shaohui <houshao55@gmail.com>
# Maintainer: Hou ShaoHui <houshao55@gmail.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from nls import _
CONFIG_FILENAME = "config"
PROGRAM_NAME = "deepin-music-player"
PROGRAM_VERSION = "2.0"
PROGRAM_NAME_LONG = _("Deepin Music")
PROGRAM_NAME_SHORT = _("DMusic")
DEFAULT_TIMEOUT = 10
DEFAULT_FONT_SIZE = 9
AUTOSAVE_TIMEOUT = 1000 * 60 * 5 # 5min
# Lyric mode
LRC_DESKTOP_MODE = 1
LRC_WINDOW_MODE = 2
PREDEFINE_COLORS = {
"fresh_green" : ["#e4dcb9", "#ffea93", "#ffd631", "#efede6", "#b3fc9c", "#77d035"],
"playful_pink" : ["#ffffff", "#70b8e5", "#3788c0", "#ffe0ee", "#ffa1ca", "#ff2586"],
"cool_blue" : ["#f8f8f8", "#dadada", "#bdbdbd", "#ffffff", "#60c0ff", "#19a1ff"],
"vitality_yellow" : ["#e4dcb9", "#ffea93", "#ffd631", "#f7f4ea", "#77d1ff", "#4199d5"],
}
FULL_DEFAULT_WIDTH = 886
FULL_DEFAULT_HEIGHT = 625
SIMPLE_DEFAULT_WIDTH = 322
SIMPLE_DEFAULT_HEIGHT = 625
HIDE_PLAYLIST_WIDTH = 210
LIST_WIDTH = 312
CATEGROYLIST_WIDTH = 104
PLAYLIST_WIDTH = 198
TAB_LOCAL = 1
TAB_WEBCAST = 2
TAB_RADIO = 3
EMPTY_WEBCAST_ITEM = 1
EMPTY_RADIO_ITEM = 2
|
dragondjf/musicplayertest
|
constant.py
|
Python
|
gpl-2.0
| 1,831 | 0.004369 |
"""
WSGI config for mords_backend project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "mords_backend.settings")
application = get_wsgi_application()
|
TeppieC/M-ords
|
mords_backend/mords_backend/wsgi.py
|
Python
|
mit
| 404 | 0 |
#!/usr/bin/python3
import os, sys, random
pandoraPath = os.getenv('PANDORAPATH', '/usr/local/pandora')
sys.path.append(pandoraPath+'/bin')
sys.path.append(pandoraPath+'/lib')
from pyPandora import Config, World, Agent, SizeInt
class MyAgent(Agent):
gatheredResources = 0
def __init__(self, id):
Agent.__init__( self, id)
print('constructing agent: ',self.id)
def updateState(self):
print('updating state of: ',self.id)
newPosition = self.position
newPosition._x = newPosition._x + random.randint(-1,1)
newPosition._y = newPosition._y + random.randint(-1,1)
if self.getWorld().checkPosition(newPosition):
self.position = newPosition
self.gatheredResources = self.gatheredResources + self.getWorld().getValue('resources', self.position)
self.getWorld().setValue('resources', self.position, 0)
def registerAttributes(self):
self.registerIntAttribute('resources')
def serialize(self):
print('serializing MyAgent: ',self.id)
self.serializeIntAttribute('resources', self.gatheredResources)
class MyWorld(World):
def __init__(self, config):
World.__init__( self, config)
print('constructing MyWorld')
def createRasters(self):
print('creating rasters')
self.registerDynamicRaster("resources", 1)
self.getDynamicRaster("resources").setInitValues(0, 10, 0)
return
def createAgents(self):
print('creating agents')
for i in range (0, 10):
newAgent = MyAgent('MyAgent_'+str(i))
self.addAgent(newAgent)
newAgent.setRandomPosition()
def main():
print('getting started with pyPandora')
numTimeSteps = 10
worldSize = SizeInt(64,64)
myConfig = Config(worldSize, numTimeSteps)
myWorld = MyWorld(myConfig)
myWorld.initialize()
myWorld.run()
print('simulation finished')
if __name__ == "__main__":
main()
|
montanier/pandora
|
docs/tutorials/01_src/tutorial_pyPandora.py
|
Python
|
lgpl-3.0
| 2,012 | 0.011928 |
#!/usr/bin/env python
try:
from pyGCMMA import GCMMA
__all__ = ['GCMMA']
except:
__all__ = []
#end
|
svn2github/pyopt
|
pyOpt/pyGCMMA/__init__.py
|
Python
|
gpl-3.0
| 112 | 0.017857 |
# -*- coding: utf-8 -*-
#########################################################################
#
# Copyright (C) 2016 OSGeo
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#########################################################################
import json
from django.contrib.auth import get_user_model
from django.contrib.auth.models import Group
from django.core.urlresolvers import reverse
from django.test import TestCase
from django.conf import settings
from guardian.shortcuts import get_anonymous_user
from geonode.groups.models import GroupProfile, GroupInvitation, GroupCategory
from geonode.documents.models import Document
from geonode.layers.models import Layer
from geonode.maps.models import Map
from geonode.base.populate_test_data import create_models
from geonode.security.views import _perms_info_json
class SmokeTest(TestCase):
"""
Basic checks to make sure pages load, etc.
"""
fixtures = ['initial_data.json', "group_test_data"]
def setUp(self):
create_models(type='layer')
create_models(type='map')
create_models(type='document')
self.norman = get_user_model().objects.get(username="norman")
self.norman.groups.add(Group.objects.get(name='anonymous'))
self.test_user = get_user_model().objects.get(username='test_user')
self.test_user.groups.add(Group.objects.get(name='anonymous'))
self.bar = GroupProfile.objects.get(slug='bar')
self.anonymous_user = get_anonymous_user()
def test_group_permissions_extend_to_user(self):
"""
Ensures that when a user is in a group, the group permissions
extend to the user.
"""
layer = Layer.objects.all()[0]
# Set the default permissions
layer.set_default_permissions()
# Test that the anonymous user can read
self.assertTrue(
self.anonymous_user.has_perm(
'view_resourcebase',
layer.get_self_resource()))
# Test that the default perms give Norman view permissions but not
# write permissions
self.assertTrue(
self.norman.has_perm(
'view_resourcebase',
layer.get_self_resource()))
self.assertFalse(
self.norman.has_perm(
'change_resourcebase',
layer.get_self_resource()))
# Make sure Norman is not in the bar group.
self.assertFalse(self.bar.user_is_member(self.norman))
# Add norman to the bar group.
self.bar.join(self.norman)
# Ensure Norman is in the bar group.
self.assertTrue(self.bar.user_is_member(self.norman))
# Give the bar group permissions to change the layer.
permissions = {
'groups': {
'bar': [
'view_resourcebase',
'change_resourcebase']}}
layer.set_permissions(permissions)
self.assertTrue(
self.norman.has_perm(
'view_resourcebase',
layer.get_self_resource()))
# check that now norman can change the layer
self.assertTrue(
self.norman.has_perm(
'change_resourcebase',
layer.get_self_resource()))
# Test adding a new user to the group after setting permissions on the layer.
# Make sure Test User is not in the bar group.
self.assertFalse(self.bar.user_is_member(self.test_user))
self.assertFalse(
self.test_user.has_perm(
'change_resourcebase',
layer.get_self_resource()))
self.bar.join(self.test_user)
self.assertTrue(
self.test_user.has_perm(
'change_resourcebase',
layer.get_self_resource()))
def test_group_resource(self):
"""
Tests the resources method on a Group object.
"""
layer = Layer.objects.all()[0]
map = Map.objects.all()[0]
perm_spec = {'groups': {'bar': ['change_resourcebase']}}
# Give the self.bar group write perms on the layer
layer.set_permissions(perm_spec)
map.set_permissions(perm_spec)
# Ensure the layer is returned in the group's resources
self.assertTrue(layer.get_self_resource() in self.bar.resources())
self.assertTrue(map.get_self_resource() in self.bar.resources())
# Test the resource filter
self.assertTrue(
layer.get_self_resource() in self.bar.resources(
resource_type='layer'))
self.assertTrue(
map.get_self_resource() not in self.bar.resources(
resource_type='layer'))
# Revoke permissions on the layer from the self.bar group
layer.set_permissions("{}")
# Ensure the layer is no longer returned in the groups resources
self.assertFalse(layer.get_self_resource() in self.bar.resources())
def test_perms_info(self):
"""
Tests the perms_info function (which passes permissions to the response context).
"""
# Add test to test perms being sent to the front end.
layer = Layer.objects.all()[0]
layer.set_default_permissions()
perms_info = layer.get_all_level_info()
# Ensure there is only one group 'anonymous' by default
self.assertEqual(len(perms_info['groups'].keys()), 1)
# Add the foo group to the layer object groups
layer.set_permissions({'groups': {'bar': ['view_resourcebase']}})
perms_info = _perms_info_json(layer)
# Ensure foo is in the perms_info output
self.assertItemsEqual(
json.loads(perms_info)['groups'], {
'bar': ['view_resourcebase']})
def test_resource_permissions(self):
"""
Tests that the client can get and set group permissions through the test_resource_permissions view.
"""
self.assertTrue(self.client.login(username="admin", password="admin"))
layer = Layer.objects.all()[0]
document = Document.objects.all()[0]
map_obj = Map.objects.all()[0]
layer.set_default_permissions()
document.set_default_permissions()
map_obj.set_default_permissions()
objects = layer, document, map_obj
for obj in objects:
response = self.client.get(
reverse(
'resource_permissions',
kwargs=dict(
resource_id=obj.id)))
self.assertEqual(response.status_code, 200)
js = json.loads(response.content)
permissions = js.get('permissions', dict())
if isinstance(permissions, unicode) or isinstance(
permissions, str):
permissions = json.loads(permissions)
# Ensure the groups value is empty by default
expected_permissions = {}
if settings.DEFAULT_ANONYMOUS_DOWNLOAD_PERMISSION:
expected_permissions.setdefault(
u'anonymous', []).append(u'download_resourcebase')
if settings.DEFAULT_ANONYMOUS_VIEW_PERMISSION:
expected_permissions.setdefault(
u'anonymous', []).append(u'view_resourcebase')
self.assertItemsEqual(
permissions.get('groups'),
expected_permissions)
permissions = {
'groups': {
'bar': ['change_resourcebase']
},
'users': {
'admin': ['change_resourcebase']
}
}
# Give the bar group permissions
response = self.client.post(
reverse(
'resource_permissions',
kwargs=dict(resource_id=obj.id)),
data=json.dumps(permissions),
content_type="application/json")
self.assertEqual(response.status_code, 200)
response = self.client.get(
reverse(
'resource_permissions',
kwargs=dict(
resource_id=obj.id)))
js = json.loads(response.content)
permissions = js.get('permissions', dict())
if isinstance(permissions, unicode) or isinstance(
permissions, str):
permissions = json.loads(permissions)
# Make sure the bar group now has write permissions
self.assertItemsEqual(
permissions['groups'], {
'bar': ['change_resourcebase']})
# Remove group permissions
permissions = {"users": {"admin": ['change_resourcebase']}}
# Update the object's permissions to remove the bar group
response = self.client.post(
reverse(
'resource_permissions',
kwargs=dict(resource_id=obj.id)),
data=json.dumps(permissions),
content_type="application/json")
self.assertEqual(response.status_code, 200)
response = self.client.get(
reverse(
'resource_permissions',
kwargs=dict(
resource_id=obj.id)))
js = json.loads(response.content)
permissions = js.get('permissions', dict())
if isinstance(permissions, unicode) or isinstance(
permissions, str):
permissions = json.loads(permissions)
# Assert the bar group no longer has permissions
self.assertItemsEqual(permissions['groups'], {})
def test_create_new_group(self):
"""
Tests creating a group through the group_create route.
"""
d = dict(title='TestGroup',
description='This is a test group.',
access='public',
keywords='testing, groups')
self.client.login(username="admin", password="admin")
response = self.client.post(reverse('group_create'), data=d)
# successful POSTS will redirect to the group's detail view.
self.assertEqual(response.status_code, 302)
self.assertTrue(GroupProfile.objects.get(title='TestGroup'))
def test_delete_group_view(self):
"""
Tests deleting a group through the group_delete route.
"""
# Ensure the group exists
self.assertTrue(GroupProfile.objects.get(id=self.bar.id))
self.client.login(username="admin", password="admin")
# Delete the group
response = self.client.post(
reverse(
'group_remove', args=[
self.bar.slug]))
# successful POSTS will redirect to the group list view.
self.assertEqual(response.status_code, 302)
self.assertFalse(
GroupProfile.objects.filter(
id=self.bar.id).count() > 0)
def test_delete_group_view_no_perms(self):
"""
Tests deleting a group through the group_delete with a non-manager.
"""
# Ensure the group exists
self.assertTrue(GroupProfile.objects.get(id=self.bar.id))
self.client.login(username="norman", password="norman")
# Delete the group
response = self.client.post(
reverse(
'group_remove', args=[
self.bar.slug]))
self.assertEqual(response.status_code, 403)
# Ensure the group still exists
self.assertTrue(GroupProfile.objects.get(id=self.bar.id))
def test_groupmember_manager(self):
"""
Tests the get_managers method.
"""
norman = get_user_model().objects.get(username="norman")
admin = get_user_model().objects.get(username='admin')
# Make sure norman is not a user
self.assertFalse(self.bar.user_is_member(norman))
# Add norman to the self.bar group
self.bar.join(norman)
# Ensure norman is now a member
self.assertTrue(self.bar.user_is_member(norman))
# Ensure norman is not in the managers queryset
self.assertTrue(norman not in self.bar.get_managers())
# Ensure admin is in the managers queryset
self.assertTrue(admin in self.bar.get_managers())
def test_public_pages_render(self):
"""
Verify pages that do not require login load without internal error
"""
response = self.client.get("/groups/")
self.assertEqual(200, response.status_code)
response = self.client.get("/groups/group/bar/")
self.assertEqual(200, response.status_code)
response = self.client.get("/groups/group/bar/members/")
self.assertEqual(200, response.status_code)
# 302 for auth failure since we redirect to login page
response = self.client.get("/groups/create/")
self.assertEqual(302, response.status_code)
response = self.client.get("/groups/group/bar/update/")
self.assertEqual(302, response.status_code)
# 405 - json endpoint, doesn't support GET
response = self.client.get("/groups/group/bar/invite/")
self.assertEqual(405, response.status_code)
def test_protected_pages_render(self):
"""
Verify pages that require login load without internal error
"""
self.assertTrue(self.client.login(username="admin", password="admin"))
response = self.client.get("/groups/")
self.assertEqual(200, response.status_code)
response = self.client.get("/groups/group/bar/")
self.assertEqual(200, response.status_code)
response = self.client.get("/groups/group/bar/members/")
self.assertEqual(200, response.status_code)
response = self.client.get("/groups/create/")
self.assertEqual(200, response.status_code)
response = self.client.get("/groups/group/bar/update/")
self.assertEqual(200, response.status_code)
# 405 - json endpoint, doesn't support GET
response = self.client.get("/groups/group/bar/invite/")
self.assertEqual(405, response.status_code)
def test_group_activity_pages_render(self):
"""
Verify Activity List pages
"""
self.assertTrue(self.client.login(username="admin", password="admin"))
response = self.client.get("/groups/")
self.assertEqual(200, response.status_code)
response = self.client.get("/groups/group/bar/activity/")
self.assertEqual(200, response.status_code)
self.assertContains(response,
'<a href="/layers/geonode:CA">CA</a>',
count=0,
status_code=200,
msg_prefix='',
html=False)
self.assertContains(response,
'uploaded',
count=0,
status_code=200,
msg_prefix='',
html=False)
try:
# Add test to test perms being sent to the front end.
layer = Layer.objects.all()[0]
layer.set_default_permissions()
perms_info = layer.get_all_level_info()
# Ensure there is only one group 'anonymous' by default
self.assertEqual(len(perms_info['groups'].keys()), 1)
# Add the foo group to the layer object groups
layer.set_permissions({'groups': {'bar': ['view_resourcebase']}})
perms_info = _perms_info_json(layer)
# Ensure foo is in the perms_info output
self.assertItemsEqual(
json.loads(perms_info)['groups'], {
'bar': ['view_resourcebase']})
layer.group = self.bar.group
layer.save()
response = self.client.get("/groups/group/bar/activity/")
self.assertEqual(200, response.status_code)
self.assertContains(response,
'<a href="/layers/geonode:CA">CA</a>',
count=2,
status_code=200,
msg_prefix='',
html=False)
self.assertContains(response,
'uploaded',
count=2,
status_code=200,
msg_prefix='',
html=False)
finally:
layer.set_default_permissions()
layer.group = None
layer.save()
class MembershipTest(TestCase):
"""
Tests membership logic in the geonode.groups models
"""
fixtures = ["group_test_data"]
def test_group_is_member(self):
"""
Tests checking group membership
"""
anon = get_anonymous_user()
normal = get_user_model().objects.get(username="norman")
group = GroupProfile.objects.get(slug="bar")
self.assert_(not group.user_is_member(anon))
self.assert_(not group.user_is_member(normal))
def test_group_add_member(self):
"""
Tests adding a user to a group
"""
anon = get_anonymous_user()
normal = get_user_model().objects.get(username="norman")
group = GroupProfile.objects.get(slug="bar")
group.join(normal)
self.assert_(group.user_is_member(normal))
self.assertRaises(ValueError, lambda: group.join(anon))
class InvitationTest(TestCase):
"""
Tests invitation logic in geonode.groups models
"""
fixtures = ["group_test_data"]
def test_invite_user(self):
"""
Tests inviting a registered user
"""
normal = get_user_model().objects.get(username="norman")
admin = get_user_model().objects.get(username="admin")
group = GroupProfile.objects.get(slug="bar")
group.invite(normal, admin, role="member", send=False)
self.assert_(
GroupInvitation.objects.filter(
user=normal,
from_user=admin,
group=group).exists())
invite = GroupInvitation.objects.get(
user=normal, from_user=admin, group=group)
# Test that the user can access the token url.
self.client.login(username="norman", password="norman")
response = self.client.get(
"/groups/group/{group}/invite/{token}/".format(group=group, token=invite.token))
self.assertEqual(200, response.status_code)
def test_accept_invitation(self):
"""
Tests accepting an invitation
"""
anon = get_anonymous_user()
normal = get_user_model().objects.get(username="norman")
admin = get_user_model().objects.get(username="admin")
group = GroupProfile.objects.get(slug="bar")
group.invite(normal, admin, role="member", send=False)
invitation = GroupInvitation.objects.get(
user=normal, from_user=admin, group=group)
self.assertRaises(ValueError, lambda: invitation.accept(anon))
self.assertRaises(ValueError, lambda: invitation.accept(admin))
invitation.accept(normal)
self.assert_(group.user_is_member(normal))
self.assert_(invitation.state == "accepted")
def test_decline_invitation(self):
"""
Tests declining an invitation
"""
anon = get_anonymous_user()
normal = get_user_model().objects.get(username="norman")
admin = get_user_model().objects.get(username="admin")
group = GroupProfile.objects.get(slug="bar")
group.invite(normal, admin, role="member", send=False)
invitation = GroupInvitation.objects.get(
user=normal, from_user=admin, group=group)
self.assertRaises(ValueError, lambda: invitation.decline(anon))
self.assertRaises(ValueError, lambda: invitation.decline(admin))
invitation.decline(normal)
self.assert_(not group.user_is_member(normal))
self.assert_(invitation.state == "declined")
class GroupCategoriesTestCase(TestCase):
"""
Group Categories tests
"""
def setUp(self):
c1 = GroupCategory.objects.create(name='test #1 category')
g = GroupProfile.objects.create(title='test')
g.categories.add(c1)
g.save()
User = get_user_model()
u = User.objects.create(username='test')
u.set_password('test')
u.save()
def test_api(self):
api_url = '/api/groupcategory/'
r = self.client.get(api_url)
self.assertEqual(r.status_code, 200)
data = json.loads(r.content)
self.assertEqual(
data['meta']['total_count'],
GroupCategory.objects.all().count())
# check if we have non-empty group category
self.assertTrue(
GroupCategory.objects.filter(
groups__isnull=False).exists())
for item in data['objects']:
self.assertTrue(
GroupCategory.objects.filter(
slug=item['slug']).count() == 1)
g = GroupCategory.objects.get(slug=item['slug'])
self.assertEqual(item['member_count'], g.groups.all().count())
def test_group_categories_list(self):
view_url = reverse('group_category_list')
r = self.client.get(view_url)
self.assertEqual(r.status_code, 200)
def test_group_categories_add(self):
view_url = reverse('group_category_create')
r = self.client.get(view_url)
self.assertEqual(r.status_code, 200)
r = self.client.post(view_url)
self.assertEqual(r.status_code, 200)
self.client.login(username='test', password='test')
category = 'test #3 category'
r = self.client.post(view_url, {'name': category})
self.assertEqual(r.status_code, 302)
q = GroupCategory.objects.filter(name=category)
self.assertEqual(q.count(), 1)
self.assertTrue(q.get().slug)
|
ingenieroariel/geonode
|
geonode/groups/tests.py
|
Python
|
gpl-3.0
| 22,844 | 0.000175 |
########################################################################
#
# File Name: HTMLTextAreaElement
#
#
### This file is automatically generated by GenerateHtml.py.
### DO NOT EDIT!
"""
WWW: http://4suite.com/4DOM e-mail: support@4suite.com
Copyright (c) 2000 Fourthought Inc, USA. All Rights Reserved.
See http://4suite.com/COPYRIGHT for license and copyright information
"""
import string
from xml.dom import Node
from xml.dom.html.HTMLElement import HTMLElement
class HTMLTextAreaElement(HTMLElement):
def __init__(self, ownerDocument, nodeName="TEXTAREA"):
HTMLElement.__init__(self, ownerDocument, nodeName)
### Attribute Methods ###
def _get_accessKey(self):
return self.getAttribute("ACCESSKEY")
def _set_accessKey(self, value):
self.setAttribute("ACCESSKEY", value)
def _get_cols(self):
value = self.getAttribute("COLS")
if value:
return int(value)
return 0
def _set_cols(self, value):
self.setAttribute("COLS", str(value))
def _get_defaultValue(self):
if not self.firstChild:
return
if self.firstChild == self.lastChild:
return self.firstChild.data
self.normalize()
text = filter(lambda x: x.nodeType == Node.TEXT_NODE, self.childNodes)
return text[0].data
def _set_defaultValue(self, value):
text = None
for node in self.childNodes:
if not text and node.nodeType == Node.TEXT_NODE:
text = node
else:
self.removeChild(node)
if text:
text.data = value
else:
text = self.ownerDocument.createTextNode(value)
self.appendChild(text)
def _get_disabled(self):
return self.hasAttribute("DISABLED")
def _set_disabled(self, value):
if value:
self.setAttribute("DISABLED", "DISABLED")
else:
self.removeAttribute("DISABLED")
def _get_form(self):
parent = self.parentNode
while parent:
if parent.nodeName == "FORM":
return parent
parent = parent.parentNode
return None
def _get_name(self):
return self.getAttribute("NAME")
def _set_name(self, value):
self.setAttribute("NAME", value)
def _get_readonly(self):
return self.hasAttribute("READONLY")
def _set_readonly(self, value):
if value:
self.setAttribute("READONLY", "READONLY")
else:
self.removeAttribute("READONLY")
def _get_rows(self):
value = self.getAttribute("ROWS")
if value:
return int(value)
return 0
def _set_rows(self, value):
self.setAttribute("ROWS", str(value))
def _get_tabIndex(self):
value = self.getAttribute("TABINDEX")
if value:
return int(value)
return 0
def _set_tabIndex(self, value):
self.setAttribute("TABINDEX", str(value))
def _get_type(self):
return "textarea"
def _get_value(self):
if not self.firstChild:
return
if self.firstChild == self.lastChild:
return self.firstChild.data
self.normalize()
text = filter(lambda x: x.nodeType == Node.TEXT_NODE, self.childNodes)
return text[0].data
def _set_value(self, value):
text = None
for node in self.childNodes:
if not text and node.nodeType == Node.TEXT_NODE:
text = node
else:
self.removeChild(node)
if text:
text.data = value
else:
text = self.ownerDocument.createTextNode(value)
self.appendChild(text)
### Methods ###
def blur(self):
pass
def focus(self):
pass
def select(self):
pass
### Attribute Access Mappings ###
_readComputedAttrs = HTMLElement._readComputedAttrs.copy()
_readComputedAttrs.update({
"accessKey" : _get_accessKey,
"cols" : _get_cols,
"defaultValue" : _get_defaultValue,
"disabled" : _get_disabled,
"form" : _get_form,
"name" : _get_name,
"readonly" : _get_readonly,
"rows" : _get_rows,
"tabIndex" : _get_tabIndex,
"type" : _get_type,
"value" : _get_value
})
_writeComputedAttrs = HTMLElement._writeComputedAttrs.copy()
_writeComputedAttrs.update({
"accessKey" : _set_accessKey,
"cols" : _set_cols,
"defaultValue" : _set_defaultValue,
"disabled" : _set_disabled,
"name" : _set_name,
"readonly" : _set_readonly,
"rows" : _set_rows,
"tabIndex" : _set_tabIndex,
"value" : _set_value
})
_readOnlyAttrs = filter(lambda k,m=_writeComputedAttrs: not m.has_key(k),
HTMLElement._readOnlyAttrs + _readComputedAttrs.keys())
|
alanjw/GreenOpenERP-Win-X86
|
python/Lib/site-packages/_xmlplus/dom/html/HTMLTextAreaElement.py
|
Python
|
agpl-3.0
| 4,989 | 0.005813 |
#
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2017-2018 Nick Hall
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
"""
LaTeX Genealogy Tree ancestor report
"""
#------------------------------------------------------------------------
#
# python modules
#
#------------------------------------------------------------------------
from functools import partial
#------------------------------------------------------------------------
#
# Set up logging
#
#------------------------------------------------------------------------
import logging
LOG = logging.getLogger(".Tree")
#------------------------------------------------------------------------
#
# Gramps module
#
#------------------------------------------------------------------------
from gramps.gen.errors import ReportError
from gramps.gen.plug.report import Report
from gramps.gen.plug.report import MenuReportOptions
from gramps.gen.plug.report import stdoptions
from gramps.gen.plug.menu import PersonOption, NumberOption, BooleanOption
#------------------------------------------------------------------------
#
# Internationalisation
#
#------------------------------------------------------------------------
from gramps.gen.const import GRAMPS_LOCALE as glocale
try:
_trans = glocale.get_addon_translator(__file__)
except ValueError:
_trans = glocale.translation
_ = _trans.gettext
#------------------------------------------------------------------------
#
# AncestorTree
#
#------------------------------------------------------------------------
class AncestorTree(Report):
""" Ancestor Tree report """
def __init__(self, database, options, user):
"""
Create LaTeX Genealogy Tree ancestor report.
"""
Report.__init__(self, database, options, user)
menu = options.menu
get_option_by_name = menu.get_option_by_name
get_value = lambda name: get_option_by_name(name).get_value()
self._db = self.database
self._pid = get_value('pid')
self.max_generations = menu.get_option_by_name('maxgen').get_value()
self.include_images = menu.get_option_by_name('images').get_value()
self.set_locale(menu.get_option_by_name('trans').get_value())
def write_report(self):
"""
Inherited method; called by report() in _ReportDialog.py
"""
if self._pid:
person = self._db.get_person_from_gramps_id(self._pid)
if person is None:
raise ReportError(_("Person %s is not in the Database") %
self._pid)
family_handle = person.get_main_parents_family_handle()
if family_handle:
options = ['pref code={\\underline{#1}}',
'list separators hang',
'place text={\\newline}{}']
if self.include_images:
images = ('if image defined={'
'add to width=25mm,right=25mm,\n'
'underlay={\\begin{tcbclipinterior}'
'\\path[fill overzoom image=\\gtrDBimage]\n'
'([xshift=-24mm]interior.south east) '
'rectangle (interior.north east);\n'
'\\end{tcbclipinterior}},\n'
'}{},')
box = 'box={halign=left,\\gtrDBsex,%s\n}' % images
else:
box = 'box={halign=left,\\gtrDBsex}'
options.append(box)
self.doc.start_tree(options)
self.write_subgraph(0, 'parent', family_handle, person.handle)
self.doc.end_tree()
def write_subgraph(self, level, subgraph_type, family_handle, ghandle):
if level > self.max_generations:
return
family = self._db.get_family_from_handle(family_handle)
self.doc.start_subgraph(level, subgraph_type, family)
for handle in (family.get_father_handle(), family.get_mother_handle()):
if handle:
parent = self._db.get_person_from_handle(handle)
family_handle = parent.get_main_parents_family_handle()
if family_handle:
self.write_subgraph(level+1, 'parent', family_handle,
handle)
else:
self.doc.write_node(self._db, level+1, 'p', parent, True)
for childref in family.get_child_ref_list():
child = self._db.get_person_from_handle(childref.ref)
if childref.ref == ghandle:
self.doc.write_node(self._db, level+1, 'g', child, True)
else:
self.doc.write_node(self._db, level+1, 'c', child, False)
self.doc.end_subgraph(level)
#------------------------------------------------------------------------
#
# AncestorTreeOptions
#
#------------------------------------------------------------------------
class AncestorTreeOptions(MenuReportOptions):
"""
Defines all of the controls necessary
to configure the Ancestor Tree report.
"""
def __init__(self, name, dbase):
self.__pid = None
MenuReportOptions.__init__(self, name, dbase)
def add_menu_options(self, menu):
category_name = _("Report Options")
self.__pid = PersonOption(_("Center Person"))
self.__pid.set_help(_("The center person for the report"))
menu.add_option(category_name, "pid", self.__pid)
maxgen = NumberOption(_("Generations"), 10, 1, 100)
maxgen.set_help(_("The number of generations to include in the tree"))
menu.add_option(category_name, "maxgen", maxgen)
images = BooleanOption(_("Include images"), False)
images.set_help(_("Include images of people in the nodes."))
menu.add_option(category_name, "images", images)
locale_opt = stdoptions.add_localization_option(menu, category_name)
|
gramps-project/addons-source
|
GenealogyTree/gt_ancestor.py
|
Python
|
gpl-2.0
| 6,685 | 0.003141 |
"""
train supervised classifier with what's cooking recipe data
objective - determine recipe type categorical value from 20
"""
import time
from features_bow import *
from features_word2vec import *
from sklearn.preprocessing import StandardScaler
from sklearn.ensemble import RandomForestClassifier, ExtraTreesClassifier
from sklearn.linear_model import SGDClassifier
from sklearn.cross_validation import cross_val_score
""" main entry method """
def main(use_idf=False, random_state=None, std=False, n_jobs=-1, verbose=2):
wc_idf_map = None
if use_idf:
# ingredients inverse document frequencies
wc_components = build_tfidf_wc(verbose=(verbose > 0))
wc_idf = wc_components['model'].idf_
wc_idf_words = wc_components['model'].get_feature_names()
wc_idf_map = dict(zip(wc_idf_words, wc_idf))
# word2vec recipe feature vectors
wc_components = build_word2vec_wc(feature_vec_size=120, avg=True, idf=wc_idf_map, verbose=(verbose > 0))
y_train = wc_components['train']['df']['cuisine_code'].as_matrix()
X_train = wc_components['train']['features_matrix']
# standardize features aka mean ~ 0, std ~ 1
if std:
scaler = StandardScaler()
scaler.fit(X_train)
X_train = scaler.transform(X_train)
# random forest supervised classifier
time_0 = time.time()
clf = RandomForestClassifier(n_estimators=100, max_depth=None,
n_jobs=n_jobs, random_state=random_state, verbose=verbose)
# perform cross validation
cv_n_fold = 8
print 'cross validating %s ways...' % cv_n_fold
scores_cv = cross_val_score(clf, X_train, y_train, cv=cv_n_fold, n_jobs=-1)
print 'accuracy: %0.5f (+/- %0.5f)' % (scores_cv.mean(), scores_cv.std() * 2)
time_1 = time.time()
elapsed_time = time_1 - time_0
print 'cross validation took %.3f seconds' % elapsed_time
if __name__ == '__main__':
main()
|
eifuentes/kaggle_whats_cooking
|
train_word2vec_rf.py
|
Python
|
mit
| 1,909 | 0.002095 |
from gludb.simple import DBObject, Field
@DBObject(table_name='TopData')
class TopData(object):
name = Field('name')
|
memphis-iis/GLUDB
|
tests/testpkg/module.py
|
Python
|
apache-2.0
| 123 | 0 |
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Runs the RocksDB Shenango experiments.
This script runs the RocksDB Shenango experiments on ghOSt and on CFS. In these
experiments, RocksDB is co-located with an Antagonist. Specifically, the
dispatcher and worker threads are co-located with the Antagonist threads while
the load generator is isolated on its own CPU (to ensure that the load we think
we are generating is the load we are actually generating). For ghOSt, the
Antagonist threads are preempted to allow RocksDB threads to run. For CFS, this
preemption is left to CFS to figure out. Furthermore, for the CFS experiments,
the worker threads sleep on a futex when they do not have work rather than spin
so that CFS gives the Antagonist threads a chance to run.
"""
from typing import Sequence
from absl import app
from experiments.scripts.options import CfsWaitType
from experiments.scripts.options import CheckSchedulers
from experiments.scripts.options import GetAntagonistOptions
from experiments.scripts.options import GetGhostOptions
from experiments.scripts.options import GetRocksDBOptions
from experiments.scripts.options import Scheduler
from experiments.scripts.run import Experiment
from experiments.scripts.run import Run
_NUM_CPUS = 8
_NUM_CFS_WORKERS = _NUM_CPUS - 2
_NUM_GHOST_WORKERS = 11
# Subtract 1 for the Antagonist since the Antagonist does not run a thread on
# the same CPU as the load generator.
_NUM_ANTAGONIST_CPUS = _NUM_CPUS - 1
def RunCfs():
"""Runs the CFS (Linux Completely Fair Scheduler) experiment."""
e: Experiment = Experiment()
# Run throughputs 10000, 20000, 30000, ... 60000.
e.throughputs = list(i for i in range(10000, 600000, 10000))
# Toward the end, run throughputs 70000, 71000, 72000, ..., 120000.
e.throughputs.extend(list(i for i in range(70000, 121000, 1000)))
e.rocksdb = GetRocksDBOptions(Scheduler.CFS, _NUM_CPUS, _NUM_CFS_WORKERS)
e.rocksdb.cfs_wait_type = CfsWaitType.FUTEX
e.rocksdb.get_exponential_mean = '1us'
e.antagonist = GetAntagonistOptions(Scheduler.CFS, _NUM_ANTAGONIST_CPUS)
e.ghost = None
Run(e)
def RunGhost():
"""Runs the ghOSt experiment."""
e: Experiment = Experiment()
# Run throughputs 10000, 20000, 30000, ..., 380000.
e.throughputs = list(i for i in range(10000, 381000, 10000))
# Toward the end, run throughputs 390000, 391000, 392000, ..., 450000.
e.throughputs.extend(list(i for i in range(390000, 451000, 1000)))
e.rocksdb = GetRocksDBOptions(Scheduler.GHOST, _NUM_CPUS, _NUM_GHOST_WORKERS)
e.rocksdb.get_exponential_mean = '1us'
e.rocksdb.ghost_qos = 2
e.antagonist = GetAntagonistOptions(Scheduler.GHOST, _NUM_ANTAGONIST_CPUS)
e.antagonist.ghost_qos = 1
e.ghost = GetGhostOptions(_NUM_CPUS)
# There is no time-based preemption for Shenango, so set the preemption time
# slice to infinity.
e.ghost.preemption_time_slice = 'inf'
Run(e)
def main(argv: Sequence[str]):
if len(argv) > 3:
raise app.UsageError('Too many command-line arguments.')
elif len(argv) == 1:
raise app.UsageError(
'No experiment specified. Pass `cfs` and/or `ghost` as arguments.')
# First check that all of the command line arguments are valid.
if not CheckSchedulers(argv[1:]):
raise ValueError('Invalid scheduler specified.')
# Run the experiments.
for i in range(1, len(argv)):
scheduler = Scheduler(argv[i])
if scheduler == Scheduler.CFS:
RunCfs()
else:
if scheduler != Scheduler.GHOST:
raise ValueError(f'Unknown scheduler {scheduler}.')
RunGhost()
if __name__ == '__main__':
app.run(main)
|
google/ghost-userspace
|
experiments/scripts/shenango.py
|
Python
|
apache-2.0
| 4,128 | 0.009205 |
# -*- coding: utf-8 -*-
from flask import Flask, request
from fbmq import Page, QuickReply, Attachment, Template
import requests, records, re, json
from flask_restful import Resource, Api
token = '<auth token here>'
metricsData = {}
macid = 111111111111
pg = Page(token)
import time
db = records.Database('mysql://<user>:<password>@<url>:3306/db')
app = Flask(__name__)
api = Api(app)
class deviceMetrics(Resource):
def get(self):
return {"energy": metricsData["energy"], "money_saved": metricsData["savings"], "days": metricsData["days"], "charging_status": "charging"}
@app.route('/')
def index():
# return str(macid)
return '^_^'
def date_handler(obj):
if hasattr(obj, 'isoformat'):
return obj.isoformat()
else:
raise TypeError
@app.route('/device/dbase')
def dbaser():
data = db.query('select * from client where mac = %s LIMIT 1' % macid)
time.sleep(1)
return json.dumps(data.as_dict(), default=date_handler)
@app.route('/hook', methods=['POST'])
def hook():
db = records.Database('mysql://<user>:<password>@<url>:3306/db')
pg.greeting("Welcome, get started below!")
# pg.show_starting_button("startpay")
pg.handle_webhook(request.get_data(as_text=True))
return 'ok'
@pg.handle_message
def mhandle(event):
sender_id = event.sender_id.decode('utf-8')
pg.typing_on(sender_id)
# print event.message # debugging
quick_replies = [
{'title': 'Charging status', 'payload': 'charge_stat'},
{'title': 'Last saved', 'payload': 'l_saved'},
{'title': 'Total saving', 'payload': 'c_saved'},
]
message = event.message_text
rec = db.query("select * from user where fbid = %s" % sender_id)
time.sleep(.5)
if len(rec.as_dict()) == 0:
user = pg.get_user_profile(sender_id)[u'first_name']
message = ''.join(re.findall('\d+', message))
if (len(str(message)) != 12):
pg.send(sender_id, "Kindly enter your 12 digit MAC ID")
else:
db.query("insert into user values(DEFAULT, %s, %s)" % (sender_id, str(message)))
pg.send(sender_id, "Registration successful!")
else:
pg.send(sender_id, "What do you want to know?", quick_replies=quick_replies)
@pg.callback(['startpay'])
def start_callback(payload, event):
sender_id = event.sender_id.decode('utf-8')
pg.typing_on(sender_id)
db = records.Database('mysql://<user>:<password>@<url>:3306/db')
rec = db.query("select * from user where fbid = %s" % sender_id)
if len(rec.as_dict()) == 0:
user = pg.get_user_profile(sender_id)[u'first_name']
pg.send(sender_id, "Hey %s, please send me your MAC ID" % user)
else:
pg.typing_on(sender_id)
quick_replies = [
{'title': 'Charging status', 'payload': 'charge_stat'},
{'title': 'Last saved', 'payload': 'l_saved'},
{'title': 'Total saving', 'payload': 'c_saved'},
]
pg.send(sender_id, "What do you want to know?", quick_replies=quick_replies)
@pg.callback(['charge_stat', 'l_saved', 'c_saved'])
def doer(payl, event):
global macid
global metricsData
sender_id = event.sender_id
pg.typing_on(sender_id)
quick_replies = [
{'title': 'Charging status', 'payload': 'charge_stat'},
{'title': 'Last saved', 'payload': 'l_saved'},
{'title': 'Total saving', 'payload': 'c_saved'},
]
if payl == 'charge_stat':
pg.send(sender_id, "Charging status: Charging", quick_replies=quick_replies)
elif payl == 'l_saved':
pg.send(sender_id, "Last savings: ₹ 131!", quick_replies=quick_replies)
elif payl == 'c_saved':
macid = db.query("select mac from user where fbid = %s" % sender_id)
macid = macid[0].as_dict()["mac"]
data = db.query('select * from client where mac = %s' % macid)
row = data.as_dict()[::-1]
# fav_rows = {}
fav_rows = []
maxi = 1
start = 0
total_hrs = list()
for r in row:
if (r['status'] == 0):
maxi += 1
if start == 0:
sTime = r['timestamp']
start += 1
else:
eTime = r['timestamp']
# print eTime
else:
if r['strength']>96:
# fav_rows[maxi] = [sTime, eTime, r['strength']]
# fav_rows[maxi] = [sTime, eTime]
fav_rows.append(sTime - eTime)
maxi = 0
start = 0
days = sum([x.days for x in fav_rows])
fav_rows = sum([x.total_seconds()/60 for x in fav_rows])
# total_hrs = sum(total_hrs)
power = .5 # in watt
price = 5 # per KWh
energy = ((fav_rows*power)/1000)*days
# print fav_rows, days
pg.send(sender_id, "You've saved total %d KWh of energy so a total of"\
" %d ₹ of savings in last %d days!" % (energy, energy*price,\
days), quick_replies=quick_replies)
metricsData["energy"] = energy
metricsData["savings"] = energy*price
metricsData["days"] = days
api.add_resource(deviceMetrics, '/device/metrics')
if __name__ == '__main__':
app.run(debug=True, use_reloader=True)
|
Knapsacks/power-pi-v2
|
facebook-messenger-bot/app.py
|
Python
|
mit
| 5,321 | 0.004326 |
# coding: utf-8
from __future__ import unicode_literals
import unittest
import io
from lxml import isoschematron, etree
from packtools.catalogs import SCHEMAS
SCH = etree.parse(SCHEMAS['sps-1.3'])
def TestPhase(phase_name, cache):
"""Factory of parsed Schematron phases.
:param phase_name: the phase name
:param cache: mapping type
"""
if phase_name not in cache:
phase = isoschematron.Schematron(SCH, phase=phase_name)
cache[phase_name] = phase
return cache[phase_name]
class PhaseBasedTestCase(unittest.TestCase):
cache = {}
def _run_validation(self, sample):
schematron = TestPhase(self.sch_phase, self.cache)
return schematron.validate(etree.parse(sample))
class JournalIdTests(PhaseBasedTestCase):
"""Tests for article/front/journal-meta/journal-id elements.
Ticket #14 makes @journal-id-type="publisher-id" mandatory.
Ref: https://github.com/scieloorg/scielo_publishing_schema/issues/14
"""
sch_phase = 'phase.journal-id'
def test_case1(self):
"""
presence(@nlm-ta) is True
presence(@publisher-id) is True
"""
sample = u"""<article>
<front>
<journal-meta>
<journal-id journal-id-type="nlm-ta">
Rev Saude Publica
</journal-id>
<journal-id journal-id-type="publisher-id">
RSP
</journal-id>
</journal-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_case2(self):
"""
presence(@nlm-ta) is True
presence(@publisher-id) is False
"""
sample = u"""<article>
<front>
<journal-meta>
<journal-id journal-id-type="nlm-ta">
Rev Saude Publica
</journal-id>
</journal-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_case3(self):
"""
presence(@nlm-ta) is False
presence(@publisher-id) is True
"""
sample = u"""<article>
<front>
<journal-meta>
<journal-id journal-id-type="publisher-id">
RSP
</journal-id>
</journal-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_case4(self):
"""
presence(@nlm-ta) is False
presence(@publisher-id) is False
"""
sample = u"""<article>
<front>
<journal-meta>
<journal-id journal-id-type='doi'>
123.plin
</journal-id>
</journal-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_publisher_id_cannot_be_empty(self):
sample = u"""<article>
<front>
<journal-meta>
<journal-id journal-id-type="publisher-id"></journal-id>
</journal-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
class JournalTitleGroupTests(PhaseBasedTestCase):
"""Tests for article/front/journal-meta/journal-title-group elements.
"""
sch_phase = 'phase.journal-title-group'
def test_journal_title_group_is_absent(self):
sample = u"""<article>
<front>
<journal-meta>
</journal-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_case1(self):
"""
A: presence(journal-title) is True
B: presence(abbrev-journal-title[@abbrev-type='publisher']) is True
A ^ B is True
"""
sample = u"""<article>
<front>
<journal-meta>
<journal-title-group>
<journal-title>
Revista de Saude Publica
</journal-title>
<abbrev-journal-title abbrev-type='publisher'>
Rev. Saude Publica
</abbrev-journal-title>
</journal-title-group>
</journal-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_case2(self):
"""
A: presence(journal-title) is True
B: presence(abbrev-journal-title[@abbrev-type='publisher']) is False
A ^ B is False
"""
sample = u"""<article>
<front>
<journal-meta>
<journal-title-group>
<journal-title>
Revista de Saude Publica
</journal-title>
</journal-title-group>
</journal-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_case3(self):
"""
A: presence(journal-title) is False
B: presence(abbrev-journal-title[@abbrev-type='publisher']) is True
A ^ B is False
"""
sample = u"""<article>
<front>
<journal-meta>
<journal-title-group>
<abbrev-journal-title abbrev-type='publisher'>
Rev. Saude Publica
</abbrev-journal-title>
</journal-title-group>
</journal-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_case4(self):
"""
A: presence(journal-title) is False
B: presence(abbrev-journal-title[@abbrev-type='publisher']) is False
A ^ B is False
"""
sample = u"""<article>
<front>
<journal-meta>
<journal-title-group>
</journal-title-group>
</journal-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_empty_journal_title(self):
sample = u"""<article>
<front>
<journal-meta>
<journal-title-group>
<journal-title></journal-title>
<abbrev-journal-title abbrev-type='publisher'>Rev. Saude Publica</abbrev-journal-title>
</journal-title-group>
</journal-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_empty_abbrev_journal_title(self):
sample = u"""<article>
<front>
<journal-meta>
<journal-title-group>
<journal-title>Revista de Saude Publica</journal-title>
<abbrev-journal-title abbrev-type='publisher'></abbrev-journal-title>
</journal-title-group>
</journal-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
class PublisherTests(PhaseBasedTestCase):
"""Tests for article/front/journal-meta/publisher elements.
"""
sch_phase = 'phase.publisher'
def test_publisher_is_present(self):
sample = u"""<article>
<front>
<journal-meta>
<publisher>
<publisher-name>British Medical Journal</publisher-name>
</publisher>
</journal-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_publisher_is_absent(self):
sample = u"""<article>
<front>
<journal-meta>
</journal-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_publisher_is_empty(self):
sample = u"""<article>
<front>
<journal-meta>
<publisher>
<publisher-name></publisher-name>
</publisher>
</journal-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
class ArticleCategoriesTests(PhaseBasedTestCase):
"""Tests for article/front/article-meta/article-categories elements.
"""
sch_phase = 'phase.article-categories'
def test_article_categories_is_present(self):
sample = u"""<article>
<front>
<article-meta>
<article-categories>
<subj-group>
<subject>ISO/TC 108</subject>
<subject>
SC 2, Measurement and evaluation of...
</subject>
</subj-group>
</article-categories>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_article_categories_is_absent(self):
sample = u"""<article>
<front>
<article-meta>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
class fpage_OR_elocationTests(PhaseBasedTestCase):
"""Tests for article/front/article-meta/fpage or elocation-id elements.
"""
sch_phase = 'phase.fpage_or_elocation-id'
def test_case1(self):
"""
fpage is True
elocation-id is True
fpage v elocation-id is True
"""
sample = u"""<article>
<front>
<article-meta>
<fpage>01</fpage>
<elocation-id>E27</elocation-id>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_case2(self):
"""
fpage is True
elocation-id is False
fpage v elocation-id is True
"""
sample = u"""<article>
<front>
<article-meta>
<fpage>01</fpage>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_case3(self):
"""
fpage is False
elocation-id is True
fpage v elocation-id is True
"""
sample = u"""<article>
<front>
<article-meta>
<elocation-id>E27</elocation-id>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_case4(self):
"""
fpage is False
elocation-id is False
fpage v elocation-id is False
"""
sample = u"""<article>
<front>
<article-meta>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_empty_fpage(self):
sample = u"""<article>
<front>
<article-meta>
<fpage></fpage>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_empty_elocationid(self):
sample = u"""<article>
<front>
<article-meta>
<elocation-id></elocation-id>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
class ISSNTests(PhaseBasedTestCase):
"""Tests for article/front/journal-meta/issn elements.
"""
sch_phase = 'phase.issn'
def test_case1(self):
"""
A: @pub-type='epub' is True
B: @pub-type='ppub' is True
A v B is True
"""
sample = u"""<article>
<front>
<journal-meta>
<issn pub-type="epub">
0959-8138
</issn>
<issn pub-type="ppub">
0959-813X
</issn>
</journal-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_case2(self):
"""
A: @pub-type='epub' is True
B: @pub-type='ppub' is False
A v B is True
"""
sample = u"""<article>
<front>
<journal-meta>
<issn pub-type="epub">
0959-8138
</issn>
</journal-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_case3(self):
"""
A: @pub-type='epub' is False
B: @pub-type='ppub' is True
A v B is True
"""
sample = u"""<article>
<front>
<journal-meta>
<issn pub-type="ppub">
0959-813X
</issn>
</journal-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_case4(self):
"""
A: @pub-type='epub' is False
B: @pub-type='ppub' is False
A v B is False
"""
sample = u"""<article>
<front>
<journal-meta>
<issn>
0959-813X
</issn>
</journal-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_empty_issn(self):
sample = u"""<article>
<front>
<journal-meta>
<issn pub-type="epub"></issn>
</journal-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
class ArticleIdTests(PhaseBasedTestCase):
"""Tests for article/front/article-meta/article-id elements.
"""
sch_phase = 'phase.article-id'
def test_article_id_is_absent(self):
sample = u"""<article>
<front>
<article-meta>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_pub_id_type_doi_is_absent(self):
sample = u"""<article>
<front>
<article-meta>
<article-id>
10.1590/1414-431X20143434
</article-id>
<article-id pub-id-type='other'>
10.1590/1414-431X20143435
</article-id>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_pub_id_type_doi(self):
sample = u"""<article>
<front>
<article-meta>
<article-id pub-id-type='doi'>
10.1590/1414-431X20143434
</article-id>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_pub_id_type_doi_is_empty(self):
sample = u"""<article>
<front>
<article-meta>
<article-id pub-id-type='doi'/>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_invalid_pub_id_type(self):
sample = u"""<article>
<front>
<article-meta>
<article-id pub-id-type='unknown'>
10.1590/1414-431X20143434
</article-id>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_invalid_pub_id_type_case2(self):
sample = u"""<article>
<front>
<article-meta>
<article-id pub-id-type='unknown'>
10.1590/1414-431X20143434
</article-id>
<article-id pub-id-type='doi'>
10.1590/1414-431X20143434
</article-id>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_valid_pub_id_type_values(self):
for typ in ['doi', 'publisher-id', 'other']:
sample = u"""<article>
<front>
<article-meta>
<article-id pub-id-type='%s'>
10.1590/1414-431X20143433
</article-id>
<article-id pub-id-type='doi'>
10.1590/1414-431X20143434
</article-id>
</article-meta>
</front>
</article>
""" % typ
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
class SubjGroupTests(PhaseBasedTestCase):
"""Tests for article/front/article-meta/article-categories/subj-group elements.
"""
sch_phase = 'phase.subj-group'
def test_subj_group_is_absent(self):
sample = u"""<article>
<front>
<article-meta>
<article-categories>
</article-categories>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_without_heading_type(self):
sample = u"""<article>
<front>
<article-meta>
<article-categories>
<subj-group subj-group-type="kwd">
<subject content-type="neurosci">
Cellular and Molecular Biology
</subject>
<subj-group>
<subject content-type="neurosci">
Blood and brain barrier
</subject>
</subj-group>
</subj-group>
</article-categories>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_with_heading_type(self):
sample = u"""<article>
<front>
<article-meta>
<article-categories>
<subj-group subj-group-type="heading">
<subject>
Cellular and Molecular Biology
</subject>
<subj-group>
<subject content-type="neurosci">
Blood and brain barrier
</subject>
</subj-group>
</subj-group>
</article-categories>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_with_heading_in_subarticle_pt(self):
sample = u"""<article>
<front>
<article-meta>
<article-categories>
<subj-group subj-group-type="heading">
<subject>
Original Article
</subject>
<subj-group>
<subject content-type="neurosci">
Blood and brain barrier
</subject>
</subj-group>
</subj-group>
</article-categories>
</article-meta>
</front>
<sub-article xml:lang="pt" article-type="translation" id="S01">
<front-stub>
<article-categories>
<subj-group subj-group-type="heading">
<subject>Artigos Originais</subject>
</subj-group>
</article-categories>
</front-stub>
</sub-article>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_with_many_heading_in_subarticle_pt(self):
sample = u"""<article>
<front>
<article-meta>
<article-categories>
<subj-group subj-group-type="heading">
<subject>
Original Article
</subject>
<subj-group>
<subject content-type="neurosci">
Blood and brain barrier
</subject>
</subj-group>
</subj-group>
</article-categories>
</article-meta>
</front>
<sub-article xml:lang="pt" article-type="translation" id="S01">
<front-stub>
<article-categories>
<subj-group subj-group-type="heading">
<subject>Artigos Originais</subject>
</subj-group>
<subj-group subj-group-type="heading">
<subject>Artigos Piratas</subject>
</subj-group>
</article-categories>
</front-stub>
</sub-article>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_with_heading_type_in_the_deep(self):
sample = u"""<article>
<front>
<article-meta>
<article-categories>
<subj-group>
<subject>
Cellular and Molecular Biology
</subject>
<subj-group subj-group-type="heading">
<subject>
Blood and brain barrier
</subject>
</subj-group>
</subj-group>
</article-categories>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_with_many_heading_type(self):
sample = u"""<article>
<front>
<article-meta>
<article-categories>
<subj-group subj-group-type="heading">
<subject>
Cellular and Molecular Biology
</subject>
</subj-group>
<subj-group subj-group-type="heading">
<subject>
Blood and brain barrier
</subject>
</subj-group>
</article-categories>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
class AbstractLangTests(PhaseBasedTestCase):
"""Tests for article/front/article-meta/abstract elements.
"""
sch_phase = 'phase.abstract_lang'
def test_is_present(self):
sample = u"""<article>
<front>
<article-meta>
<abstract>
<p>Differing socioeconomic positions in...</p>
</abstract>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_is_absent(self):
sample = u"""<article>
<front>
<article-meta>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_is_present_with_lang(self):
sample = u"""<?xml version="1.0" encoding="UTF-8"?>
<article>
<front>
<article-meta>
<abstract xml:lang="en">
<p>Differing socioeconomic positions in...</p>
</abstract>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_missing_for_research_article(self):
sample = u"""<?xml version="1.0" encoding="UTF-8"?>
<article article-type="research-article">
<front>
<article-meta>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_research_article(self):
sample = u"""<?xml version="1.0" encoding="UTF-8"?>
<article article-type="research-article">
<front>
<article-meta>
<abstract>
<p>Differing socioeconomic positions in...</p>
</abstract>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_research_article_only_with_transabstract(self):
sample = u"""<?xml version="1.0" encoding="UTF-8"?>
<article article-type="research-article">
<front>
<article-meta>
<trans-abstract xml:lang="en">
<p>Differing socioeconomic positions in...</p>
</trans-abstract>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_missing_for_review_article(self):
sample = u"""<?xml version="1.0" encoding="UTF-8"?>
<article article-type="review-article">
<front>
<article-meta>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_review_article(self):
sample = u"""<?xml version="1.0" encoding="UTF-8"?>
<article article-type="review-article">
<front>
<article-meta>
<abstract>
<p>Differing socioeconomic positions in...</p>
</abstract>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_review_article_only_with_transabstract(self):
sample = u"""<?xml version="1.0" encoding="UTF-8"?>
<article article-type="review-article">
<front>
<article-meta>
<trans-abstract xml:lang="en">
<p>Differing socioeconomic positions in...</p>
</trans-abstract>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
class ArticleTitleLangTests(PhaseBasedTestCase):
"""Tests for article/front/article-meta/title-group/article-title elements.
"""
sch_phase = 'phase.article-title_lang'
def test_is_present(self):
sample = u"""<article>
<front>
<article-meta>
<title-group>
<article-title>
Systematic review of day hospital care...
</article-title>
</title-group>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_is_present_with_lang(self):
sample = u"""<article>
<front>
<article-meta>
<title-group>
<article-title xml:lang="en">
Systematic review of day hospital care...
</article-title>
</title-group>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_is_present_in_elementcitation(self):
sample = u"""<article>
<back>
<ref-list>
<ref>
<mixed-citation>Aires M, Paz AA, Perosa CT. Situação de saúde e grau de dependência de pessoas idosas institucionalizadas. <italic>Rev Gaucha Enferm.</italic> 2009;30(3):192-9.</mixed-citation>
<element-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Aires</surname>
<given-names>M</given-names>
</name>
<name>
<surname>Paz</surname>
<given-names>AA</given-names>
</name>
<name>
<surname>Perosa</surname>
<given-names>CT</given-names>
</name>
</person-group>
<article-title>Situação de saúde e grau de dependência de pessoas idosas institucionalizadas</article-title>
<source>Rev Gaucha Enferm</source>
<year>2009</year>
<volume>30</volume>
<issue>3</issue>
<fpage>192</fpage>
<lpage>199</lpage>
</element-citation>
</ref>
</ref-list>
</back>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_is_present_in_elementcitation_with_lang(self):
sample = u"""<article>
<back>
<ref-list>
<ref>
<mixed-citation>Aires M, Paz AA, Perosa CT. Situação de saúde e grau de dependência de pessoas idosas institucionalizadas. <italic>Rev Gaucha Enferm.</italic> 2009;30(3):192-9.</mixed-citation>
<element-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Aires</surname>
<given-names>M</given-names>
</name>
<name>
<surname>Paz</surname>
<given-names>AA</given-names>
</name>
<name>
<surname>Perosa</surname>
<given-names>CT</given-names>
</name>
</person-group>
<article-title xml:lang="pt">Situação de saúde e grau de dependência de pessoas idosas institucionalizadas</article-title>
<source>Rev Gaucha Enferm</source>
<year>2009</year>
<volume>30</volume>
<issue>3</issue>
<fpage>192</fpage>
<lpage>199</lpage>
</element-citation>
</ref>
</ref-list>
</back>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
class KwdGroupLangTests(PhaseBasedTestCase):
"""Tests for article/front/article-meta/kwd-group elements.
"""
sch_phase = 'phase.kwd-group_lang'
def test_single_occurence(self):
sample = u"""<article>
<front>
<article-meta>
<kwd-group>
<kwd>gene expression</kwd>
</kwd-group>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_many_occurencies(self):
sample = u"""<article>
<front>
<article-meta>
<kwd-group xml:lang="en">
<kwd>gene expression</kwd>
</kwd-group>
<kwd-group xml:lang="pt">
<kwd>expressao do gene</kwd>
</kwd-group>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_many_occurencies_without_lang(self):
sample = u"""<article>
<front>
<article-meta>
<kwd-group>
<kwd>gene expression</kwd>
</kwd-group>
<kwd-group>
<kwd>expressao do gene</kwd>
</kwd-group>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
class AffContentTypeTests(PhaseBasedTestCase):
"""Tests for:
- article/front/article-meta/contrib-group
- article/front/article-meta
"""
sch_phase = 'phase.aff_contenttypes'
def test_original_is_present(self):
sample = u"""<article>
<front>
<article-meta>
<aff>
<institution content-type="original">
Grupo de ...
</institution>
</aff>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_original_is_absent(self):
sample = u"""<article>
<front>
<article-meta>
<aff>
<institution>
Grupo de ...
</institution>
</aff>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_many_original(self):
sample = u"""<article>
<front>
<article-meta>
<aff>
<institution content-type="original">
Grupo de ...
</institution>
<institution content-type="original">
Galera de ...
</institution>
</aff>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_original_is_present_and_absent(self):
sample = u"""<article>
<front>
<article-meta>
<aff>
<institution content-type="original">
Grupo de ...
</institution>
</aff>
<aff>
<institution>
Grupo de ...
</institution>
</aff>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_original_is_present_and_present(self):
sample = u"""<article>
<front>
<article-meta>
<aff>
<institution content-type="original">
Grupo de ...
</institution>
</aff>
<aff>
<institution content-type="original">
Grupo de ...
</institution>
</aff>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_allowed_orgdiv1(self):
sample = u"""<article>
<front>
<article-meta>
<aff>
<institution content-type="original">
Grupo de ...
</institution>
<institution content-type="orgdiv1">
Instituto de Matematica e Estatistica
</institution>
</aff>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_allowed_orgdiv2(self):
sample = u"""<article>
<front>
<article-meta>
<aff>
<institution content-type="original">
Grupo de ...
</institution>
<institution content-type="orgdiv2">
Instituto de Matematica e Estatistica
</institution>
</aff>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_allowed_orgdiv3(self):
sample = u"""<article>
<front>
<article-meta>
<aff>
<institution content-type="original">
Grupo de ...
</institution>
<institution content-type="orgdiv3">
Instituto de Matematica e Estatistica
</institution>
</aff>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_allowed_normalized(self):
sample = u"""<article>
<front>
<article-meta>
<aff>
<institution content-type="original">
Grupo de ...
</institution>
<institution content-type="normalized">
Instituto de Matematica e Estatistica
</institution>
</aff>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_disallowed_orgdiv4(self):
sample = u"""<article>
<front>
<article-meta>
<aff>
<institution content-type="original">
Grupo de ...
</institution>
<institution content-type="orgdiv4">
Instituto de Matematica e Estatistica
</institution>
</aff>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_orgname_inside_contrib_group(self):
sample = u"""<article>
<front>
<article-meta>
<contrib-group>
<aff>
<institution content-type="original">
Grupo de ...
</institution>
<institution content-type="orgname">
Instituto de Matematica e Estatistica
</institution>
</aff>
</contrib-group>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
class CountsTests(PhaseBasedTestCase):
"""Tests for article/front/article-meta/counts elements.
"""
sch_phase = 'phase.counts'
def test_absent(self):
sample = u"""<article>
<front>
<article-meta>
<fpage>0</fpage>
<lpage>0</lpage>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_table_is_absent(self):
sample = u"""<article>
<front>
<article-meta>
<counts>
<ref-count count="0"/>
<fig-count count="0"/>
<equation-count count="0"/>
<page-count count="0"/>
</counts>
<fpage>0</fpage>
<lpage>0</lpage>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_ref_is_absent(self):
sample = u"""<article>
<front>
<article-meta>
<counts>
<table-count count="0"/>
<fig-count count="0"/>
<equation-count count="0"/>
<page-count count="0"/>
</counts>
<fpage>0</fpage>
<lpage>0</lpage>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_fig_is_absent(self):
sample = u"""<article>
<front>
<article-meta>
<counts>
<table-count count="0"/>
<ref-count count="0"/>
<equation-count count="0"/>
<page-count count="0"/>
</counts>
<fpage>0</fpage>
<lpage>0</lpage>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_equation_is_absent(self):
sample = u"""<article>
<front>
<article-meta>
<counts>
<table-count count="0"/>
<ref-count count="0"/>
<fig-count count="0"/>
<page-count count="0"/>
</counts>
<fpage>0</fpage>
<lpage>0</lpage>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_page_is_absent(self):
sample = u"""<article>
<front>
<article-meta>
<counts>
<table-count count="0"/>
<ref-count count="0"/>
<fig-count count="0"/>
<equation-count count="0"/>
</counts>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_tables(self):
sample = u"""<article>
<front>
<article-meta>
<counts>
<table-count count="1"/>
<ref-count count="0"/>
<fig-count count="0"/>
<equation-count count="0"/>
<page-count count="0"/>
</counts>
<fpage>0</fpage>
<lpage>0</lpage>
</article-meta>
</front>
<body>
<sec>
<p>
<table-wrap>
<table frame="hsides" rules="groups">
<colgroup width="25%"><col/><col/><col/><col/></colgroup>
<thead>
<tr>
<th style="font-weight:normal" align="left">Modelo</th>
<th style="font-weight:normal">Estrutura</th>
<th style="font-weight:normal">Processos</th>
<th style="font-weight:normal">Resultados</th>
</tr>
</thead>
<tbody>
<tr>
<td valign="top">SIPA<sup>1,2</sup></td>
<td valign="top">Urgência e hospitalar.</td>
<td valign="top">Realiza triagem para fragilidade.</td>
<td valign="top">Maior gasto comunitário, menor gasto.</td>
</tr>
</tbody>
</table>
</table-wrap>
</p>
</sec>
</body>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_tables_as_graphic(self):
sample = u"""<article xmlns:xlink="http://www.w3.org/1999/xlink">
<front>
<article-meta>
<counts>
<table-count count="1"/>
<ref-count count="0"/>
<fig-count count="0"/>
<equation-count count="0"/>
<page-count count="0"/>
</counts>
<fpage>0</fpage>
<lpage>0</lpage>
</article-meta>
</front>
<body>
<sec>
<p>
<table-wrap id="t01">
<graphic mimetype="image"
xlink:href="1414-431X-bjmbr-1414-431X20142875-gt001">
</graphic>
</table-wrap>
</p>
</sec>
</body>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_ref(self):
sample = u"""<article>
<front>
<article-meta>
<counts>
<table-count count="0"/>
<ref-count count="1"/>
<fig-count count="0"/>
<equation-count count="0"/>
<page-count count="0"/>
</counts>
<fpage>0</fpage>
<lpage>0</lpage>
</article-meta>
</front>
<back>
<ref-list>
<title>REFERÊNCIAS</title>
<ref id="B1">
<label>1</label>
<mixed-citation>
Béland F, Bergman H, Lebel P, Clarfield AM, Tousignant P, ...
</mixed-citation>
</ref>
</ref-list>
</back>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_fig(self):
sample = u"""<article xmlns:xlink="http://www.w3.org/1999/xlink">
<front>
<article-meta>
<counts>
<table-count count="0"/>
<ref-count count="0"/>
<fig-count count="1"/>
<equation-count count="0"/>
<page-count count="0"/>
</counts>
<fpage>0</fpage>
<lpage>0</lpage>
</article-meta>
</front>
<body>
<sec>
<p>
<fig id="f01">
<label>Figura 1</label>
<caption>
<title>Modelo das cinco etapas da pesquisa translacional.</title>
</caption>
<graphic xlink:href="0034-8910-rsp-48-2-0347-gf01"/>
</fig>
</p>
</sec>
</body>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_equation(self):
sample = u"""<article xmlns:xlink="http://www.w3.org/1999/xlink">
<front>
<article-meta>
<counts>
<table-count count="0"/>
<ref-count count="0"/>
<fig-count count="0"/>
<equation-count count="1"/>
<page-count count="0"/>
</counts>
<fpage>0</fpage>
<lpage>0</lpage>
</article-meta>
</front>
<body>
<sec>
<disp-formula>
<tex-math id="M1">
</tex-math>
</disp-formula>
</sec>
</body>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_page(self):
sample = u"""<article xmlns:xlink="http://www.w3.org/1999/xlink">
<front>
<article-meta>
<counts>
<table-count count="0"/>
<ref-count count="0"/>
<fig-count count="0"/>
<equation-count count="0"/>
<page-count count="11"/>
</counts>
<fpage>140</fpage>
<lpage>150</lpage>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_page_wrong_count(self):
sample = u"""<article xmlns:xlink="http://www.w3.org/1999/xlink">
<front>
<article-meta>
<counts>
<table-count count="0"/>
<ref-count count="0"/>
<fig-count count="0"/>
<equation-count count="0"/>
<page-count count="50"/>
</counts>
<fpage>140</fpage>
<lpage>150</lpage>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_non_digit_pages(self):
"""Non-digit page interval cannot be checked automatically.
"""
sample = u"""<article xmlns:xlink="http://www.w3.org/1999/xlink">
<front>
<article-meta>
<counts>
<table-count count="0"/>
<ref-count count="0"/>
<fig-count count="0"/>
<equation-count count="0"/>
<page-count count="11"/>
</counts>
<fpage>A140</fpage>
<lpage>A150</lpage>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_elocationid_pages(self):
"""Electronic pagination cannot be checked automatically.
"""
sample = u"""<article xmlns:xlink="http://www.w3.org/1999/xlink">
<front>
<article-meta>
<counts>
<table-count count="0"/>
<ref-count count="0"/>
<fig-count count="0"/>
<equation-count count="0"/>
<page-count count="11"/>
</counts>
<elocation-id>A140</elocation-id>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
class AuthorNotesTests(PhaseBasedTestCase):
"""Tests for article/front/article-meta/author-notes elements.
"""
sch_phase = 'phase.fn-group'
def test_allowed_fn_types(self):
for fn_type in ['author', 'con', 'conflict', 'corresp', 'current-aff',
'deceased', 'edited-by', 'equal', 'on-leave', 'participating-researchers',
'present-address', 'previously-at', 'study-group-members', 'other']:
sample = u"""<article>
<front>
<article-meta>
<author-notes>
<fn fn-type="%s">
<p>foobar</p>
</fn>
</author-notes>
</article-meta>
</front>
</article>
""" % fn_type
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_disallowed_fn_types(self):
sample = u"""<article>
<front>
<article-meta>
<author-notes>
<fn fn-type="wtf">
<p>foobar</p>
</fn>
</author-notes>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
class PubDateTests(PhaseBasedTestCase):
"""Tests for article/front/article-meta/pub-date elements.
"""
sch_phase = 'phase.pub-date'
def test_pub_type_absent(self):
sample = u"""<article>
<front>
<article-meta>
<pub-date>
<day>17</day>
<month>03</month>
<year>2014</year>
</pub-date>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_pub_type_allowed_values(self):
for pub_type in ['epub', 'epub-ppub', 'collection']:
sample = u"""<article>
<front>
<article-meta>
<pub-date pub-type="%s">
<day>17</day>
<month>03</month>
<year>2014</year>
</pub-date>
</article-meta>
</front>
</article>
""" % pub_type
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_pub_type_disallowed_value(self):
sample = u"""<article>
<front>
<article-meta>
<pub-date pub-type="wtf">
<day>17</day>
<month>03</month>
<year>2014</year>
</pub-date>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
class VolumeTests(PhaseBasedTestCase):
"""Tests for:
- article/front/article-meta/volume
- article/back/ref-list/ref/element-citation/volume
"""
sch_phase = 'phase.volume'
def test_absent_in_front(self):
sample = u"""<article>
<front>
<article-meta>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_present_but_empty_in_front(self):
sample = u"""<article>
<front>
<article-meta>
<volume></volume>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_present_in_front(self):
sample = u"""<article>
<front>
<article-meta>
<volume>10</volume>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
class IssueTests(PhaseBasedTestCase):
"""Tests for:
- article/front/article-meta/issue
- article/back/ref-list/ref/element-citation/issue
"""
sch_phase = 'phase.issue'
def test_absent_in_front(self):
sample = u"""<article>
<front>
<article-meta>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_present_but_empty_in_front(self):
sample = u"""<article>
<front>
<article-meta>
<issue></issue>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_present_in_front(self):
sample = u"""<article>
<front>
<article-meta>
<issue>10</issue>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
class SupplementTests(PhaseBasedTestCase):
"""Tests for:
- article/front/article-meta/supplement
"""
sch_phase = 'phase.supplement'
def test_absent(self):
sample = u"""<article>
<front>
<article-meta>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_present(self):
sample = u"""<article>
<front>
<article-meta>
<supplement>Suppl 2</supplement>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
class ElocationIdTests(PhaseBasedTestCase):
"""Tests for:
- article/front/article-meta/elocation-id
- article/back/ref-list/ref/element-citation/elocation-id
"""
sch_phase = 'phase.elocation-id'
def test_absent(self):
sample = u"""<article>
<front>
<article-meta>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_with_fpage(self):
sample = u"""<article>
<front>
<article-meta>
<elocation-id>E27</elocation-id>
<fpage>12</fpage>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_without_fpage(self):
sample = u"""<article>
<front>
<article-meta>
<elocation-id>E27</elocation-id>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_absent_back(self):
sample = u"""<article>
<back>
<ref-list>
<ref>
<element-citation>
</element-citation>
</ref>
</ref-list>
</back>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_with_fpage_back(self):
sample = u"""<article>
<back>
<ref-list>
<ref>
<element-citation>
<elocation-id>E27</elocation-id>
<fpage>12</fpage>
</element-citation>
</ref>
</ref-list>
</back>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_without_fpage_back(self):
sample = u"""<article>
<back>
<ref-list>
<ref>
<element-citation>
<elocation-id>E27</elocation-id>
</element-citation>
</ref>
</ref-list>
</back>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_with_and_without_fpage_back(self):
sample = u"""<article>
<back>
<ref-list>
<ref>
<element-citation>
<elocation-id>E27</elocation-id>
<fpage>12</fpage>
</element-citation>
</ref>
<ref>
<element-citation>
<elocation-id>E27</elocation-id>
</element-citation>
</ref>
</ref-list>
</back>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
class HistoryTests(PhaseBasedTestCase):
"""Tests for:
- article/front/article-meta/history
"""
sch_phase = 'phase.history'
def test_absent(self):
sample = u"""<article>
<front>
<article-meta>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_(self):
sample = u"""<article>
<front>
<article-meta>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_date_type_allowed_values(self):
for pub_type in ['received', 'accepted', 'rev-recd']:
sample = u"""<article>
<front>
<article-meta>
<history>
<date date-type="%s">
<day>17</day>
<month>03</month>
<year>2014</year>
</date>
</history>
</article-meta>
</front>
</article>
""" % pub_type
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_date_type_disallowed_values(self):
sample = u"""<article>
<front>
<article-meta>
<history>
<date date-type="invalid">
<day>17</day>
<month>03</month>
<year>2014</year>
</date>
</history>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_date_type_allowed_values_multi(self):
sample = u"""<article>
<front>
<article-meta>
<history>
<date date-type="received">
<day>17</day>
<month>03</month>
<year>2014</year>
</date>
<date date-type="accepted">
<day>17</day>
<month>03</month>
<year>2014</year>
</date>
</history>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
class ProductTests(PhaseBasedTestCase):
"""Tests for:
- article/front/article-meta/product
"""
sch_phase = 'phase.product'
def test_absent(self):
sample = u"""<article>
<front>
<article-meta>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_absent_allowed_types(self):
for art_type in ['book-review', 'product-review']:
sample = u"""<article article-type="%s">
<front>
<article-meta>
</article-meta>
</front>
</article>
""" % art_type
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_allowed_types(self):
for art_type in ['book-review', 'product-review']:
sample = u"""<article article-type="%s">
<front>
<article-meta>
<product product-type="book">
<person-group person-group-type="author">
<name>
<surname>Sobrenome do autor</surname>
<given-names>Prenomes do autor</given-names>
</name>
</person-group>
<source>Título do livro</source>
<year>Ano de publicação</year>
<publisher-name>Nome da casa publicadora/Editora</publisher-name>
<publisher-loc>Local de publicação</publisher-loc>
<page-count count="total de paginação do livro (opcional)"/>
<isbn>ISBN do livro, se houver</isbn>
<inline-graphic>1234-5678-rctb-45-05-690-gf01.tif</inline-graphic>
</product>
</article-meta>
</front>
</article>
""" % art_type
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_disallowed_types(self):
sample = u"""<article article-type="research-article">
<front>
<article-meta>
<product product-type="book">
<person-group person-group-type="author">
<name>
<surname>Sobrenome do autor</surname>
<given-names>Prenomes do autor</given-names>
</name>
</person-group>
<source>Título do livro</source>
<year>Ano de publicação</year>
<publisher-name>Nome da casa publicadora/Editora</publisher-name>
<publisher-loc>Local de publicação</publisher-loc>
<page-count count="total de paginação do livro (opcional)"/>
<isbn>ISBN do livro, se houver</isbn>
<inline-graphic>1234-5678-rctb-45-05-690-gf01.tif</inline-graphic>
</product>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_no_type(self):
sample = u"""<article>
<front>
<article-meta>
<product product-type="book">
<person-group person-group-type="author">
<name>
<surname>Sobrenome do autor</surname>
<given-names>Prenomes do autor</given-names>
</name>
</person-group>
<source>Título do livro</source>
<year>Ano de publicação</year>
<publisher-name>Nome da casa publicadora/Editora</publisher-name>
<publisher-loc>Local de publicação</publisher-loc>
<page-count count="total de paginação do livro (opcional)"/>
<isbn>ISBN do livro, se houver</isbn>
<inline-graphic>1234-5678-rctb-45-05-690-gf01.tif</inline-graphic>
</product>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_product_type(self):
sample = u"""<article article-type="book-review">
<front>
<article-meta>
<product>
<person-group person-group-type="author">
<name>
<surname>Sobrenome do autor</surname>
<given-names>Prenomes do autor</given-names>
</name>
</person-group>
<source>Título do livro</source>
<year>Ano de publicação</year>
<publisher-name>Nome da casa publicadora/Editora</publisher-name>
<publisher-loc>Local de publicação</publisher-loc>
<page-count count="total de paginação do livro (opcional)"/>
<isbn>ISBN do livro, se houver</isbn>
<inline-graphic>1234-5678-rctb-45-05-690-gf01.tif</inline-graphic>
</product>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_allowed_product_types(self):
for prod_type in ['book', 'software', 'article', 'chapter', 'other']:
sample = u"""<article article-type="book-review">
<front>
<article-meta>
<product product-type="%s">
<person-group person-group-type="author">
<name>
<surname>Sobrenome do autor</surname>
<given-names>Prenomes do autor</given-names>
</name>
</person-group>
<source>Título do livro</source>
<year>Ano de publicação</year>
<publisher-name>Nome da casa publicadora/Editora</publisher-name>
<publisher-loc>Local de publicação</publisher-loc>
<page-count count="total de paginação do livro (opcional)"/>
<isbn>ISBN do livro, se houver</isbn>
<inline-graphic>1234-5678-rctb-45-05-690-gf01.tif</inline-graphic>
</product>
</article-meta>
</front>
</article>
""" % prod_type
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_disallowed_product_types(self):
sample = u"""<article article-type="book-review">
<front>
<article-meta>
<product product-type="invalid">
<person-group person-group-type="author">
<name>
<surname>Sobrenome do autor</surname>
<given-names>Prenomes do autor</given-names>
</name>
</person-group>
<source>Título do livro</source>
<year>Ano de publicação</year>
<publisher-name>Nome da casa publicadora/Editora</publisher-name>
<publisher-loc>Local de publicação</publisher-loc>
<page-count count="total de paginação do livro (opcional)"/>
<isbn>ISBN do livro, se houver</isbn>
<inline-graphic>1234-5678-rctb-45-05-690-gf01.tif</inline-graphic>
</product>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
class SecTitleTests(PhaseBasedTestCase):
"""Tests for:
- article/body/sec/title
"""
sch_phase = 'phase.sectitle'
def test_absent(self):
sample = u"""<article>
<body>
<sec>
<p>Foo bar</p>
</sec>
</body>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_has_title(self):
sample = u"""<article>
<body>
<sec>
<title>Introduction</title>
<p>Foo bar</p>
</sec>
</body>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_has_empty_title(self):
sample = u"""<article>
<body>
<sec>
<title></title>
<p>Foo bar</p>
</sec>
</body>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
class ParagraphTests(PhaseBasedTestCase):
"""Tests for //p
"""
sch_phase = 'phase.paragraph'
def test_sec_without_id(self):
sample = u"""<article>
<body>
<sec>
<title>Intro</title>
<p>Foo bar</p>
</sec>
</body>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_sec_with_id(self):
sample = u"""<article>
<body>
<sec>
<title>Intro</title>
<p id="p01">Foo bar</p>
</sec>
</body>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_body_without_id(self):
sample = u"""<article>
<body>
<p>Foo bar</p>
</body>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_body_with_id(self):
sample = u"""<article>
<body>
<p id="p01">Foo bar</p>
</body>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
class XrefRidTests(PhaseBasedTestCase):
"""Tests for //xref[@rid]
"""
sch_phase = 'phase.rid_integrity'
def test_mismatching_rid(self):
sample = u"""<article>
<front>
<article-meta>
<contrib-group>
<contrib>
<xref ref-type="aff" rid="aff1">
<sup>I</sup>
</xref>
</contrib>
</contrib-group>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_matching_rid(self):
sample = u"""<article>
<front>
<article-meta>
<contrib-group>
<contrib>
<xref ref-type="aff" rid="aff1">
<sup>I</sup>
</xref>
</contrib>
</contrib-group>
<aff id="aff1">
<label>I</label>
<institution content-type="orgname">
Secretaria Municipal de Saude de Belo Horizonte
</institution>
<addr-line>
<named-content content-type="city">Belo Horizonte</named-content>
<named-content content-type="state">MG</named-content>
</addr-line>
<country>Brasil</country>
<institution content-type="original">
Secretaria Municipal de Saude de Belo Horizonte. Belo Horizonte, MG, Brasil
</institution>
</aff>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_mismatching_reftype(self):
sample = u"""<article>
<body>
<sec>
<table-wrap id="t01">
</table-wrap>
</sec>
<sec>
<p>
<xref ref-type="aff" rid="t01">table 1</xref>
</p>
</sec>
</body>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
class XrefRefTypeTests(PhaseBasedTestCase):
"""Tests for //xref[@ref-type]
"""
sch_phase = 'phase.xref_reftype_integrity'
def test_allowed_ref_types(self):
for reftype in ['aff', 'app', 'author-notes', 'bibr', 'contrib',
'corresp', 'disp-formula', 'fig', 'fn', 'sec',
'supplementary-material', 'table', 'table-fn',
'boxed-text']:
sample = u"""<article>
<body>
<sec>
<p>
<xref ref-type="%s">foo</xref>
</p>
</sec>
</body>
</article>
""" % reftype
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_disallowed_ref_types(self):
for reftype in ['chem', 'kwd', 'list', 'other', 'plate'
'scheme', 'statement']:
sample = u"""<article>
<body>
<sec>
<p>
<xref ref-type="%s">foo</xref>
</p>
</sec>
</body>
</article>
""" % reftype
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
class CaptionTests(PhaseBasedTestCase):
"""Tests for //caption
"""
sch_phase = 'phase.caption'
def test_with_title(self):
sample = u"""<article xmlns:xlink="http://www.w3.org/1999/xlink">
<body>
<fig id="f03">
<label>Figura 3</label>
<caption>
<title>
Percentual de atividade mitocondrial.
</title>
</caption>
<graphic xlink:href="1234-5678-rctb-45-05-0110-gf01.tif"/>
</fig>
</body>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_without_title(self):
sample = u"""<article xmlns:xlink="http://www.w3.org/1999/xlink">
<body>
<fig id="f03">
<label>Figura 3</label>
<caption>
<label>
Percentual de atividade mitocondrial.
</label>
</caption>
<graphic xlink:href="1234-5678-rctb-45-05-0110-gf01.tif"/>
</fig>
</body>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_with_title_and_more(self):
sample = u"""<article xmlns:xlink="http://www.w3.org/1999/xlink">
<body>
<fig id="f03">
<label>Figura 3</label>
<caption>
<title>
Percentual de atividade mitocondrial.
</title>
<label>
Percentual de atividade mitocondrial.
</label>
</caption>
<graphic xlink:href="1234-5678-rctb-45-05-0110-gf01.tif"/>
</fig>
</body>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
class LicenseTests(PhaseBasedTestCase):
"""Tests for article/front/article-meta/permissions/license element.
"""
sch_phase = 'phase.license'
def test_missing_permissions_elem(self):
sample = u"""<article xmlns:xlink="http://www.w3.org/1999/xlink">
<front>
<article-meta>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_missing_license(self):
sample = u"""<article xmlns:xlink="http://www.w3.org/1999/xlink">
<front>
<article-meta>
<permissions>
</permissions>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_allowed_license_type(self):
sample = u"""<article xmlns:xlink="http://www.w3.org/1999/xlink">
<front>
<article-meta>
<permissions>
<license license-type="open-access"
xlink:href="http://creativecommons.org/licenses/by/4.0/">
<license-p>
This is an open-access article distributed under the terms...
</license-p>
</license>
</permissions>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_disallowed_license_type(self):
sample = u"""<article xmlns:xlink="http://www.w3.org/1999/xlink">
<front>
<article-meta>
<permissions>
<license license-type="closed-access"
xlink:href="http://creativecommons.org/licenses/by/4.0/">
<license-p>
This is an open-access article distributed under the terms...
</license-p>
</license>
</permissions>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_allowed_license_href(self):
allowed_licenses = [
'http://creativecommons.org/licenses/by-nc/4.0/',
'http://creativecommons.org/licenses/by-nc/3.0/',
'http://creativecommons.org/licenses/by/4.0/',
'http://creativecommons.org/licenses/by/3.0/',
'http://creativecommons.org/licenses/by-nc-nd/4.0/',
'http://creativecommons.org/licenses/by-nc-nd/3.0/',
'http://creativecommons.org/licenses/by/3.0/igo/',
'http://creativecommons.org/licenses/by-nc/3.0/igo/',
'http://creativecommons.org/licenses/by-nc-nd/3.0/igo/',
]
for license in allowed_licenses:
sample = u"""<article xmlns:xlink="http://www.w3.org/1999/xlink">
<front>
<article-meta>
<permissions>
<license license-type="open-access"
xlink:href="%s">
<license-p>
This is an open-access article distributed under the terms...
</license-p>
</license>
</permissions>
</article-meta>
</front>
</article>
""" % license
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_allowed_license_href_https_scheme(self):
allowed_licenses = [
'https://creativecommons.org/licenses/by-nc/4.0/',
'https://creativecommons.org/licenses/by-nc/3.0/',
'https://creativecommons.org/licenses/by/4.0/',
'https://creativecommons.org/licenses/by/3.0/',
'https://creativecommons.org/licenses/by-nc-nd/4.0/',
'https://creativecommons.org/licenses/by-nc-nd/3.0/',
'https://creativecommons.org/licenses/by/3.0/igo/',
'https://creativecommons.org/licenses/by-nc/3.0/igo/',
'https://creativecommons.org/licenses/by-nc-nd/3.0/igo/',
]
for license in allowed_licenses:
sample = u"""<article xmlns:xlink="http://www.w3.org/1999/xlink">
<front>
<article-meta>
<permissions>
<license license-type="open-access"
xlink:href="%s">
<license-p>
This is an open-access article distributed under the terms...
</license-p>
</license>
</permissions>
</article-meta>
</front>
</article>
""" % license
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_disallowed_license_href(self):
sample = u"""<article xmlns:xlink="http://www.w3.org/1999/xlink">
<front>
<article-meta>
<permissions>
<license license-type="open-access"
xlink:href="http://opensource.org/licenses/MIT">
<license-p>
This is an open-access article distributed under the terms...
</license-p>
</license>
</permissions>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_missing_trailing_slash(self):
allowed_licenses = [
'https://creativecommons.org/licenses/by-nc/4.0',
]
for license in allowed_licenses:
sample = u"""<article xmlns:xlink="http://www.w3.org/1999/xlink">
<front>
<article-meta>
<permissions>
<license license-type="open-access"
xlink:href="%s">
<license-p>
This is an open-access article distributed under the terms...
</license-p>
</license>
</permissions>
</article-meta>
</front>
</article>
""" % license
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
class AckTests(PhaseBasedTestCase):
"""Tests for article/back/ack element.
"""
sch_phase = 'phase.ack'
def test_with_sec(self):
sample = u"""<article>
<back>
<ack>
<sec>
<p>Some</p>
</sec>
</ack>
</back>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_without_sec(self):
sample = u"""<article>
<back>
<ack>
<title>Acknowledgment</title>
<p>Some text</p>
</ack>
</back>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
class ElementCitationTests(PhaseBasedTestCase):
"""Tests for article/back/ref-list/ref/element-citation element.
"""
sch_phase = 'phase.element-citation'
def test_with_name_outside_persongroup(self):
sample = u"""<article>
<back>
<ref-list>
<ref>
<element-citation publication-type="journal">
<name>Foo</name>
</element-citation>
</ref>
</ref-list>
</back>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_with_name_inside_persongroup(self):
sample = u"""<article>
<back>
<ref-list>
<ref>
<element-citation publication-type="journal">
<person-group>
<name>Foo</name>
</person-group>
</element-citation>
</ref>
</ref-list>
</back>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_with_etal_outside_persongroup(self):
sample = u"""<article>
<back>
<ref-list>
<ref>
<element-citation publication-type="journal">
<etal>Foo</etal>
</element-citation>
</ref>
</ref-list>
</back>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_with_etal_inside_persongroup(self):
sample = u"""<article>
<back>
<ref-list>
<ref>
<element-citation publication-type="journal">
<person-group>
<etal>Foo</etal>
</person-group>
</element-citation>
</ref>
</ref-list>
</back>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_with_collab_outside_persongroup(self):
sample = u"""<article>
<back>
<ref-list>
<ref>
<element-citation publication-type="journal">
<collab>Foo</collab>
</element-citation>
</ref>
</ref-list>
</back>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_with_collab_inside_persongroup(self):
sample = u"""<article>
<back>
<ref-list>
<ref>
<element-citation publication-type="journal">
<person-group>
<collab>Foo</collab>
</person-group>
</element-citation>
</ref>
</ref-list>
</back>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_allowed_publication_types(self):
for pub_type in ['journal', 'book', 'webpage', 'thesis', 'confproc',
'patent', 'software', 'database', 'legal-doc', 'newspaper',
'other', 'report']:
sample = u"""<article>
<back>
<ref-list>
<ref>
<element-citation publication-type="%s">
</element-citation>
</ref>
</ref-list>
</back>
</article>
""" % pub_type
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_disallowed_publication_types(self):
sample = u"""<article>
<back>
<ref-list>
<ref>
<element-citation publication-type="invalid">
</element-citation>
</ref>
</ref-list>
</back>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_outside_ref(self):
sample = u"""<article>
<body>
<sec>
<p>
<element-citation publication-type="journal">
<person-group>
<collab>Foo</collab>
</person-group>
</element-citation>
</p>
</sec>
</body>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
class PersonGroupTests(PhaseBasedTestCase):
"""Tests for
- article/back/ref-list/ref/element-citation/person-group
- article/front/article-meta/product/person-group
"""
sch_phase = 'phase.person-group'
def test_missing_type(self):
sample = u"""<article>
<back>
<ref-list>
<ref>
<element-citation>
<person-group>
<name>Foo</name>
</person-group>
</element-citation>
</ref>
</ref-list>
</back>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_missing_type_at_product(self):
sample = u"""<article>
<front>
<article-meta>
<product>
<person-group>
<name>Foo</name>
</person-group>
</product>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_with_type(self):
sample = u"""<article>
<back>
<ref-list>
<ref>
<element-citation>
<person-group person-group-type="author">
<name>Foo</name>
</person-group>
</element-citation>
</ref>
</ref-list>
</back>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_allowed_types(self):
for group_type in ['author', 'compiler', 'editor', 'translator']:
sample = u"""<article>
<back>
<ref-list>
<ref>
<element-citation>
<person-group person-group-type="%s">
<name>Foo</name>
</person-group>
</element-citation>
</ref>
</ref-list>
</back>
</article>
""" % group_type
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_disallowed_type(self):
sample = u"""<article>
<back>
<ref-list>
<ref>
<element-citation>
<person-group person-group-type="invalid">
<name>Foo</name>
</person-group>
</element-citation>
</ref>
</ref-list>
</back>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_loose_text_below_element_citation_node(self):
sample = u"""<article>
<back>
<ref-list>
<ref>
<element-citation publication-type="journal">
<person-group person-group-type="author">HERE
<collab>Foo</collab>
</person-group>
</element-citation>
</ref>
</ref-list>
</back>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_loose_text_below_product_node(self):
sample = u"""<article>
<front>
<article-meta>
<product>
<person-group person-group-type="author">HERE
<collab>Foo</collab>
</person-group>
</product>
</article-meta>
</front>
<back>
<ref-list>
<ref>
<element-citation publication-type="journal">
<person-group person-group-type="author">
<collab>Foo</collab>
</person-group>
</element-citation>
</ref>
</ref-list>
</back>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
class FNGroupTests(PhaseBasedTestCase):
"""Tests for article/back/fn-group/fn element.
"""
sch_phase = 'phase.fn-group'
def test_allowed_fn_types(self):
for fn_type in ['abbr', 'com', 'financial-disclosure', 'supported-by',
'presented-at', 'supplementary-material', 'other']:
sample = u"""<article>
<back>
<fn-group>
<fn fn-type="%s">
<p>foobar</p>
</fn>
</fn-group>
</back>
</article>
""" % fn_type
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_disallowed_fn_types(self):
sample = u"""<article>
<back>
<fn-group>
<fn fn-type="invalid">
<p>foobar</p>
</fn>
</fn-group>
</back>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
class XHTMLTableTests(PhaseBasedTestCase):
"""Tests for //table elements.
"""
sch_phase = 'phase.xhtml-table'
def test_valid_toplevel(self):
for elem in ['caption', 'summary', 'col', 'colgroup', 'thead', 'tfoot', 'tbody']:
sample = u"""<article>
<body>
<sec>
<p>
<table>
<%s></%s>
</table>
</p>
</sec>
</body>
</article>
""" % (elem, elem)
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_invalid_toplevel(self):
for elem in ['tr']:
sample = u"""<article>
<body>
<sec>
<p>
<table>
<%s></%s>
</table>
</p>
</sec>
</body>
</article>
""" % (elem, elem)
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_tbody_upon_th(self):
sample = u"""<article>
<body>
<sec>
<p>
<table>
<tbody>
<tr>
<th>Foo</th>
</tr>
</tbody>
</table>
</p>
</sec>
</body>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_thead_upon_th(self):
sample = u"""<article>
<body>
<sec>
<p>
<table>
<thead>
<tr>
<th>Foo</th>
</tr>
</thead>
</table>
</p>
</sec>
</body>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_thead_upon_td(self):
sample = u"""<article>
<body>
<sec>
<p>
<table>
<thead>
<tr>
<td>Foo</td>
</tr>
</thead>
</table>
</p>
</sec>
</body>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
class SupplementaryMaterialMimetypeTests(PhaseBasedTestCase):
"""Tests for article//supplementary-material elements.
"""
sch_phase = 'phase.supplementary-material'
def test_case1(self):
"""mimetype is True
mime-subtype is True
mimetype ^ mime-subtype is True
"""
sample = u"""<article xmlns:xlink="http://www.w3.org/1999/xlink">
<body>
<supplementary-material id="S1"
xlink:title="local_file"
xlink:href="1471-2105-1-1-s1.pdf"
mimetype="application"
mime-subtype="pdf">
<label>Additional material</label>
<caption>
<p>Supplementary PDF file supplied by authors.</p>
</caption>
</supplementary-material>
</body>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_case2(self):
"""mimetype is True
mime-subtype is False
mimetype ^ mime-subtype is False
"""
sample = u"""<article xmlns:xlink="http://www.w3.org/1999/xlink">
<body>
<supplementary-material id="S1"
xlink:title="local_file"
xlink:href="1471-2105-1-1-s1.pdf"
mimetype="application">
<label>Additional material</label>
<caption>
<p>Supplementary PDF file supplied by authors.</p>
</caption>
</supplementary-material>
</body>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_case3(self):
"""mimetype is False
mime-subtype is True
mimetype ^ mime-subtype is False
"""
sample = u"""<article xmlns:xlink="http://www.w3.org/1999/xlink">
<body>
<supplementary-material id="S1"
xlink:title="local_file"
xlink:href="1471-2105-1-1-s1.pdf"
mime-subtype="pdf">
<label>Additional material</label>
<caption>
<p>Supplementary PDF file supplied by authors.</p>
</caption>
</supplementary-material>
</body>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_case4(self):
"""mimetype is False
mime-subtype is False
mimetype ^ mime-subtype is False
"""
sample = u"""<article xmlns:xlink="http://www.w3.org/1999/xlink">
<body>
<supplementary-material id="S1"
xlink:title="local_file"
xlink:href="1471-2105-1-1-s1.pdf">
<label>Additional material</label>
<caption>
<p>Supplementary PDF file supplied by authors.</p>
</caption>
</supplementary-material>
</body>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
class AuthorNotesFNTests(PhaseBasedTestCase):
"""Tests for article/front/article-meta/author-notes/fn element.
"""
sch_phase = 'phase.fn-group'
def test_allowed_fn_types(self):
for fn_type in ['author', 'con', 'conflict', 'corresp', 'current-aff',
'deceased', 'edited-by', 'equal', 'on-leave',
'participating-researchers', 'present-address',
'previously-at', 'study-group-members', 'other',
'presented-at', 'presented-by']:
sample = u"""<article>
<front>
<article-meta>
<author-notes>
<fn fn-type="%s">
<p>foobar</p>
</fn>
</author-notes>
</article-meta>
</front>
</article>
""" % fn_type
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_disallowed_fn_types(self):
sample = u"""<article>
<front>
<article-meta>
<author-notes>
<fn fn-type="invalid">
<p>foobar</p>
</fn>
</author-notes>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
class ArticleAttributesTests(PhaseBasedTestCase):
"""Tests for article element.
"""
sch_phase = 'phase.article-attrs'
def test_allowed_article_types(self):
for art_type in ['other', 'article-commentary', 'case-report',
'editorial', 'correction', 'letter', 'research-article',
'in-brief', 'review-article', 'book-review', 'retraction',
'brief-report', 'rapid-communication', 'reply', 'translation']:
sample = u"""<article article-type="%s" xml:lang="en" dtd-version="1.0" specific-use="sps-1.3">
</article>
""" % art_type
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_disallowed_article_type(self):
sample = u"""<article article-type="invalid" dtd-version="1.0" specific-use="sps-1.3">
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_missing_article_type(self):
sample = u"""<article xml:lang="en" dtd-version="1.0" specific-use="sps-1.3">
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_missing_xmllang(self):
sample = u"""<article article-type="research-article" dtd-version="1.0" specific-use="sps-1.3">
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_missing_dtdversion(self):
sample = u"""<article article-type="research-article" xml:lang="en" specific-use="sps-1.3">
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_missing_sps_version(self):
sample = u"""<article article-type="research-article" dtd-version="1.0" xml:lang="en">
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_invalid_sps_version(self):
sample = u"""<article article-type="research-article" dtd-version="1.0" xml:lang="en" specific-use="sps-1.0">
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
class NamedContentTests(PhaseBasedTestCase):
"""Tests for article/front/article-meta/aff/addr-line/named-content elements.
"""
sch_phase = 'phase.named-content_attrs'
def test_missing_contenttype(self):
sample = u"""<article>
<front>
<article-meta>
<aff>
<addr-line>
<named-content>Foo</named-content>
</addr-line>
</aff>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_allowed_contenttype(self):
for ctype in ['city', 'state']:
sample = u"""<article>
<front>
<article-meta>
<aff>
<addr-line>
<named-content content-type="%s">Foo</named-content>
</addr-line>
</aff>
</article-meta>
</front>
</article>
""" % ctype
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_disallowed_contenttype(self):
sample = u"""<article>
<front>
<article-meta>
<aff>
<addr-line>
<named-content content-type="invalid">Foo</named-content>
</addr-line>
</aff>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
class MonthTests(PhaseBasedTestCase):
"""Tests for //month elements.
"""
sch_phase = 'phase.month'
def test_range_1_12(self):
for month in range(1, 13):
sample = u"""<article>
<front>
<article-meta>
<pub-date>
<month>%s</month>
</pub-date>
</article-meta>
</front>
</article>
""" % month
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_range_01_12(self):
for month in range(1, 13):
sample = u"""<article>
<front>
<article-meta>
<pub-date>
<month>%02d</month>
</pub-date>
</article-meta>
</front>
</article>
""" % month
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_out_of_range(self):
for month in [0, 13]:
sample = u"""<article>
<front>
<article-meta>
<pub-date>
<month>%s</month>
</pub-date>
</article-meta>
</front>
</article>
""" % month
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_must_be_integer(self):
sample = u"""<article>
<front>
<article-meta>
<pub-date>
<month>January</month>
</pub-date>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
class SizeTests(PhaseBasedTestCase):
"""Tests for:
- article/front/article-meta/product/size
- article/back/ref-list/ref/element-citation/size
"""
sch_phase = 'phase.size'
def test_in_element_citation(self):
sample = u"""<article>
<back>
<ref-list>
<ref>
<element-citation>
<size units="pages">2</size>
</element-citation>
</ref>
</ref-list>
</back>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_in_product(self):
sample = u"""<article>
<front>
<article-meta>
<product>
<size units="pages">2</size>
</product>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_missing_units_in_product(self):
sample = u"""<article>
<front>
<article-meta>
<product>
<size>2</size>
</product>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_missing_units_in_element_citation(self):
sample = u"""<article>
<back>
<ref-list>
<ref>
<element-citation>
<size>2</size>
</element-citation>
</ref>
</ref-list>
</back>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_invalid_units_value(self):
sample = u"""<article>
<front>
<article-meta>
<product>
<size units="invalid">2</size>
</product>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
class ListTests(PhaseBasedTestCase):
"""Tests for list elements.
"""
sch_phase = 'phase.list'
def test_allowed_list_type(self):
for list_type in ['order', 'bullet', 'alpha-lower', 'alpha-upper',
'roman-lower', 'roman-upper', 'simple']:
sample = u"""<article>
<body>
<sec>
<p>
<list list-type="%s">
<title>Lista Númerica</title>
<list-item>
<p>Nullam gravida tellus eget condimentum egestas.</p>
</list-item>
<list-item>
<list list-type="%s">
<list-item>
<p>Curabitur luctus lorem ac feugiat pretium.</p>
</list-item>
</list>
</list-item>
<list-item>
<p>Donec pulvinar odio ut enim lobortis, eu dignissim elit accumsan.</p>
</list-item>
</list>
</p>
</sec>
</body>
</article>
""" % (list_type, list_type)
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_disallowed_list_type(self):
sample = u"""<article>
<body>
<sec>
<p>
<list list-type="invalid">
<title>Lista Númerica</title>
<list-item>
<p>Nullam gravida tellus eget condimentum egestas.</p>
</list-item>
<list-item>
<list list-type="invalid">
<list-item>
<p>Curabitur luctus lorem ac feugiat pretium.</p>
</list-item>
</list>
</list-item>
<list-item>
<p>Donec pulvinar odio ut enim lobortis, eu dignissim elit accumsan.</p>
</list-item>
</list>
</p>
</sec>
</body>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_disallowed_sub_list_type(self):
sample = u"""<article>
<body>
<sec>
<p>
<list list-type="order">
<title>Lista Númerica</title>
<list-item>
<p>Nullam gravida tellus eget condimentum egestas.</p>
</list-item>
<list-item>
<list list-type="invalid">
<list-item>
<p>Curabitur luctus lorem ac feugiat pretium.</p>
</list-item>
</list>
</list-item>
<list-item>
<p>Donec pulvinar odio ut enim lobortis, eu dignissim elit accumsan.</p>
</list-item>
</list>
</p>
</sec>
</body>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_missing_list_type(self):
sample = u"""<article>
<body>
<sec>
<p>
<list>
<title>Lista Númerica</title>
<list-item>
<p>Nullam gravida tellus eget condimentum egestas.</p>
</list-item>
<list-item>
<list>
<list-item>
<p>Curabitur luctus lorem ac feugiat pretium.</p>
</list-item>
</list>
</list-item>
<list-item>
<p>Donec pulvinar odio ut enim lobortis, eu dignissim elit accumsan.</p>
</list-item>
</list>
</p>
</sec>
</body>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_missing_sub_list_type(self):
sample = u"""<article>
<body>
<sec>
<p>
<list list-type="order">
<title>Lista Númerica</title>
<list-item>
<p>Nullam gravida tellus eget condimentum egestas.</p>
</list-item>
<list-item>
<list>
<list-item>
<p>Curabitur luctus lorem ac feugiat pretium.</p>
</list-item>
</list>
</list-item>
<list-item>
<p>Donec pulvinar odio ut enim lobortis, eu dignissim elit accumsan.</p>
</list-item>
</list>
</p>
</sec>
</body>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
class MediaTests(PhaseBasedTestCase):
"""Tests for article/body//p/media elements.
"""
sch_phase = 'phase.media_attributes'
def test_missing_mimetype(self):
sample = u"""<article xmlns:xlink="http://www.w3.org/1999/xlink">
<body>
<p><media mime-subtype="mp4" xlink:href="1234-5678-rctb-45-05-0110-m01.mp4"/></p>
</body>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_missing_mime_subtype(self):
sample = u"""<article xmlns:xlink="http://www.w3.org/1999/xlink">
<body>
<p><media mimetype="video" xlink:href="1234-5678-rctb-45-05-0110-m01.mp4"/></p>
</body>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_missing_href(self):
sample = u"""<article xmlns:xlink="http://www.w3.org/1999/xlink">
<body>
<p><media mimetype="video" mime-subtype="mp4"/></p>
</body>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_all_present(self):
sample = u"""<article xmlns:xlink="http://www.w3.org/1999/xlink">
<body>
<p><media mimetype="video" mime-subtype="mp4" xlink:href="1234-5678-rctb-45-05-0110-m01.mp4"/></p>
</body>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
class ExtLinkTests(PhaseBasedTestCase):
"""Tests for ext-link elements.
"""
sch_phase = 'phase.ext-link'
def test_complete(self):
sample = u"""<article xmlns:xlink="http://www.w3.org/1999/xlink">
<body>
<sec>
<p>Neque porro quisquam est <ext-link ext-link-type="uri" xlink:href="http://www.scielo.org">www.scielo.org</ext-link> qui dolorem ipsum quia</p>
</sec>
</body>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_allowed_extlinktype(self):
for link_type in ['uri', 'clinical-trial' ]:
sample = u"""<article xmlns:xlink="http://www.w3.org/1999/xlink">
<body>
<sec>
<p>Neque porro quisquam est <ext-link ext-link-type="%s" xlink:href="http://www.scielo.org">www.scielo.org</ext-link> qui dolorem ipsum quia</p>
</sec>
</body>
</article>
""" % link_type
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_disallowed_extlinktype(self):
sample = u"""<article xmlns:xlink="http://www.w3.org/1999/xlink">
<body>
<sec>
<p>Neque porro quisquam est <ext-link ext-link-type="invalid" xlink:href="http://www.scielo.org">www.scielo.org</ext-link> qui dolorem ipsum quia</p>
</sec>
</body>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_missing_extlinktype(self):
sample = u"""<article xmlns:xlink="http://www.w3.org/1999/xlink">
<body>
<sec>
<p>Neque porro quisquam est <ext-link xlink:href="http://www.scielo.org">www.scielo.org</ext-link> qui dolorem ipsum quia</p>
</sec>
</body>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_missing_xlinkhref(self):
sample = u"""<article xmlns:xlink="http://www.w3.org/1999/xlink">
<body>
<sec>
<p>Neque porro quisquam est <ext-link ext-link-type="uri">www.scielo.org</ext-link> qui dolorem ipsum quia</p>
</sec>
</body>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_uri_without_scheme(self):
sample = u"""<article xmlns:xlink="http://www.w3.org/1999/xlink">
<body>
<sec>
<p>Neque porro quisquam est <ext-link ext-link-type="uri" xlink:href="www.scielo.org">www.scielo.org</ext-link> qui dolorem ipsum quia</p>
</sec>
</body>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
class SubArticleAttributesTests(PhaseBasedTestCase):
"""Tests for sub-article element.
"""
sch_phase = 'phase.sub-article-attrs'
def test_allowed_article_types(self):
for art_type in ['abstract', 'letter', 'reply', 'translation']:
sample = u"""<article article-type="research-article" xml:lang="en" dtd-version="1.0" specific-use="sps-1.3">
<sub-article article-type="%s" xml:lang="pt" id="sa1"></sub-article>
</article>
""" % art_type
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_disallowed_article_type(self):
sample = u"""<article article-type="research-article" dtd-version="1.0" specific-use="sps-1.3">
<sub-article article-type="invalid" xml:lang="pt" id="trans_pt"></sub-article>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_missing_article_type(self):
sample = u"""<article article-type="research-article" dtd-version="1.0" specific-use="sps-1.3">
<sub-article xml:lang="pt" id="trans_pt"></sub-article>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_missing_xmllang(self):
sample = u"""<article article-type="research-article" dtd-version="1.0" specific-use="sps-1.3">
<sub-article article-type="translation" id="trans_pt"></sub-article>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_missing_id(self):
sample = u"""<article article-type="research-article" dtd-version="1.0" specific-use="sps-1.3">
<sub-article article-type="translation" xml:lang="pt"></sub-article>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
class ResponseAttributesTests(PhaseBasedTestCase):
"""Tests for response element.
"""
sch_phase = 'phase.response-attrs'
def test_allowed_response_types(self):
for type in ['addendum', 'discussion', 'reply']:
sample = u"""<article>
<response response-type="%s" xml:lang="pt" id="r1"></response>
</article>
""" % type
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_disallowed_response_type(self):
sample = u"""<article>
<response response-type="invalid" xml:lang="pt" id="r1"></response>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_missing_response_type(self):
sample = u"""<article>
<response xml:lang="pt" id="r1"></response>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_missing_xmllang(self):
sample = u"""<article>
<response response-type="invalid" id="r1"></response>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_missing_id(self):
sample = u"""<article>
<response response-type="invalid" xml:lang="pt"></response>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
class ResponseReplyAttributeTests(PhaseBasedTestCase):
"""Tests for response[@response-type='reply'] elements.
"""
sch_phase = 'phase.response-reply-type'
def test_reply_type_demands_an_article_type(self):
""" the article-type of value `article-commentary` is required
"""
sample = u"""<article article-type="article-commentary">
<response response-type="reply" xml:lang="pt" id="r1">
<front-stub>
<related-article related-article-type="commentary-article" id="ra1" vol="109" page="87-92"/>
</front-stub>
</response>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_reply_type_invalid_article_type(self):
""" anything different of `article-commentary` is invalid
"""
sample = u"""<article article-type="research-article">
<response response-type="reply" xml:lang="pt" id="r1">
<front-stub>
<related-article related-article-type="commentary-article" id="ra1" vol="109" page="87-92"/>
</front-stub>
</response>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_reply_type_missing_related_article(self):
""" the article-type of value `article-commentary` is required
"""
sample = u"""<article article-type="article-commentary">
<response response-type="reply" xml:lang="pt" id="r1">
<front-stub>
</front-stub>
</response>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_related_article_missing_vol(self):
sample = u"""<article article-type="article-commentary">
<response response-type="reply" xml:lang="pt" id="r1">
<front-stub>
<related-article related-article-type="commentary-article" id="ra1" page="87-92"/>
</front-stub>
</response>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_related_article_missing_page(self):
sample = u"""<article article-type="article-commentary">
<response response-type="reply" xml:lang="pt" id="r1">
<front-stub>
<related-article related-article-type="commentary-article" id="ra1" vol="109" elocation-id="1q2w"/>
</front-stub>
</response>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_related_article_missing_elocationid(self):
sample = u"""<article article-type="article-commentary">
<response response-type="reply" xml:lang="pt" id="r1">
<front-stub>
<related-article related-article-type="commentary-article" id="ra1" vol="109" page="87-92"/>
</front-stub>
</response>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_related_article_missing_page_and_elocationid(self):
sample = u"""<article article-type="article-commentary">
<response response-type="reply" xml:lang="pt" id="r1">
<front-stub>
<related-article related-article-type="commentary-article" id="ra1" vol="109"/>
</front-stub>
</response>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
class RelatedArticleTypesTests(PhaseBasedTestCase):
"""Tests for related-article element.
"""
sch_phase = 'phase.related-article-attrs'
def test_allowed_related_article_types(self):
for type in ['corrected-article', 'press-release', 'commentary-article', 'article-reference']:
sample = u"""<article>
<front>
<article-meta>
<related-article related-article-type="%s" id="01"/>
</article-meta>
</front>
</article>
""" % type
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_disallowed_related_article_type(self):
sample = u"""<article>
<front>
<article-meta>
<related-article related-article-type="invalid" id="01"/>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_missing_id(self):
sample = u"""<article>
<front>
<article-meta>
<related-article related-article-type="corrected-article"/>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_missing_related_article_type(self):
sample = u"""<article>
<front>
<article-meta>
<related-article id="01"/>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
class CorrectionTests(PhaseBasedTestCase):
"""Tests for article[@article-type="correction"] element.
"""
sch_phase = 'phase.correction'
def test_expected_elements(self):
sample = u"""<article article-type="correction">
<front>
<article-meta>
<related-article related-article-type="corrected-article" id="01"/>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_missing_related_article(self):
""" must have a related-article[@related-article-type='corrected-article']
element.
"""
sample = u"""<article article-type="correction">
<front>
<article-meta>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_article_type_must_be_correction(self):
sample = u"""<article article-type="research-article">
<front>
<article-meta>
<related-article related-article-type="corrected-article" id="01"/>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
class InBriefTests(PhaseBasedTestCase):
"""Tests for article[@article-type="in-brief"] element.
"""
sch_phase = 'phase.in-brief'
def test_expected_elements(self):
sample = u"""<article article-type="in-brief">
<front>
<article-meta>
<related-article related-article-type="article-reference" id="01"/>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_missing_related_article(self):
""" must have a related-article[@related-article-type='in-brief']
element.
"""
sample = u"""<article article-type="in-brief">
<front>
<article-meta>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_article_type_must_be_in_brief(self):
sample = u"""<article article-type="research-article">
<front>
<article-meta>
<related-article related-article-type="article-reference" id="01"/>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
class FundingGroupTests(PhaseBasedTestCase):
"""Tests for article/front/article-meta/funding-group elements.
"""
sch_phase = 'phase.funding-group'
def test_funding_statement_when_fn_is_present_missing_award_group(self):
sample = u"""<article>
<front>
<article-meta>
<funding-group>
<funding-statement>This study was supported by FAPEST #12345</funding-statement>
</funding-group>
</article-meta>
</front>
<back>
<fn-group>
<fn id="fn01" fn-type="financial-disclosure">
<p>This study was supported by FAPEST #12345</p>
</fn>
</fn-group>
</back>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_funding_statement_when_fn_is_present(self):
sample = u"""<article>
<front>
<article-meta>
<funding-group>
<award-group>
<funding-source>FAPEST</funding-source>
<award-id>12345</award-id>
</award-group>
<funding-statement>This study was supported by FAPEST #12345</funding-statement>
</funding-group>
</article-meta>
</front>
<back>
<fn-group>
<fn id="fn01" fn-type="financial-disclosure">
<p>This study was supported by FAPEST #12345</p>
</fn>
</fn-group>
</back>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_missing_funding_statement_when_fn_is_present(self):
sample = u"""<article>
<front>
<article-meta>
<funding-group>
<award-group>
<funding-source>FAPEST</funding-source>
<award-id>12345</award-id>
</award-group>
</funding-group>
</article-meta>
</front>
<back>
<fn-group>
<fn id="fn01" fn-type="financial-disclosure">
<p>This study was supported by FAPEST #12345</p>
</fn>
</fn-group>
</back>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
class AffCountryTests(PhaseBasedTestCase):
""" //aff/country/@country is required.
See: https://github.com/scieloorg/packtools/issues/44
"""
sch_phase = 'phase.aff_country'
def test_attribute_is_present(self):
sample = u"""<article>
<front>
<article-meta>
<aff>
<institution content-type="original">
Grupo de ...
</institution>
<country country="BR">Brasil</country>
</aff>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_attribute_is_absent(self):
sample = u"""<article>
<front>
<article-meta>
<aff>
<institution content-type="original">
Grupo de ...
</institution>
<country>Brasil</country>
</aff>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_attribute_value_is_not_validated(self):
sample = u"""<article>
<front>
<article-meta>
<aff>
<institution content-type="original">
Grupo de ...
</institution>
<country country="XZ">Brasil</country>
</aff>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_country_cannot_be_empty(self):
sample = u"""<article>
<front>
<article-meta>
<aff>
<institution content-type="original">
Grupo de ...
</institution>
<country country="XZ"></country>
</aff>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_country_cannot_be_empty_closed_element(self):
sample = u"""<article>
<front>
<article-meta>
<aff>
<institution content-type="original">
Grupo de ...
</institution>
<country country="XZ"/>
</aff>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
class RefTests(PhaseBasedTestCase):
"""Tests for article/back/ref-list/ref element.
"""
sch_phase = 'phase.ref'
def test_element_and_mixed_citation_elements(self):
sample = u"""<article>
<back>
<ref-list>
<ref>
<mixed-citation>Aires M, Paz AA, Perosa CT. Situação de saúde e grau de dependência de pessoas idosas institucionalizadas. <italic>Rev Gaucha Enferm.</italic> 2009;30(3):192-9.</mixed-citation>
<element-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Aires</surname>
<given-names>M</given-names>
</name>
<name>
<surname>Paz</surname>
<given-names>AA</given-names>
</name>
<name>
<surname>Perosa</surname>
<given-names>CT</given-names>
</name>
</person-group>
<article-title>Situação de saúde e grau de dependência de pessoas idosas institucionalizadas</article-title>
<source>Rev Gaucha Enferm</source>
<year>2009</year>
<volume>30</volume>
<issue>3</issue>
<fpage>192</fpage>
<lpage>199</lpage>
</element-citation>
</ref>
</ref-list>
</back>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_missing_element_citation(self):
sample = u"""<article>
<back>
<ref-list>
<ref>
<mixed-citation>Aires M, Paz AA, Perosa CT. Situação de saúde e grau de dependência de pessoas idosas institucionalizadas. <italic>Rev Gaucha Enferm.</italic> 2009;30(3):192-9.</mixed-citation>
</ref>
</ref-list>
</back>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_missing_mixed_citation(self):
sample = u"""<article>
<back>
<ref-list>
<ref>
<element-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Aires</surname>
<given-names>M</given-names>
</name>
<name>
<surname>Paz</surname>
<given-names>AA</given-names>
</name>
<name>
<surname>Perosa</surname>
<given-names>CT</given-names>
</name>
</person-group>
<article-title>Situação de saúde e grau de dependência de pessoas idosas institucionalizadas</article-title>
<source>Rev Gaucha Enferm</source>
<year>2009</year>
<volume>30</volume>
<issue>3</issue>
<fpage>192</fpage>
<lpage>199</lpage>
</element-citation>
</ref>
</ref-list>
</back>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_mixed_citation_cannot_be_empty(self):
sample = u"""<article>
<back>
<ref-list>
<ref>
<mixed-citation></mixed-citation>
<element-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Aires</surname>
<given-names>M</given-names>
</name>
<name>
<surname>Paz</surname>
<given-names>AA</given-names>
</name>
<name>
<surname>Perosa</surname>
<given-names>CT</given-names>
</name>
</person-group>
<article-title>Situação de saúde e grau de dependência de pessoas idosas institucionalizadas</article-title>
<source>Rev Gaucha Enferm</source>
<year>2009</year>
<volume>30</volume>
<issue>3</issue>
<fpage>192</fpage>
<lpage>199</lpage>
</element-citation>
</ref>
</ref-list>
</back>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
class AffTests(PhaseBasedTestCase):
""" /article//aff is required.
"""
sch_phase = 'phase.aff'
def test_country_is_present(self):
sample = u"""<article>
<front>
<article-meta>
<aff>
<institution content-type="original">
Grupo de ...
</institution>
<country country="BR">Brasil</country>
</aff>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_country_is_absent(self):
sample = u"""<article>
<front>
<article-meta>
<aff>
<institution content-type="original">
Grupo de ...
</institution>
</aff>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
|
gustavofonseca/packtools
|
tests/test_schematron_1_3.py
|
Python
|
bsd-2-clause
| 181,171 | 0.000746 |
from db.testing import DatabaseTestCase, TEST_DATA_PATH
import db.exceptions
import db.data
import os.path
import json
import mock
import copy
class DataDBTestCase(DatabaseTestCase):
def setUp(self):
super(DataDBTestCase, self).setUp()
self.test_mbid = "0dad432b-16cc-4bf0-8961-fd31d124b01b"
self.test_lowlevel_data_json = open(os.path.join(TEST_DATA_PATH, self.test_mbid + '.json')).read()
self.test_lowlevel_data = json.loads(self.test_lowlevel_data_json)
@mock.patch("db.data.sanity_check_data")
@mock.patch("db.data.write_low_level")
@mock.patch("db.data.clean_metadata")
def test_submit_low_level_data(self, clean, write, sanity):
"""Submission with valid data"""
clean.side_effect = lambda x: x
sanity.return_value = None
db.data.submit_low_level_data(self.test_mbid, self.test_lowlevel_data)
write.assert_called_with(self.test_mbid, self.test_lowlevel_data)
@mock.patch("db.data.sanity_check_data")
@mock.patch("db.data.write_low_level")
@mock.patch("db.data.clean_metadata")
def test_submit_low_level_data_rewrite_keys(self, clean, write, sanity):
"""submit rewrites trackid -> recordingid, and sets lossless to a boolean"""
clean.side_effect = lambda x: x
sanity.return_value = None
input = {"metadata": {"tags": {"musicbrainz_trackid": [self.test_mbid]}, "audio_properties": {"lossless": 1}}}
output = {"metadata": {"tags": {"musicbrainz_recordingid": [self.test_mbid]}, "audio_properties": {"lossless": True}}}
db.data.submit_low_level_data(self.test_mbid, input)
write.assert_called_with(self.test_mbid, output)
@mock.patch("db.data.sanity_check_data")
@mock.patch("db.data.write_low_level")
@mock.patch("db.data.clean_metadata")
def test_submit_low_level_data_bad_mbid(self, clean, write, sanity):
"""Check that hl write raises an error if the provided mbid is different to what is in the metadata"""
clean.side_effect = lambda x: x
sanity.return_value = None
input = {"metadata": {"tags": {"musicbrainz_recordingid": ["not-the-recording-mbid"]}, "audio_properties": {"lossless": False}}}
with self.assertRaises(db.exceptions.BadDataException):
db.data.submit_low_level_data(self.test_mbid, input)
@mock.patch("db.data.sanity_check_data")
@mock.patch("db.data.write_low_level")
@mock.patch("db.data.clean_metadata")
def test_submit_low_level_data_missing_keys(self, clean, write, sanity):
"""Check that hl write raises an error if some required keys are missing"""
clean.side_effect = lambda x: x
sanity.return_value = ["missing", "key"]
with self.assertRaises(db.exceptions.BadDataException):
db.data.submit_low_level_data(self.test_mbid, self.test_lowlevel_data)
def test_write_load_low_level(self):
"""Writing and loading a dict returns the same data"""
one = {"data": "one", "metadata": {"audio_properties": {"lossless": True}, "version": {"essentia_build_sha": "x"}}}
db.data.write_low_level(self.test_mbid, one)
self.assertEqual(one, db.data.load_low_level(self.test_mbid))
def test_load_low_level_offset(self):
"""If two items with the same mbid are added, you can select between them with offset"""
one = {"data": "one", "metadata": {"audio_properties": {"lossless": True}, "version": {"essentia_build_sha": "x"}}}
two = {"data": "two", "metadata": {"audio_properties": {"lossless": True}, "version": {"essentia_build_sha": "x"}}}
db.data.write_low_level(self.test_mbid, one)
db.data.write_low_level(self.test_mbid, two)
self.assertEqual(one, db.data.load_low_level(self.test_mbid))
self.assertEqual(one, db.data.load_low_level(self.test_mbid, 0))
self.assertEqual(two, db.data.load_low_level(self.test_mbid, 1))
def test_load_low_level_none(self):
"""If no lowlevel data is loaded, or offset is too high, an exception is raised"""
with self.assertRaises(db.exceptions.NoDataFoundException):
db.data.load_low_level(self.test_mbid)
one = {"data": "one", "metadata": {"audio_properties": {"lossless": True}, "version": {"essentia_build_sha": "x"}}}
db.data.write_low_level(self.test_mbid, one)
with self.assertRaises(db.exceptions.NoDataFoundException):
db.data.load_low_level(self.test_mbid, 1)
def _get_ll_id_from_mbid(self, mbid):
with db.engine.connect() as connection:
ret = []
result = connection.execute("select id from lowlevel where mbid = %s", (mbid, ))
for row in result:
ret.append(row[0])
return ret
def test_write_load_high_level(self):
"""Writing and loading a dict returns the same data"""
ll = {"data": "one", "metadata": {"audio_properties": {"lossless": True}, "version": {"essentia_build_sha": "x"}}}
ver = {"hlversion": "123", "models_essentia_git_sha": "v1"}
hl = {"highlevel": {"model1": {"x": "y"}, "model2": {"a": "b"}},
"metadata": {"meta": "here",
"version": {"highlevel": ver}
}
}
db.data.add_model("model1", "v1", "show")
db.data.add_model("model2", "v1", "show")
build_sha = "test"
db.data.write_low_level(self.test_mbid, ll)
ll_id = self._get_ll_id_from_mbid(self.test_mbid)[0]
db.data.write_high_level(self.test_mbid, ll_id, hl, build_sha)
hl_expected = copy.deepcopy(hl)
for mname in ["model1", "model2"]:
hl_expected["highlevel"][mname]["version"] = ver
self.assertEqual(hl_expected, db.data.load_high_level(self.test_mbid))
def test_load_high_level_offset(self):
# If there are two lowlevel items, but only one highlevel, we should raise NoDataFound
second_data = copy.deepcopy(self.test_lowlevel_data)
second_data["metadata"]["tags"]["album"] = ["Another album"]
db.data.write_low_level(self.test_mbid, self.test_lowlevel_data)
db.data.write_low_level(self.test_mbid, second_data)
ll_id1, ll_id2 = self._get_ll_id_from_mbid(self.test_mbid)
db.data.add_model("model1", "v1", "show")
db.data.add_model("model2", "v1", "show")
build_sha = "sha"
ver = {"hlversion": "123", "models_essentia_git_sha": "v1"}
hl1 = {"highlevel": {"model1": {"x": "y"}, "model2": {"a": "b"}},
"metadata": {"meta": "here",
"version": {"highlevel": ver}
}
}
hl2 = {"highlevel": {"model1": {"1": "2"}, "model2": {"3": "3"}},
"metadata": {"meta": "for hl2",
"version": {"highlevel": ver}
}
}
db.data.write_high_level(self.test_mbid, ll_id1, hl1, build_sha)
hl1_expected = copy.deepcopy(hl1)
hl2_expected = copy.deepcopy(hl2)
for mname in ["model1", "model2"]:
hl1_expected["highlevel"][mname]["version"] = ver
hl2_expected["highlevel"][mname]["version"] = ver
# First highlevel item
self.assertEqual(hl1_expected, db.data.load_high_level(self.test_mbid))
self.assertEqual(hl1_expected, db.data.load_high_level(self.test_mbid, offset=0))
# second has a ll, but no hl => exception
with self.assertRaises(db.exceptions.NoDataFoundException):
db.data.load_high_level(self.test_mbid, offset=1)
# after adding the hl, no error
db.data.write_high_level(self.test_mbid, ll_id2, hl2, build_sha)
self.assertEqual(hl2_expected, db.data.load_high_level(self.test_mbid, offset=1))
def test_load_high_level_offset_reverse(self):
# If hl are added in a different order to ll, offset should return ll order
second_data = copy.deepcopy(self.test_lowlevel_data)
second_data["metadata"]["tags"]["album"] = ["Another album"]
db.data.write_low_level(self.test_mbid, self.test_lowlevel_data)
db.data.write_low_level(self.test_mbid, second_data)
ll_id1, ll_id2 = self._get_ll_id_from_mbid(self.test_mbid)
db.data.add_model("model1", "v1", "show")
db.data.add_model("model2", "v1", "show")
build_sha = "sha"
ver = {"hlversion": "123", "models_essentia_git_sha": "v1"}
hl1 = {"highlevel": {"model1": {"x": "y"}, "model2": {"a": "b"}},
"metadata": {"meta": "here",
"version": {"highlevel": ver}
}
}
hl2 = {"highlevel": {"model1": {"1": "2"}, "model2": {"3": "3"}},
"metadata": {"meta": "for hl2",
"version": {"highlevel": ver}
}
}
db.data.write_high_level(self.test_mbid, ll_id2, hl2, build_sha)
db.data.write_high_level(self.test_mbid, ll_id1, hl1, build_sha)
hl1_expected = copy.deepcopy(hl1)
hl2_expected = copy.deepcopy(hl2)
for mname in ["model1", "model2"]:
hl1_expected["highlevel"][mname]["version"] = ver
hl2_expected["highlevel"][mname]["version"] = ver
self.assertEqual(hl1_expected, db.data.load_high_level(self.test_mbid))
self.assertEqual(hl2_expected, db.data.load_high_level(self.test_mbid, offset=1))
def test_load_high_level_none(self):
"""If no highlevel data is loaded, or offset is too high, an exception is raised"""
# no data
with self.assertRaises(db.exceptions.NoDataFoundException):
db.data.load_high_level(self.test_mbid, offset=0)
db.data.write_low_level(self.test_mbid, self.test_lowlevel_data)
ll_id1 = self._get_ll_id_from_mbid(self.test_mbid)[0]
db.data.add_model("model1", "1.0", "show")
db.data.add_model("model2", "1.0", "show")
build_sha = "sha"
hl1 = {"highlevel": {"model1": {"x": "y"}, "model2": {"a": "b"}},
"metadata": {"meta": "here",
"version": {"highlevel": {"hlversion": "123",
"models_essentia_git_sha": "v1"}}
}
}
db.data.write_high_level(self.test_mbid, ll_id1, hl1, build_sha)
with self.assertRaises(db.exceptions.NoDataFoundException):
db.data.load_high_level(self.test_mbid, offset=1)
def test_count_lowlevel(self):
db.data.submit_low_level_data(self.test_mbid, self.test_lowlevel_data)
self.assertEqual(1, db.data.count_lowlevel(self.test_mbid))
# Exact same data is deduplicated
db.data.submit_low_level_data(self.test_mbid, self.test_lowlevel_data)
self.assertEqual(1, db.data.count_lowlevel(self.test_mbid))
# make a copy of the data and change it
second_data = copy.deepcopy(self.test_lowlevel_data)
second_data["metadata"]["tags"]["album"] = ["Another album"]
db.data.submit_low_level_data(self.test_mbid, second_data)
self.assertEqual(2, db.data.count_lowlevel(self.test_mbid))
def test_add_get_model(self):
self.assertIsNone(db.data._get_model_id("modelname", "v1"))
modelid = db.data.add_model("modelname", "v1")
get_id = db.data._get_model_id("modelname", "v1")
self.assertEqual(modelid, get_id)
def test_get_summary_data(self):
pass
class DataUtilTestCase(DatabaseTestCase):
""" Tests for utility methods in db/data. Should be moved out of db at some time. """
def test_has_key(self):
dictionary = {
'test_1': {
'inner_test': {
'secret_test_1': 'Hey there!',
'secret_test_2': 'Bye!',
},
},
'test_2': 'Testing!',
}
self.assertTrue(db.data._has_key(dictionary, ['test_1', 'inner_test']))
self.assertTrue(db.data._has_key(dictionary, ['test_1', 'inner_test', 'secret_test_2']))
self.assertTrue(db.data._has_key(dictionary, ['test_2']))
self.assertFalse(db.data._has_key(dictionary, ['test_3']))
self.assertFalse(db.data._has_key(dictionary, ['test_1', 'inner_test', 'secret_test_3']))
def test_sanity_check_data(self):
d = {
"metadata": {
"audio_properties": {
"bit_rate": 253569,
"codec": "mp3",
"length": 280.685699463,
"lossless": False,
},
"tags": {
"file_name": "example.mp3",
"musicbrainz_recordingid": ["8c12af5a-f9a2-42fa-9dbe-032d7a1f4d5b"],
},
"version": {
"essentia": "2.1-beta2",
"essentia_build_sha": "26c37b627ab5a2028d412893e0969599b764ad4d",
"essentia_git_sha": "v2.1_beta2",
"extractor": "music 1.0"
}
},
"lowlevel": None,
"rhythm": None,
"tonal": None,
}
self.assertIsNone(db.data.sanity_check_data(d))
del d['metadata']['tags']['file_name']
self.assertEquals(db.data.sanity_check_data(d), ['metadata', 'tags', 'file_name'])
def test_clean_metadata(self):
d = {
"metadata": {
"tags": {
"file_name": "example.mp3",
"musicbrainz_recordingid": ["8c12af5a-f9a2-42fa-9dbe-032d7a1f4d5b"],
"unknown_tag": "Hello! I am an unknown tag!",
},
},
}
db.data.clean_metadata(d)
self.assertFalse('unknown_tag' in d['metadata']['tags'])
self.assertTrue('file_name' in d['metadata']['tags'])
|
abhinavjain241/acousticbrainz-server
|
db/test/test_data.py
|
Python
|
gpl-2.0
| 14,021 | 0.003994 |
from . import RephoneTest
from re import match
class TestViews(RephoneTest):
def test_index(self):
with self.client:
response = self.client.get('/')
assert response.status_code == 303
def test_outbound(self):
with self.client:
response = self.client.post('/outbound/1')
assert response.status_code == 200
assert match(r'.*<Dial><Number>\+33388175572</Number></Dial>.*', str(response.get_data()))
def test_outbound_test_number(self):
with self.client:
self.app.config['TWILIO_TEST_NUMBER'] = '+4940123456789'
response = self.client.post('/outbound/1')
assert response.status_code == 200
assert match(r'.*<Dial><Number>\+4940123456789</Number></Dial>.*', str(response.get_data()))
def test_bias_alteration_audience_1(self):
index_0_before = self.app.random[1][0]
index_1_before = self.app.random[1][1]
self.app.random.add_sample(audience_id=1, respondent_id=1)
index_0_after = self.app.random[1][0]
index_1_after = self.app.random[1][1]
assert index_0_before == index_0_after
assert index_1_before == index_1_after - 1
def test_bias_alteration_audience_2(self):
index_0_before = self.app.random[2][0]
index_1_before = self.app.random[2][1]
self.app.random.add_sample(audience_id=2, respondent_id=751)
index_0_after = self.app.random[2][0]
index_1_after = self.app.random[2][1]
print(index_0_before, index_0_after)
print(index_1_before, index_1_after)
assert index_0_before == index_0_after
assert index_1_before == index_1_after - 1
|
rickmer/rephone
|
tests/test_views.py
|
Python
|
agpl-3.0
| 1,721 | 0.001162 |
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2012 ~ 2013 Deepin, Inc.
# 2012 ~ 2013 Hailong Qiu
#
# Author: Hailong Qiu <356752238@qq.com>
# Maintainer: Hailong Qiu <356752238@qq.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from dtk.ui.theme import ui_theme
from dtk.ui.scrolled_window import ScrolledWindow
from dtk.ui.utils import propagate_expose
from dtk.ui.draw import draw_vlinear
from skin import app_theme
from listview import ListView
from listview_base import Text
from treeview_base import TreeViewBase
from net_search import Search
from notebook import NoteBook
from color import alpha_color_hex_to_cairo
from utils import get_text_size
from draw import draw_text, draw_pixbuf
import gtk
class PlayListView(object):
def __init__(self):
self.one_close = app_theme.get_pixbuf("treeview/1-close.png")
self.one_open = app_theme.get_pixbuf("treeview/1-open.png")
self.two_close = app_theme.get_pixbuf("treeview/2-close.png")
self.two_open = app_theme.get_pixbuf("treeview/2-open.png")
self.three_close = app_theme.get_pixbuf("treeview/3-close.png")
self.three_open = app_theme.get_pixbuf("treeview/3-open.png")
#
self.tree_view_open = app_theme.get_pixbuf("treeview/open.png")
self.tree_view_close = app_theme.get_pixbuf("treeview/close.png")
self.tree_view_right = app_theme.get_pixbuf("treeview/right.png")
self.tree_view_bottom = app_theme.get_pixbuf("treeview/bottom.png")
#
self.listview_color = ui_theme.get_color("scrolledbar")
self.play_list_vbox = gtk.VBox()
#
self.list_view_vbox = gtk.VBox()
self.list_scroll_win = ScrolledWindow(0, 0)
self.list_scroll_win.set_policy(gtk.POLICY_NEVER, gtk.POLICY_ALWAYS)
self.list_view = ListView()
#
self.play_list_con = PlayListControl()
#
self.list_view_vbox.pack_start(self.list_scroll_win, True, True)
self.list_view_vbox.pack_start(self.play_list_con, False, False)
# 网络列表,搜索框.
self.tree_scroll_win = ScrolledWindow(0, 0)
self.tree_scroll_win.set_policy(gtk.POLICY_NEVER, gtk.POLICY_ALWAYS)
self.tree_view_vbox = gtk.VBox()
self.tree_view = TreeViewBase()
self.search_ali = gtk.Alignment(0, 0, 1, 1)
self.search = Search()
self.search_ali.add(self.search)
#
self.search_ali.set_padding(7, 5, 12, 12)
self.tree_view_vbox.pack_start(self.search_ali, False, False)
self.tree_view_vbox.pack_start(self.tree_scroll_win, True, True)
self.search_ali.connect("expose-event", self.search_ali_expose_event)
#
self.note_book = NoteBook()
#
self.list_view.on_draw_sub_item = self.__listview_on_draw_sub_item
self.list_view.columns.add_range(["filename", "time"])
self.list_view.columns[0].width = 120
self.list_view.columns[1].width = 95
#
self.note_book.hide_title()
self.tree_view.paint_nodes_event = self.__treeview_paint_nodes_event
#
self.list_scroll_win.add_with_viewport(self.list_view)
self.tree_scroll_win.add_with_viewport(self.tree_view)
#self.note_book.add_layout1(self.list_scroll_win)
self.note_book.add_layout1(self.list_view_vbox)
self.note_book.add_layout2(self.tree_view_vbox)
#self.play_list_vbox.pack_start(self.scroll_win, True, True)
self.play_list_vbox.pack_start(self.note_book, True, True)
def __listview_on_draw_sub_item(self, e):
color = self.listview_color.get_color()
if e.double_items == e.item:
e.text_color = "#000000"
text_size=9
color_info = [(0, (color, 0.8)), (1, (color, 0.8))]
draw_vlinear(e.cr,
e.x, e.y, e.w, e.h,
color_info
)
elif e.item in e.single_items:
e.text_color = "#FFFFFF"
text_size=9
color_info = [(0, (color, 0.5)), (1, (color, 0.5))]
draw_vlinear(e.cr,
e.x, e.y, e.w, e.h,
color_info
)
elif e.motion_items == e.item:
e.text_color = "#FFFFFF"
text_size=9
color_info = [(0, (color, 0.2)), (1, (color, 0.2))]
draw_vlinear(e.cr,
e.x, e.y, e.w, e.h,
color_info
)
else:
e.text_color = "#FFFFFF"
text_size=9
#
text = e.text.decode("utf-8")
one_width = self.list_view.columns[0].width
two_width = self.list_view.columns[1].width
#if e.w == one_width: # 显示播放名字的第一列.
if e.column_index == 0:
#
t_width = 0
t_index = 0
add_point = False
for t in text:
t_width += get_text_size(t, text_size=text_size)[0]
if t_width > one_width - 20:
add_point = True
break
t_index += 1
if add_point:
text = text[:t_index] + "..."
#
alignment = Text.LEFT
x = e.x + 15
elif e.w == two_width:
alignment = Text.RIGHT
x = e.x - 15
e.draw_text(e.cr,
str(text),
x, e.y, e.w, e.h,
text_color=e.text_color,
text_size=text_size,
alignment=alignment)
def __treeview_paint_nodes_event(self, node_event):
color = self.listview_color.get_color()
text_color = "#FFFFFF"
# 单击和移动, 双击.
if node_event.node in node_event.single_items:
color_info = [(0, (color, 0.45)), (1, (color, 0.45))]
draw_vlinear(node_event.cr,
node_event.x, node_event.y, node_event.w, node_event.h,
color_info
)
#text_color = "#000000"
elif node_event.node in node_event.motion_items:
color_info = [(0, (color, 0.75)), (1, (color, 0.75))]
draw_vlinear(node_event.cr,
node_event.x, node_event.y, node_event.w, node_event.h,
color_info
)
#
x_padding = 12 # 因为要和搜索框对齐.
if 0 == node_event.node.leave: # 根节点. :比如->> >我看过的. >优酷视频. >pps.
if node_event.node.is_expanded:
pixbuf = self.one_open.get_pixbuf()
else:
pixbuf = self.one_close.get_pixbuf()
elif 1 == node_event.node.leave: #
if node_event.node.is_expanded:
pixbuf = self.two_open.get_pixbuf()
else:
pixbuf = self.two_close.get_pixbuf()
else:
if node_event.node.is_expanded:
pixbuf = self.three_open.get_pixbuf()
else:
pixbuf = self.three_close.get_pixbuf()
#
icon_x = node_event.x + x_padding
icon_y = node_event.y + node_event.h/2 - pixbuf.get_height()/2 + 1
if node_event.node.leave > 1:
icon_x += (node_event.node.leave - 1) * pixbuf.get_width()
if node_event.node.leave > 0:
text_color = "#a8a8a8"
##########
# 画图标.
if node_event.node.nodes != []:
draw_pixbuf(node_event.cr,
pixbuf,
icon_x,
icon_y)
# 画文本.
text_x_padding = 15
text_size = 9
draw_text(node_event.cr,
node_event.node.text,
icon_x + text_x_padding,
node_event.y + node_event.h/2 - get_text_size(node_event.node.text, text_size=9)[1]/2,
text_color=text_color,
text_size=text_size
)
def search_ali_expose_event(self, widget, event):
cr = widget.window.cairo_create()
rect = widget.allocation
#
bg_color = "#272727"
cr.set_source_rgba(*alpha_color_hex_to_cairo((bg_color,1.0)))
cr.rectangle(rect.x, rect.y, rect.width + 1, rect.height)
cr.fill()
#
propagate_expose(widget, event)
return True
class PlayListControl(gtk.HBox):
def __init__(self):
gtk.HBox.__init__(self)
self.del_btn = gtk.Button("del")
self.add_btn = gtk.Button("add")
self.empty_btn = gtk.Button('')
height = 22
self.del_btn.set_size_request(-1, height)
self.add_btn.set_size_request(-1, height)
self.empty_btn.set_size_request(-1, height)
# init pixbuf.
self.del_pixbuf = app_theme.get_pixbuf("bottom_buttons/play_list_del_file.png").get_pixbuf()
self.add_pixbuf = app_theme.get_pixbuf("bottom_buttons/play_list_add_file.png").get_pixbuf()
#
self.del_btn.connect("expose-event", self.del_btn_expose_event)
self.add_btn.connect("expose-event", self.add_btn_expose_event)
self.empty_btn.connect("expose-event", self.empty_btn_expose_event)
#
self.pack_start(self.empty_btn, True, True)
self.pack_start(self.add_btn, False, False)
self.pack_start(self.del_btn, False, False)
def del_btn_expose_event(self, widget, event):
cr = widget.window.cairo_create()
rect = widget.allocation
self.paint_bg(cr, rect)
x = rect.x + rect.width/2 - self.del_pixbuf.get_width()/2
y = rect.y + rect.height/2 - self.del_pixbuf.get_height()/2
if widget.state == gtk.STATE_ACTIVE:
x += 1
y += 1
draw_pixbuf(cr, self.del_pixbuf, x, y)
return True
def add_btn_expose_event(self, widget, event):
cr = widget.window.cairo_create()
rect = widget.allocation
self.paint_bg(cr, rect)
x = rect.x + rect.width/2 - self.add_pixbuf.get_width()/2
y = rect.y + rect.height/2 - self.add_pixbuf.get_height()/2
if widget.state == gtk.STATE_ACTIVE:
x += 1
y += 1
draw_pixbuf(cr, self.add_pixbuf, x, y)
return True
def empty_btn_expose_event(self, widget, event):
cr = widget.window.cairo_create()
rect = widget.allocation
self.paint_bg(cr, rect)
return True
def paint_bg(self, cr, rect):
cr.set_source_rgba(*alpha_color_hex_to_cairo(("#202020", 1.0)))
cr.rectangle(*rect)
cr.fill()
|
linuxdeepin/deepin-media-player
|
src/widget/playlistview.py
|
Python
|
gpl-3.0
| 11,382 | 0.00541 |
'''
Created on 26 Mar 2013
@author: hoekstra
'''
from flask.ext.login import login_required
import requests
from linkitup import app
from linkitup.util.baseplugin import plugin
from linkitup.util.provenance import provenance
LLD_AUTOCOMPLETE_URL = "http://linkedlifedata.com/autocomplete.json"
@app.route('/linkedlifedata', methods=['POST'])
@login_required
@plugin(fields=[('tags','id','name'),('categories','id','name')], link='mapping')
@provenance()
def link_to_lld(*args, **kwargs):
# Retrieve the article from the wrapper
article_id = kwargs['article']['id']
app.logger.debug("Running LinkedLifeData.com plugin for article {}".format(article_id))
match_items = kwargs['inputs']
search_parameters = {'limit': '2'}
matches = {}
for item in match_items :
search_parameters['q'] = item['label']
original_id = item['id']
response = requests.get(LLD_AUTOCOMPLETE_URL, params=search_parameters)
hits = response.json()['results']
for h in hits:
app.logger.debug(h)
match_uri = h['uri']['namespace'] + h['uri']['localName']
web_uri = match_uri
display_uri = h['label']
id_base = h['uri']['localName']
if 'types' in h:
if len(h['types']) > 0 :
types = ", ".join(h['types'])
else :
types = None
elif 'type' in h:
types = h['type']
else :
types = None
if 'definition' in h :
if h['definition'] != None :
if h['definition'].strip() != "" :
description = h['definition']
else :
description = None
else :
description = None
else :
description = None
score = "Score: {}".format(h['score'])
# Create the match dictionary
match = {'type': "mapping",
'uri': match_uri,
'web': web_uri,
'show': display_uri,
'short': id_base,
'description': description,
'extra': types,
'subscript': score,
'original':original_id}
# Append it to all matches
matches[match_uri] = match
# Return the matches
return matches
|
Data2Semantics/linkitup
|
linkitup/linkedlifedata/plugin.py
|
Python
|
mit
| 2,673 | 0.014964 |
# coding: utf-8
"""
Onshape REST API
The Onshape REST API consumed by all clients. # noqa: E501
The version of the OpenAPI document: 1.113
Contact: api-support@onshape.zendesk.com
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
import sys # noqa: F401
import six # noqa: F401
import nulltype # noqa: F401
from onshape_client.oas.model_utils import ( # noqa: F401
ModelComposed,
ModelNormal,
ModelSimple,
date,
datetime,
file_type,
int,
none_type,
str,
validate_get_composed_info,
)
try:
from onshape_client.oas.models import btp_annotation231
except ImportError:
btp_annotation231 = sys.modules["onshape_client.oas.models.btp_annotation231"]
try:
from onshape_client.oas.models import btp_argument_declaration232
except ImportError:
btp_argument_declaration232 = sys.modules[
"onshape_client.oas.models.btp_argument_declaration232"
]
try:
from onshape_client.oas.models import btp_conversion_function1362_all_of
except ImportError:
btp_conversion_function1362_all_of = sys.modules[
"onshape_client.oas.models.btp_conversion_function1362_all_of"
]
try:
from onshape_client.oas.models import btp_identifier8
except ImportError:
btp_identifier8 = sys.modules["onshape_client.oas.models.btp_identifier8"]
try:
from onshape_client.oas.models import btp_literal_number258
except ImportError:
btp_literal_number258 = sys.modules[
"onshape_client.oas.models.btp_literal_number258"
]
try:
from onshape_client.oas.models import btp_procedure_declaration_base266
except ImportError:
btp_procedure_declaration_base266 = sys.modules[
"onshape_client.oas.models.btp_procedure_declaration_base266"
]
try:
from onshape_client.oas.models import btp_space10
except ImportError:
btp_space10 = sys.modules["onshape_client.oas.models.btp_space10"]
try:
from onshape_client.oas.models import btp_statement269
except ImportError:
btp_statement269 = sys.modules["onshape_client.oas.models.btp_statement269"]
try:
from onshape_client.oas.models import btp_statement_block271
except ImportError:
btp_statement_block271 = sys.modules[
"onshape_client.oas.models.btp_statement_block271"
]
try:
from onshape_client.oas.models import btp_type_name290
except ImportError:
btp_type_name290 = sys.modules["onshape_client.oas.models.btp_type_name290"]
class BTPConversionFunction1362(ModelComposed):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
("documentation_type",): {
"FUNCTION": "FUNCTION",
"PREDICATE": "PREDICATE",
"CONSTANT": "CONSTANT",
"ENUM": "ENUM",
"USER_TYPE": "USER_TYPE",
"FEATURE_DEFINITION": "FEATURE_DEFINITION",
"FILE_HEADER": "FILE_HEADER",
"UNDOCUMENTABLE": "UNDOCUMENTABLE",
"UNKNOWN": "UNKNOWN",
},
}
validations = {}
additional_properties_type = None
@staticmethod
def openapi_types():
"""
This must be a class method so a model may have properties that are
of type self, this ensures that we don't create a cyclic import
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
"bt_type": (str,), # noqa: E501
"_from": (btp_literal_number258.BTPLiteralNumber258,), # noqa: E501
"space_after_type": (btp_space10.BTPSpace10,), # noqa: E501
"to": (btp_literal_number258.BTPLiteralNumber258,), # noqa: E501
"type_name": (btp_identifier8.BTPIdentifier8,), # noqa: E501
"atomic": (bool,), # noqa: E501
"documentation_type": (str,), # noqa: E501
"end_source_location": (int,), # noqa: E501
"node_id": (str,), # noqa: E501
"short_descriptor": (str,), # noqa: E501
"space_after": (btp_space10.BTPSpace10,), # noqa: E501
"space_before": (btp_space10.BTPSpace10,), # noqa: E501
"space_default": (bool,), # noqa: E501
"start_source_location": (int,), # noqa: E501
"annotation": (btp_annotation231.BTPAnnotation231,), # noqa: E501
"arguments_to_document": (
[btp_argument_declaration232.BTPArgumentDeclaration232],
), # noqa: E501
"deprecated": (bool,), # noqa: E501
"deprecated_explanation": (str,), # noqa: E501
"for_export": (bool,), # noqa: E501
"space_after_export": (btp_space10.BTPSpace10,), # noqa: E501
"symbol_name": (btp_identifier8.BTPIdentifier8,), # noqa: E501
"arguments": (
[btp_argument_declaration232.BTPArgumentDeclaration232],
), # noqa: E501
"body": (btp_statement_block271.BTPStatementBlock271,), # noqa: E501
"precondition": (btp_statement269.BTPStatement269,), # noqa: E501
"return_type": (btp_type_name290.BTPTypeName290,), # noqa: E501
"space_after_arglist": (btp_space10.BTPSpace10,), # noqa: E501
"space_in_empty_list": (btp_space10.BTPSpace10,), # noqa: E501
}
@staticmethod
def discriminator():
return None
attribute_map = {
"bt_type": "btType", # noqa: E501
"_from": "from", # noqa: E501
"space_after_type": "spaceAfterType", # noqa: E501
"to": "to", # noqa: E501
"type_name": "typeName", # noqa: E501
"atomic": "atomic", # noqa: E501
"documentation_type": "documentationType", # noqa: E501
"end_source_location": "endSourceLocation", # noqa: E501
"node_id": "nodeId", # noqa: E501
"short_descriptor": "shortDescriptor", # noqa: E501
"space_after": "spaceAfter", # noqa: E501
"space_before": "spaceBefore", # noqa: E501
"space_default": "spaceDefault", # noqa: E501
"start_source_location": "startSourceLocation", # noqa: E501
"annotation": "annotation", # noqa: E501
"arguments_to_document": "argumentsToDocument", # noqa: E501
"deprecated": "deprecated", # noqa: E501
"deprecated_explanation": "deprecatedExplanation", # noqa: E501
"for_export": "forExport", # noqa: E501
"space_after_export": "spaceAfterExport", # noqa: E501
"symbol_name": "symbolName", # noqa: E501
"arguments": "arguments", # noqa: E501
"body": "body", # noqa: E501
"precondition": "precondition", # noqa: E501
"return_type": "returnType", # noqa: E501
"space_after_arglist": "spaceAfterArglist", # noqa: E501
"space_in_empty_list": "spaceInEmptyList", # noqa: E501
}
required_properties = set(
[
"_data_store",
"_check_type",
"_from_server",
"_path_to_item",
"_configuration",
"_composed_instances",
"_var_name_to_model_instances",
"_additional_properties_model_instances",
]
)
def __init__(
self,
_check_type=True,
_from_server=False,
_path_to_item=(),
_configuration=None,
**kwargs
): # noqa: E501
"""btp_conversion_function1362.BTPConversionFunction1362 - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_from_server (bool): True if the data is from the server
False if the data is from the client (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
bt_type (str): [optional] # noqa: E501
_from (btp_literal_number258.BTPLiteralNumber258): [optional] # noqa: E501
space_after_type (btp_space10.BTPSpace10): [optional] # noqa: E501
to (btp_literal_number258.BTPLiteralNumber258): [optional] # noqa: E501
type_name (btp_identifier8.BTPIdentifier8): [optional] # noqa: E501
atomic (bool): [optional] # noqa: E501
documentation_type (str): [optional] # noqa: E501
end_source_location (int): [optional] # noqa: E501
node_id (str): [optional] # noqa: E501
short_descriptor (str): [optional] # noqa: E501
space_after (btp_space10.BTPSpace10): [optional] # noqa: E501
space_before (btp_space10.BTPSpace10): [optional] # noqa: E501
space_default (bool): [optional] # noqa: E501
start_source_location (int): [optional] # noqa: E501
annotation (btp_annotation231.BTPAnnotation231): [optional] # noqa: E501
arguments_to_document ([btp_argument_declaration232.BTPArgumentDeclaration232]): [optional] # noqa: E501
deprecated (bool): [optional] # noqa: E501
deprecated_explanation (str): [optional] # noqa: E501
for_export (bool): [optional] # noqa: E501
space_after_export (btp_space10.BTPSpace10): [optional] # noqa: E501
symbol_name (btp_identifier8.BTPIdentifier8): [optional] # noqa: E501
arguments ([btp_argument_declaration232.BTPArgumentDeclaration232]): [optional] # noqa: E501
body (btp_statement_block271.BTPStatementBlock271): [optional] # noqa: E501
precondition (btp_statement269.BTPStatement269): [optional] # noqa: E501
return_type (btp_type_name290.BTPTypeName290): [optional] # noqa: E501
space_after_arglist (btp_space10.BTPSpace10): [optional] # noqa: E501
space_in_empty_list (btp_space10.BTPSpace10): [optional] # noqa: E501
"""
self._data_store = {}
self._check_type = _check_type
self._from_server = _from_server
self._path_to_item = _path_to_item
self._configuration = _configuration
constant_args = {
"_check_type": _check_type,
"_path_to_item": _path_to_item,
"_from_server": _from_server,
"_configuration": _configuration,
}
required_args = {}
# remove args whose value is Null because they are unset
required_arg_names = list(required_args.keys())
for required_arg_name in required_arg_names:
if required_args[required_arg_name] is nulltype.Null:
del required_args[required_arg_name]
model_args = {}
model_args.update(required_args)
model_args.update(kwargs)
composed_info = validate_get_composed_info(constant_args, model_args, self)
self._composed_instances = composed_info[0]
self._var_name_to_model_instances = composed_info[1]
self._additional_properties_model_instances = composed_info[2]
unused_args = composed_info[3]
for var_name, var_value in required_args.items():
setattr(self, var_name, var_value)
for var_name, var_value in six.iteritems(kwargs):
if (
var_name in unused_args
and self._configuration is not None
and self._configuration.discard_unknown_keys
and not self._additional_properties_model_instances
):
# discard variable.
continue
setattr(self, var_name, var_value)
@staticmethod
def _composed_schemas():
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error beause the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return {
"anyOf": [],
"allOf": [
btp_conversion_function1362_all_of.BTPConversionFunction1362AllOf,
btp_procedure_declaration_base266.BTPProcedureDeclarationBase266,
],
"oneOf": [],
}
|
onshape-public/onshape-clients
|
python/onshape_client/oas/models/btp_conversion_function1362.py
|
Python
|
mit
| 14,003 | 0.000428 |
#! /usr/bin/env python
'''
oscutils.py -- Open Sound Control builtins for MFP
Copyright (c) 2013 Bill Gribble <grib@billgribble.com>
'''
from ..processor import Processor
from ..mfp_app import MFPApp
from ..bang import Uninit
class OSCPacket(object):
def __init__(self, payload):
self.payload = payload
class OSCIn (Processor):
doc_tooltip_obj = "Open Sound Control message receive"
doc_tooltip_inlet = ["Config input"]
doc_tooltip_outlet = ["OSC data output"]
def __init__(self, init_type, init_args, patch, scope, name):
self.path = None
self.types = None
self.handler = None
Processor.__init__(self, 1, 1, init_type, init_args, patch, scope, name)
initargs, kwargs = self.parse_args(init_args)
if len(initargs) > 0:
self.path = initargs[0]
if len(initargs) > 1:
self.types = initargs[1]
def trigger(self):
need_update = False
if isinstance(self.inlets[0], OSCPacket):
self.outlets[0] = self.inlets[0].payload
self.inlets[0] = Uninit
elif isinstance(self.inlets[0], dict):
path = self.inlets[0].get("path")
if path:
self.path = path
need_update = True
types = self.inlets[0].get("types")
if types:
self.types = types
need_update = True
if need_update:
if self.handler is not None:
MFPApp().osc_mgr.del_method(self.handler, self.types)
self.handler = None
self.handler = MFPApp().osc_mgr.add_method(self.path, self.types, self._handler)
class OSCOut (Processor):
doc_tooltip_obj = "Open Sound Control message send"
doc_tooltip_inlet = ["Message data",
"Destination host:port (UDP) (default: initarg 0)",
"OSC path (default: initarg 1)" ]
def __init__(self, init_type, init_args, patch, scope, name):
self.host = None
self.port = None
self.path = None
Processor.__init__(self, 3, 0, init_type, init_args, patch, scope, name)
initargs, kwargs = self.parse_args(init_args)
if len(initargs) > 0:
parts = initargs[0].split(":")
self.host = parts[0]
if len(parts) > 1:
self.port = int(parts[1])
if len(initargs) > 1:
self.path = initargs[1]
def trigger(self):
if self.inlets[2] is not Uninit:
self.path = self.inlets[2]
self.inlets[2] = Uninit
if self.inlets[1] is not Uninit:
if isinstance(self.inlets[1], str):
parts = self.inlets[1].split(":")
self.host = parts[0]
if len(parts) > 1:
self.port = int(parts[1])
elif isinstance(self.inlets[1], (float, int)):
self.port = int(self.inlets[1])
self.inlets[1] = Uninit
MFPApp().osc_mgr.send((self.host, self.port), self.path, self.inlets[0])
self.inlets[0] = Uninit
def register():
MFPApp().register("osc_in", OSCIn)
MFPApp().register("osc_out", OSCOut)
|
bgribble/mfp
|
mfp/builtins/oscutils.py
|
Python
|
gpl-2.0
| 3,258 | 0.012277 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Ansible module to manage mysql replication
(c) 2013, Balazs Pocze <banyek@gawker.com>
Certain parts are taken from Mark Theunissen's mysqldb module
This file is part of Ansible
Ansible is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Ansible is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Ansible. If not, see <http://www.gnu.org/licenses/>.
"""
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: mysql_replication
short_description: Manage MySQL replication
description:
- Manages MySQL server replication, slave, master status get and change master host.
version_added: "1.3"
author: "Balazs Pocze (@banyek)"
options:
mode:
description:
- module operating mode. Could be getslave (SHOW SLAVE STATUS), getmaster (SHOW MASTER STATUS), changemaster (CHANGE MASTER TO), startslave
(START SLAVE), stopslave (STOP SLAVE), resetslave (RESET SLAVE), resetslaveall (RESET SLAVE ALL)
required: False
choices:
- getslave
- getmaster
- changemaster
- stopslave
- startslave
- resetslave
- resetslaveall
default: getslave
master_host:
description:
- same as mysql variable
master_user:
description:
- same as mysql variable
master_password:
description:
- same as mysql variable
master_port:
description:
- same as mysql variable
master_connect_retry:
description:
- same as mysql variable
master_log_file:
description:
- same as mysql variable
master_log_pos:
description:
- same as mysql variable
relay_log_file:
description:
- same as mysql variable
relay_log_pos:
description:
- same as mysql variable
master_ssl:
description:
- same as mysql variable
choices: [ 0, 1 ]
master_ssl_ca:
description:
- same as mysql variable
master_ssl_capath:
description:
- same as mysql variable
master_ssl_cert:
description:
- same as mysql variable
master_ssl_key:
description:
- same as mysql variable
master_ssl_cipher:
description:
- same as mysql variable
master_auto_position:
description:
- does the host uses GTID based replication or not
required: false
default: null
version_added: "2.0"
extends_documentation_fragment: mysql
'''
EXAMPLES = '''
# Stop mysql slave thread
- mysql_replication:
mode: stopslave
# Get master binlog file name and binlog position
- mysql_replication:
mode: getmaster
# Change master to master server 192.0.2.1 and use binary log 'mysql-bin.000009' with position 4578
- mysql_replication:
mode: changemaster
master_host: 192.0.2.1
master_log_file: mysql-bin.000009
master_log_pos: 4578
# Check slave status using port 3308
- mysql_replication:
mode: getslave
login_host: ansible.example.com
login_port: 3308
'''
import os
import warnings
try:
import MySQLdb
except ImportError:
mysqldb_found = False
else:
mysqldb_found = True
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.mysql import mysql_connect
from ansible.module_utils.pycompat24 import get_exception
def get_master_status(cursor):
cursor.execute("SHOW MASTER STATUS")
masterstatus = cursor.fetchone()
return masterstatus
def get_slave_status(cursor):
cursor.execute("SHOW SLAVE STATUS")
slavestatus = cursor.fetchone()
return slavestatus
def stop_slave(cursor):
try:
cursor.execute("STOP SLAVE")
stopped = True
except:
stopped = False
return stopped
def reset_slave(cursor):
try:
cursor.execute("RESET SLAVE")
reset = True
except:
reset = False
return reset
def reset_slave_all(cursor):
try:
cursor.execute("RESET SLAVE ALL")
reset = True
except:
reset = False
return reset
def start_slave(cursor):
try:
cursor.execute("START SLAVE")
started = True
except:
started = False
return started
def changemaster(cursor, chm, chm_params):
sql_param = ",".join(chm)
query = 'CHANGE MASTER TO %s' % sql_param
cursor.execute(query, chm_params)
def main():
module = AnsibleModule(
argument_spec=dict(
login_user=dict(default=None),
login_password=dict(default=None, no_log=True),
login_host=dict(default="localhost"),
login_port=dict(default=3306, type='int'),
login_unix_socket=dict(default=None),
mode=dict(default="getslave", choices=["getmaster", "getslave", "changemaster", "stopslave", "startslave", "resetslave", "resetslaveall"]),
master_auto_position=dict(default=False, type='bool'),
master_host=dict(default=None),
master_user=dict(default=None),
master_password=dict(default=None, no_log=True),
master_port=dict(default=None, type='int'),
master_connect_retry=dict(default=None, type='int'),
master_log_file=dict(default=None),
master_log_pos=dict(default=None, type='int'),
relay_log_file=dict(default=None),
relay_log_pos=dict(default=None, type='int'),
master_ssl=dict(default=False, type='bool'),
master_ssl_ca=dict(default=None),
master_ssl_capath=dict(default=None),
master_ssl_cert=dict(default=None),
master_ssl_key=dict(default=None),
master_ssl_cipher=dict(default=None),
connect_timeout=dict(default=30, type='int'),
config_file=dict(default="~/.my.cnf", type='path'),
ssl_cert=dict(default=None),
ssl_key=dict(default=None),
ssl_ca=dict(default=None),
)
)
mode = module.params["mode"]
master_host = module.params["master_host"]
master_user = module.params["master_user"]
master_password = module.params["master_password"]
master_port = module.params["master_port"]
master_connect_retry = module.params["master_connect_retry"]
master_log_file = module.params["master_log_file"]
master_log_pos = module.params["master_log_pos"]
relay_log_file = module.params["relay_log_file"]
relay_log_pos = module.params["relay_log_pos"]
master_ssl = module.params["master_ssl"]
master_ssl_ca = module.params["master_ssl_ca"]
master_ssl_capath = module.params["master_ssl_capath"]
master_ssl_cert = module.params["master_ssl_cert"]
master_ssl_key = module.params["master_ssl_key"]
master_ssl_cipher = module.params["master_ssl_cipher"]
master_auto_position = module.params["master_auto_position"]
ssl_cert = module.params["ssl_cert"]
ssl_key = module.params["ssl_key"]
ssl_ca = module.params["ssl_ca"]
connect_timeout = module.params['connect_timeout']
config_file = module.params['config_file']
if not mysqldb_found:
module.fail_json(msg="the python mysqldb module is required")
else:
warnings.filterwarnings('error', category=MySQLdb.Warning)
login_password = module.params["login_password"]
login_user = module.params["login_user"]
try:
cursor = mysql_connect(module, login_user, login_password, config_file, ssl_cert, ssl_key, ssl_ca, None, 'MySQLdb.cursors.DictCursor',
connect_timeout=connect_timeout)
except Exception:
e = get_exception()
if os.path.exists(config_file):
module.fail_json(msg="unable to connect to database, check login_user and login_password are correct or %s has the credentials. "
"Exception message: %s" % (config_file, e))
else:
module.fail_json(msg="unable to find %s. Exception message: %s" % (config_file, e))
if mode in "getmaster":
status = get_master_status(cursor)
if not isinstance(status, dict):
status = dict(Is_Master=False, msg="Server is not configured as mysql master")
else:
status['Is_Master'] = True
module.exit_json(**status)
elif mode in "getslave":
status = get_slave_status(cursor)
if not isinstance(status, dict):
status = dict(Is_Slave=False, msg="Server is not configured as mysql slave")
else:
status['Is_Slave'] = True
module.exit_json(**status)
elif mode in "changemaster":
chm = []
chm_params = {}
result = {}
if master_host:
chm.append("MASTER_HOST=%(master_host)s")
chm_params['master_host'] = master_host
if master_user:
chm.append("MASTER_USER=%(master_user)s")
chm_params['master_user'] = master_user
if master_password:
chm.append("MASTER_PASSWORD=%(master_password)s")
chm_params['master_password'] = master_password
if master_port is not None:
chm.append("MASTER_PORT=%(master_port)s")
chm_params['master_port'] = master_port
if master_connect_retry is not None:
chm.append("MASTER_CONNECT_RETRY=%(master_connect_retry)s")
chm_params['master_connect_retry'] = master_connect_retry
if master_log_file:
chm.append("MASTER_LOG_FILE=%(master_log_file)s")
chm_params['master_log_file'] = master_log_file
if master_log_pos is not None:
chm.append("MASTER_LOG_POS=%(master_log_pos)s")
chm_params['master_log_pos'] = master_log_pos
if relay_log_file:
chm.append("RELAY_LOG_FILE=%(relay_log_file)s")
chm_params['relay_log_file'] = relay_log_file
if relay_log_pos is not None:
chm.append("RELAY_LOG_POS=%(relay_log_pos)s")
chm_params['relay_log_pos'] = relay_log_pos
if master_ssl:
chm.append("MASTER_SSL=1")
if master_ssl_ca:
chm.append("MASTER_SSL_CA=%(master_ssl_ca)s")
chm_params['master_ssl_ca'] = master_ssl_ca
if master_ssl_capath:
chm.append("MASTER_SSL_CAPATH=%(master_ssl_capath)s")
chm_params['master_ssl_capath'] = master_ssl_capath
if master_ssl_cert:
chm.append("MASTER_SSL_CERT=%(master_ssl_cert)s")
chm_params['master_ssl_cert'] = master_ssl_cert
if master_ssl_key:
chm.append("MASTER_SSL_KEY=%(master_ssl_key)s")
chm_params['master_ssl_key'] = master_ssl_key
if master_ssl_cipher:
chm.append("MASTER_SSL_CIPHER=%(master_ssl_cipher)s")
chm_params['master_ssl_cipher'] = master_ssl_cipher
if master_auto_position:
chm.append("MASTER_AUTO_POSITION = 1")
try:
changemaster(cursor, chm, chm_params)
except MySQLdb.Warning:
e = get_exception()
result['warning'] = str(e)
except Exception:
e = get_exception()
module.fail_json(msg='%s. Query == CHANGE MASTER TO %s' % (e, chm))
result['changed'] = True
module.exit_json(**result)
elif mode in "startslave":
started = start_slave(cursor)
if started is True:
module.exit_json(msg="Slave started ", changed=True)
else:
module.exit_json(msg="Slave already started (Or cannot be started)", changed=False)
elif mode in "stopslave":
stopped = stop_slave(cursor)
if stopped is True:
module.exit_json(msg="Slave stopped", changed=True)
else:
module.exit_json(msg="Slave already stopped", changed=False)
elif mode in "resetslave":
reset = reset_slave(cursor)
if reset is True:
module.exit_json(msg="Slave reset", changed=True)
else:
module.exit_json(msg="Slave already reset", changed=False)
elif mode in "resetslaveall":
reset = reset_slave_all(cursor)
if reset is True:
module.exit_json(msg="Slave reset", changed=True)
else:
module.exit_json(msg="Slave already reset", changed=False)
if __name__ == '__main__':
main()
warnings.simplefilter("ignore")
|
andreaso/ansible
|
lib/ansible/modules/database/mysql/mysql_replication.py
|
Python
|
gpl-3.0
| 13,039 | 0.001534 |
# -*- coding: utf-8 -*-
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
"""
import numpy as np
from numpy.testing import assert_allclose
from cotede.fuzzy import fuzzyfy
CFG = {
"output": {
"low": {"type": "trimf", "params": [0.0, 0.225, 0.45]},
"medium": {"type": "trimf", "params": [0.275, 0.5, 0.725]},
"high": {"type": "smf", "params": [0.55, 0.775]},
},
"features": {
"f1": {
"weight": 1,
"low": {"type": "zmf", "params": [0.07, 0.2]},
"medium": {"type": "trapmf", "params": [0.07, 0.2, 2, 6]},
"high": {"type": "smf", "params": [2, 6]},
},
"f2": {
"weight": 1,
"low": {"type": "zmf", "params": [3, 4]},
"medium": {"type": "trapmf", "params": [3, 4, 5, 6]},
"high": {"type": "smf", "params": [5, 6]},
},
"f3": {
"weight": 1,
"low": {"type": "zmf", "params": [0.5, 1.5]},
"medium": {"type": "trapmf", "params": [0.5, 1.5, 3, 4]},
"high": {"type": "smf", "params": [3, 4]},
},
},
}
def test_fuzzyfy():
features = {"f1": np.array([1.0]), "f2": np.array([5.2]), "f3": np.array([0.9])}
rules = fuzzyfy(features, **CFG)
assert_allclose(rules["low"], [0.226666666])
assert_allclose(rules["medium"], [0.733333333])
assert_allclose(rules["high"], [0.08000000])
def test_fuzzyfy_with_nan():
features = {
"f1": np.array([1.0, np.nan, 1.0, 1.0, np.nan]),
"f2": np.array([5.2, 5.2, np.nan, 5.2, np.nan]),
"f3": np.array([0.9, 0.9, 0.9, np.nan, np.nan]),
}
rules = fuzzyfy(features, **CFG)
assert_allclose(rules["low"], [0.22666667, np.nan, np.nan, np.nan, np.nan])
assert_allclose(rules["medium"], [0.733333333, np.nan, np.nan, np.nan, np.nan])
assert_allclose(rules["high"], [0.08000000, np.nan, np.nan, np.nan, np.nan])
rules = fuzzyfy(features, **CFG, require="any")
assert_allclose(rules["low"], [0.22666667, 0.34, 0.34, 0, np.nan])
assert_allclose(rules["medium"], [0.733333333, 0.6, 0.7, 0.9, np.nan])
assert_allclose(rules["high"], [0.08, 0.08, 0, 0.08, np.nan])
def test_fuzzyfy_all_nan():
features = {
"f1": np.array([np.nan]),
"f2": np.array([np.nan]),
"f3": np.array([np.nan]),
}
rules = fuzzyfy(features, **CFG)
assert_allclose(rules["low"], [np.nan])
assert_allclose(rules["medium"], [np.nan])
assert_allclose(rules["high"], [np.nan])
"""
# FIXME: If there is only one feature, it will return 1 value
# instead of an array with N values.
"""
|
castelao/CoTeDe
|
tests/fuzzy/test_fuzzyfy.py
|
Python
|
bsd-3-clause
| 2,666 | 0.001125 |
# -*- coding: utf-8 -*-
from django.db import models
from ..users.models import User
class Feedback(models.Model):
user = models.ForeignKey(User)
comments = models.CharField(max_length=50000)
date = models.DateTimeField(auto_now=True)
class Meta:
ordering = ["-date"]
def __str__(self):
return "{} - {}".format(self.user, self.date)
|
jacobajit/ion
|
intranet/apps/feedback/models.py
|
Python
|
gpl-2.0
| 374 | 0 |
# Copyright (c) 2010-2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import errno
import os
import mock
import posix
import unittest
from tempfile import mkdtemp
from shutil import rmtree
from test.unit import FakeLogger
import itertools
import random
from io import BytesIO
import json
from six import StringIO
from six.moves.urllib.parse import quote
import xml.dom.minidom
from swift import __version__ as swift_version
from swift.common.swob import (Request, WsgiBytesIO, HTTPNoContent)
from swift.common.constraints import ACCOUNT_LISTING_LIMIT
from swift.account.backend import AccountBroker
from swift.account.server import AccountController
from swift.common.utils import (normalize_timestamp, replication, public,
mkdirs, storage_directory, Timestamp)
from swift.common.request_helpers import get_sys_meta_prefix, get_reserved_name
from test.unit import patch_policies, debug_logger, mock_check_drive, \
make_timestamp_iter
from swift.common.storage_policy import StoragePolicy, POLICIES
@patch_policies
class TestAccountController(unittest.TestCase):
"""Test swift.account.server.AccountController"""
def setUp(self):
"""Set up for testing swift.account.server.AccountController"""
self.testdir_base = mkdtemp()
self.testdir = os.path.join(self.testdir_base, 'account_server')
mkdirs(os.path.join(self.testdir, 'sda1'))
self.logger = debug_logger()
self.controller = AccountController(
{'devices': self.testdir, 'mount_check': 'false'},
logger=self.logger)
self.ts = make_timestamp_iter()
def tearDown(self):
"""Tear down for testing swift.account.server.AccountController"""
try:
rmtree(self.testdir_base)
except OSError as err:
if err.errno != errno.ENOENT:
raise
def test_init(self):
conf = {
'devices': self.testdir,
'mount_check': 'false',
}
AccountController(conf, logger=self.logger)
self.assertEqual(self.logger.get_lines_for_level('warning'), [])
conf['auto_create_account_prefix'] = '-'
AccountController(conf, logger=self.logger)
self.assertEqual(self.logger.get_lines_for_level('warning'), [
'Option auto_create_account_prefix is deprecated. '
'Configure auto_create_account_prefix under the '
'swift-constraints section of swift.conf. This option '
'will be ignored in a future release.'
])
def test_OPTIONS(self):
server_handler = AccountController(
{'devices': self.testdir, 'mount_check': 'false'})
req = Request.blank('/sda1/p/a/c/o', {'REQUEST_METHOD': 'OPTIONS'})
req.content_length = 0
resp = server_handler.OPTIONS(req)
self.assertEqual(200, resp.status_int)
for verb in 'OPTIONS GET POST PUT DELETE HEAD REPLICATE'.split():
self.assertIn(verb, resp.headers['Allow'].split(', '))
self.assertEqual(len(resp.headers['Allow'].split(', ')), 7)
self.assertEqual(resp.headers['Server'],
(server_handler.server_type + '/' + swift_version))
def test_insufficient_storage_mount_check_true(self):
conf = {'devices': self.testdir, 'mount_check': 'true'}
account_controller = AccountController(conf)
self.assertTrue(account_controller.mount_check)
for method in account_controller.allowed_methods:
if method == 'OPTIONS':
continue
req = Request.blank('/sda1/p/a-or-suff', method=method,
headers={'x-timestamp': '1'})
with mock_check_drive() as mocks:
try:
resp = req.get_response(account_controller)
self.assertEqual(resp.status_int, 507)
mocks['ismount'].return_value = True
resp = req.get_response(account_controller)
self.assertNotEqual(resp.status_int, 507)
# feel free to rip out this last assertion...
expected = 2 if method == 'PUT' else 4
self.assertEqual(resp.status_int // 100, expected)
except AssertionError as e:
self.fail('%s for %s' % (e, method))
def test_insufficient_storage_mount_check_false(self):
conf = {'devices': self.testdir, 'mount_check': 'false'}
account_controller = AccountController(conf)
self.assertFalse(account_controller.mount_check)
for method in account_controller.allowed_methods:
if method == 'OPTIONS':
continue
req = Request.blank('/sda1/p/a-or-suff', method=method,
headers={'x-timestamp': '1'})
with mock_check_drive() as mocks:
try:
resp = req.get_response(account_controller)
self.assertEqual(resp.status_int, 507)
mocks['isdir'].return_value = True
resp = req.get_response(account_controller)
self.assertNotEqual(resp.status_int, 507)
# feel free to rip out this last assertion...
expected = 2 if method == 'PUT' else 4
self.assertEqual(resp.status_int // 100, expected)
except AssertionError as e:
self.fail('%s for %s' % (e, method))
def test_DELETE_not_found(self):
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'DELETE',
'HTTP_X_TIMESTAMP': '0'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 404)
self.assertNotIn('X-Account-Status', resp.headers)
def test_DELETE_empty(self):
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'DELETE',
'HTTP_X_TIMESTAMP': '1'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 204)
self.assertEqual(resp.headers['X-Account-Status'], 'Deleted')
def test_DELETE_not_empty(self):
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a/c1', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Put-Timestamp': '1',
'X-Delete-Timestamp': '0',
'X-Object-Count': '0',
'X-Bytes-Used': '0'})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'DELETE',
'HTTP_X_TIMESTAMP': '1'})
resp = req.get_response(self.controller)
# We now allow deleting non-empty accounts
self.assertEqual(resp.status_int, 204)
self.assertEqual(resp.headers['X-Account-Status'], 'Deleted')
def test_DELETE_now_empty(self):
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a/c1', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Put-Timestamp': '1',
'X-Delete-Timestamp': '0',
'X-Object-Count': '0',
'X-Bytes-Used': '0',
'X-Timestamp': normalize_timestamp(0)})
req.get_response(self.controller)
req = Request.blank(
'/sda1/p/a/c1',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Put-Timestamp': '1',
'X-Delete-Timestamp': '2',
'X-Object-Count': '0',
'X-Bytes-Used': '0',
'X-Timestamp': normalize_timestamp(0)})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 204)
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'DELETE',
'HTTP_X_TIMESTAMP': '1'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 204)
self.assertEqual(resp.headers['X-Account-Status'], 'Deleted')
def test_DELETE_invalid_partition(self):
req = Request.blank('/sda1/./a', environ={'REQUEST_METHOD': 'DELETE',
'HTTP_X_TIMESTAMP': '1'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 400)
def test_DELETE_timestamp_not_float(self):
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': 'not-float'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 400)
def test_REPLICATE_insufficient_space(self):
conf = {'devices': self.testdir,
'mount_check': 'false',
'fallocate_reserve': '2%'}
account_controller = AccountController(conf)
req = Request.blank('/sda1/p/a',
environ={'REQUEST_METHOD': 'REPLICATE'})
statvfs_result = posix.statvfs_result([
4096, # f_bsize
4096, # f_frsize
2854907, # f_blocks
59000, # f_bfree
57000, # f_bavail (just under 2% free)
1280000, # f_files
1266040, # f_ffree,
1266040, # f_favail,
4096, # f_flag
255, # f_namemax
])
with mock.patch('os.statvfs',
return_value=statvfs_result) as mock_statvfs:
resp = req.get_response(account_controller)
self.assertEqual(resp.status_int, 507)
self.assertEqual(mock_statvfs.mock_calls,
[mock.call(os.path.join(self.testdir, 'sda1'))])
def test_REPLICATE_rsync_then_merge_works(self):
def fake_rsync_then_merge(self, drive, db_file, args):
return HTTPNoContent()
with mock.patch("swift.common.db_replicator.ReplicatorRpc."
"rsync_then_merge", fake_rsync_then_merge):
req = Request.blank('/sda1/p/a/',
environ={'REQUEST_METHOD': 'REPLICATE'},
headers={})
json_string = b'["rsync_then_merge", "a.db"]'
inbuf = WsgiBytesIO(json_string)
req.environ['wsgi.input'] = inbuf
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 204)
def test_REPLICATE_complete_rsync_works(self):
def fake_complete_rsync(self, drive, db_file, args):
return HTTPNoContent()
# check complete_rsync
with mock.patch("swift.common.db_replicator.ReplicatorRpc."
"complete_rsync", fake_complete_rsync):
req = Request.blank('/sda1/p/a/',
environ={'REQUEST_METHOD': 'REPLICATE'},
headers={})
json_string = b'["complete_rsync", "a.db"]'
inbuf = WsgiBytesIO(json_string)
req.environ['wsgi.input'] = inbuf
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 204)
def test_REPLICATE_value_error_works(self):
req = Request.blank('/sda1/p/a/',
environ={'REQUEST_METHOD': 'REPLICATE'},
headers={})
# check valuerror
wsgi_input_valuerror = b'["sync" : sync, "-1"]'
inbuf1 = WsgiBytesIO(wsgi_input_valuerror)
req.environ['wsgi.input'] = inbuf1
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 400)
def test_REPLICATE_unknown_sync(self):
# First without existing DB file
req = Request.blank('/sda1/p/a/',
environ={'REQUEST_METHOD': 'REPLICATE'},
headers={})
json_string = b'["unknown_sync", "a.db"]'
inbuf = WsgiBytesIO(json_string)
req.environ['wsgi.input'] = inbuf
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 404)
mkdirs(os.path.join(self.testdir, 'sda1', 'accounts', 'p', 'a', 'a'))
db_file = os.path.join(self.testdir, 'sda1',
storage_directory('accounts', 'p', 'a'),
'a' + '.db')
open(db_file, 'w')
req = Request.blank('/sda1/p/a/',
environ={'REQUEST_METHOD': 'REPLICATE'},
headers={})
json_string = b'["unknown_sync", "a.db"]'
inbuf = WsgiBytesIO(json_string)
req.environ['wsgi.input'] = inbuf
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 500)
def test_HEAD_not_found(self):
# Test the case in which account does not exist (can be recreated)
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 404)
self.assertNotIn('X-Account-Status', resp.headers)
# Test the case in which account was deleted but not yet reaped
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a/c1', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Put-Timestamp': '1',
'X-Delete-Timestamp': '0',
'X-Object-Count': '0',
'X-Bytes-Used': '0'})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'DELETE',
'HTTP_X_TIMESTAMP': '1'})
resp = req.get_response(self.controller)
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 404)
self.assertEqual(resp.headers['X-Account-Status'], 'Deleted')
def test_HEAD_empty_account(self):
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 204)
self.assertEqual(resp.headers['x-account-container-count'], '0')
self.assertEqual(resp.headers['x-account-object-count'], '0')
self.assertEqual(resp.headers['x-account-bytes-used'], '0')
def test_HEAD_with_containers(self):
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': '0'})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a/c1', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Put-Timestamp': '1',
'X-Delete-Timestamp': '0',
'X-Object-Count': '0',
'X-Bytes-Used': '0',
'X-Timestamp': normalize_timestamp(0)})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a/c2', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Put-Timestamp': '2',
'X-Delete-Timestamp': '0',
'X-Object-Count': '0',
'X-Bytes-Used': '0',
'X-Timestamp': normalize_timestamp(0)})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 204)
self.assertEqual(resp.headers['x-account-container-count'], '2')
self.assertEqual(resp.headers['x-account-object-count'], '0')
self.assertEqual(resp.headers['x-account-bytes-used'], '0')
req = Request.blank('/sda1/p/a/c1', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Put-Timestamp': '1',
'X-Delete-Timestamp': '0',
'X-Object-Count': '1',
'X-Bytes-Used': '2',
'X-Timestamp': normalize_timestamp(0)})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a/c2', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Put-Timestamp': '2',
'X-Delete-Timestamp': '0',
'X-Object-Count': '3',
'X-Bytes-Used': '4',
'X-Timestamp': normalize_timestamp(0)})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'HEAD',
'HTTP_X_TIMESTAMP': '5'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 204)
self.assertEqual(resp.headers['x-account-container-count'], '2')
self.assertEqual(resp.headers['x-account-object-count'], '4')
self.assertEqual(resp.headers['x-account-bytes-used'], '6')
def test_HEAD_invalid_partition(self):
req = Request.blank('/sda1/./a', environ={'REQUEST_METHOD': 'HEAD',
'HTTP_X_TIMESTAMP': '1'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 400)
def test_HEAD_invalid_content_type(self):
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'HEAD'},
headers={'Accept': 'application/plain'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 406)
def test_HEAD_invalid_accept(self):
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'HEAD'},
headers={'Accept': 'application/plain;q=1;q=0.5'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 400)
self.assertEqual(resp.body, b'')
def test_HEAD_invalid_format(self):
format = '%D1%BD%8A9' # invalid UTF-8; should be %E1%BD%8A9 (E -> D)
req = Request.blank('/sda1/p/a?format=' + format,
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 400)
def test_PUT_not_found(self):
req = Request.blank(
'/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-PUT-Timestamp': normalize_timestamp(1),
'X-DELETE-Timestamp': normalize_timestamp(0),
'X-Object-Count': '1',
'X-Bytes-Used': '1',
'X-Timestamp': normalize_timestamp(0)})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 404)
self.assertNotIn('X-Account-Status', resp.headers)
def test_PUT_insufficient_space(self):
conf = {'devices': self.testdir,
'mount_check': 'false',
'fallocate_reserve': '2%'}
account_controller = AccountController(conf)
req = Request.blank(
'/sda1/p/a',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': '1517612949.541469'})
statvfs_result = posix.statvfs_result([
4096, # f_bsize
4096, # f_frsize
2854907, # f_blocks
59000, # f_bfree
57000, # f_bavail (just under 2% free)
1280000, # f_files
1266040, # f_ffree,
1266040, # f_favail,
4096, # f_flag
255, # f_namemax
])
with mock.patch('os.statvfs',
return_value=statvfs_result) as mock_statvfs:
resp = req.get_response(account_controller)
self.assertEqual(resp.status_int, 507)
self.assertEqual(mock_statvfs.mock_calls,
[mock.call(os.path.join(self.testdir, 'sda1'))])
def test_PUT(self):
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '1'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 202)
def test_PUT_simulated_create_race(self):
state = ['initial']
from swift.account.backend import AccountBroker as OrigAcBr
class InterceptedAcBr(OrigAcBr):
def __init__(self, *args, **kwargs):
super(InterceptedAcBr, self).__init__(*args, **kwargs)
if state[0] == 'initial':
# Do nothing initially
pass
elif state[0] == 'race':
# Save the original db_file attribute value
self._saved_db_file = self.db_file
self._db_file += '.doesnotexist'
def initialize(self, *args, **kwargs):
if state[0] == 'initial':
# Do nothing initially
pass
elif state[0] == 'race':
# Restore the original db_file attribute to get the race
# behavior
self._db_file = self._saved_db_file
return super(InterceptedAcBr, self).initialize(*args, **kwargs)
with mock.patch("swift.account.server.AccountBroker", InterceptedAcBr):
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 201)
state[0] = "race"
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '1'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 202)
def test_PUT_after_DELETE(self):
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(1)})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': normalize_timestamp(1)})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 204)
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(2)})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 403)
self.assertEqual(resp.body, b'Recently deleted')
self.assertEqual(resp.headers['X-Account-Status'], 'Deleted')
def test_create_reserved_namespace_account(self):
path = '/sda1/p/%s' % get_reserved_name('a')
req = Request.blank(path, method='PUT', headers={
'X-Timestamp': next(self.ts).internal})
resp = req.get_response(self.controller)
self.assertEqual(resp.status, '201 Created')
path = '/sda1/p/%s' % get_reserved_name('foo', 'bar')
req = Request.blank(path, method='PUT', headers={
'X-Timestamp': next(self.ts).internal})
resp = req.get_response(self.controller)
self.assertEqual(resp.status, '201 Created')
def test_create_invalid_reserved_namespace_account(self):
account_name = get_reserved_name('foo', 'bar')[1:]
path = '/sda1/p/%s' % account_name
req = Request.blank(path, method='PUT', headers={
'X-Timestamp': next(self.ts).internal})
resp = req.get_response(self.controller)
self.assertEqual(resp.status, '400 Bad Request')
def test_create_reserved_container_in_account(self):
# create account
path = '/sda1/p/a'
req = Request.blank(path, method='PUT', headers={
'X-Timestamp': next(self.ts).internal})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 201)
# put null container in it
path += '/%s' % get_reserved_name('c', 'stuff')
req = Request.blank(path, method='PUT', headers={
'X-Timestamp': next(self.ts).internal,
'X-Put-Timestamp': next(self.ts).internal,
'X-Delete-Timestamp': 0,
'X-Object-Count': 0,
'X-Bytes-Used': 0,
})
resp = req.get_response(self.controller)
self.assertEqual(resp.status, '201 Created')
def test_create_invalid_reserved_container_in_account(self):
# create account
path = '/sda1/p/a'
req = Request.blank(path, method='PUT', headers={
'X-Timestamp': next(self.ts).internal})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 201)
# put invalid container in it
path += '/%s' % get_reserved_name('c', 'stuff')[1:]
req = Request.blank(path, method='PUT', headers={
'X-Timestamp': next(self.ts).internal,
'X-Put-Timestamp': next(self.ts).internal,
'X-Delete-Timestamp': 0,
'X-Object-Count': 0,
'X-Bytes-Used': 0,
})
resp = req.get_response(self.controller)
self.assertEqual(resp.status, '400 Bad Request')
def test_PUT_non_utf8_metadata(self):
# Set metadata header
req = Request.blank(
'/sda1/p/a', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(1),
'X-Account-Meta-Test': b'\xff'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 400)
# Set sysmeta header
req = Request.blank(
'/sda1/p/a', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(1),
'X-Account-Sysmeta-Access-Control': b'\xff'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 400)
# Send other
req = Request.blank(
'/sda1/p/a', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(1),
'X-Will-Not-Be-Saved': b'\xff'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 202)
def test_utf8_metadata(self):
ts_str = normalize_timestamp(1)
def get_test_meta(method, headers):
# Set metadata header
headers.setdefault('X-Timestamp', ts_str)
req = Request.blank(
'/sda1/p/a', environ={'REQUEST_METHOD': method},
headers=headers)
resp = req.get_response(self.controller)
self.assertIn(resp.status_int, (201, 202, 204))
db_path = os.path.join(*next(
(dir_name, file_name)
for dir_name, _, files in os.walk(self.testdir)
for file_name in files if file_name.endswith('.db')
))
broker = AccountBroker(db_path)
# Why not use broker.metadata, you ask? Because we want to get
# as close to the on-disk format as is reasonable.
result = json.loads(broker.get_raw_metadata())
# Clear it out for the next run
with broker.get() as conn:
conn.execute("UPDATE account_stat SET metadata=''")
conn.commit()
return result
wsgi_str = '\xf0\x9f\x91\x8d'
uni_str = u'\U0001f44d'
self.assertEqual(
get_test_meta('PUT', {'x-account-sysmeta-' + wsgi_str: wsgi_str}),
{u'X-Account-Sysmeta-' + uni_str: [uni_str, ts_str]})
self.assertEqual(
get_test_meta('PUT', {'x-account-meta-' + wsgi_str: wsgi_str}),
{u'X-Account-Meta-' + uni_str: [uni_str, ts_str]})
self.assertEqual(
get_test_meta('POST', {'x-account-sysmeta-' + wsgi_str: wsgi_str}),
{u'X-Account-Sysmeta-' + uni_str: [uni_str, ts_str]})
self.assertEqual(
get_test_meta('POST', {'x-account-meta-' + wsgi_str: wsgi_str}),
{u'X-Account-Meta-' + uni_str: [uni_str, ts_str]})
def test_PUT_GET_metadata(self):
# Set metadata header
req = Request.blank(
'/sda1/p/a', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(1),
'X-Account-Meta-Test': 'Value'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 204)
self.assertEqual(resp.headers.get('x-account-meta-test'), 'Value')
# Set another metadata header, ensuring old one doesn't disappear
req = Request.blank(
'/sda1/p/a', environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': normalize_timestamp(1),
'X-Account-Meta-Test2': 'Value2'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 204)
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 204)
self.assertEqual(resp.headers.get('x-account-meta-test'), 'Value')
self.assertEqual(resp.headers.get('x-account-meta-test2'), 'Value2')
# Update metadata header
req = Request.blank(
'/sda1/p/a', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(3),
'X-Account-Meta-Test': 'New Value'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 202)
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 204)
self.assertEqual(resp.headers.get('x-account-meta-test'), 'New Value')
# Send old update to metadata header
req = Request.blank(
'/sda1/p/a', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(2),
'X-Account-Meta-Test': 'Old Value'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 202)
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 204)
self.assertEqual(resp.headers.get('x-account-meta-test'), 'New Value')
# Remove metadata header (by setting it to empty)
req = Request.blank(
'/sda1/p/a', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(4),
'X-Account-Meta-Test': ''})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 202)
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 204)
self.assertNotIn('x-account-meta-test', resp.headers)
def test_PUT_GET_sys_metadata(self):
prefix = get_sys_meta_prefix('account')
hdr = '%stest' % prefix
hdr2 = '%stest2' % prefix
# Set metadata header
req = Request.blank(
'/sda1/p/a', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(1),
hdr.title(): 'Value'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 204)
self.assertEqual(resp.headers.get(hdr), 'Value')
# Set another metadata header, ensuring old one doesn't disappear
req = Request.blank(
'/sda1/p/a', environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': normalize_timestamp(1),
hdr2.title(): 'Value2'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 204)
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 204)
self.assertEqual(resp.headers.get(hdr), 'Value')
self.assertEqual(resp.headers.get(hdr2), 'Value2')
# Update metadata header
req = Request.blank(
'/sda1/p/a', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(3),
hdr.title(): 'New Value'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 202)
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 204)
self.assertEqual(resp.headers.get(hdr), 'New Value')
# Send old update to metadata header
req = Request.blank(
'/sda1/p/a', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(2),
hdr.title(): 'Old Value'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 202)
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 204)
self.assertEqual(resp.headers.get(hdr), 'New Value')
# Remove metadata header (by setting it to empty)
req = Request.blank(
'/sda1/p/a', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(4),
hdr.title(): ''})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 202)
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 204)
self.assertNotIn(hdr, resp.headers)
def test_PUT_invalid_partition(self):
req = Request.blank('/sda1/./a', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '1'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 400)
def test_POST_HEAD_metadata(self):
req = Request.blank(
'/sda1/p/a', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(1)})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 201)
# Set metadata header
req = Request.blank(
'/sda1/p/a', environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': normalize_timestamp(1),
'X-Account-Meta-Test': 'Value'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 204, resp.body)
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 204)
self.assertEqual(resp.headers.get('x-account-meta-test'), 'Value')
# Update metadata header
req = Request.blank(
'/sda1/p/a', environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': normalize_timestamp(3),
'X-Account-Meta-Test': 'New Value'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 204)
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 204)
self.assertEqual(resp.headers.get('x-account-meta-test'), 'New Value')
# Send old update to metadata header
req = Request.blank(
'/sda1/p/a', environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': normalize_timestamp(2),
'X-Account-Meta-Test': 'Old Value'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 204)
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 204)
self.assertEqual(resp.headers.get('x-account-meta-test'), 'New Value')
# Remove metadata header (by setting it to empty)
req = Request.blank(
'/sda1/p/a', environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': normalize_timestamp(4),
'X-Account-Meta-Test': ''})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 204)
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 204)
self.assertNotIn('x-account-meta-test', resp.headers)
def test_POST_HEAD_sys_metadata(self):
prefix = get_sys_meta_prefix('account')
hdr = '%stest' % prefix
req = Request.blank(
'/sda1/p/a', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(1)})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 201)
# Set metadata header
req = Request.blank(
'/sda1/p/a', environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': normalize_timestamp(1),
hdr.title(): 'Value'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 204)
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 204)
self.assertEqual(resp.headers.get(hdr), 'Value')
# Update metadata header
req = Request.blank(
'/sda1/p/a', environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': normalize_timestamp(3),
hdr.title(): 'New Value'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 204)
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 204)
self.assertEqual(resp.headers.get(hdr), 'New Value')
# Send old update to metadata header
req = Request.blank(
'/sda1/p/a', environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': normalize_timestamp(2),
hdr.title(): 'Old Value'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 204)
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 204)
self.assertEqual(resp.headers.get(hdr), 'New Value')
# Remove metadata header (by setting it to empty)
req = Request.blank(
'/sda1/p/a', environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': normalize_timestamp(4),
hdr.title(): ''})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 204)
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 204)
self.assertNotIn(hdr, resp.headers)
def test_POST_invalid_partition(self):
req = Request.blank('/sda1/./a', environ={'REQUEST_METHOD': 'POST',
'HTTP_X_TIMESTAMP': '1'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 400)
def test_POST_insufficient_space(self):
conf = {'devices': self.testdir,
'mount_check': 'false',
'fallocate_reserve': '2%'}
account_controller = AccountController(conf)
req = Request.blank(
'/sda1/p/a',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': '1517611584.937603'})
statvfs_result = posix.statvfs_result([
4096, # f_bsize
4096, # f_frsize
2854907, # f_blocks
59000, # f_bfree
57000, # f_bavail (just under 2% free)
1280000, # f_files
1266040, # f_ffree,
1266040, # f_favail,
4096, # f_flag
255, # f_namemax
])
with mock.patch('os.statvfs',
return_value=statvfs_result) as mock_statvfs:
resp = req.get_response(account_controller)
self.assertEqual(resp.status_int, 507)
self.assertEqual(mock_statvfs.mock_calls,
[mock.call(os.path.join(self.testdir, 'sda1'))])
def test_POST_timestamp_not_float(self):
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'POST',
'HTTP_X_TIMESTAMP': '0'},
headers={'X-Timestamp': 'not-float'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 400)
def test_POST_after_DELETE_not_found(self):
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'DELETE',
'HTTP_X_TIMESTAMP': '1'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 204)
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'POST',
'HTTP_X_TIMESTAMP': '2'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 404)
self.assertEqual(resp.headers['X-Account-Status'], 'Deleted')
def test_GET_not_found_plain(self):
# Test the case in which account does not exist (can be recreated)
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 404)
self.assertNotIn('X-Account-Status', resp.headers)
# Test the case in which account was deleted but not yet reaped
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a/c1', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Put-Timestamp': '1',
'X-Delete-Timestamp': '0',
'X-Object-Count': '0',
'X-Bytes-Used': '0'})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'DELETE',
'HTTP_X_TIMESTAMP': '1'})
resp = req.get_response(self.controller)
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 404)
self.assertEqual(resp.headers['X-Account-Status'], 'Deleted')
def test_GET_not_found_json(self):
req = Request.blank('/sda1/p/a?format=json',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 404)
def test_GET_not_found_xml(self):
req = Request.blank('/sda1/p/a?format=xml',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 404)
def test_GET_empty_account_plain(self):
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 204)
self.assertEqual(resp.headers['Content-Type'],
'text/plain; charset=utf-8')
def test_GET_empty_account_json(self):
req = Request.blank('/sda1/p/a?format=json',
environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a?format=json',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.headers['Content-Type'],
'application/json; charset=utf-8')
def test_GET_empty_account_xml(self):
req = Request.blank('/sda1/p/a?format=xml',
environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a?format=xml',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.headers['Content-Type'],
'application/xml; charset=utf-8')
def test_GET_invalid_accept(self):
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'GET'},
headers={'Accept': 'application/plain;q=foo'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 400)
self.assertEqual(resp.body, b'Invalid Accept header')
def test_GET_over_limit(self):
req = Request.blank(
'/sda1/p/a?limit=%d' % (ACCOUNT_LISTING_LIMIT + 1),
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 412)
def test_GET_with_containers_plain(self):
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a/c1', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Put-Timestamp': '1',
'X-Delete-Timestamp': '0',
'X-Object-Count': '0',
'X-Bytes-Used': '0',
'X-Timestamp': normalize_timestamp(0)})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a/c2', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Put-Timestamp': '2',
'X-Delete-Timestamp': '0',
'X-Object-Count': '0',
'X-Bytes-Used': '0',
'X-Timestamp': normalize_timestamp(0)})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.body.strip().split(b'\n'),
[b'c1', b'c2'])
req = Request.blank('/sda1/p/a/c1', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Put-Timestamp': '1',
'X-Delete-Timestamp': '0',
'X-Object-Count': '1',
'X-Bytes-Used': '2',
'X-Timestamp': normalize_timestamp(0)})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a/c2', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Put-Timestamp': '2',
'X-Delete-Timestamp': '0',
'X-Object-Count': '3',
'X-Bytes-Used': '4',
'X-Timestamp': normalize_timestamp(0)})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.body.strip().split(b'\n'),
[b'c1', b'c2'])
self.assertEqual(resp.content_type, 'text/plain')
self.assertEqual(resp.charset, 'utf-8')
# test unknown format uses default plain
req = Request.blank('/sda1/p/a?format=somethinglese',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.body.strip().split(b'\n'),
[b'c1', b'c2'])
self.assertEqual(resp.content_type, 'text/plain')
self.assertEqual(resp.charset, 'utf-8')
def test_GET_with_containers_json(self):
put_timestamps = {}
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
req.get_response(self.controller)
put_timestamps['c1'] = normalize_timestamp(1)
req = Request.blank('/sda1/p/a/c1', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Put-Timestamp': put_timestamps['c1'],
'X-Delete-Timestamp': '0',
'X-Object-Count': '0',
'X-Bytes-Used': '0',
'X-Timestamp': normalize_timestamp(0)})
req.get_response(self.controller)
put_timestamps['c2'] = normalize_timestamp(2)
req = Request.blank('/sda1/p/a/c2', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Put-Timestamp': put_timestamps['c2'],
'X-Delete-Timestamp': '0',
'X-Object-Count': '0',
'X-Bytes-Used': '0',
'X-Timestamp': normalize_timestamp(0)})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a?format=json',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(
json.loads(resp.body),
[{'count': 0, 'bytes': 0, 'name': 'c1',
'last_modified': Timestamp(put_timestamps['c1']).isoformat},
{'count': 0, 'bytes': 0, 'name': 'c2',
'last_modified': Timestamp(put_timestamps['c2']).isoformat}])
put_timestamps['c1'] = normalize_timestamp(3)
req = Request.blank('/sda1/p/a/c1', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Put-Timestamp': put_timestamps['c1'],
'X-Delete-Timestamp': '0',
'X-Object-Count': '1',
'X-Bytes-Used': '2',
'X-Timestamp': normalize_timestamp(0)})
req.get_response(self.controller)
put_timestamps['c2'] = normalize_timestamp(4)
req = Request.blank('/sda1/p/a/c2', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Put-Timestamp': put_timestamps['c2'],
'X-Delete-Timestamp': '0',
'X-Object-Count': '3',
'X-Bytes-Used': '4',
'X-Timestamp': normalize_timestamp(0)})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a?format=json',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(
json.loads(resp.body),
[{'count': 1, 'bytes': 2, 'name': 'c1',
'last_modified': Timestamp(put_timestamps['c1']).isoformat},
{'count': 3, 'bytes': 4, 'name': 'c2',
'last_modified': Timestamp(put_timestamps['c2']).isoformat}])
self.assertEqual(resp.content_type, 'application/json')
self.assertEqual(resp.charset, 'utf-8')
def test_GET_with_containers_xml(self):
put_timestamps = {}
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
req.get_response(self.controller)
put_timestamps['c1'] = normalize_timestamp(1)
req = Request.blank('/sda1/p/a/c1', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Put-Timestamp': put_timestamps['c1'],
'X-Delete-Timestamp': '0',
'X-Object-Count': '0',
'X-Bytes-Used': '0',
'X-Timestamp': normalize_timestamp(0)})
req.get_response(self.controller)
put_timestamps['c2'] = normalize_timestamp(2)
req = Request.blank('/sda1/p/a/c2', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Put-Timestamp': put_timestamps['c2'],
'X-Delete-Timestamp': '0',
'X-Object-Count': '0',
'X-Bytes-Used': '0',
'X-Timestamp': normalize_timestamp(0)})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a?format=xml',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.content_type, 'application/xml')
self.assertEqual(resp.status_int, 200)
dom = xml.dom.minidom.parseString(resp.body)
self.assertEqual(dom.firstChild.nodeName, 'account')
listing = \
[n for n in dom.firstChild.childNodes if n.nodeName != '#text']
self.assertEqual(len(listing), 2)
self.assertEqual(listing[0].nodeName, 'container')
container = [n for n in listing[0].childNodes if n.nodeName != '#text']
self.assertEqual(sorted([n.nodeName for n in container]),
['bytes', 'count', 'last_modified', 'name'])
node = [n for n in container if n.nodeName == 'name'][0]
self.assertEqual(node.firstChild.nodeValue, 'c1')
node = [n for n in container if n.nodeName == 'count'][0]
self.assertEqual(node.firstChild.nodeValue, '0')
node = [n for n in container if n.nodeName == 'bytes'][0]
self.assertEqual(node.firstChild.nodeValue, '0')
node = [n for n in container if n.nodeName == 'last_modified'][0]
self.assertEqual(node.firstChild.nodeValue,
Timestamp(put_timestamps['c1']).isoformat)
self.assertEqual(listing[-1].nodeName, 'container')
container = \
[n for n in listing[-1].childNodes if n.nodeName != '#text']
self.assertEqual(sorted([n.nodeName for n in container]),
['bytes', 'count', 'last_modified', 'name'])
node = [n for n in container if n.nodeName == 'name'][0]
self.assertEqual(node.firstChild.nodeValue, 'c2')
node = [n for n in container if n.nodeName == 'count'][0]
self.assertEqual(node.firstChild.nodeValue, '0')
node = [n for n in container if n.nodeName == 'bytes'][0]
self.assertEqual(node.firstChild.nodeValue, '0')
node = [n for n in container if n.nodeName == 'last_modified'][0]
self.assertEqual(node.firstChild.nodeValue,
Timestamp(put_timestamps['c2']).isoformat)
req = Request.blank('/sda1/p/a/c1', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Put-Timestamp': '1',
'X-Delete-Timestamp': '0',
'X-Object-Count': '1',
'X-Bytes-Used': '2',
'X-Timestamp': normalize_timestamp(0)})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a/c2', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Put-Timestamp': '2',
'X-Delete-Timestamp': '0',
'X-Object-Count': '3',
'X-Bytes-Used': '4',
'X-Timestamp': normalize_timestamp(0)})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a?format=xml',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200)
dom = xml.dom.minidom.parseString(resp.body)
self.assertEqual(dom.firstChild.nodeName, 'account')
listing = \
[n for n in dom.firstChild.childNodes if n.nodeName != '#text']
self.assertEqual(len(listing), 2)
self.assertEqual(listing[0].nodeName, 'container')
container = [n for n in listing[0].childNodes if n.nodeName != '#text']
self.assertEqual(sorted([n.nodeName for n in container]),
['bytes', 'count', 'last_modified', 'name'])
node = [n for n in container if n.nodeName == 'name'][0]
self.assertEqual(node.firstChild.nodeValue, 'c1')
node = [n for n in container if n.nodeName == 'count'][0]
self.assertEqual(node.firstChild.nodeValue, '1')
node = [n for n in container if n.nodeName == 'bytes'][0]
self.assertEqual(node.firstChild.nodeValue, '2')
node = [n for n in container if n.nodeName == 'last_modified'][0]
self.assertEqual(node.firstChild.nodeValue,
Timestamp(put_timestamps['c1']).isoformat)
self.assertEqual(listing[-1].nodeName, 'container')
container = [
n for n in listing[-1].childNodes if n.nodeName != '#text']
self.assertEqual(sorted([n.nodeName for n in container]),
['bytes', 'count', 'last_modified', 'name'])
node = [n for n in container if n.nodeName == 'name'][0]
self.assertEqual(node.firstChild.nodeValue, 'c2')
node = [n for n in container if n.nodeName == 'count'][0]
self.assertEqual(node.firstChild.nodeValue, '3')
node = [n for n in container if n.nodeName == 'bytes'][0]
self.assertEqual(node.firstChild.nodeValue, '4')
node = [n for n in container if n.nodeName == 'last_modified'][0]
self.assertEqual(node.firstChild.nodeValue,
Timestamp(put_timestamps['c2']).isoformat)
self.assertEqual(resp.charset, 'utf-8')
def test_GET_xml_escapes_account_name(self):
req = Request.blank(
'/sda1/p/%22%27', # "'
environ={'REQUEST_METHOD': 'PUT', 'HTTP_X_TIMESTAMP': '0'})
req.get_response(self.controller)
req = Request.blank(
'/sda1/p/%22%27?format=xml',
environ={'REQUEST_METHOD': 'GET', 'HTTP_X_TIMESTAMP': '1'})
resp = req.get_response(self.controller)
dom = xml.dom.minidom.parseString(resp.body)
self.assertEqual(dom.firstChild.attributes['name'].value, '"\'')
def test_GET_xml_escapes_container_name(self):
req = Request.blank(
'/sda1/p/a',
environ={'REQUEST_METHOD': 'PUT', 'HTTP_X_TIMESTAMP': '0'})
req.get_response(self.controller)
req = Request.blank(
'/sda1/p/a/%22%3Cword', # "<word
environ={'REQUEST_METHOD': 'PUT', 'HTTP_X_TIMESTAMP': '1',
'HTTP_X_PUT_TIMESTAMP': '1', 'HTTP_X_OBJECT_COUNT': '0',
'HTTP_X_DELETE_TIMESTAMP': '0', 'HTTP_X_BYTES_USED': '1'})
req.get_response(self.controller)
req = Request.blank(
'/sda1/p/a?format=xml',
environ={'REQUEST_METHOD': 'GET', 'HTTP_X_TIMESTAMP': '1'})
resp = req.get_response(self.controller)
dom = xml.dom.minidom.parseString(resp.body)
self.assertEqual(
dom.firstChild.firstChild.nextSibling.firstChild.firstChild.data,
'"<word')
def test_GET_xml_escapes_container_name_as_subdir(self):
req = Request.blank(
'/sda1/p/a',
environ={'REQUEST_METHOD': 'PUT', 'HTTP_X_TIMESTAMP': '0'})
req.get_response(self.controller)
req = Request.blank(
'/sda1/p/a/%22%3Cword-test', # "<word-test
environ={'REQUEST_METHOD': 'PUT', 'HTTP_X_TIMESTAMP': '1',
'HTTP_X_PUT_TIMESTAMP': '1', 'HTTP_X_OBJECT_COUNT': '0',
'HTTP_X_DELETE_TIMESTAMP': '0', 'HTTP_X_BYTES_USED': '1'})
req.get_response(self.controller)
req = Request.blank(
'/sda1/p/a?format=xml&delimiter=-',
environ={'REQUEST_METHOD': 'GET', 'HTTP_X_TIMESTAMP': '1'})
resp = req.get_response(self.controller)
dom = xml.dom.minidom.parseString(resp.body)
self.assertEqual(
dom.firstChild.firstChild.nextSibling.attributes['name'].value,
'"<word-')
def test_GET_limit_marker_plain(self):
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
req.get_response(self.controller)
put_timestamp = normalize_timestamp(0)
for c in range(5):
req = Request.blank(
'/sda1/p/a/c%d' % c,
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Put-Timestamp': put_timestamp,
'X-Delete-Timestamp': '0',
'X-Object-Count': '2',
'X-Bytes-Used': '3',
'X-Timestamp': put_timestamp})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a?limit=3',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.body.strip().split(b'\n'),
[b'c0', b'c1', b'c2'])
req = Request.blank('/sda1/p/a?limit=3&marker=c2',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.body.strip().split(b'\n'),
[b'c3', b'c4'])
def test_GET_limit_marker_json(self):
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
req.get_response(self.controller)
for c in range(5):
put_timestamp = normalize_timestamp(c + 1)
req = Request.blank(
'/sda1/p/a/c%d' % c,
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Put-Timestamp': put_timestamp,
'X-Delete-Timestamp': '0',
'X-Object-Count': '2',
'X-Bytes-Used': '3',
'X-Timestamp': put_timestamp})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a?limit=3&format=json',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200)
expected = [{'count': 2, 'bytes': 3, 'name': 'c0',
'last_modified': Timestamp('1').isoformat},
{'count': 2, 'bytes': 3, 'name': 'c1',
'last_modified': Timestamp('2').isoformat},
{'count': 2, 'bytes': 3, 'name': 'c2',
'last_modified': Timestamp('3').isoformat}]
self.assertEqual(json.loads(resp.body), expected)
req = Request.blank('/sda1/p/a?limit=3&marker=c2&format=json',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200)
expected = [{'count': 2, 'bytes': 3, 'name': 'c3',
'last_modified': Timestamp('4').isoformat},
{'count': 2, 'bytes': 3, 'name': 'c4',
'last_modified': Timestamp('5').isoformat}]
self.assertEqual(json.loads(resp.body), expected)
def test_GET_limit_marker_xml(self):
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
req.get_response(self.controller)
for c in range(5):
put_timestamp = normalize_timestamp(c + 1)
req = Request.blank(
'/sda1/p/a/c%d' % c,
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Put-Timestamp': put_timestamp,
'X-Delete-Timestamp': '0',
'X-Object-Count': '2',
'X-Bytes-Used': '3',
'X-Timestamp': put_timestamp})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a?limit=3&format=xml',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200)
dom = xml.dom.minidom.parseString(resp.body)
self.assertEqual(dom.firstChild.nodeName, 'account')
listing = \
[n for n in dom.firstChild.childNodes if n.nodeName != '#text']
self.assertEqual(len(listing), 3)
self.assertEqual(listing[0].nodeName, 'container')
container = [n for n in listing[0].childNodes if n.nodeName != '#text']
self.assertEqual(sorted([n.nodeName for n in container]),
['bytes', 'count', 'last_modified', 'name'])
node = [n for n in container if n.nodeName == 'name'][0]
self.assertEqual(node.firstChild.nodeValue, 'c0')
node = [n for n in container if n.nodeName == 'count'][0]
self.assertEqual(node.firstChild.nodeValue, '2')
node = [n for n in container if n.nodeName == 'bytes'][0]
self.assertEqual(node.firstChild.nodeValue, '3')
node = [n for n in container if n.nodeName == 'last_modified'][0]
self.assertEqual(node.firstChild.nodeValue,
Timestamp('1').isoformat)
self.assertEqual(listing[-1].nodeName, 'container')
container = [
n for n in listing[-1].childNodes if n.nodeName != '#text']
self.assertEqual(sorted([n.nodeName for n in container]),
['bytes', 'count', 'last_modified', 'name'])
node = [n for n in container if n.nodeName == 'name'][0]
self.assertEqual(node.firstChild.nodeValue, 'c2')
node = [n for n in container if n.nodeName == 'count'][0]
self.assertEqual(node.firstChild.nodeValue, '2')
node = [n for n in container if n.nodeName == 'bytes'][0]
self.assertEqual(node.firstChild.nodeValue, '3')
node = [n for n in container if n.nodeName == 'last_modified'][0]
self.assertEqual(node.firstChild.nodeValue,
Timestamp('3').isoformat)
req = Request.blank('/sda1/p/a?limit=3&marker=c2&format=xml',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200)
dom = xml.dom.minidom.parseString(resp.body)
self.assertEqual(dom.firstChild.nodeName, 'account')
listing = \
[n for n in dom.firstChild.childNodes if n.nodeName != '#text']
self.assertEqual(len(listing), 2)
self.assertEqual(listing[0].nodeName, 'container')
container = [n for n in listing[0].childNodes if n.nodeName != '#text']
self.assertEqual(sorted([n.nodeName for n in container]),
['bytes', 'count', 'last_modified', 'name'])
node = [n for n in container if n.nodeName == 'name'][0]
self.assertEqual(node.firstChild.nodeValue, 'c3')
node = [n for n in container if n.nodeName == 'count'][0]
self.assertEqual(node.firstChild.nodeValue, '2')
node = [n for n in container if n.nodeName == 'bytes'][0]
self.assertEqual(node.firstChild.nodeValue, '3')
node = [n for n in container if n.nodeName == 'last_modified'][0]
self.assertEqual(node.firstChild.nodeValue,
Timestamp('4').isoformat)
self.assertEqual(listing[-1].nodeName, 'container')
container = [
n for n in listing[-1].childNodes if n.nodeName != '#text']
self.assertEqual(sorted([n.nodeName for n in container]),
['bytes', 'count', 'last_modified', 'name'])
node = [n for n in container if n.nodeName == 'name'][0]
self.assertEqual(node.firstChild.nodeValue, 'c4')
node = [n for n in container if n.nodeName == 'count'][0]
self.assertEqual(node.firstChild.nodeValue, '2')
node = [n for n in container if n.nodeName == 'bytes'][0]
self.assertEqual(node.firstChild.nodeValue, '3')
node = [n for n in container if n.nodeName == 'last_modified'][0]
self.assertEqual(node.firstChild.nodeValue,
Timestamp('5').isoformat)
def test_GET_accept_wildcard(self):
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a/c1', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Put-Timestamp': '1',
'X-Delete-Timestamp': '0',
'X-Object-Count': '0',
'X-Bytes-Used': '0',
'X-Timestamp': normalize_timestamp(0)})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'GET'})
req.accept = '*/*'
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.body, b'c1\n')
def test_GET_accept_application_wildcard(self):
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank('/sda1/p/a/c1', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Put-Timestamp': '1',
'X-Delete-Timestamp': '0',
'X-Object-Count': '0',
'X-Bytes-Used': '0',
'X-Timestamp': normalize_timestamp(0)})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'GET'})
req.accept = 'application/*'
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(len(json.loads(resp.body)), 1)
def test_GET_accept_json(self):
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a/c1', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Put-Timestamp': '1',
'X-Delete-Timestamp': '0',
'X-Object-Count': '0',
'X-Bytes-Used': '0',
'X-Timestamp': normalize_timestamp(0)})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'GET'})
req.accept = 'application/json'
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(len(json.loads(resp.body)), 1)
def test_GET_accept_xml(self):
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a/c1', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Put-Timestamp': '1',
'X-Delete-Timestamp': '0',
'X-Object-Count': '0',
'X-Bytes-Used': '0',
'X-Timestamp': normalize_timestamp(0)})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'GET'})
req.accept = 'application/xml'
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200)
dom = xml.dom.minidom.parseString(resp.body)
self.assertEqual(dom.firstChild.nodeName, 'account')
listing = \
[n for n in dom.firstChild.childNodes if n.nodeName != '#text']
self.assertEqual(len(listing), 1)
def test_GET_accept_conflicting(self):
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a/c1', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Put-Timestamp': '1',
'X-Delete-Timestamp': '0',
'X-Object-Count': '0',
'X-Bytes-Used': '0',
'X-Timestamp': normalize_timestamp(0)})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a?format=plain',
environ={'REQUEST_METHOD': 'GET'})
req.accept = 'application/json'
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.body, b'c1\n')
def test_GET_accept_not_valid(self):
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a/c1', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Put-Timestamp': '1',
'X-Delete-Timestamp': '0',
'X-Object-Count': '0',
'X-Bytes-Used': '0',
'X-Timestamp': normalize_timestamp(0)})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'GET'})
req.accept = 'application/xml*'
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 406)
def test_GET_prefix_delimiter_plain(self):
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
resp = req.get_response(self.controller)
for first in range(3):
req = Request.blank(
'/sda1/p/a/sub.%s' % first,
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Put-Timestamp': '1',
'X-Delete-Timestamp': '0',
'X-Object-Count': '0',
'X-Bytes-Used': '0',
'X-Timestamp': normalize_timestamp(0)})
req.get_response(self.controller)
for second in range(3):
req = Request.blank(
'/sda1/p/a/sub.%s.%s' % (first, second),
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Put-Timestamp': '1',
'X-Delete-Timestamp': '0',
'X-Object-Count': '0',
'X-Bytes-Used': '0',
'X-Timestamp': normalize_timestamp(0)})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a?delimiter=.',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.body.strip().split(b'\n'),
[b'sub.'])
req = Request.blank('/sda1/p/a?prefix=sub.&delimiter=.',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(
resp.body.strip().split(b'\n'),
[b'sub.0', b'sub.0.', b'sub.1', b'sub.1.', b'sub.2', b'sub.2.'])
req = Request.blank('/sda1/p/a?prefix=sub.1.&delimiter=.',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.body.strip().split(b'\n'),
[b'sub.1.0', b'sub.1.1', b'sub.1.2'])
def test_GET_prefix_delimiter_json(self):
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
resp = req.get_response(self.controller)
for first in range(3):
req = Request.blank(
'/sda1/p/a/sub.%s' % first,
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Put-Timestamp': '1',
'X-Delete-Timestamp': '0',
'X-Object-Count': '0',
'X-Bytes-Used': '0',
'X-Timestamp': normalize_timestamp(0)})
req.get_response(self.controller)
for second in range(3):
req = Request.blank(
'/sda1/p/a/sub.%s.%s' % (first, second),
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Put-Timestamp': '1',
'X-Delete-Timestamp': '0',
'X-Object-Count': '0',
'X-Bytes-Used': '0',
'X-Timestamp': normalize_timestamp(0)})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a?delimiter=.&format=json',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual([n.get('name', 's:' + n.get('subdir', 'error'))
for n in json.loads(resp.body)], ['s:sub.'])
req = Request.blank('/sda1/p/a?prefix=sub.&delimiter=.&format=json',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(
[n.get('name', 's:' + n.get('subdir', 'error'))
for n in json.loads(resp.body)],
['sub.0', 's:sub.0.', 'sub.1', 's:sub.1.', 'sub.2', 's:sub.2.'])
req = Request.blank('/sda1/p/a?prefix=sub.1.&delimiter=.&format=json',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(
[n.get('name', 's:' + n.get('subdir', 'error'))
for n in json.loads(resp.body)],
['sub.1.0', 'sub.1.1', 'sub.1.2'])
def test_GET_prefix_delimiter_xml(self):
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
resp = req.get_response(self.controller)
for first in range(3):
req = Request.blank(
'/sda1/p/a/sub.%s' % first,
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Put-Timestamp': '1',
'X-Delete-Timestamp': '0',
'X-Object-Count': '0',
'X-Bytes-Used': '0',
'X-Timestamp': normalize_timestamp(0)})
req.get_response(self.controller)
for second in range(3):
req = Request.blank(
'/sda1/p/a/sub.%s.%s' % (first, second),
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Put-Timestamp': '1',
'X-Delete-Timestamp': '0',
'X-Object-Count': '0',
'X-Bytes-Used': '0',
'X-Timestamp': normalize_timestamp(0)})
req.get_response(self.controller)
req = Request.blank(
'/sda1/p/a?delimiter=.&format=xml',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200)
dom = xml.dom.minidom.parseString(resp.body)
listing = []
for node1 in dom.firstChild.childNodes:
if node1.nodeName == 'subdir':
listing.append('s:' + node1.attributes['name'].value)
elif node1.nodeName == 'container':
for node2 in node1.childNodes:
if node2.nodeName == 'name':
listing.append(node2.firstChild.nodeValue)
self.assertEqual(listing, ['s:sub.'])
req = Request.blank(
'/sda1/p/a?prefix=sub.&delimiter=.&format=xml',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200)
dom = xml.dom.minidom.parseString(resp.body)
listing = []
for node1 in dom.firstChild.childNodes:
if node1.nodeName == 'subdir':
listing.append('s:' + node1.attributes['name'].value)
elif node1.nodeName == 'container':
for node2 in node1.childNodes:
if node2.nodeName == 'name':
listing.append(node2.firstChild.nodeValue)
self.assertEqual(
listing,
['sub.0', 's:sub.0.', 'sub.1', 's:sub.1.', 'sub.2', 's:sub.2.'])
req = Request.blank(
'/sda1/p/a?prefix=sub.1.&delimiter=.&format=xml',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200)
dom = xml.dom.minidom.parseString(resp.body)
listing = []
for node1 in dom.firstChild.childNodes:
if node1.nodeName == 'subdir':
listing.append('s:' + node1.attributes['name'].value)
elif node1.nodeName == 'container':
for node2 in node1.childNodes:
if node2.nodeName == 'name':
listing.append(node2.firstChild.nodeValue)
self.assertEqual(listing, ['sub.1.0', 'sub.1.1', 'sub.1.2'])
def test_GET_leading_delimiter(self):
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
resp = req.get_response(self.controller)
for first in range(3):
req = Request.blank(
'/sda1/p/a/.sub.%s' % first,
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Put-Timestamp': '1',
'X-Delete-Timestamp': '0',
'X-Object-Count': '0',
'X-Bytes-Used': '0',
'X-Timestamp': normalize_timestamp(0)})
req.get_response(self.controller)
for second in range(3):
req = Request.blank(
'/sda1/p/a/.sub.%s.%s' % (first, second),
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Put-Timestamp': '1',
'X-Delete-Timestamp': '0',
'X-Object-Count': '0',
'X-Bytes-Used': '0',
'X-Timestamp': normalize_timestamp(0)})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a?delimiter=.',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.body.strip().split(b'\n'),
[b'.'])
req = Request.blank('/sda1/p/a?prefix=.&delimiter=.',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.body.strip().split(b'\n'),
[b'.sub.'])
req = Request.blank('/sda1/p/a?prefix=.sub.&delimiter=.',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(
resp.body.strip().split(b'\n'),
[b'.sub.0', b'.sub.0.', b'.sub.1', b'.sub.1.',
b'.sub.2', b'.sub.2.'])
req = Request.blank('/sda1/p/a?prefix=.sub.1.&delimiter=.',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.body.strip().split(b'\n'),
[b'.sub.1.0', b'.sub.1.1', b'.sub.1.2'])
def test_GET_multichar_delimiter(self):
self.maxDiff = None
req = Request.blank('/sda1/p/a', method='PUT', headers={
'x-timestamp': '0'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 201, resp.body)
for i in ('US~~TX~~A', 'US~~TX~~B', 'US~~OK~~A', 'US~~OK~~B',
'US~~OK~Tulsa~~A', 'US~~OK~Tulsa~~B',
'US~~UT~~A', 'US~~UT~~~B'):
req = Request.blank('/sda1/p/a/%s' % i, method='PUT', headers={
'X-Put-Timestamp': '1',
'X-Delete-Timestamp': '0',
'X-Object-Count': '0',
'X-Bytes-Used': '0',
'X-Timestamp': normalize_timestamp(0)})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank(
'/sda1/p/a?prefix=US~~&delimiter=~~&format=json',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(
json.loads(resp.body),
[{"subdir": "US~~OK~Tulsa~~"},
{"subdir": "US~~OK~~"},
{"subdir": "US~~TX~~"},
{"subdir": "US~~UT~~"}])
req = Request.blank(
'/sda1/p/a?prefix=US~~&delimiter=~~&format=json&reverse=on',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(
json.loads(resp.body),
[{"subdir": "US~~UT~~"},
{"subdir": "US~~TX~~"},
{"subdir": "US~~OK~~"},
{"subdir": "US~~OK~Tulsa~~"}])
req = Request.blank(
'/sda1/p/a?prefix=US~~UT&delimiter=~~&format=json',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(
json.loads(resp.body),
[{"subdir": "US~~UT~~"}])
req = Request.blank(
'/sda1/p/a?prefix=US~~UT&delimiter=~~&format=json&reverse=on',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(
json.loads(resp.body),
[{"subdir": "US~~UT~~"}])
req = Request.blank(
'/sda1/p/a?prefix=US~~UT~&delimiter=~~&format=json',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(
[{k: v for k, v in item.items() if k in ('subdir', 'name')}
for item in json.loads(resp.body)],
[{"name": "US~~UT~~A"},
{"subdir": "US~~UT~~~"}])
req = Request.blank(
'/sda1/p/a?prefix=US~~UT~&delimiter=~~&format=json&reverse=on',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(
[{k: v for k, v in item.items() if k in ('subdir', 'name')}
for item in json.loads(resp.body)],
[{"subdir": "US~~UT~~~"},
{"name": "US~~UT~~A"}])
req = Request.blank(
'/sda1/p/a?prefix=US~~UT~~&delimiter=~~&format=json',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(
[{k: v for k, v in item.items() if k in ('subdir', 'name')}
for item in json.loads(resp.body)],
[{"name": "US~~UT~~A"},
{"name": "US~~UT~~~B"}])
req = Request.blank(
'/sda1/p/a?prefix=US~~UT~~&delimiter=~~&format=json&reverse=on',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(
[{k: v for k, v in item.items() if k in ('subdir', 'name')}
for item in json.loads(resp.body)],
[{"name": "US~~UT~~~B"},
{"name": "US~~UT~~A"}])
req = Request.blank(
'/sda1/p/a?prefix=US~~UT~~~&delimiter=~~&format=json',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(
[{k: v for k, v in item.items() if k in ('subdir', 'name')}
for item in json.loads(resp.body)],
[{"name": "US~~UT~~~B"}])
def _expected_listing(self, containers):
return [dict(
last_modified=c['timestamp'].isoformat, **{
k: v for k, v in c.items()
if k != 'timestamp'
}) for c in sorted(containers, key=lambda c: c['name'])]
def _report_containers(self, containers, account='a'):
req = Request.blank('/sda1/p/%s' % account, method='PUT', headers={
'x-timestamp': next(self.ts).internal})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int // 100, 2, resp.body)
for container in containers:
path = '/sda1/p/%s/%s' % (account, container['name'])
req = Request.blank(path, method='PUT', headers={
'X-Put-Timestamp': container['timestamp'].internal,
'X-Delete-Timestamp': container.get(
'deleted', Timestamp(0)).internal,
'X-Object-Count': container['count'],
'X-Bytes-Used': container['bytes'],
})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int // 100, 2, resp.body)
def test_delimiter_with_reserved_and_no_public(self):
containers = [{
'name': get_reserved_name('null', 'test01'),
'bytes': 200,
'count': 2,
'timestamp': next(self.ts),
}]
self._report_containers(containers)
req = Request.blank('/sda1/p/a', headers={
'Accept': 'application/json'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200, resp.body)
self.assertEqual(json.loads(resp.body), [])
req = Request.blank('/sda1/p/a', headers={
'X-Backend-Allow-Reserved-Names': 'true',
'Accept': 'application/json'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200, resp.body)
self.assertEqual(json.loads(resp.body),
self._expected_listing(containers))
req = Request.blank('/sda1/p/a?prefix=%s&delimiter=l' %
get_reserved_name('nul'), headers={
'Accept': 'application/json'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200, resp.body)
self.assertEqual(json.loads(resp.body), [])
req = Request.blank('/sda1/p/a?prefix=%s&delimiter=l' %
get_reserved_name('nul'), headers={
'X-Backend-Allow-Reserved-Names': 'true',
'Accept': 'application/json'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200, resp.body)
self.assertEqual(json.loads(resp.body), [{
'subdir': '%s' % get_reserved_name('null')}])
def test_delimiter_with_reserved_and_public(self):
containers = [{
'name': get_reserved_name('null', 'test01'),
'bytes': 200,
'count': 2,
'timestamp': next(self.ts),
}, {
'name': 'nullish',
'bytes': 10,
'count': 10,
'timestamp': next(self.ts),
}]
self._report_containers(containers)
req = Request.blank('/sda1/p/a?prefix=nul&delimiter=l', headers={
'Accept': 'application/json'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200, resp.body)
self.assertEqual(json.loads(resp.body), [{'subdir': 'null'}])
# allow-reserved header doesn't really make a difference
req = Request.blank('/sda1/p/a?prefix=nul&delimiter=l', headers={
'X-Backend-Allow-Reserved-Names': 'true',
'Accept': 'application/json'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200, resp.body)
self.assertEqual(json.loads(resp.body), [{'subdir': 'null'}])
req = Request.blank('/sda1/p/a?prefix=%s&delimiter=l' %
get_reserved_name('nul'), headers={
'Accept': 'application/json'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200, resp.body)
self.assertEqual(json.loads(resp.body), [])
req = Request.blank('/sda1/p/a?prefix=%s&delimiter=l' %
get_reserved_name('nul'), headers={
'X-Backend-Allow-Reserved-Names': 'true',
'Accept': 'application/json'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200, resp.body)
self.assertEqual(json.loads(resp.body), [{
'subdir': '%s' % get_reserved_name('null')}])
req = Request.blank('/sda1/p/a?delimiter=%00', headers={
'Accept': 'application/json'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200, resp.body)
self.assertEqual(json.loads(resp.body),
self._expected_listing(containers)[1:])
req = Request.blank('/sda1/p/a?delimiter=%00', headers={
'X-Backend-Allow-Reserved-Names': 'true',
'Accept': 'application/json'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200, resp.body)
self.assertEqual(json.loads(resp.body),
[{'subdir': '\x00'}] +
self._expected_listing(containers)[1:])
def test_markers_with_reserved(self):
containers = [{
'name': get_reserved_name('null', 'test01'),
'bytes': 200,
'count': 2,
'timestamp': next(self.ts),
}, {
'name': get_reserved_name('null', 'test02'),
'bytes': 10,
'count': 10,
'timestamp': next(self.ts),
}]
self._report_containers(containers)
req = Request.blank('/sda1/p/a?marker=%s' %
get_reserved_name('null', ''), headers={
'Accept': 'application/json'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200, resp.body)
self.assertEqual(json.loads(resp.body), [])
req = Request.blank('/sda1/p/a?marker=%s' %
get_reserved_name('null', ''), headers={
'X-Backend-Allow-Reserved-Names': 'true',
'Accept': 'application/json'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200, resp.body)
self.assertEqual(json.loads(resp.body),
self._expected_listing(containers))
req = Request.blank('/sda1/p/a?marker=%s' % quote(
self._expected_listing(containers)[0]['name']), headers={
'X-Backend-Allow-Reserved-Names': 'true',
'Accept': 'application/json'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200, resp.body)
self.assertEqual(json.loads(resp.body),
self._expected_listing(containers)[1:])
containers.append({
'name': get_reserved_name('null', 'test03'),
'bytes': 300,
'count': 30,
'timestamp': next(self.ts),
})
self._report_containers(containers)
req = Request.blank('/sda1/p/a?marker=%s' % quote(
self._expected_listing(containers)[0]['name']), headers={
'X-Backend-Allow-Reserved-Names': 'true',
'Accept': 'application/json'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200, resp.body)
self.assertEqual(json.loads(resp.body),
self._expected_listing(containers)[1:])
req = Request.blank('/sda1/p/a?marker=%s' % quote(
self._expected_listing(containers)[1]['name']), headers={
'X-Backend-Allow-Reserved-Names': 'true',
'Accept': 'application/json'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200, resp.body)
self.assertEqual(json.loads(resp.body),
self._expected_listing(containers)[-1:])
def test_prefix_with_reserved(self):
containers = [{
'name': get_reserved_name('null', 'test01'),
'bytes': 200,
'count': 2,
'timestamp': next(self.ts),
}, {
'name': get_reserved_name('null', 'test02'),
'bytes': 10,
'count': 10,
'timestamp': next(self.ts),
}, {
'name': get_reserved_name('null', 'foo'),
'bytes': 10,
'count': 10,
'timestamp': next(self.ts),
}, {
'name': get_reserved_name('nullish'),
'bytes': 300,
'count': 32,
'timestamp': next(self.ts),
}]
self._report_containers(containers)
req = Request.blank('/sda1/p/a?prefix=%s' %
get_reserved_name('null', 'test'), headers={
'Accept': 'application/json'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200, resp.body)
self.assertEqual(json.loads(resp.body), [])
req = Request.blank('/sda1/p/a?prefix=%s' %
get_reserved_name('null', 'test'), headers={
'X-Backend-Allow-Reserved-Names': 'true',
'Accept': 'application/json'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200, resp.body)
self.assertEqual(json.loads(resp.body),
self._expected_listing(containers[:2]))
def test_prefix_and_delim_with_reserved(self):
containers = [{
'name': get_reserved_name('null', 'test01'),
'bytes': 200,
'count': 2,
'timestamp': next(self.ts),
}, {
'name': get_reserved_name('null', 'test02'),
'bytes': 10,
'count': 10,
'timestamp': next(self.ts),
}, {
'name': get_reserved_name('null', 'foo'),
'bytes': 10,
'count': 10,
'timestamp': next(self.ts),
}, {
'name': get_reserved_name('nullish'),
'bytes': 300,
'count': 32,
'timestamp': next(self.ts),
}]
self._report_containers(containers)
req = Request.blank('/sda1/p/a?prefix=%s&delimiter=%s' % (
get_reserved_name('null'), get_reserved_name()), headers={
'Accept': 'application/json'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200, resp.body)
self.assertEqual(json.loads(resp.body), [])
req = Request.blank('/sda1/p/a?prefix=%s&delimiter=%s' % (
get_reserved_name('null'), get_reserved_name()), headers={
'X-Backend-Allow-Reserved-Names': 'true',
'Accept': 'application/json'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200, resp.body)
expected = [{'subdir': get_reserved_name('null', '')}] + \
self._expected_listing(containers[-1:])
self.assertEqual(json.loads(resp.body), expected)
def test_reserved_markers_with_non_reserved(self):
containers = [{
'name': get_reserved_name('null', 'test01'),
'bytes': 200,
'count': 2,
'timestamp': next(self.ts),
}, {
'name': get_reserved_name('null', 'test02'),
'bytes': 10,
'count': 10,
'timestamp': next(self.ts),
}, {
'name': 'nullish',
'bytes': 300,
'count': 32,
'timestamp': next(self.ts),
}]
self._report_containers(containers)
req = Request.blank('/sda1/p/a?marker=%s' %
get_reserved_name('null', ''), headers={
'X-Backend-Allow-Reserved-Names': 'true',
'Accept': 'application/json'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200, resp.body)
self.assertEqual(json.loads(resp.body),
self._expected_listing(containers))
req = Request.blank('/sda1/p/a?marker=%s' %
get_reserved_name('null', ''), headers={
'Accept': 'application/json'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200, resp.body)
self.assertEqual(json.loads(resp.body),
[c for c in self._expected_listing(containers)
if get_reserved_name() not in c['name']])
req = Request.blank('/sda1/p/a?marker=%s' %
get_reserved_name('null', ''), headers={
'X-Backend-Allow-Reserved-Names': 'true',
'Accept': 'application/json'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200, resp.body)
self.assertEqual(json.loads(resp.body),
self._expected_listing(containers))
req = Request.blank('/sda1/p/a?marker=%s' % quote(
self._expected_listing(containers)[0]['name']), headers={
'X-Backend-Allow-Reserved-Names': 'true',
'Accept': 'application/json'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200, resp.body)
self.assertEqual(json.loads(resp.body),
self._expected_listing(containers)[1:])
def test_null_markers(self):
containers = [{
'name': get_reserved_name('null', ''),
'bytes': 200,
'count': 2,
'timestamp': next(self.ts),
}, {
'name': get_reserved_name('null', 'test01'),
'bytes': 200,
'count': 2,
'timestamp': next(self.ts),
}, {
'name': 'null',
'bytes': 300,
'count': 32,
'timestamp': next(self.ts),
}]
self._report_containers(containers)
req = Request.blank('/sda1/p/a?marker=%s' % get_reserved_name('null'),
headers={'Accept': 'application/json'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200, resp.body)
self.assertEqual(json.loads(resp.body),
self._expected_listing(containers)[-1:])
req = Request.blank('/sda1/p/a?marker=%s' % get_reserved_name('null'),
headers={'X-Backend-Allow-Reserved-Names': 'true',
'Accept': 'application/json'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200, resp.body)
self.assertEqual(json.loads(resp.body),
self._expected_listing(containers))
req = Request.blank('/sda1/p/a?marker=%s' %
get_reserved_name('null', ''), headers={
'X-Backend-Allow-Reserved-Names': 'true',
'Accept': 'application/json'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200, resp.body)
self.assertEqual(json.loads(resp.body),
self._expected_listing(containers)[1:])
req = Request.blank('/sda1/p/a?marker=%s' %
get_reserved_name('null', 'test00'), headers={
'X-Backend-Allow-Reserved-Names': 'true',
'Accept': 'application/json'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200, resp.body)
self.assertEqual(json.loads(resp.body),
self._expected_listing(containers)[1:])
def test_through_call(self):
inbuf = BytesIO()
errbuf = StringIO()
outbuf = StringIO()
def start_response(*args):
outbuf.write(args[0])
self.controller.__call__({'REQUEST_METHOD': 'GET',
'SCRIPT_NAME': '',
'PATH_INFO': '/sda1/p/a',
'SERVER_NAME': '127.0.0.1',
'SERVER_PORT': '8080',
'SERVER_PROTOCOL': 'HTTP/1.0',
'CONTENT_LENGTH': '0',
'wsgi.version': (1, 0),
'wsgi.url_scheme': 'http',
'wsgi.input': inbuf,
'wsgi.errors': errbuf,
'wsgi.multithread': False,
'wsgi.multiprocess': False,
'wsgi.run_once': False},
start_response)
self.assertEqual(errbuf.getvalue(), '')
self.assertEqual(outbuf.getvalue()[:4], '404 ')
def test_through_call_invalid_path(self):
inbuf = BytesIO()
errbuf = StringIO()
outbuf = StringIO()
def start_response(*args):
outbuf.write(args[0])
self.controller.__call__({'REQUEST_METHOD': 'GET',
'SCRIPT_NAME': '',
'PATH_INFO': '/bob',
'SERVER_NAME': '127.0.0.1',
'SERVER_PORT': '8080',
'SERVER_PROTOCOL': 'HTTP/1.0',
'CONTENT_LENGTH': '0',
'wsgi.version': (1, 0),
'wsgi.url_scheme': 'http',
'wsgi.input': inbuf,
'wsgi.errors': errbuf,
'wsgi.multithread': False,
'wsgi.multiprocess': False,
'wsgi.run_once': False},
start_response)
self.assertEqual(errbuf.getvalue(), '')
self.assertEqual(outbuf.getvalue()[:4], '400 ')
def test_through_call_invalid_path_utf8(self):
inbuf = BytesIO()
errbuf = StringIO()
outbuf = StringIO()
def start_response(*args):
outbuf.write(args[0])
self.controller.__call__({'REQUEST_METHOD': 'GET',
'SCRIPT_NAME': '',
'PATH_INFO': '/sda1/p/a/c\xd8\x3e%20',
'SERVER_NAME': '127.0.0.1',
'SERVER_PORT': '8080',
'SERVER_PROTOCOL': 'HTTP/1.0',
'CONTENT_LENGTH': '0',
'wsgi.version': (1, 0),
'wsgi.url_scheme': 'http',
'wsgi.input': inbuf,
'wsgi.errors': errbuf,
'wsgi.multithread': False,
'wsgi.multiprocess': False,
'wsgi.run_once': False},
start_response)
self.assertEqual(errbuf.getvalue(), '')
self.assertEqual(outbuf.getvalue()[:4], '412 ')
def test_invalid_method_doesnt_exist(self):
errbuf = StringIO()
outbuf = StringIO()
def start_response(*args):
outbuf.write(args[0])
self.controller.__call__({'REQUEST_METHOD': 'method_doesnt_exist',
'PATH_INFO': '/sda1/p/a'},
start_response)
self.assertEqual(errbuf.getvalue(), '')
self.assertEqual(outbuf.getvalue()[:4], '405 ')
def test_invalid_method_is_not_public(self):
errbuf = StringIO()
outbuf = StringIO()
def start_response(*args):
outbuf.write(args[0])
self.controller.__call__({'REQUEST_METHOD': '__init__',
'PATH_INFO': '/sda1/p/a'},
start_response)
self.assertEqual(errbuf.getvalue(), '')
self.assertEqual(outbuf.getvalue()[:4], '405 ')
def test_params_format(self):
Request.blank('/sda1/p/a',
headers={'X-Timestamp': normalize_timestamp(1)},
environ={'REQUEST_METHOD': 'PUT'}).get_response(
self.controller)
for format in ('xml', 'json'):
req = Request.blank('/sda1/p/a?format=%s' % format,
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200)
def test_params_utf8(self):
# Bad UTF8 sequence, all parameters should cause 400 error
for param in ('delimiter', 'limit', 'marker', 'prefix', 'end_marker',
'format'):
req = Request.blank('/sda1/p/a?%s=\xce' % param,
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 400,
"%d on param %s" % (resp.status_int, param))
Request.blank('/sda1/p/a',
headers={'X-Timestamp': normalize_timestamp(1)},
environ={'REQUEST_METHOD': 'PUT'}).get_response(
self.controller)
# Good UTF8 sequence, ignored for limit, doesn't affect other queries
for param in ('limit', 'marker', 'prefix', 'end_marker', 'format',
'delimiter'):
req = Request.blank('/sda1/p/a?%s=\xce\xa9' % param,
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 204,
"%d on param %s" % (resp.status_int, param))
def test_PUT_auto_create(self):
headers = {'x-put-timestamp': normalize_timestamp(1),
'x-delete-timestamp': normalize_timestamp(0),
'x-object-count': '0',
'x-bytes-used': '0'}
req = Request.blank('/sda1/p/a/c',
environ={'REQUEST_METHOD': 'PUT'},
headers=dict(headers))
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 404)
req = Request.blank('/sda1/p/.a/c',
environ={'REQUEST_METHOD': 'PUT'},
headers=dict(headers))
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank('/sda1/p/a/.c',
environ={'REQUEST_METHOD': 'PUT'},
headers=dict(headers))
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 404)
def test_content_type_on_HEAD(self):
Request.blank('/sda1/p/a',
headers={'X-Timestamp': normalize_timestamp(1)},
environ={'REQUEST_METHOD': 'PUT'}).get_response(
self.controller)
env = {'REQUEST_METHOD': 'HEAD'}
req = Request.blank('/sda1/p/a?format=xml', environ=env)
resp = req.get_response(self.controller)
self.assertEqual(resp.content_type, 'application/xml')
req = Request.blank('/sda1/p/a?format=json', environ=env)
resp = req.get_response(self.controller)
self.assertEqual(resp.content_type, 'application/json')
self.assertEqual(resp.charset, 'utf-8')
req = Request.blank('/sda1/p/a', environ=env)
resp = req.get_response(self.controller)
self.assertEqual(resp.content_type, 'text/plain')
self.assertEqual(resp.charset, 'utf-8')
req = Request.blank(
'/sda1/p/a', headers={'Accept': 'application/json'}, environ=env)
resp = req.get_response(self.controller)
self.assertEqual(resp.content_type, 'application/json')
self.assertEqual(resp.charset, 'utf-8')
req = Request.blank(
'/sda1/p/a', headers={'Accept': 'application/xml'}, environ=env)
resp = req.get_response(self.controller)
self.assertEqual(resp.content_type, 'application/xml')
self.assertEqual(resp.charset, 'utf-8')
def test_serv_reserv(self):
# Test replication_server flag was set from configuration file.
conf = {'devices': self.testdir, 'mount_check': 'false'}
self.assertTrue(AccountController(conf).replication_server)
for val in [True, '1', 'True', 'true']:
conf['replication_server'] = val
self.assertTrue(AccountController(conf).replication_server)
for val in [False, 0, '0', 'False', 'false', 'test_string']:
conf['replication_server'] = val
self.assertFalse(AccountController(conf).replication_server)
def test_list_allowed_methods(self):
# Test list of allowed_methods
obj_methods = ['DELETE', 'PUT', 'HEAD', 'GET', 'POST']
repl_methods = ['REPLICATE']
for method_name in obj_methods:
method = getattr(self.controller, method_name)
self.assertFalse(hasattr(method, 'replication'))
for method_name in repl_methods:
method = getattr(self.controller, method_name)
self.assertEqual(method.replication, True)
def test_correct_allowed_method(self):
# Test correct work for allowed method using
# swift.account.server.AccountController.__call__
inbuf = BytesIO()
errbuf = StringIO()
self.controller = AccountController(
{'devices': self.testdir,
'mount_check': 'false',
'replication_server': 'false'})
def start_response(*args):
pass
method = 'PUT'
env = {'REQUEST_METHOD': method,
'SCRIPT_NAME': '',
'PATH_INFO': '/sda1/p/a/c',
'SERVER_NAME': '127.0.0.1',
'SERVER_PORT': '8080',
'SERVER_PROTOCOL': 'HTTP/1.0',
'CONTENT_LENGTH': '0',
'wsgi.version': (1, 0),
'wsgi.url_scheme': 'http',
'wsgi.input': inbuf,
'wsgi.errors': errbuf,
'wsgi.multithread': False,
'wsgi.multiprocess': False,
'wsgi.run_once': False}
method_res = mock.MagicMock()
mock_method = public(lambda x: mock.MagicMock(return_value=method_res))
with mock.patch.object(self.controller, method,
new=mock_method):
mock_method.replication = False
response = self.controller(env, start_response)
self.assertEqual(response, method_res)
def test_not_allowed_method(self):
# Test correct work for NOT allowed method using
# swift.account.server.AccountController.__call__
inbuf = BytesIO()
errbuf = StringIO()
self.controller = AccountController(
{'devices': self.testdir, 'mount_check': 'false',
'replication_server': 'false'})
def start_response(*args):
pass
method = 'PUT'
env = {'REQUEST_METHOD': method,
'SCRIPT_NAME': '',
'PATH_INFO': '/sda1/p/a/c',
'SERVER_NAME': '127.0.0.1',
'SERVER_PORT': '8080',
'SERVER_PROTOCOL': 'HTTP/1.0',
'CONTENT_LENGTH': '0',
'wsgi.version': (1, 0),
'wsgi.url_scheme': 'http',
'wsgi.input': inbuf,
'wsgi.errors': errbuf,
'wsgi.multithread': False,
'wsgi.multiprocess': False,
'wsgi.run_once': False}
answer = [b'<html><h1>Method Not Allowed</h1><p>The method is not '
b'allowed for this resource.</p></html>']
mock_method = replication(public(lambda x: mock.MagicMock()))
with mock.patch.object(self.controller, method,
new=mock_method):
mock_method.replication = True
response = self.controller.__call__(env, start_response)
self.assertEqual(response, answer)
def test_replicaiton_server_call_all_methods(self):
inbuf = BytesIO()
errbuf = StringIO()
outbuf = StringIO()
self.controller = AccountController(
{'devices': self.testdir, 'mount_check': 'false',
'replication_server': 'true'})
def start_response(*args):
outbuf.write(args[0])
obj_methods = ['PUT', 'HEAD', 'GET', 'POST', 'DELETE', 'OPTIONS']
for method in obj_methods:
env = {'REQUEST_METHOD': method,
'SCRIPT_NAME': '',
'PATH_INFO': '/sda1/p/a',
'SERVER_NAME': '127.0.0.1',
'SERVER_PORT': '8080',
'SERVER_PROTOCOL': 'HTTP/1.0',
'HTTP_X_TIMESTAMP': next(self.ts).internal,
'CONTENT_LENGTH': '0',
'wsgi.version': (1, 0),
'wsgi.url_scheme': 'http',
'wsgi.input': inbuf,
'wsgi.errors': errbuf,
'wsgi.multithread': False,
'wsgi.multiprocess': False,
'wsgi.run_once': False}
self.controller(env, start_response)
self.assertEqual(errbuf.getvalue(), '')
self.assertIn(outbuf.getvalue()[:4], ('200 ', '201 ', '204 '))
def test__call__raise_timeout(self):
inbuf = WsgiBytesIO()
errbuf = StringIO()
self.logger = debug_logger('test')
self.account_controller = AccountController(
{'devices': self.testdir, 'mount_check': 'false',
'replication_server': 'false', 'log_requests': 'false'},
logger=self.logger)
def start_response(*args):
pass
method = 'PUT'
env = {'REQUEST_METHOD': method,
'SCRIPT_NAME': '',
'PATH_INFO': '/sda1/p/a/c',
'SERVER_NAME': '127.0.0.1',
'SERVER_PORT': '8080',
'SERVER_PROTOCOL': 'HTTP/1.0',
'CONTENT_LENGTH': '0',
'wsgi.version': (1, 0),
'wsgi.url_scheme': 'http',
'wsgi.input': inbuf,
'wsgi.errors': errbuf,
'wsgi.multithread': False,
'wsgi.multiprocess': False,
'wsgi.run_once': False}
@public
def mock_put_method(*args, **kwargs):
raise Exception()
with mock.patch.object(self.account_controller, method,
new=mock_put_method):
response = self.account_controller.__call__(env, start_response)
self.assertTrue(response[0].decode('ascii').startswith(
'Traceback (most recent call last):'))
self.assertEqual(self.logger.get_lines_for_level('error'), [
'ERROR __call__ error with %(method)s %(path)s : ' % {
'method': 'PUT', 'path': '/sda1/p/a/c'},
])
self.assertEqual(self.logger.get_lines_for_level('info'), [])
def test_GET_log_requests_true(self):
self.controller.logger = FakeLogger()
self.controller.log_requests = True
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 404)
self.assertTrue(self.controller.logger.log_dict['info'])
def test_GET_log_requests_false(self):
self.controller.logger = FakeLogger()
self.controller.log_requests = False
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 404)
self.assertFalse(self.controller.logger.log_dict['info'])
def test_log_line_format(self):
req = Request.blank(
'/sda1/p/a',
environ={'REQUEST_METHOD': 'HEAD', 'REMOTE_ADDR': '1.2.3.4'})
self.controller.logger = FakeLogger()
with mock.patch(
'time.time',
mock.MagicMock(side_effect=[10000.0, 10001.0, 10002.0,
10002.0])):
with mock.patch(
'os.getpid', mock.MagicMock(return_value=1234)):
req.get_response(self.controller)
self.assertEqual(
self.controller.logger.log_dict['info'],
[(('1.2.3.4 - - [01/Jan/1970:02:46:42 +0000] "HEAD /sda1/p/a" 404 '
'- "-" "-" "-" 2.0000 "-" 1234 -',), {})])
def test_policy_stats_with_legacy(self):
ts = itertools.count()
# create the account
req = Request.blank('/sda1/p/a', method='PUT', headers={
'X-Timestamp': normalize_timestamp(next(ts))})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 201) # sanity
# add a container
req = Request.blank('/sda1/p/a/c1', method='PUT', headers={
'X-Put-Timestamp': normalize_timestamp(next(ts)),
'X-Delete-Timestamp': '0',
'X-Object-Count': '2',
'X-Bytes-Used': '4',
})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 201)
# read back rollup
for method in ('GET', 'HEAD'):
req = Request.blank('/sda1/p/a', method=method)
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int // 100, 2)
self.assertEqual(resp.headers['X-Account-Object-Count'], '2')
self.assertEqual(resp.headers['X-Account-Bytes-Used'], '4')
self.assertEqual(
resp.headers['X-Account-Storage-Policy-%s-Object-Count' %
POLICIES[0].name], '2')
self.assertEqual(
resp.headers['X-Account-Storage-Policy-%s-Bytes-Used' %
POLICIES[0].name], '4')
self.assertEqual(
resp.headers['X-Account-Storage-Policy-%s-Container-Count' %
POLICIES[0].name], '1')
def test_policy_stats_non_default(self):
ts = itertools.count()
# create the account
req = Request.blank('/sda1/p/a', method='PUT', headers={
'X-Timestamp': normalize_timestamp(next(ts))})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 201) # sanity
# add a container
non_default_policies = [p for p in POLICIES if not p.is_default]
policy = random.choice(non_default_policies)
req = Request.blank('/sda1/p/a/c1', method='PUT', headers={
'X-Put-Timestamp': normalize_timestamp(next(ts)),
'X-Delete-Timestamp': '0',
'X-Object-Count': '2',
'X-Bytes-Used': '4',
'X-Backend-Storage-Policy-Index': policy.idx,
})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 201)
# read back rollup
for method in ('GET', 'HEAD'):
req = Request.blank('/sda1/p/a', method=method)
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int // 100, 2)
self.assertEqual(resp.headers['X-Account-Object-Count'], '2')
self.assertEqual(resp.headers['X-Account-Bytes-Used'], '4')
self.assertEqual(
resp.headers['X-Account-Storage-Policy-%s-Object-Count' %
policy.name], '2')
self.assertEqual(
resp.headers['X-Account-Storage-Policy-%s-Bytes-Used' %
policy.name], '4')
self.assertEqual(
resp.headers['X-Account-Storage-Policy-%s-Container-Count' %
policy.name], '1')
def test_empty_policy_stats(self):
ts = itertools.count()
# create the account
req = Request.blank('/sda1/p/a', method='PUT', headers={
'X-Timestamp': normalize_timestamp(next(ts))})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 201) # sanity
for method in ('GET', 'HEAD'):
req = Request.blank('/sda1/p/a', method=method)
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int // 100, 2)
for key in resp.headers:
self.assertNotIn('storage-policy', key.lower())
def test_empty_except_for_used_policies(self):
ts = itertools.count()
# create the account
req = Request.blank('/sda1/p/a', method='PUT', headers={
'X-Timestamp': normalize_timestamp(next(ts))})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 201) # sanity
# starts empty
for method in ('GET', 'HEAD'):
req = Request.blank('/sda1/p/a', method=method)
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int // 100, 2)
for key in resp.headers:
self.assertNotIn('storage-policy', key.lower())
# add a container
policy = random.choice(POLICIES)
req = Request.blank('/sda1/p/a/c1', method='PUT', headers={
'X-Put-Timestamp': normalize_timestamp(next(ts)),
'X-Delete-Timestamp': '0',
'X-Object-Count': '2',
'X-Bytes-Used': '4',
'X-Backend-Storage-Policy-Index': policy.idx,
})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 201)
# only policy of the created container should be in headers
for method in ('GET', 'HEAD'):
req = Request.blank('/sda1/p/a', method=method)
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int // 100, 2)
for key in resp.headers:
if 'storage-policy' in key.lower():
self.assertIn(policy.name.lower(), key.lower())
def test_multiple_policies_in_use(self):
ts = itertools.count()
# create the account
req = Request.blank('/sda1/p/a', method='PUT', headers={
'X-Timestamp': normalize_timestamp(next(ts))})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 201) # sanity
# add some containers
for policy in POLICIES:
count = policy.idx * 100 # good as any integer
container_path = '/sda1/p/a/c_%s' % policy.name
req = Request.blank(
container_path, method='PUT', headers={
'X-Put-Timestamp': normalize_timestamp(next(ts)),
'X-Delete-Timestamp': '0',
'X-Object-Count': count,
'X-Bytes-Used': count,
'X-Backend-Storage-Policy-Index': policy.idx,
})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank('/sda1/p/a', method='HEAD')
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int // 100, 2)
# check container counts in roll up headers
total_object_count = 0
total_bytes_used = 0
for key in resp.headers:
if 'storage-policy' not in key.lower():
continue
for policy in POLICIES:
if policy.name.lower() not in key.lower():
continue
if key.lower().endswith('object-count'):
object_count = int(resp.headers[key])
self.assertEqual(policy.idx * 100, object_count)
total_object_count += object_count
if key.lower().endswith('bytes-used'):
bytes_used = int(resp.headers[key])
self.assertEqual(policy.idx * 100, bytes_used)
total_bytes_used += bytes_used
expected_total_count = sum([p.idx * 100 for p in POLICIES])
self.assertEqual(expected_total_count, total_object_count)
self.assertEqual(expected_total_count, total_bytes_used)
@patch_policies([StoragePolicy(0, 'zero', False),
StoragePolicy(1, 'one', True),
StoragePolicy(2, 'two', False),
StoragePolicy(3, 'three', False)])
class TestNonLegacyDefaultStoragePolicy(TestAccountController):
pass
if __name__ == '__main__':
unittest.main()
|
swiftstack/swift
|
test/unit/account/test_server.py
|
Python
|
apache-2.0
| 135,836 | 0 |
from . import config
from django.shortcuts import render
from mwoauth import ConsumerToken, Handshaker, tokens
def requests_handshaker():
consumer_key = config.OAUTH_CONSUMER_KEY
consumer_secret = config.OAUTH_CONSUMER_SECRET
consumer_token = ConsumerToken(consumer_key, consumer_secret)
return Handshaker("https://meta.wikimedia.org/w/index.php", consumer_token)
def get_username(request):
handshaker = requests_handshaker()
if 'access_token_key' in request.session:
access_key = request.session['access_token_key'].encode('utf-8')
access_secret = request.session['access_token_secret'].encode('utf-8')
access_token = tokens.AccessToken(key=access_key, secret=access_secret)
return handshaker.identify(access_token)['username']
else:
return None
|
harej/requestoid
|
authentication.py
|
Python
|
mit
| 819 | 0 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
#----------------------------------------------------------------------------------------------------------------------*
#
# Options for all compilers
#
#----------------------------------------------------------------------------------------------------------------------*
def allCompilerOptions (platformOptions):
result = platformOptions
result.append ("-Wall")
result.append ("-Werror")
result.append ("-Wreturn-type")
result.append ("-Wformat")
result.append ("-Wsign-compare")
result.append ("-Wpointer-arith")
#--- Options added for GALGAS 1.9.0
result.append ("-ansi")
result.append ("-W")
result.append ("-Wshadow")
# result.append ("-Wcast-qual")
result.append ("-Wwrite-strings")
result.append ("-ffunction-sections")
result.append ("-fdata-sections")
return result
#----------------------------------------------------------------------------------------------------------------------*
#
# Release options
#
#----------------------------------------------------------------------------------------------------------------------*
def compilerReleaseOptions (platformOptions):
result = platformOptions
result.append ("-DDO_NOT_GENERATE_CHECKINGS")
result.append ("-Wunused-variable")
return result
#----------------------------------------------------------------------------------------------------------------------*
#
# Debug options
#
#----------------------------------------------------------------------------------------------------------------------*
def compilerDebugOptions (platformOptions):
result = platformOptions
result.append ("-g")
return result
#----------------------------------------------------------------------------------------------------------------------*
#
# C compiler options
#
#----------------------------------------------------------------------------------------------------------------------*
def C_CompilerOptions (platformOptions):
result = platformOptions
result.append ("-std=c99")
return result
#----------------------------------------------------------------------------------------------------------------------*
#
# C++ compiler options
#
#----------------------------------------------------------------------------------------------------------------------*
def Cpp_CompilerOptions (platformOptions):
result = platformOptions
result.append ("-std=c++14")
result.append ("-Woverloaded-virtual")
return result
#----------------------------------------------------------------------------------------------------------------------*
#
# Objective C compiler options
#
#----------------------------------------------------------------------------------------------------------------------*
def ObjectiveC_CompilerOptions (platformOptions):
result = platformOptions
return result
#----------------------------------------------------------------------------------------------------------------------*
#
# Objective C++ compiler options
#
#----------------------------------------------------------------------------------------------------------------------*
def ObjectiveCpp_CompilerOptions (platformOptions):
result = platformOptions
return result
#----------------------------------------------------------------------------------------------------------------------*
|
TrampolineRTOS/trampoline
|
goil/build/libpm/python-makefiles/default_build_options.py
|
Python
|
gpl-2.0
| 3,374 | 0.02786 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.