text
stringlengths 6
947k
| repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
|
---|---|---|---|---|---|---|
##############################################################################
# For copyright and license notices, see __manifest__.py file in module root
# directory
##############################################################################
from . import account_journal
from . import account_tax
from . import account_document_letter
from . import afip_responsability_type
from . import account_document_type
from . import afip_incoterm
from . import res_partner
from . import res_country
from . import res_currency
from . import res_company
from . import account_invoice_tax
from . import account_invoice
from . import account_invoice_line
from . import product_uom
from . import account_chart_template
from . import afip_vat_f2002_category
from . import product_template
from . import account_move_line
from . import account_move
from . import afip_padron
from . import account_account
from . import account_account_tag
|
bmya/odoo-argentina
|
l10n_ar_account/models/__init__.py
|
Python
|
agpl-3.0
| 929 | 0 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright (c) 2017 Citrix Systems
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.0'}
DOCUMENTATION = '''
---
module: netscaler_cs_action
short_description: Manage content switching actions
description:
- Manage content switching actions
- This module is intended to run either on the ansible control node or a bastion (jumpserver) with access to the actual netscaler instance
version_added: "2.4.0"
author: George Nikolopoulos (@giorgos-nikolopoulos)
options:
name:
description:
- >-
Name for the content switching action. Must begin with an ASCII alphanumeric or underscore C(_)
character, and must contain only ASCII alphanumeric, underscore C(_), hash C(#), period C(.), space C( ), colon
C(:), at sign C(@), equal sign C(=), and hyphen C(-) characters. Can be changed after the content
switching action is created.
targetlbvserver:
description:
- "Name of the load balancing virtual server to which the content is switched."
targetvserver:
description:
- "Name of the VPN virtual server to which the content is switched."
targetvserverexpr:
description:
- "Information about this content switching action."
comment:
description:
- "Comments associated with this cs action."
extends_documentation_fragment: netscaler
requirements:
- nitro python sdk
'''
EXAMPLES = '''
# lb_vserver_1 must have been already created with the netscaler_lb_vserver module
- name: Configure netscaler content switching action
delegate_to: localhost
netscaler_cs_action:
nsip: 172.18.0.2
nitro_user: nsroot
nitro_pass: nsroot
validate_certs: no
state: present
name: action-1
targetlbvserver: lb_vserver_1
'''
RETURN = '''
loglines:
description: list of logged messages by the module
returned: always
type: list
sample: "['message 1', 'message 2']"
msg:
description: Message detailing the failure reason
returned: failure
type: string
sample: "Action does not exist"
diff:
description: List of differences between the actual configured object and the configuration specified in the module
returned: failure
type: dictionary
sample: "{ 'targetlbvserver': 'difference. ours: (str) server1 other: (str) server2' }"
'''
import json
try:
from nssrc.com.citrix.netscaler.nitro.resource.config.cs.csaction import csaction
from nssrc.com.citrix.netscaler.nitro.exception.nitro_exception import nitro_exception
PYTHON_SDK_IMPORTED = True
except ImportError as e:
PYTHON_SDK_IMPORTED = False
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.netscaler import (
ConfigProxy,
get_nitro_client,
netscaler_common_arguments,
log, loglines,
ensure_feature_is_enabled,
get_immutables_intersection
)
def action_exists(client, module):
if csaction.count_filtered(client, 'name:%s' % module.params['name']) > 0:
return True
else:
return False
def action_identical(client, module, csaction_proxy):
if len(diff_list(client, module, csaction_proxy)) == 0:
return True
else:
return False
def diff_list(client, module, csaction_proxy):
action_list = csaction.get_filtered(client, 'name:%s' % module.params['name'])
diff_list = csaction_proxy.diff_object(action_list[0])
if False and 'targetvserverexpr' in diff_list:
json_value = json.loads(action_list[0].targetvserverexpr)
if json_value == module.params['targetvserverexpr']:
del diff_list['targetvserverexpr']
return diff_list
def main():
module_specific_arguments = dict(
name=dict(type='str'),
targetlbvserver=dict(type='str'),
targetvserverexpr=dict(type='str'),
comment=dict(type='str'),
)
argument_spec = dict()
argument_spec.update(netscaler_common_arguments)
argument_spec.update(module_specific_arguments)
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
)
module_result = dict(
changed=False,
failed=False,
loglines=loglines
)
# Fail the module if imports failed
if not PYTHON_SDK_IMPORTED:
module.fail_json(msg='Could not load nitro python sdk')
# Fallthrough to rest of execution
client = get_nitro_client(module)
try:
client.login()
except nitro_exception as e:
msg = "nitro exception during login. errorcode=%s, message=%s" % (str(e.errorcode), e.message)
module.fail_json(msg=msg)
except Exception as e:
if str(type(e)) == "<class 'requests.exceptions.ConnectionError'>":
module.fail_json(msg='Connection error %s' % str(e))
elif str(type(e)) == "<class 'requests.exceptions.SSLError'>":
module.fail_json(msg='SSL Error %s' % str(e))
else:
module.fail_json(msg='Unexpected error during login %s' % str(e))
readwrite_attrs = [
'name',
'targetlbvserver',
'targetvserverexpr',
'comment',
]
readonly_attrs = [
'hits',
'referencecount',
'undefhits',
'builtin',
]
immutable_attrs = [
'name',
'targetvserverexpr',
]
transforms = {
}
json_encodes = ['targetvserverexpr']
# Instantiate config proxy
csaction_proxy = ConfigProxy(
actual=csaction(),
client=client,
attribute_values_dict=module.params,
readwrite_attrs=readwrite_attrs,
readonly_attrs=readonly_attrs,
immutable_attrs=immutable_attrs,
transforms=transforms,
json_encodes=json_encodes,
)
try:
ensure_feature_is_enabled(client, 'CS')
# Apply appropriate state
if module.params['state'] == 'present':
log('Applying actions for state present')
if not action_exists(client, module):
if not module.check_mode:
csaction_proxy.add()
if module.params['save_config']:
client.save_config()
module_result['changed'] = True
elif not action_identical(client, module, csaction_proxy):
# Check if we try to change value of immutable attributes
immutables_changed = get_immutables_intersection(csaction_proxy, diff_list(client, module, csaction_proxy).keys())
if immutables_changed != []:
module.fail_json(
msg='Cannot update immutable attributes %s' % (immutables_changed,),
diff=diff_list(client, module, csaction_proxy),
**module_result
)
if not module.check_mode:
csaction_proxy.update()
if module.params['save_config']:
client.save_config()
module_result['changed'] = True
else:
module_result['changed'] = False
# Sanity check for state
log('Sanity checks for state present')
if not module.check_mode:
if not action_exists(client, module):
module.fail_json(msg='Content switching action does not exist', **module_result)
if not action_identical(client, module, csaction_proxy):
module.fail_json(
msg='Content switching action differs from configured',
diff=diff_list(client, module, csaction_proxy),
**module_result
)
elif module.params['state'] == 'absent':
log('Applying actions for state absent')
if action_exists(client, module):
if not module.check_mode:
csaction_proxy.delete()
if module.params['save_config']:
client.save_config()
module_result['changed'] = True
else:
module_result['changed'] = False
# Sanity check for state
if not module.check_mode:
log('Sanity checks for state absent')
if action_exists(client, module):
module.fail_json(msg='Content switching action still exists', **module_result)
except nitro_exception as e:
msg = "nitro exception errorcode=%s, message=%s" % (str(e.errorcode), e.message)
module.fail_json(msg=msg, **module_result)
client.logout()
module.exit_json(**module_result)
if __name__ == "__main__":
main()
|
bearstech/ansible
|
lib/ansible/modules/network/netscaler/netscaler_cs_action.py
|
Python
|
gpl-3.0
| 9,091 | 0.00242 |
"""Some utility functions"""
# Authors: Eric Larson <larsoner@uw.edu>
#
# License: BSD (3-clause)
import warnings
import operator
from copy import deepcopy
import subprocess
import importlib
import os
import os.path as op
import inspect
import sys
import time
import tempfile
import traceback
import ssl
from shutil import rmtree
import atexit
import json
from functools import partial
from distutils.version import LooseVersion
import logging
import datetime
from timeit import default_timer as clock
from threading import Timer
import numpy as np
import scipy as sp
from ._externals import decorator
# set this first thing to make sure it "takes"
try:
import pyglet
pyglet.options['debug_gl'] = False
del pyglet
except Exception:
pass
# for py3k (eventually)
if sys.version.startswith('2'):
string_types = basestring # noqa
input = raw_input # noqa, input is raw_input in py3k
text_type = unicode # noqa
from __builtin__ import reload
from urllib2 import urlopen # noqa
from cStringIO import StringIO # noqa
else:
string_types = str
text_type = str
from urllib.request import urlopen
input = input
from io import StringIO # noqa, analysis:ignore
from importlib import reload # noqa, analysis:ignore
###############################################################################
# LOGGING
EXP = 25
logging.addLevelName(EXP, 'EXP')
def exp(self, message, *args, **kwargs):
"""Experiment-level logging."""
self.log(EXP, message, *args, **kwargs)
logging.Logger.exp = exp
logger = logging.getLogger('expyfun')
def flush_logger():
"""Flush expyfun logger"""
for handler in logger.handlers:
handler.flush()
def set_log_level(verbose=None, return_old_level=False):
"""Convenience function for setting the logging level
Parameters
----------
verbose : bool, str, int, or None
The verbosity of messages to print. If a str, it can be either DEBUG,
INFO, WARNING, ERROR, or CRITICAL. Note that these are for
convenience and are equivalent to passing in logging.DEBUG, etc.
For bool, True is the same as 'INFO', False is the same as 'WARNING'.
If None, the environment variable EXPYFUN_LOGGING_LEVEL is read, and if
it doesn't exist, defaults to INFO.
return_old_level : bool
If True, return the old verbosity level.
"""
if verbose is None:
verbose = get_config('EXPYFUN_LOGGING_LEVEL', 'INFO')
elif isinstance(verbose, bool):
verbose = 'INFO' if verbose is True else 'WARNING'
if isinstance(verbose, string_types):
verbose = verbose.upper()
logging_types = dict(DEBUG=logging.DEBUG, INFO=logging.INFO,
WARNING=logging.WARNING, ERROR=logging.ERROR,
CRITICAL=logging.CRITICAL)
if verbose not in logging_types:
raise ValueError('verbose must be of a valid type')
verbose = logging_types[verbose]
old_verbose = logger.level
logger.setLevel(verbose)
return (old_verbose if return_old_level else None)
def set_log_file(fname=None,
output_format='%(asctime)s - %(levelname)-7s - %(message)s',
overwrite=None):
"""Convenience function for setting the log to print to a file
Parameters
----------
fname : str, or None
Filename of the log to print to. If None, stdout is used.
To suppress log outputs, use set_log_level('WARN').
output_format : str
Format of the output messages. See the following for examples:
http://docs.python.org/dev/howto/logging.html
e.g., "%(asctime)s - %(levelname)s - %(message)s".
overwrite : bool, or None
Overwrite the log file (if it exists). Otherwise, statements
will be appended to the log (default). None is the same as False,
but additionally raises a warning to notify the user that log
entries will be appended.
"""
handlers = logger.handlers
for h in handlers:
if isinstance(h, logging.FileHandler):
h.close()
logger.removeHandler(h)
if fname is not None:
if op.isfile(fname) and overwrite is None:
warnings.warn('Log entries will be appended to the file. Use '
'overwrite=False to avoid this message in the '
'future.')
mode = 'w' if overwrite is True else 'a'
lh = logging.FileHandler(fname, mode=mode)
else:
""" we should just be able to do:
lh = logging.StreamHandler(sys.stdout)
but because doctests uses some magic on stdout, we have to do this:
"""
lh = logging.StreamHandler(WrapStdOut())
lh.setFormatter(logging.Formatter(output_format))
# actually add the stream handler
logger.addHandler(lh)
###############################################################################
# RANDOM UTILITIES
building_doc = any('sphinx-build' in ((''.join(i[4]).lower() + i[1])
if i[4] is not None else '')
for i in inspect.stack())
def run_subprocess(command, **kwargs):
"""Run command using subprocess.Popen
Run command and wait for command to complete. If the return code was zero
then return, otherwise raise CalledProcessError.
By default, this will also add stdout= and stderr=subproces.PIPE
to the call to Popen to suppress printing to the terminal.
Parameters
----------
command : list of str
Command to run as subprocess (see subprocess.Popen documentation).
**kwargs : objects
Keywoard arguments to pass to ``subprocess.Popen``.
Returns
-------
stdout : str
Stdout returned by the process.
stderr : str
Stderr returned by the process.
"""
# code adapted with permission from mne-python
kw = dict(stderr=subprocess.PIPE, stdout=subprocess.PIPE)
kw.update(kwargs)
p = subprocess.Popen(command, **kw)
stdout_, stderr = p.communicate()
output = (stdout_.decode(), stderr.decode())
if p.returncode:
err_fun = subprocess.CalledProcessError.__init__
if 'output' in _get_args(err_fun):
raise subprocess.CalledProcessError(p.returncode, command, output)
else:
raise subprocess.CalledProcessError(p.returncode, command)
return output
class ZeroClock(object):
"""Clock that uses "clock" function but starts at zero on init."""
def __init__(self):
self._start_time = clock()
def get_time(self):
"""Get time."""
return clock() - self._start_time
def date_str():
"""Produce a date string for the current date and time
Returns
-------
datestr : str
The date string.
"""
return str(datetime.datetime.today()).replace(':', '_')
class WrapStdOut(object):
"""Ridiculous class to work around how doctest captures stdout."""
def __getattr__(self, name):
# Even more ridiculous than this class, this must be sys.stdout (not
# just stdout) in order for this to work (tested on OSX and Linux)
return getattr(sys.stdout, name)
class _TempDir(str):
"""Class for creating and auto-destroying temp dir
This is designed to be used with testing modules.
We cannot simply use __del__() method for cleanup here because the rmtree
function may be cleaned up before this object, so we use the atexit module
instead. Passing del_after and print_del kwargs to the constructor are
helpful primarily for debugging purposes.
"""
def __new__(self, del_after=True, print_del=False):
new = str.__new__(self, tempfile.mkdtemp())
self._del_after = del_after
self._print_del = print_del
return new
def __init__(self):
self._path = self.__str__()
atexit.register(self.cleanup)
def cleanup(self):
if self._del_after is True:
if self._print_del is True:
print('Deleting {} ...'.format(self._path))
rmtree(self._path, ignore_errors=True)
def check_units(units):
"""Ensure user passed valid units type
Parameters
----------
units : str
Must be ``'norm'``, ``'deg'``, ``'pix'``, or ``'cm'``.
"""
good_units = ['norm', 'pix', 'deg', 'cm']
if units not in good_units:
raise ValueError('"units" must be one of {}, not {}'
''.format(good_units, units))
###############################################################################
# DECORATORS
# Following deprecated class copied from scikit-learn
class deprecated(object):
"""Decorator to mark a function or class as deprecated.
Issue a warning when the function is called/the class is instantiated and
adds a warning to the docstring.
The optional extra argument will be appended to the deprecation message
and the docstring. Note: to use this with the default value for extra, put
in an empty of parentheses:
>>> from expyfun._utils import deprecated
>>> deprecated() # doctest: +ELLIPSIS
<expyfun._utils.deprecated object at ...>
>>> @deprecated()
... def some_function(): pass
"""
# Adapted from http://wiki.python.org/moin/PythonDecoratorLibrary,
# but with many changes.
# scikit-learn will not import on all platforms b/c it can be
# sklearn or scikits.learn, so a self-contained example is used above
def __init__(self, extra=''):
"""
Parameters
----------
extra: string
to be added to the deprecation messages
"""
self.extra = extra
def __call__(self, obj):
"""Call."""
if isinstance(obj, type):
return self._decorate_class(obj)
else:
return self._decorate_fun(obj)
def _decorate_class(self, cls):
msg = "Class %s is deprecated" % cls.__name__
if self.extra:
msg += "; %s" % self.extra
# FIXME: we should probably reset __new__ for full generality
init = cls.__init__
def wrapped(*args, **kwargs):
warnings.warn(msg, category=DeprecationWarning)
return init(*args, **kwargs)
cls.__init__ = wrapped
wrapped.__name__ = '__init__'
wrapped.__doc__ = self._update_doc(init.__doc__)
wrapped.deprecated_original = init
return cls
def _decorate_fun(self, fun):
"""Decorate function fun"""
msg = "Function %s is deprecated" % fun.__name__
if self.extra:
msg += "; %s" % self.extra
def wrapped(*args, **kwargs):
warnings.warn(msg, category=DeprecationWarning)
return fun(*args, **kwargs)
wrapped.__name__ = fun.__name__
wrapped.__dict__ = fun.__dict__
wrapped.__doc__ = self._update_doc(fun.__doc__)
return wrapped
def _update_doc(self, olddoc):
newdoc = "DEPRECATED"
if self.extra:
newdoc = "%s: %s" % (newdoc, self.extra)
if olddoc:
newdoc = "%s\n\n%s" % (newdoc, olddoc)
return newdoc
if hasattr(inspect, 'signature'): # py35
def _get_args(function, varargs=False):
params = inspect.signature(function).parameters
args = [key for key, param in params.items()
if param.kind not in (param.VAR_POSITIONAL, param.VAR_KEYWORD)]
if varargs:
varargs = [param.name for param in params.values()
if param.kind == param.VAR_POSITIONAL]
if len(varargs) == 0:
varargs = None
return args, varargs
else:
return args
else:
def _get_args(function, varargs=False):
out = inspect.getargspec(function) # args, varargs, keywords, defaults
if varargs:
return out[:2]
else:
return out[0]
@decorator
def verbose_dec(function, *args, **kwargs):
"""Improved verbose decorator to allow functions to override log-level
Do not call this directly to set global verbosrity level, instead use
set_log_level().
Parameters
----------
function : callable
Function to be decorated by setting the verbosity level.
Returns
-------
dec - function
The decorated function
"""
arg_names = _get_args(function)
if len(arg_names) > 0 and arg_names[0] == 'self':
default_level = getattr(args[0], 'verbose', None)
else:
default_level = None
if('verbose' in arg_names):
verbose_level = args[arg_names.index('verbose')]
else:
verbose_level = default_level
if verbose_level is not None:
old_level = set_log_level(verbose_level, True)
# set it back if we get an exception
try:
ret = function(*args, **kwargs)
except Exception:
set_log_level(old_level)
raise
set_log_level(old_level)
return ret
else:
ret = function(*args, **kwargs)
return ret
def _new_pyglet():
import pyglet
return LooseVersion(pyglet.version) >= LooseVersion('1.4')
def _has_video(raise_error=False):
exceptions = list()
good = True
if _new_pyglet():
try:
from pyglet.media.codecs.ffmpeg import FFmpegSource # noqa
except ImportError:
exceptions.append(traceback.format_exc())
good = False
else:
if raise_error:
print('Found FFmpegSource for new Pyglet')
else:
try:
from pyglet.media.avbin import AVbinSource # noqa
except ImportError:
exceptions.append(traceback.format_exc())
try:
from pyglet.media.sources.avbin import AVbinSource # noqa
except ImportError:
exceptions.append(traceback.format_exc())
good = False
else:
if raise_error:
print('Found AVbinSource for old Pyglet 1')
else:
if raise_error:
print('Found AVbinSource for old Pyglet 2')
if raise_error and not good:
raise RuntimeError('Video support not enabled, got exception(s):\n'
'\n***********************\n'.join(exceptions))
return good
def requires_video():
"""Require FFmpeg/AVbin."""
import pytest
return pytest.mark.skipif(not _has_video(), reason='Requires FFmpeg/AVbin')
def requires_opengl21(func):
"""Require OpenGL."""
import pytest
import pyglet.gl
vendor = pyglet.gl.gl_info.get_vendor()
version = pyglet.gl.gl_info.get_version()
sufficient = pyglet.gl.gl_info.have_version(2, 0)
return pytest.mark.skipif(not sufficient,
reason='OpenGL too old: %s %s'
% (vendor, version,))(func)
def requires_lib(lib):
"""Requires lib decorator."""
import pytest
try:
importlib.import_module(lib)
except Exception as exp:
val = True
reason = 'Needs %s (%s)' % (lib, exp)
else:
val = False
reason = ''
return pytest.mark.skipif(val, reason=reason)
def _has_scipy_version(version):
return (LooseVersion(sp.__version__) >= LooseVersion(version))
def _get_user_home_path():
"""Return standard preferences path"""
# this has been checked on OSX64, Linux64, and Win32
val = os.getenv('APPDATA' if 'nt' == os.name.lower() else 'HOME', None)
if val is None:
raise ValueError('expyfun config file path could '
'not be determined, please report this '
'error to expyfun developers')
return val
def fetch_data_file(fname):
"""Fetch example remote file
Parameters
----------
fname : str
The remote filename to get. If the filename already exists
on the local system, the file will not be fetched again.
Returns
-------
fname : str
The filename on the local system where the file was downloaded.
"""
path = get_config('EXPYFUN_DATA_PATH', op.join(_get_user_home_path(),
'.expyfun', 'data'))
fname_out = op.join(path, fname)
if not op.isdir(op.dirname(fname_out)):
os.makedirs(op.dirname(fname_out))
fname_url = ('https://github.com/LABSN/expyfun-data/raw/master/{0}'
''.format(fname))
try:
# until we get proper certificates
context = ssl._create_unverified_context()
this_urlopen = partial(urlopen, context=context)
except AttributeError:
context = None
this_urlopen = urlopen
if not op.isfile(fname_out):
try:
with open(fname_out, 'wb') as fid:
www = this_urlopen(fname_url, timeout=30.0)
try:
fid.write(www.read())
finally:
www.close()
except Exception:
os.remove(fname_out)
raise
return fname_out
def get_config_path():
r"""Get path to standard expyfun config file.
Returns
-------
config_path : str
The path to the expyfun configuration file. On windows, this
will be '%APPDATA%\.expyfun\expyfun.json'. On every other
system, this will be $HOME/.expyfun/expyfun.json.
"""
val = op.join(_get_user_home_path(), '.expyfun', 'expyfun.json')
return val
# List the known configuration values
known_config_types = ('RESPONSE_DEVICE',
'AUDIO_CONTROLLER',
'DB_OF_SINE_AT_1KHZ_1RMS',
'EXPYFUN_EYELINK',
'SOUND_CARD_API',
'SOUND_CARD_API_OPTIONS',
'SOUND_CARD_BACKEND',
'SOUND_CARD_FS',
'SOUND_CARD_NAME',
'SOUND_CARD_FIXED_DELAY',
'SOUND_CARD_TRIGGER_CHANNELS',
'SOUND_CARD_TRIGGER_INSERTION',
'SOUND_CARD_TRIGGER_SCALE',
'SOUND_CARD_TRIGGER_ID_AFTER_ONSET',
'SOUND_CARD_DRIFT_TRIGGER',
'TDT_CIRCUIT_PATH',
'TDT_DELAY',
'TDT_INTERFACE',
'TDT_MODEL',
'TDT_TRIG_DELAY',
'TRIGGER_CONTROLLER',
'TRIGGER_ADDRESS',
'WINDOW_SIZE',
'SCREEN_NUM',
'SCREEN_WIDTH',
'SCREEN_DISTANCE',
'SCREEN_SIZE_PIX',
'EXPYFUN_LOGGING_LEVEL',
)
# These allow for partial matches: 'NAME_1' is okay key if 'NAME' is listed
known_config_wildcards = ()
def get_config(key=None, default=None, raise_error=False):
"""Read expyfun preference from env, then expyfun config
Parameters
----------
key : str
The preference key to look for. The os environment is searched first,
then the expyfun config file is parsed.
default : str | None
Value to return if the key is not found.
raise_error : bool
If True, raise an error if the key is not found (instead of returning
default).
Returns
-------
value : str | None
The preference key value.
"""
if key is not None and not isinstance(key, string_types):
raise ValueError('key must be a string')
# first, check to see if key is in env
if key is not None and key in os.environ:
return os.environ[key]
# second, look for it in expyfun config file
config_path = get_config_path()
if not op.isfile(config_path):
key_found = False
val = default
else:
with open(config_path, 'r') as fid:
config = json.load(fid)
if key is None:
return config
key_found = True if key in config else False
val = config.get(key, default)
if not key_found and raise_error is True:
meth_1 = 'os.environ["%s"] = VALUE' % key
meth_2 = 'expyfun.utils.set_config("%s", VALUE)' % key
raise KeyError('Key "%s" not found in environment or in the '
'expyfun config file:\n%s\nTry either:\n'
' %s\nfor a temporary solution, or:\n'
' %s\nfor a permanent one. You can also '
'set the environment variable before '
'running python.'
% (key, config_path, meth_1, meth_2))
return val
def set_config(key, value):
"""Set expyfun preference in config
Parameters
----------
key : str | None
The preference key to set. If None, a tuple of the valid
keys is returned, and ``value`` is ignored.
value : str | None
The value to assign to the preference key. If None, the key is
deleted.
"""
if key is None:
return sorted(known_config_types)
if not isinstance(key, string_types):
raise ValueError('key must be a string')
# While JSON allow non-string types, we allow users to override config
# settings using env, which are strings, so we enforce that here
if not isinstance(value, string_types) and value is not None:
raise ValueError('value must be a string or None')
if key not in known_config_types and not \
any(k in key for k in known_config_wildcards):
warnings.warn('Setting non-standard config type: "%s"' % key)
# Read all previous values
config_path = get_config_path()
if op.isfile(config_path):
with open(config_path, 'r') as fid:
config = json.load(fid)
else:
config = dict()
logger.info('Attempting to create new expyfun configuration '
'file:\n%s' % config_path)
if value is None:
config.pop(key, None)
else:
config[key] = value
# Write all values
directory = op.split(config_path)[0]
if not op.isdir(directory):
os.mkdir(directory)
with open(config_path, 'w') as fid:
json.dump(config, fid, sort_keys=True, indent=0)
###############################################################################
# MISC
def fake_button_press(ec, button='1', delay=0.):
"""Fake a button press after a delay
Notes
-----
This function only works with the keyboard controller (not TDT)!
It uses threads to ensure that control is passed back, so other commands
can be called (like wait_for_presses).
"""
def send():
ec._response_handler._on_pyglet_keypress(button, [], True)
Timer(delay, send).start() if delay > 0. else send()
def fake_mouse_click(ec, pos, button='left', delay=0.):
"""Fake a mouse click after a delay"""
button = dict(left=1, middle=2, right=4)[button] # trans to pyglet
def send():
ec._mouse_handler._on_pyglet_mouse_click(pos[0], pos[1], button, [])
Timer(delay, send).start() if delay > 0. else send()
def _check_pyglet_version(raise_error=False):
"""Check pyglet version, return True if usable.
"""
import pyglet
is_usable = LooseVersion(pyglet.version) >= LooseVersion('1.2')
if raise_error is True and is_usable is False:
raise ImportError('On Linux, you must run at least Pyglet '
'version 1.2, and you are running '
'{0}'.format(pyglet.version))
return is_usable
def _wait_secs(secs, ec=None):
"""Wait a specified number of seconds.
Parameters
----------
secs : float
Number of seconds to wait.
ec : None | expyfun.ExperimentController instance
The ExperimentController.
Notes
-----
This function uses a while loop. Although this slams the CPU, it will
guarantee that events (keypresses, etc.) are processed.
"""
# hog the cpu, checking time
t0 = clock()
if ec is not None:
while (clock() - t0) < secs:
ec._dispatch_events()
ec.check_force_quit()
time.sleep(0.0001)
else:
wins = _get_display().get_windows()
while (clock() - t0) < secs:
for win in wins:
win.dispatch_events()
time.sleep(0.0001)
def running_rms(signal, win_length):
"""RMS of ``signal`` with rectangular window ``win_length`` samples long.
Parameters
----------
signal : array_like
The (1-dimesional) signal of interest.
win_length : int
Length (in samples) of the rectangular window
"""
assert signal.ndim == 1
assert win_length > 0
# The following is equivalent to:
# sqrt(convolve(signal ** 2, ones(win_length) / win_length, 'valid'))
# But an order of magnitude faster: 60 ms vs 7 ms for:
#
# x = np.random.RandomState(0).randn(1000001)
# %timeit expyfun._utils.running_rms(x, 441)
#
sig2 = signal * signal
c1 = np.cumsum(sig2)
out = c1[win_length - 1:].copy()
if len(out) == 0: # len(signal) < len(win_length)
out = np.array([np.sqrt(c1[-1] / signal.size)])
else:
out[1:] -= c1[:-win_length]
out /= win_length
np.sqrt(out, out=out)
return out
def _fix_audio_dims(signal, n_channels):
"""Make it so a valid audio buffer is in the standard dimensions.
Parameters
----------
signal : array_like
The signal whose dimensions should be checked and fixed.
n_channels : int
The number of channels that the output should have.
If the input is mono and n_channels=2, it will be tiled to be
shape (2, n_samples). Otherwise, the number of channels in signal
must match n_channels.
Returns
-------
signal_fixed : array
The signal with standard dimensions (n_channels, N).
"""
# Check requested channel output
n_channels = int(operator.index(n_channels))
signal = np.asarray(np.atleast_2d(signal), dtype=np.float32)
# Check dimensionality
if signal.ndim != 2:
raise ValueError('Sound data must have one or two dimensions, got %s.'
% (signal.ndim,))
# Return data with correct dimensions
if n_channels == 2 and signal.shape[0] == 1:
signal = np.tile(signal, (n_channels, 1))
if signal.shape[0] != n_channels:
raise ValueError('signal channel count %d did not match required '
'channel count %d' % (signal.shape[0], n_channels))
return signal
def _sanitize(text_like):
"""Cast as string, encode as UTF-8 and sanitize any escape characters.
"""
return text_type(text_like).encode('unicode_escape').decode('utf-8')
def _sort_keys(x):
"""Sort and return keys of dict"""
keys = list(x.keys()) # note: not thread-safe
idx = np.argsort([str(k) for k in keys])
keys = [keys[ii] for ii in idx]
return keys
def object_diff(a, b, pre=''):
"""Compute all differences between two python variables
Parameters
----------
a : object
Currently supported: dict, list, tuple, ndarray, int, str, bytes,
float, StringIO, BytesIO.
b : object
Must be same type as ``a``.
pre : str
String to prepend to each line.
Returns
-------
diffs : str
A string representation of the differences.
Notes
-----
Taken from mne-python with permission.
"""
out = ''
if type(a) != type(b):
out += pre + ' type mismatch (%s, %s)\n' % (type(a), type(b))
elif isinstance(a, dict):
k1s = _sort_keys(a)
k2s = _sort_keys(b)
m1 = set(k2s) - set(k1s)
if len(m1):
out += pre + ' x1 missing keys %s\n' % (m1)
for key in k1s:
if key not in k2s:
out += pre + ' x2 missing key %s\n' % key
else:
out += object_diff(a[key], b[key], pre + 'd1[%s]' % repr(key))
elif isinstance(a, (list, tuple)):
if len(a) != len(b):
out += pre + ' length mismatch (%s, %s)\n' % (len(a), len(b))
else:
for xx1, xx2 in zip(a, b):
out += object_diff(xx1, xx2, pre='')
elif isinstance(a, (string_types, int, float, bytes)):
if a != b:
out += pre + ' value mismatch (%s, %s)\n' % (a, b)
elif a is None:
if b is not None:
out += pre + ' a is None, b is not (%s)\n' % (b)
elif isinstance(a, np.ndarray):
if not np.array_equal(a, b):
out += pre + ' array mismatch\n'
else:
raise RuntimeError(pre + ': unsupported type %s (%s)' % (type(a), a))
return out
def _check_skip_backend(backend):
from expyfun._sound_controllers import _import_backend
import pytest
if isinstance(backend, dict): # actually an AC
backend = backend['SOUND_CARD_BACKEND']
try:
_import_backend(backend)
except Exception as exc:
pytest.skip('Skipping test for backend %s: %s' % (backend, exc))
def _check_params(params, keys, defaults, name):
if not isinstance(params, dict):
raise TypeError('{0} must be a dict, got type {1}'
.format(name, type(params)))
params = deepcopy(params)
if not isinstance(params, dict):
raise TypeError('{0} must be a dict, got {1}'
.format(name, type(params)))
# Set sensible defaults for values that are not passed
for k in keys:
params[k] = params.get(k, get_config(k, defaults.get(k, None)))
# Check keys
for k in params.keys():
if k not in keys:
raise KeyError('Unrecognized key in {0}["{1}"], must be '
'one of {2}'.format(name, k, ', '.join(keys)))
return params
def _get_display():
import pyglet
try:
display = pyglet.canvas.get_display()
except AttributeError: # < 1.4
display = pyglet.window.get_platform().get_default_display()
return display
|
drammock/expyfun
|
expyfun/_utils.py
|
Python
|
bsd-3-clause
| 30,243 | 0 |
# -*- coding: utf-8 -*-
import os
import platform
import subprocess
import unittest
import pytest
import six
from conans.client import tools
from conans.client.conf import get_default_settings_yml
from conans.client.tools.files import which
from conans.client.tools.win import vswhere
from conans.errors import ConanException
from conans.model.settings import Settings
from conans.test.utils.mocks import TestBufferConanOutput
from conans.test.utils.test_files import temp_folder
from conans.util.env_reader import get_env
from conans.util.files import save
from conans.util.runners import check_output_runner
class FunctionalToolsTest(unittest.TestCase):
output = TestBufferConanOutput()
@pytest.mark.tool_file # Needs the "file" command, not by default in linux
@pytest.mark.skipif(which("file") is None,
reason="Needs the 'file' command, not by default in linux")
def test_unix_to_dos_unit(self):
def save_file(contents):
tmp = temp_folder()
filepath = os.path.join(tmp, "a_file.txt")
save(filepath, contents)
return filepath
fp = save_file(b"a line\notherline\n")
if platform.system() != "Windows":
output = check_output_runner(["file", fp], stderr=subprocess.STDOUT)
self.assertIn("ASCII text", str(output))
self.assertNotIn("CRLF", str(output))
tools.unix2dos(fp)
output = check_output_runner(["file", fp], stderr=subprocess.STDOUT)
self.assertIn("ASCII text", str(output))
self.assertIn("CRLF", str(output))
else:
fc = tools.load(fp)
self.assertNotIn("\r\n", fc)
tools.unix2dos(fp)
fc = tools.load(fp)
self.assertIn("\r\n", fc)
self.assertEqual("a line\r\notherline\r\n", str(tools.load(fp)))
fp = save_file(b"a line\r\notherline\r\n")
if platform.system() != "Windows":
output = check_output_runner(["file", fp], stderr=subprocess.STDOUT)
self.assertIn("ASCII text", str(output))
self.assertIn("CRLF", str(output))
tools.dos2unix(fp)
output = check_output_runner(["file", fp], stderr=subprocess.STDOUT)
self.assertIn("ASCII text", str(output))
self.assertNotIn("CRLF", str(output))
else:
fc = tools.load(fp)
self.assertIn("\r\n", fc)
tools.dos2unix(fp)
fc = tools.load(fp)
self.assertNotIn("\r\n", fc)
self.assertEqual("a line\notherline\n", str(tools.load(fp)))
@pytest.mark.skipif(platform.system() != "Windows", reason="Requires Visual Studio")
@pytest.mark.tool_visual_studio
class VisualStudioToolsTest(unittest.TestCase):
output = TestBufferConanOutput()
@pytest.mark.skipif(six.PY2, reason="Does not pass on Py2 with Pytest")
def test_msvc_build_command(self):
settings = Settings.loads(get_default_settings_yml())
settings.os = "Windows"
settings.compiler = "Visual Studio"
settings.compiler.version = "14"
# test build_type and arch override, for multi-config packages
cmd = tools.msvc_build_command(settings, "project.sln", build_type="Debug",
arch="x86", output=self.output)
self.assertIn('msbuild "project.sln" /p:Configuration="Debug" '
'/p:UseEnv=false /p:Platform="x86"', cmd)
self.assertIn('vcvarsall.bat', cmd)
# tests errors if args not defined
with six.assertRaisesRegex(self, ConanException, "Cannot build_sln_command"):
tools.msvc_build_command(settings, "project.sln", output=self.output)
settings.arch = "x86"
with six.assertRaisesRegex(self, ConanException, "Cannot build_sln_command"):
tools.msvc_build_command(settings, "project.sln", output=self.output)
# successful definition via settings
settings.build_type = "Debug"
cmd = tools.msvc_build_command(settings, "project.sln", output=self.output)
self.assertIn('msbuild "project.sln" /p:Configuration="Debug" '
'/p:UseEnv=false /p:Platform="x86"', cmd)
self.assertIn('vcvarsall.bat', cmd)
def test_vswhere_path(self):
"""
Locate vswhere in PATH or in ProgramFiles
"""
# vswhere not found
with tools.environment_append({"ProgramFiles": None, "ProgramFiles(x86)": None, "PATH": ""}):
with six.assertRaisesRegex(self, ConanException, "Cannot locate vswhere"):
vswhere()
# vswhere in ProgramFiles but not in PATH
program_files = get_env("ProgramFiles(x86)") or get_env("ProgramFiles")
vswhere_path = None
if program_files:
expected_path = os.path.join(program_files, "Microsoft Visual Studio", "Installer",
"vswhere.exe")
if os.path.isfile(expected_path):
vswhere_path = expected_path
with tools.environment_append({"PATH": ""}):
self.assertTrue(vswhere())
# vswhere in PATH but not in ProgramFiles
env = {"ProgramFiles": None, "ProgramFiles(x86)": None}
if not which("vswhere") and vswhere_path:
vswhere_folder = os.path.join(program_files, "Microsoft Visual Studio", "Installer")
env.update({"PATH": [vswhere_folder]})
with tools.environment_append(env):
self.assertTrue(vswhere())
|
conan-io/conan
|
conans/test/functional/util/tools_test.py
|
Python
|
mit
| 5,578 | 0.002868 |
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""SQLAlchemy models for heat data."""
import uuid
from oslo_db.sqlalchemy import models
from oslo_utils import timeutils
import six
import sqlalchemy
from sqlalchemy.ext import declarative
from sqlalchemy.orm import backref
from sqlalchemy.orm import relationship
from sqlalchemy.orm import session as orm_session
from heat.db.sqlalchemy import types
BASE = declarative.declarative_base()
def get_session():
from heat.db.sqlalchemy import api as db_api
return db_api.get_session()
class HeatBase(models.ModelBase, models.TimestampMixin):
"""Base class for Heat Models."""
__table_args__ = {'mysql_engine': 'InnoDB'}
def expire(self, session=None, attrs=None):
"""Expire this object ()."""
if not session:
session = orm_session.Session.object_session(self)
if not session:
session = get_session()
session.expire(self, attrs)
def refresh(self, session=None, attrs=None):
"""Refresh this object."""
if not session:
session = orm_session.Session.object_session(self)
if not session:
session = get_session()
session.refresh(self, attrs)
def delete(self, session=None):
"""Delete this object."""
if not session:
session = orm_session.Session.object_session(self)
if not session:
session = get_session()
session.begin(subtransactions=True)
session.delete(self)
session.commit()
def update_and_save(self, values, session=None):
if not session:
session = orm_session.Session.object_session(self)
if not session:
session = get_session()
session.begin(subtransactions=True)
for k, v in six.iteritems(values):
setattr(self, k, v)
session.commit()
class SoftDelete(object):
deleted_at = sqlalchemy.Column(sqlalchemy.DateTime)
def soft_delete(self, session=None):
"""Mark this object as deleted."""
self.update_and_save({'deleted_at': timeutils.utcnow()},
session=session)
class StateAware(object):
action = sqlalchemy.Column('action', sqlalchemy.String(255))
status = sqlalchemy.Column('status', sqlalchemy.String(255))
status_reason = sqlalchemy.Column('status_reason', sqlalchemy.Text)
class RawTemplate(BASE, HeatBase):
"""Represents an unparsed template which should be in JSON format."""
__tablename__ = 'raw_template'
id = sqlalchemy.Column(sqlalchemy.Integer, primary_key=True)
template = sqlalchemy.Column(types.Json)
files = sqlalchemy.Column(types.Json)
environment = sqlalchemy.Column('environment', types.Json)
class StackTag(BASE, HeatBase):
"""Key/value store of arbitrary stack tags."""
__tablename__ = 'stack_tag'
id = sqlalchemy.Column('id',
sqlalchemy.Integer,
primary_key=True,
nullable=False)
tag = sqlalchemy.Column('tag', sqlalchemy.Unicode(80))
stack_id = sqlalchemy.Column('stack_id',
sqlalchemy.String(36),
sqlalchemy.ForeignKey('stack.id'),
nullable=False)
class SyncPoint(BASE, HeatBase):
"""Represents a syncpoint for a stack that is being worked on."""
__tablename__ = 'sync_point'
__table_args__ = (
sqlalchemy.PrimaryKeyConstraint('entity_id',
'traversal_id',
'is_update'),
sqlalchemy.ForeignKeyConstraint(['stack_id'], ['stack.id'])
)
entity_id = sqlalchemy.Column(sqlalchemy.String(36))
traversal_id = sqlalchemy.Column(sqlalchemy.String(36))
is_update = sqlalchemy.Column(sqlalchemy.Boolean)
# integer field for atomic update operations
atomic_key = sqlalchemy.Column(sqlalchemy.Integer, nullable=False)
stack_id = sqlalchemy.Column(sqlalchemy.String(36),
nullable=False)
input_data = sqlalchemy.Column(types.Json)
class Stack(BASE, HeatBase, SoftDelete, StateAware):
"""Represents a stack created by the heat engine."""
__tablename__ = 'stack'
__table_args__ = (
sqlalchemy.Index('ix_stack_name', 'name', mysql_length=255),
sqlalchemy.Index('ix_stack_tenant', 'tenant', mysql_length=255),
)
id = sqlalchemy.Column(sqlalchemy.String(36), primary_key=True,
default=lambda: str(uuid.uuid4()))
name = sqlalchemy.Column(sqlalchemy.String(255))
raw_template_id = sqlalchemy.Column(
sqlalchemy.Integer,
sqlalchemy.ForeignKey('raw_template.id'),
nullable=False)
raw_template = relationship(RawTemplate, backref=backref('stack'),
foreign_keys=[raw_template_id])
prev_raw_template_id = sqlalchemy.Column(
'prev_raw_template_id',
sqlalchemy.Integer,
sqlalchemy.ForeignKey('raw_template.id'))
prev_raw_template = relationship(RawTemplate,
foreign_keys=[prev_raw_template_id])
username = sqlalchemy.Column(sqlalchemy.String(256))
tenant = sqlalchemy.Column(sqlalchemy.String(256))
user_creds_id = sqlalchemy.Column(
sqlalchemy.Integer,
sqlalchemy.ForeignKey('user_creds.id'))
owner_id = sqlalchemy.Column(sqlalchemy.String(36), index=True)
parent_resource_name = sqlalchemy.Column(sqlalchemy.String(255))
timeout = sqlalchemy.Column(sqlalchemy.Integer)
disable_rollback = sqlalchemy.Column(sqlalchemy.Boolean, nullable=False)
stack_user_project_id = sqlalchemy.Column(sqlalchemy.String(64))
backup = sqlalchemy.Column('backup', sqlalchemy.Boolean)
nested_depth = sqlalchemy.Column('nested_depth', sqlalchemy.Integer)
convergence = sqlalchemy.Column('convergence', sqlalchemy.Boolean)
tags = relationship(StackTag, cascade="all,delete",
backref=backref('stack'))
current_traversal = sqlalchemy.Column('current_traversal',
sqlalchemy.String(36))
current_deps = sqlalchemy.Column('current_deps', types.Json)
# Override timestamp column to store the correct value: it should be the
# time the create/update call was issued, not the time the DB entry is
# created/modified. (bug #1193269)
updated_at = sqlalchemy.Column(sqlalchemy.DateTime)
class StackLock(BASE, HeatBase):
"""Store stack locks for deployments with multiple-engines."""
__tablename__ = 'stack_lock'
stack_id = sqlalchemy.Column(sqlalchemy.String(36),
sqlalchemy.ForeignKey('stack.id'),
primary_key=True)
engine_id = sqlalchemy.Column(sqlalchemy.String(36))
class UserCreds(BASE, HeatBase):
"""Represents user credentials.
Also, mirrors the 'context' handed in by wsgi.
"""
__tablename__ = 'user_creds'
id = sqlalchemy.Column(sqlalchemy.Integer, primary_key=True)
username = sqlalchemy.Column(sqlalchemy.String(255))
password = sqlalchemy.Column(sqlalchemy.String(255))
region_name = sqlalchemy.Column(sqlalchemy.String(255))
decrypt_method = sqlalchemy.Column(sqlalchemy.String(64))
tenant = sqlalchemy.Column(sqlalchemy.String(1024))
auth_url = sqlalchemy.Column(sqlalchemy.Text)
tenant_id = sqlalchemy.Column(sqlalchemy.String(256))
trust_id = sqlalchemy.Column(sqlalchemy.String(255))
trustor_user_id = sqlalchemy.Column(sqlalchemy.String(64))
stack = relationship(Stack, backref=backref('user_creds'),
cascade_backrefs=False)
class Event(BASE, HeatBase):
"""Represents an event generated by the heat engine."""
__tablename__ = 'event'
id = sqlalchemy.Column(sqlalchemy.Integer, primary_key=True)
stack_id = sqlalchemy.Column(sqlalchemy.String(36),
sqlalchemy.ForeignKey('stack.id'),
nullable=False)
stack = relationship(Stack, backref=backref('events'))
uuid = sqlalchemy.Column(sqlalchemy.String(36),
default=lambda: str(uuid.uuid4()),
unique=True)
resource_action = sqlalchemy.Column(sqlalchemy.String(255))
resource_status = sqlalchemy.Column(sqlalchemy.String(255))
resource_name = sqlalchemy.Column(sqlalchemy.String(255))
physical_resource_id = sqlalchemy.Column(sqlalchemy.String(255))
_resource_status_reason = sqlalchemy.Column(
'resource_status_reason', sqlalchemy.String(255))
resource_type = sqlalchemy.Column(sqlalchemy.String(255))
resource_properties = sqlalchemy.Column(sqlalchemy.PickleType)
@property
def resource_status_reason(self):
return self._resource_status_reason
@resource_status_reason.setter
def resource_status_reason(self, reason):
self._resource_status_reason = reason and reason[:255] or ''
class ResourceData(BASE, HeatBase):
"""Key/value store of arbitrary, resource-specific data."""
__tablename__ = 'resource_data'
id = sqlalchemy.Column('id',
sqlalchemy.Integer,
primary_key=True,
nullable=False)
key = sqlalchemy.Column('key', sqlalchemy.String(255))
value = sqlalchemy.Column('value', sqlalchemy.Text)
redact = sqlalchemy.Column('redact', sqlalchemy.Boolean)
decrypt_method = sqlalchemy.Column(sqlalchemy.String(64))
resource_id = sqlalchemy.Column('resource_id',
sqlalchemy.Integer,
sqlalchemy.ForeignKey('resource.id'),
nullable=False)
class Resource(BASE, HeatBase, StateAware):
"""Represents a resource created by the heat engine."""
__tablename__ = 'resource'
id = sqlalchemy.Column(sqlalchemy.Integer, primary_key=True)
uuid = sqlalchemy.Column(sqlalchemy.String(36),
default=lambda: str(uuid.uuid4()),
unique=True)
name = sqlalchemy.Column('name', sqlalchemy.String(255))
physical_resource_id = sqlalchemy.Column('nova_instance',
sqlalchemy.String(255))
# odd name as "metadata" is reserved
rsrc_metadata = sqlalchemy.Column('rsrc_metadata', types.Json)
stack_id = sqlalchemy.Column(sqlalchemy.String(36),
sqlalchemy.ForeignKey('stack.id'),
nullable=False)
stack = relationship(Stack, backref=backref('resources'))
root_stack_id = sqlalchemy.Column(sqlalchemy.String(36), index=True)
data = relationship(ResourceData,
cascade="all,delete",
backref=backref('resource'))
# Override timestamp column to store the correct value: it should be the
# time the create/update call was issued, not the time the DB entry is
# created/modified. (bug #1193269)
updated_at = sqlalchemy.Column(sqlalchemy.DateTime)
properties_data = sqlalchemy.Column('properties_data', types.Json)
properties_data_encrypted = sqlalchemy.Column('properties_data_encrypted',
sqlalchemy.Boolean)
engine_id = sqlalchemy.Column(sqlalchemy.String(36))
atomic_key = sqlalchemy.Column(sqlalchemy.Integer)
needed_by = sqlalchemy.Column('needed_by', types.List)
requires = sqlalchemy.Column('requires', types.List)
replaces = sqlalchemy.Column('replaces', sqlalchemy.Integer,
default=None)
replaced_by = sqlalchemy.Column('replaced_by', sqlalchemy.Integer,
default=None)
current_template_id = sqlalchemy.Column(
'current_template_id',
sqlalchemy.Integer,
sqlalchemy.ForeignKey('raw_template.id'))
class WatchRule(BASE, HeatBase):
"""Represents a watch_rule created by the heat engine."""
__tablename__ = 'watch_rule'
id = sqlalchemy.Column(sqlalchemy.Integer, primary_key=True)
name = sqlalchemy.Column('name', sqlalchemy.String(255))
rule = sqlalchemy.Column('rule', types.Json)
state = sqlalchemy.Column('state', sqlalchemy.String(255))
last_evaluated = sqlalchemy.Column(sqlalchemy.DateTime,
default=timeutils.utcnow)
stack_id = sqlalchemy.Column(sqlalchemy.String(36),
sqlalchemy.ForeignKey('stack.id'),
nullable=False)
stack = relationship(Stack, backref=backref('watch_rule'))
class WatchData(BASE, HeatBase):
"""Represents a watch_data created by the heat engine."""
__tablename__ = 'watch_data'
id = sqlalchemy.Column(sqlalchemy.Integer, primary_key=True)
data = sqlalchemy.Column('data', types.Json)
watch_rule_id = sqlalchemy.Column(
sqlalchemy.Integer,
sqlalchemy.ForeignKey('watch_rule.id'),
nullable=False)
watch_rule = relationship(WatchRule, backref=backref('watch_data'))
class SoftwareConfig(BASE, HeatBase):
"""Represents a software configuration resource.
Represents a software configuration resource to be applied to one or more
servers.
"""
__tablename__ = 'software_config'
id = sqlalchemy.Column('id', sqlalchemy.String(36), primary_key=True,
default=lambda: str(uuid.uuid4()))
name = sqlalchemy.Column('name', sqlalchemy.String(255))
group = sqlalchemy.Column('group', sqlalchemy.String(255))
config = sqlalchemy.Column('config', types.Json)
tenant = sqlalchemy.Column(
'tenant', sqlalchemy.String(64), nullable=False, index=True)
class SoftwareDeployment(BASE, HeatBase, StateAware):
"""Represents a software deployment resource.
Represents applying a software configuration resource to a single server
resource.
"""
__tablename__ = 'software_deployment'
__table_args__ = (
sqlalchemy.Index('ix_software_deployment_created_at', 'created_at'),)
id = sqlalchemy.Column('id', sqlalchemy.String(36), primary_key=True,
default=lambda: str(uuid.uuid4()))
config_id = sqlalchemy.Column(
'config_id',
sqlalchemy.String(36),
sqlalchemy.ForeignKey('software_config.id'),
nullable=False)
config = relationship(SoftwareConfig, backref=backref('deployments'))
server_id = sqlalchemy.Column('server_id', sqlalchemy.String(36),
nullable=False, index=True)
input_values = sqlalchemy.Column('input_values', types.Json)
output_values = sqlalchemy.Column('output_values', types.Json)
tenant = sqlalchemy.Column(
'tenant', sqlalchemy.String(64), nullable=False, index=True)
stack_user_project_id = sqlalchemy.Column(sqlalchemy.String(64))
updated_at = sqlalchemy.Column(sqlalchemy.DateTime)
class Snapshot(BASE, HeatBase):
__tablename__ = 'snapshot'
id = sqlalchemy.Column('id', sqlalchemy.String(36), primary_key=True,
default=lambda: str(uuid.uuid4()))
stack_id = sqlalchemy.Column(sqlalchemy.String(36),
sqlalchemy.ForeignKey('stack.id'),
nullable=False)
name = sqlalchemy.Column('name', sqlalchemy.String(255))
data = sqlalchemy.Column('data', types.Json)
tenant = sqlalchemy.Column(
'tenant', sqlalchemy.String(64), nullable=False, index=True)
status = sqlalchemy.Column('status', sqlalchemy.String(255))
status_reason = sqlalchemy.Column('status_reason', sqlalchemy.String(255))
stack = relationship(Stack, backref=backref('snapshot'))
class Service(BASE, HeatBase, SoftDelete):
__tablename__ = 'service'
id = sqlalchemy.Column('id',
sqlalchemy.String(36),
primary_key=True,
default=lambda: str(uuid.uuid4()))
engine_id = sqlalchemy.Column('engine_id',
sqlalchemy.String(36),
nullable=False)
host = sqlalchemy.Column('host',
sqlalchemy.String(255),
nullable=False)
hostname = sqlalchemy.Column('hostname',
sqlalchemy.String(255),
nullable=False)
binary = sqlalchemy.Column('binary',
sqlalchemy.String(255),
nullable=False)
topic = sqlalchemy.Column('topic',
sqlalchemy.String(255),
nullable=False)
report_interval = sqlalchemy.Column('report_interval',
sqlalchemy.Integer,
nullable=False)
|
steveb/heat
|
heat/db/sqlalchemy/models.py
|
Python
|
apache-2.0
| 17,625 | 0 |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2016-07-08 15:45
from __future__ import unicode_literals
from django.db import migrations
import share.robot
class Migration(migrations.Migration):
dependencies = [
('share', '0001_initial'),
('djcelery', '0001_initial'),
]
operations = [
migrations.RunPython(
code=share.robot.RobotUserMigration('org.sldr'),
),
migrations.RunPython(
code=share.robot.RobotOauthTokenMigration('org.sldr'),
),
migrations.RunPython(
code=share.robot.RobotScheduleMigration('org.sldr'),
),
]
|
zamattiac/SHARE
|
providers/org/sldr/migrations/0001_initial.py
|
Python
|
apache-2.0
| 649 | 0 |
from keras.layers.core import Dropout, Flatten
from keras.layers.convolutional import MaxPooling2D, Conv2D
from keras.models import Model
from keras.layers import Input, Dense
FRAME_H = 70
FRAME_W = 180
def build_model():
inp = Input(shape=(FRAME_H, FRAME_W, 3))
x = Conv2D(filters=8, kernel_size=(5, 5), activation='relu')(inp)
x = MaxPooling2D((2, 2))(x)
x = Conv2D(filters=16, kernel_size=(5, 5), activation='relu')(x)
x = MaxPooling2D((2, 2))(x)
x = Conv2D(filters=32, kernel_size=(5, 5), activation='relu')(x)
x = MaxPooling2D((2, 2))(x)
x = Flatten()(x)
x = Dropout(0.5)(x)
x = Dense(128, activation='relu')(x)
x = Dropout(0.5)(x)
x = Dense(1, activation='tanh')(x)
return Model(inputs=[inp], outputs=[x])
if __name__ == '__main__':
model = build_model()
model.summary()
|
raghakot/keras-vis
|
applications/self_driving/model.py
|
Python
|
mit
| 845 | 0 |
from __future__ import print_function, division
from sympy.core import Basic
from sympy.core.compatibility import xrange
import random
class GrayCode(Basic):
"""
A Gray code is essentially a Hamiltonian walk on
a n-dimensional cube with edge length of one.
The vertices of the cube are represented by vectors
whose values are binary. The Hamilton walk visits
each vertex exactly once. The Gray code for a 3d
cube is ['000','100','110','010','011','111','101',
'001'].
A Gray code solves the problem of sequentially
generating all possible subsets of n objects in such
a way that each subset is obtained from the previous
one by either deleting or adding a single object.
In the above example, 1 indicates that the object is
present, and 0 indicates that its absent.
Gray codes have applications in statistics as well when
we want to compute various statistics related to subsets
in an efficient manner.
References:
[1] Nijenhuis,A. and Wilf,H.S.(1978).
Combinatorial Algorithms. Academic Press.
[2] Knuth, D. (2011). The Art of Computer Programming, Vol 4
Addison Wesley
Examples
========
>>> from sympy.combinatorics.graycode import GrayCode
>>> a = GrayCode(3)
>>> list(a.generate_gray())
['000', '001', '011', '010', '110', '111', '101', '100']
>>> a = GrayCode(4)
>>> list(a.generate_gray())
['0000', '0001', '0011', '0010', '0110', '0111', '0101', '0100', \
'1100', '1101', '1111', '1110', '1010', '1011', '1001', '1000']
"""
_skip = False
_current = 0
_rank = None
def __new__(cls, n, *args, **kw_args):
"""
Default constructor.
It takes a single argument ``n`` which gives the dimension of the Gray
code. The starting Gray code string (``start``) or the starting ``rank``
may also be given; the default is to start at rank = 0 ('0...0').
Examples
========
>>> from sympy.combinatorics.graycode import GrayCode
>>> a = GrayCode(3)
>>> a
GrayCode(3)
>>> a.n
3
>>> a = GrayCode(3, start='100')
>>> a.current
'100'
>>> a = GrayCode(4, rank=4)
>>> a.current
'0110'
>>> a.rank
4
"""
if n < 1 or int(n) != n:
raise ValueError(
'Gray code dimension must be a positive integer, not %i' % n)
n = int(n)
args = (n,) + args
obj = Basic.__new__(cls, *args)
if 'start' in kw_args:
obj._current = kw_args["start"]
if len(obj._current) > n:
raise ValueError('Gray code start has length %i but '
'should not be greater than %i' % (len(obj._current), n))
elif 'rank' in kw_args:
if int(kw_args["rank"]) != kw_args["rank"]:
raise ValueError('Gray code rank must be a positive integer, '
'not %i' % kw_args["rank"])
obj._rank = int(kw_args["rank"]) % obj.selections
obj._current = obj.unrank(n, obj._rank)
return obj
def next(self, delta=1):
"""
Returns the Gray code a distance ``delta`` (default = 1) from the
current value in canonical order.
Examples
========
>>> from sympy.combinatorics.graycode import GrayCode
>>> a = GrayCode(3, start='110')
>>> a.next().current
'111'
>>> a.next(-1).current
'010'
"""
return GrayCode(self.n, rank=(self.rank + delta) % self.selections)
@property
def selections(self):
"""
Returns the number of bit vectors in the Gray code.
Examples
========
>>> from sympy.combinatorics.graycode import GrayCode
>>> a = GrayCode(3)
>>> a.selections
8
"""
return 2**self.n
@property
def n(self):
"""
Returns the dimension of the Gray code.
Examples
========
>>> from sympy.combinatorics.graycode import GrayCode
>>> a = GrayCode(5)
>>> a.n
5
"""
return self.args[0]
def generate_gray(self, **hints):
"""
Generates the sequence of bit vectors of a Gray Code.
[1] Knuth, D. (2011). The Art of Computer Programming,
Vol 4, Addison Wesley
Examples
========
>>> from sympy.combinatorics.graycode import GrayCode
>>> a = GrayCode(3)
>>> list(a.generate_gray())
['000', '001', '011', '010', '110', '111', '101', '100']
>>> list(a.generate_gray(start='011'))
['011', '010', '110', '111', '101', '100']
>>> list(a.generate_gray(rank=4))
['110', '111', '101', '100']
See Also
========
skip
"""
bits = self.n
start = None
if "start" in hints:
start = hints["start"]
elif "rank" in hints:
start = GrayCode.unrank(self.n, hints["rank"])
if start is not None:
self._current = start
current = self.current
graycode_bin = gray_to_bin(current)
if len(graycode_bin) > self.n:
raise ValueError('Gray code start has length %i but should '
'not be greater than %i' % (len(graycode_bin), bits))
self._current = int(current, 2)
graycode_int = int(''.join(graycode_bin), 2)
for i in xrange(graycode_int, 1 << bits):
if self._skip:
self._skip = False
else:
yield self.current
bbtc = (i ^ (i + 1))
gbtc = (bbtc ^ (bbtc >> 1))
self._current = (self._current ^ gbtc)
self._current = 0
def skip(self):
"""
Skips the bit generation.
Examples
========
>>> from sympy.combinatorics.graycode import GrayCode
>>> a = GrayCode(3)
>>> for i in a.generate_gray():
... if i == '010':
... a.skip()
... print(i)
...
000
001
011
010
111
101
100
See Also
========
generate_gray
"""
self._skip = True
@property
def rank(self):
"""
Ranks the Gray code.
A ranking algorithm determines the position (or rank)
of a combinatorial object among all the objects w.r.t.
a given order. For example, the 4 bit binary reflected
Gray code (BRGC) '0101' has a rank of 6 as it appears in
the 6th position in the canonical ordering of the family
of 4 bit Gray codes.
References:
[1] http://www-stat.stanford.edu/~susan/courses/s208/node12.html
Examples
========
>>> from sympy.combinatorics.graycode import GrayCode
>>> a = GrayCode(3)
>>> list(a.generate_gray())
['000', '001', '011', '010', '110', '111', '101', '100']
>>> GrayCode(3, start='100').rank
7
>>> GrayCode(3, rank=7).current
'100'
See Also
========
unrank
"""
if self._rank is None:
self._rank = int(gray_to_bin(self.current), 2)
return self._rank
@property
def current(self):
"""
Returns the currently referenced Gray code as a bit string.
Examples
========
>>> from sympy.combinatorics.graycode import GrayCode
>>> GrayCode(3, start='100').current
'100'
"""
rv = self._current or '0'
if type(rv) is not str:
rv = bin(rv)[2:]
return rv.rjust(self.n, '0')
@classmethod
def unrank(self, n, rank):
"""
Unranks an n-bit sized Gray code of rank k. This method exists
so that a derivative GrayCode class can define its own code of
a given rank.
The string here is generated in reverse order to allow for tail-call
optimization.
Examples
========
>>> from sympy.combinatorics.graycode import GrayCode
>>> GrayCode(5, rank=3).current
'00010'
>>> GrayCode.unrank(5, 3)
'00010'
See Also
========
rank
"""
def _unrank(k, n):
if n == 1:
return str(k % 2)
m = 2**(n - 1)
if k < m:
return '0' + _unrank(k, n - 1)
return '1' + _unrank(m - (k % m) - 1, n - 1)
return _unrank(rank, n)
def random_bitstring(n):
"""
Generates a random bitlist of length n.
Examples
========
>>> from sympy.combinatorics.graycode import random_bitstring
>>> random_bitstring(3) # doctest: +SKIP
100
"""
return ''.join([random.choice('01') for i in xrange(n)])
def gray_to_bin(bin_list):
"""
Convert from Gray coding to binary coding.
We assume big endian encoding.
Examples
========
>>> from sympy.combinatorics.graycode import gray_to_bin
>>> gray_to_bin('100')
'111'
See Also
========
bin_to_gray
"""
b = [bin_list[0]]
for i in xrange(1, len(bin_list)):
b += str(int(b[i - 1] != bin_list[i]))
return ''.join(b)
def bin_to_gray(bin_list):
"""
Convert from binary coding to gray coding.
We assume big endian encoding.
Examples
========
>>> from sympy.combinatorics.graycode import bin_to_gray
>>> bin_to_gray('111')
'100'
See Also
========
gray_to_bin
"""
b = [bin_list[0]]
for i in xrange(0, len(bin_list) - 1):
b += str(int(bin_list[i]) ^ int(b[i - 1]))
return ''.join(b)
def get_subset_from_bitstring(super_set, bitstring):
"""
Gets the subset defined by the bitstring.
Examples
========
>>> from sympy.combinatorics.graycode import get_subset_from_bitstring
>>> get_subset_from_bitstring(['a','b','c','d'], '0011')
['c', 'd']
>>> get_subset_from_bitstring(['c','a','c','c'], '1100')
['c', 'a']
See Also
========
graycode_subsets
"""
if len(super_set) != len(bitstring):
raise ValueError("The sizes of the lists are not equal")
return [super_set[i] for i, j in enumerate(bitstring)
if bitstring[i] == '1']
def graycode_subsets(gray_code_set):
"""
Generates the subsets as enumerated by a Gray code.
Examples
========
>>> from sympy.combinatorics.graycode import graycode_subsets
>>> list(graycode_subsets(['a','b','c']))
[[], ['c'], ['b', 'c'], ['b'], ['a', 'b'], ['a', 'b', 'c'], \
['a', 'c'], ['a']]
>>> list(graycode_subsets(['a','b','c','c']))
[[], ['c'], ['c', 'c'], ['c'], ['b', 'c'], ['b', 'c', 'c'], \
['b', 'c'], ['b'], ['a', 'b'], ['a', 'b', 'c'], ['a', 'b', 'c', 'c'], \
['a', 'b', 'c'], ['a', 'c'], ['a', 'c', 'c'], ['a', 'c'], ['a']]
See Also
========
get_subset_from_bitstring
"""
for bitstring in list(GrayCode(len(gray_code_set)).generate_gray()):
yield get_subset_from_bitstring(gray_code_set, bitstring)
|
hrashk/sympy
|
sympy/combinatorics/graycode.py
|
Python
|
bsd-3-clause
| 11,202 | 0.000357 |
# -*- coding: utf-8 -*-
# This file is part of Mumoro.
#
# Mumoro is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Mumoro is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Mumoro. If not, see <http://www.gnu.org/licenses/>.
#
# © Université de Toulouse 1 2010
# Author: Tristram Gräbener, Odysseas Gabrielides
from lib.core import mumoro
from lib.core.mumoro import Bike, Car, Foot, PublicTransport, cost, co2, dist, elevation, line_change, mode_change, Costs
from lib import layer
from lib import bikestations as bikestations
from web import shorturl
from sqlalchemy import *
from sqlalchemy.orm import mapper, sessionmaker, clear_mappers
import cherrypy
import sys
import simplejson as json
import os
import time
import urllib
import httplib
import hashlib
import datetime
from cherrypy import request
from genshi.template import TemplateLoader
loader = TemplateLoader(
os.path.join(os.path.dirname(__file__), 'web/templates'),
auto_reload=True
)
layer_array = []
bike_stations_array = []
same_nodes_connection_array = []
nearest_nodes_connection_array = []
nodes_list_connection_array = []
paths_array = []
def md5_of_file(filename):
block_size=2**20
md5 = hashlib.md5()
while True:
data = filename.read(block_size)
if not data:
break
md5.update(data)
filename.close()
return md5.hexdigest()
def is_color_valid( color ):
if len( color ) == 7:
if color[0] == '#':
try:
r = int( color[1:3], 16)
if r <= 255 and r >= 0:
try:
g = int( color[3:5], 16)
if g <= 255 and g >= 0:
try:
b = int( color[5:7], 16)
if b <= 255 and b >= 0:
return True
except ValueError:
return False
except ValueError:
return False
except ValueError:
return False
return False
#Loads an osm (compressed of not) file and insert data into database
def import_street_data( filename ):
engine = create_engine( db_type + ":///" + db_params )
metadata = MetaData(bind = engine)
mumoro_metadata = Table('metadata', metadata, autoload = True)
s = mumoro_metadata.select((mumoro_metadata.c.origin == filename) & (mumoro_metadata.c.node_or_edge == 'Nodes'))
rs = s.execute()
nd = 0
for row in rs:
nd = row[0]
s = mumoro_metadata.select((mumoro_metadata.c.origin == filename) & (mumoro_metadata.c.node_or_edge == 'Edges'))
rs = s.execute()
ed = 0
for row in rs:
ed = row[0]
return {'nodes': str(nd), 'edges' : str(ed)}
# Loads the tables corresponding to the public transport layer
def import_gtfs_data( filename, network_name = "Public Transport"):
engine = create_engine(db_type + ":///" + db_params)
metadata = MetaData(bind = engine)
mumoro_metadata = Table('metadata', metadata, autoload = True)
nd = mumoro_metadata.select((mumoro_metadata.c.origin == filename) & (mumoro_metadata.c.node_or_edge == 'Nodes')).execute().first()[0]
ed = mumoro_metadata.select((mumoro_metadata.c.origin == filename) & (mumoro_metadata.c.node_or_edge == 'Edges')).execute().first()[0]
services = mumoro_metadata.select((mumoro_metadata.c.origin == filename) & (mumoro_metadata.c.node_or_edge == 'Services')).execute().first()[0]
return {'nodes': str(nd), 'edges' : str(ed), 'services': str(services) }
def import_kalkati_data(filename, network_name = "Public Transport"):
return import_gtfs_data(filename, network_name)
def import_freq(self, line_name, nodesf, linesf):
return import_gtfs_data(line_name)
#Loads a bike service API ( from already formatted URL ). Insert bike stations in database and enables schedulded re-check.
def import_bike_service( url, name ):
engine = create_engine(db_type + ":///" + db_params)
metadata = MetaData(bind = engine)
mumoro_metadata = Table('metadata', metadata, autoload = True)
s = mumoro_metadata.select((mumoro_metadata.c.origin == url) & (mumoro_metadata.c.node_or_edge == 'bike_stations'))
rs = s.execute()
for row in rs:
bt = row[0]
bike_stations_array.append( {'url_api': url,'table': str(bt)} )
return {'url_api': url,'table': str(bt)}
#Loads data from previous inserted data and creates a layer used in multi-modal graph
def street_layer( data, name, color, mode ):
if not data or not name:
raise NameError('One or more parameters are missing')
if not is_color_valid( color ):
raise NameError('Color for the layer is invalid')
if mode != mumoro.Foot and mode != mumoro.Bike and mode != mumoro.Car and mode != None:
raise NameError('Wrong layer mode paramater')
engine = create_engine(db_type + ":///" + db_params)
metadata = MetaData(bind = engine)
res = layer.Layer(name, mode, data, metadata)
layer_array.append( {'layer':res,'name':name,'mode':mode,'origin':data,'color':color} )
return {'layer':res,'name':name,'mode':mode,'origin':data,'color':color}
def public_transport_layer(data, name, color):
engine = create_engine(db_type + ":///" + db_params)
metadata = MetaData(bind = engine)
res = layer.GTFSLayer(name, data, metadata)
layer_array.append( {'layer':res,'name':name,'mode':PublicTransport,'origin':data,'color':color} )
return {'layer':res,'name':name,'mode':PublicTransport,'origin':PublicTransport,'color':color}
def paths( starting_layer, destination_layer, objectives ):
if not starting_layer or not destination_layer:
raise NameError('Empty layer(s)')
for i in range( len( objectives ) ):
if objectives[i] != mumoro.dist and objectives[i] != mumoro.cost and objectives[i] != mumoro.elevation and objectives[i] != mumoro.co2 and objectives[i] != mumoro.mode_change and objectives[i] != mumoro.line_change:
raise NameError('Wrong objective parameter')
paths_array.append( {'starting_layer':starting_layer,'destination_layer':destination_layer,'objectives':objectives} )
#Creates a transit cost variable, including the duration in seconds of the transit and if the mode is changed
def cost( duration, mode_change ):
e = mumoro.Edge()
if mode_change:
e.mode_change = 1
else:
e.mode_change = 0
e.duration = mumoro.Duration( duration );
return e
#Connects 2 given layers on same nodes with the given cost(s)
def connect_layers_same_nodes( layer1, layer2, cost ):
if not layer1 or not layer2 or not cost:
raise NameError('One or more paramaters are empty')
same_nodes_connection_array.append( { 'layer1':layer1, 'layer2':layer2, 'cost':cost } )
#Connect 2 given layers on a node list (arg 3 which should be the returned data from import_municipal_data or import_bike_service) with the given cost(s)
def connect_layers_from_node_list( layer1, layer2, node_list, cost, cost2 = None ):
if not layer1 or not layer2 or not node_list or not cost:
raise NameError('One or more paramaters are empty')
if not cost2:
nodes_list_connection_array.append( { 'layer1':layer1, 'layer2':layer2, 'node_list':node_list, 'cost1':cost, 'cost2':cost } )
else:
nodes_list_connection_array.append( { 'layer1':layer1, 'layer2':layer2, 'node_list':node_list, 'cost1':cost, 'cost2':cost2 } )
#Connect 2 given layers on nearest nodes
def connect_layers_on_nearest_nodes( layer1 , layer2, cost, cost2 = None):
if not layer1 or not layer2 or not cost:
raise NameError('One or more paramaters are empty')
nearest_nodes_connection_array.append( { 'layer1':layer1, 'layer2':layer2, 'cost':cost, 'cost2':cost2 } )
class Mumoro:
def __init__(self,db_string,config_file,admin_email,web_url):
if not admin_email:
raise NameError('Administrator email is empty')
self.admin_email = admin_email
if not web_url:
raise NameError('Website URL is empty')
self.web_url = web_url
if not layer_array:
raise NameError('Can not create multimodal graph beceause there are no layers')
layers = []
for i in layer_array:
layers.append( i['layer'] )
for i in range( len( layer_array ) ):
for j in range( len( layer_array ) ):
if i != j:
if layer_array[i]['name'] == layer_array[j]['name']:
raise NameError('Layers can not have the same name')
if len( paths_array ) == 0:
print 'Warning: there are no defined paths !'
self.engine = create_engine(db_string)
self.metadata = MetaData(bind = self.engine)
Session = sessionmaker(bind=self.engine)
self.session = Session()
self.bike_stations = []
for i in bike_stations_array:
self.bike_stations.append( bikestations.BikeStationImporter(i['url_api'],i['table'],self.metadata) )
for i in self.bike_stations:
i.update_from_db()
self.timestamp = time.time()
self.config_table = Table('config', self.metadata,
Column('config_file', String, primary_key = True),
Column('binary_file', String, primary_key = True),
Column('md5', String, index = True)
)
self.hash_table = Table('hurl', self.metadata,
Column('id', String(length=16), primary_key=True),
Column('zoom', Integer),
Column('lonMap', Float),
Column('latMap', Float),
Column('lonStart', Float),
Column('latStart', Float),
Column('lonDest', Float),
Column('latDest', Float),
Column('addressStart', Text),
Column('addressDest', Text),
Column('chrone', DateTime(timezone=False))
)
self.metadata.create_all()
s = self.config_table.select()
rs = s.execute()
row = rs.fetchone()
if row and row['md5']== md5_of_file( file( config_file ) ) and os.path.exists( os.getcwd() + "/" + row['binary_file'] ):
print "No need to rebuilt graph : configuration didn't change so loading from binary file"
self.g = layer.MultimodalGraph(layers, str(row['binary_file']))
else:
if not row:
print "This is the first time of launch: creating multimodal graph from scratch"
elif row['md5'] != md5_of_file( file( config_file ) ):
print "Configuration has changed since last launch. Rebuilding multimodal graph"
if os.path.exists( os.getcwd() + "/" + row['binary_file'] ) :
os.remove(os.getcwd() + "/" + row['binary_file'])
self.config_table.delete().execute()
self.session.commit()
elif not os.path.exists( os.getcwd() + "/" + row['binary_file'] ) :
print "Configuration has not changed since last launch but binary file is missing. Rebuilding multimodal graph"
self.config_table.delete().execute()
self.session.commit()
self.g = layer.MultimodalGraph( layers )
for i in same_nodes_connection_array:
self.g.connect_same_nodes( i['layer1']['layer'],i['layer2']['layer'],i['cost'] )
for i in nearest_nodes_connection_array:
self.g.connect_nearest_nodes( i['layer1']['layer'],i['layer2']['layer'],i['cost'], i['cost2'] )
for i in nodes_list_connection_array:
try:
i['node_list']['url_api']
print 'Assuming that the nodes list are bike stations'
for j in self.bike_stations:
if j.url == i['node_list']['url_api']:
self.g.connect_nodes_from_list( i['layer1']['layer'],i['layer2']['layer'],j.stations,i['cost1'],i['cost2'] )
break
except KeyError:
try:
i['node_list']['layer']
print 'Assuming that the nodes list is a public transport layer'
self.g.connect_nodes_from_list( i['layer1']['layer'],i['layer2']['layer'],i['node_list']['layer'].nodes(),i['cost1'],i['cost2'] )
except KeyError:
raise NameError('Can not connect layers from the node list')
md5_config_checksum = md5_of_file( file( config_file ) )
self.g.save( md5_config_checksum + '.dump' )
i = self.config_table.insert()
i.execute({'config_file': config_file, 'md5': md5_config_checksum, 'binary_file': md5_config_checksum + '.dump'})
@cherrypy.expose
def path(self, slon, slat, dlon, dlat, time):
u_obj = []
#Creates the union of all used objectives
for p in paths_array:
for o in p['objectives'] :
if not o in u_obj:
u_obj.append( o )
u_obj = self.sort_objectives( u_obj )
p = tuple()
c = self.analyse_date( time )
#Call martins with the sorted objectives
for y in paths_array:
s = self.g.match( y['starting_layer']['name'], float(slon), float(slat))
d = self.g.match( y['destination_layer']['name'], float(dlon), float(dlat))
tmp = y['objectives']
tmp = self.sort_objectives( tmp )
t = len( tmp )
print c['seconds'], c['days']
if t == 0:
p = p + self.normalise_paths( mumoro.martins(s, d, self.g.graph,c['seconds'], c['days']),[],u_obj )
elif t == 1:
p = p + self.normalise_paths( mumoro.martins(s, d, self.g.graph,c['seconds'], c['days'], tmp[0] ), [ tmp[0] ], u_obj )
elif t == 2:
p = p + self.normalise_paths( mumoro.martins(s, d, self.g.graph,c['seconds'], c['days'], tmp[0], tmp[1] ), [ tmp[0], tmp[1] ], u_obj )
elif t >= 3:
p = p + self.normalise_paths( mumoro.martins(s, d, self.g.graph,c['seconds'], c['days'], tmp[0], tmp[1], tmp[2] ), [ tmp[0], tmp[1], tmp[2] ], u_obj )
#Creates the array containing the user-oriented string for each objective
str_obj = ['Duration']
for j in u_obj:
if j == mumoro.dist:
str_obj.append( 'Distance' )
elif j == mumoro.cost:
str_obj.append( 'Cost' )
elif j == mumoro.elevation:
str_obj.append( 'Elevation ' )
elif j == mumoro.co2:
str_obj.append( 'CO2 Emission' )
elif j == mumoro.mode_change:
str_obj.append( 'Mode Change' )
elif j == mumoro.line_change:
str_obj.append( 'Line Change' )
cherrypy.response.headers['Content-Type']= 'application/json'
if len(p) == 0:
return json.dumps({'error': 'No route found'})
print "Len of routes " + str( len( p ) )
ret = {
'objectives': str_obj,
'paths': []
}
for path in p:
p_str = {
'cost': [],
'type': 'FeatureCollection',
'crs': {
'type': 'EPSG',
'properties': {
'code': 4326,
'coordinate_order': [1,0]
}
}
}
for c in path.cost:
p_str['cost'].append(c)
features = []
feature = {'type': 'feature'}
geometry = {'type': 'Linestring'}
coordinates = []
last_node = path.nodes[0]
last_coord = self.g.coordinates(last_node)
for node in path.nodes:
coord = self.g.coordinates(node)
if(last_coord[3] != coord[3]):
geometry['coordinates'] = coordinates
feature['geometry'] = geometry
feature['properties'] = {'layer': last_coord[3]}
features.append(feature)
feature = {'type': 'feature'}
geometry = {'type': 'Linestring'}
coordinates = []
connection = {
'type': 'feature',
'geometry': {
'type': 'Linestring',
'coordinates': [[last_coord[0], last_coord[1]], [coord[0], coord[1]]]
},
'properties': {'layer': 'connection'}
}
features.append(connection);
last_node = node
last_coord = coord
coordinates.append([coord[0], coord[1]])
geometry['coordinates'] = coordinates
feature['geometry'] = geometry
feature['properties'] = {'layer': last_coord[3]}
features.append(feature)
p_str['features'] = features
ret['paths'].append(p_str)
return json.dumps(ret)
@cherrypy.expose
def bikes(self):
if len( self.bike_stations ) > 0:
if time.time() > self.timestamp + 60 * 5:
print "Updating bikestations"
for i in self.bike_stations:
i.import_data()
print "Done !"
for i in self.bike_stations:
i.update_from_db()
res = 'lat\tlon\ttitle\tdescription\ticon\ticonSize\ticonOffset\n'
for i in self.bike_stations:
res += i.to_string()
print "Got string"
return res;
else:
print "No bike stations imported so no string available to generate"
return None
@cherrypy.expose
def addhash(self,mlon,mlat,zoom,slon,slat,dlon,dlat,saddress,daddress,time):
cherrypy.response.headers['Content-Type']= 'application/json'
hashAdd = shorturl.shortURL(self.metadata)
hmd5 =hashAdd.addRouteToDatabase(mlon,mlat,zoom,slon,slat,dlon,dlat,saddress,daddress, time)
if( len(hmd5) > 0 ):
ret = {
'h': hmd5
}
return json.dumps(ret)
else:
return '{"error": "Add to DB failed"}'
@cherrypy.expose
def h(self,id):
hashCheck = shorturl.shortURL(self.metadata)
res = hashCheck.getDataFromHash(id)
if( len(res) > 0 ):
return self.index(True,res)
else:
return self.index(False,res)
@cherrypy.expose
def index(self,fromHash=False,hashData=[]):
tmpl = loader.load('index.html')
t = "{"
for i in range( len( layer_array ) ):
t = t + "\"" + layer_array[i]['name'] + "\": { strokeColor : \"" + layer_array[i]['color'] + "\"}"
if i != len( layer_array ) - 1 :
t = t + ","
t = t + "}"
if( not fromHash ):
a = paths_array[0]['starting_layer']['layer'].average()
b = paths_array[0]['starting_layer']['layer'].borders()
return tmpl.generate(fromHash='false',lonMap=a['avg_lon'],latMap=a['avg_lat'],zoom=14,lonStart=b['min_lon'],latStart=b['min_lat'],lonDest=b['max_lon'],latDest=b['max_lat'],addressStart='',addressDest='',hashUrl=self.web_url,layers=t, date=datetime.datetime.today().strftime("%d/%m/%Y %H:%M")).render('html', doctype='html')
else:
return tmpl.generate(fromHash='true',lonMap=hashData[2],latMap=hashData[3],zoom=hashData[1],lonStart=hashData[4],latStart=hashData[5],lonDest=hashData[6],latDest=hashData[7],addressStart=hashData[8].decode('utf-8'),addressDest=hashData[9].decode('utf-8'),hashUrl=self.web_url,layers=t,date=hashData[10]).render('html', doctype='html')
@cherrypy.expose
def info(self):
tmpl = loader.load('info.html')
return tmpl.generate().render('html', doctype='html')
@cherrypy.expose
def geo(self,q):
cherrypy.response.headers['Content-Type']= 'application/json'
url = "nominatim.openstreetmap.org:80"
params = urllib.urlencode({
"q": q.encode("utf-8"),
"format":"json",
"polygon": 0,
"addressdetails" : 1,
"email" : self.admin_email
})
conn = httplib.HTTPConnection(url)
conn.request("GET", "/search?" + params)
response = conn.getresponse()
ret = json.loads(response.read())
is_covered = False
if ret:
cord_error = ""
lon = ret[0]['lon']
lat = ret[0]['lat']
display_name = ret[0]['display_name']
if self.arecovered(lon,lat):
node_error = ""
is_covered = True
else:
node_error = "not covered area"
else:
cord_error = "geocoding failed"
lon = 0
lat = 0
display_name = ""
node_error = "match failed because geocoding failed"
data = {
'lon': lon,
'lat': lat,
'display_name': display_name,
'node_error': node_error,
'cord_error': cord_error,
'is_covered': is_covered
}
return json.dumps(data)
@cherrypy.expose
def revgeo(self,lon,lat):
cherrypy.response.headers['Content-Type']= 'application/json'
url = "nominatim.openstreetmap.org:80"
params = urllib.urlencode({
"lon": lon,
"lat": lat,
"format":"json",
"zoom": 18,
"addressdetails" : 1,
"email" : self.admin_email
})
conn = httplib.HTTPConnection(url)
conn.request("GET", "/reverse?" + params)
response = conn.getresponse()
ret = json.loads(response.read())
is_covered = False
if ret:
cord_error = ""
display_name = ret['display_name']
if self.arecovered(float(lon),float(lat)):
node_error = ""
is_covered = True
else:
node_error = "not covered area"
else:
cord_error = "geocoding failed"
display_name = ""
node_error = "match failed because revgeocoding failed"
data = {
'display_name': display_name,
'node_error': node_error,
'cord_error': cord_error,
'is_covered': is_covered
}
return json.dumps(data)
def arecovered(self,lon,lat):
return True
for i in layer_array:
b = i['layer'].borders()
if lon >= b['min_lon'] and lon <= b['max_lon'] and lat >= b['min_lat'] and lat <= b['max_lat']:
return True
else:
print lon,lat, b
return False
def sort_objectives(self,obj):
res = []
#Sorts the objective array in a unique way
if mumoro.cost in obj:
res.append( mumoro.cost )
if mumoro.co2 in obj:
res.append( mumoro.co2 )
if mumoro.dist in obj:
res.append( mumoro.dist )
if mumoro.elevation in obj and len( res ) < 3:
res.append( mumoro.elevation )
if mumoro.line_change in obj and len( res ) < 3:
res.append( mumoro.line_change )
if mumoro.mode_change in obj and len( res ) < 3:
res.append( mumoro.mode_change )
return res
def normalise_paths(self,route,used_objectives,union_objectives):
for i in route:
if len( union_objectives ) + 1 != len( i.cost ):
tmp = Costs()
tmp.append( i.cost[0] )
for j in range( len( union_objectives ) ):
if j < len( used_objectives ):
if used_objectives[j] == union_objectives[j]:
tmp.append( i.cost[j + 1] )
else:
tmp.append( 0.0 )
else:
tmp.append( 0.0 )
i.cost = tmp
return route
def analyse_date(self,date):
now_chrone = datetime.datetime.strptime(date, "%Y-%m-%d %H:%M:%S")
start_chrone = datetime.datetime.strptime(start_date, "%Y%m%d")
past_seconds = now_chrone.hour * 60 * 60 + now_chrone.minute * 60 + now_chrone.second
delta = now_chrone - start_chrone
return {'seconds':past_seconds,'days':delta.days}
total = len( sys.argv )
if total != 2:
sys.exit("Usage: python server.py {config_file.py}")
if not os.path.exists( os.getcwd() + "/" + sys.argv[1] ):
raise NameError('Configuration file does not exist')
exec( file( sys.argv[1] ) )
cherrypy.config.update({
'tools.encode.on': True,
'tools.encode.encoding': 'utf-8',
'tools.decode.on': True,
'tools.trailing_slash.on': True,
'tools.staticdir.root': os.path.abspath(os.path.dirname(__file__)) + "/web/",
'server.socket_port': listening_port,
'server.socket_host': '0.0.0.0'
})
cherrypy.tree.mount(Mumoro(db_type + ":///" + db_params,sys.argv[1],admin_email,web_url), '/', config={
'/': {
'tools.staticdir.on': True,
'tools.staticdir.dir': 'static'
},
})
cherrypy.quickstart()
def main():
print "Goodbye!"
|
Tristramg/mumoro
|
server.py
|
Python
|
gpl-3.0
| 26,253 | 0.020876 |
#!/usr/bin/env python3
"""Test behaviour of the test running and the term program interaction."""
import sys
import pexpect
from testrunner import run
def _shellping(child, timeout=1):
"""Issue a 'shellping' command.
Raises a pexpect exception on failure.
:param timeout: timeout for the answer
"""
child.sendline('shellping')
child.expect_exact('shellpong\r\n', timeout=timeout)
def _wait_shell_ready(child, numtries=5):
"""Wait until the shell is ready by using 'shellping'."""
for _ in range(numtries - 1):
try:
_shellping(child)
except pexpect.TIMEOUT:
pass
else:
break
else:
# This one should fail
_shellping(child)
def _test_no_local_echo(child):
"""Verify that there is not local echo while testing."""
msg = 'true this should not be echoed'
child.sendline(msg)
res = child.expect_exact([pexpect.TIMEOUT, msg], timeout=1)
assert res == 0, "There should have been a timeout and not match stdin"
def testfunc(child):
"""Run some tests to verify the board under test behaves correctly.
It currently tests:
* local echo
"""
child.expect_exact("Running 'tests_tools' application")
_wait_shell_ready(child)
# Verify there is no local and remote echo as it is disabled
_test_no_local_echo(child)
# The node should still answer after the previous one
_shellping(child)
if __name__ == "__main__":
sys.exit(run(testfunc))
|
cladmi/RIOT
|
tests/test_tools/tests/01-run.py
|
Python
|
lgpl-2.1
| 1,512 | 0 |
#
# Copyright 2018 Analytics Zoo Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
import os
import zipfile
import pandas as pd
from PIL import Image
import tensorflow as tf
from tensorflow.python.keras import layers
from tensorflow.python.keras import losses
from tensorflow.python.keras import models
from tensorflow.python.keras import backend as K
import matplotlib.image as mpimg
import matplotlib.pyplot as plt
from sklearn.model_selection import train_test_split
import numpy as np
from zoo.orca import init_orca_context, stop_orca_context
from zoo.orca.data import XShards
from zoo.orca.learn.tf.estimator import Estimator
def load_data_from_zip(file_path, file):
with zipfile.ZipFile(os.path.join(file_path, file), "r") as zip_ref:
unzipped_file = zip_ref.namelist()[0]
zip_ref.extractall(file_path)
def load_data(file_path):
load_data_from_zip(file_path, 'train.zip')
load_data_from_zip(file_path, 'train_masks.zip')
load_data_from_zip(file_path, 'train_masks.csv.zip')
def main(cluster_mode, max_epoch, file_path, batch_size, platform, non_interactive):
import matplotlib
if not non_interactive and platform == "mac":
matplotlib.use('qt5agg')
if cluster_mode == "local":
init_orca_context(cluster_mode="local", cores=4, memory="3g")
elif cluster_mode == "yarn":
init_orca_context(cluster_mode="yarn-client", num_nodes=2, cores=2, driver_memory="3g")
load_data(file_path)
img_dir = os.path.join(file_path, "train")
label_dir = os.path.join(file_path, "train_masks")
# Here we only take the first 1000 files for simplicity
df_train = pd.read_csv(os.path.join(file_path, 'train_masks.csv'))
ids_train = df_train['img'].map(lambda s: s.split('.')[0])
ids_train = ids_train[:1000]
x_train_filenames = []
y_train_filenames = []
for img_id in ids_train:
x_train_filenames.append(os.path.join(img_dir, "{}.jpg".format(img_id)))
y_train_filenames.append(os.path.join(label_dir, "{}_mask.gif".format(img_id)))
x_train_filenames, x_val_filenames, y_train_filenames, y_val_filenames = \
train_test_split(x_train_filenames, y_train_filenames, test_size=0.2, random_state=42)
def load_and_process_image(path):
array = mpimg.imread(path)
result = np.array(Image.fromarray(array).resize(size=(128, 128)))
result = result.astype(float)
result /= 255.0
return result
def load_and_process_image_label(path):
array = mpimg.imread(path)
result = np.array(Image.fromarray(array).resize(size=(128, 128)))
result = np.expand_dims(result[:, :, 1], axis=-1)
result = result.astype(float)
result /= 255.0
return result
train_images = np.stack([load_and_process_image(filepath) for filepath in x_train_filenames])
train_label_images = np.stack([load_and_process_image_label(filepath)
for filepath in y_train_filenames])
val_images = np.stack([load_and_process_image(filepath) for filepath in x_val_filenames])
val_label_images = np.stack([load_and_process_image_label(filepath)
for filepath in y_val_filenames])
train_shards = XShards.partition({"x": train_images, "y": train_label_images})
val_shards = XShards.partition({"x": val_images, "y": val_label_images})
# Build the U-Net model
def conv_block(input_tensor, num_filters):
encoder = layers.Conv2D(num_filters, (3, 3), padding='same')(input_tensor)
encoder = layers.Activation('relu')(encoder)
encoder = layers.Conv2D(num_filters, (3, 3), padding='same')(encoder)
encoder = layers.Activation('relu')(encoder)
return encoder
def encoder_block(input_tensor, num_filters):
encoder = conv_block(input_tensor, num_filters)
encoder_pool = layers.MaxPooling2D((2, 2), strides=(2, 2))(encoder)
return encoder_pool, encoder
def decoder_block(input_tensor, concat_tensor, num_filters):
decoder = layers.Conv2DTranspose(num_filters, (2, 2), strides=(2, 2), padding='same')(
input_tensor)
decoder = layers.concatenate([concat_tensor, decoder], axis=-1)
decoder = layers.Activation('relu')(decoder)
decoder = layers.Conv2D(num_filters, (3, 3), padding='same')(decoder)
decoder = layers.Activation('relu')(decoder)
decoder = layers.Conv2D(num_filters, (3, 3), padding='same')(decoder)
decoder = layers.Activation('relu')(decoder)
return decoder
inputs = layers.Input(shape=(128, 128, 3)) # 128
encoder0_pool, encoder0 = encoder_block(inputs, 16) # 64
encoder1_pool, encoder1 = encoder_block(encoder0_pool, 32) # 32
encoder2_pool, encoder2 = encoder_block(encoder1_pool, 64) # 16
encoder3_pool, encoder3 = encoder_block(encoder2_pool, 128) # 8
center = conv_block(encoder3_pool, 256) # center
decoder3 = decoder_block(center, encoder3, 128) # 16
decoder2 = decoder_block(decoder3, encoder2, 64) # 32
decoder1 = decoder_block(decoder2, encoder1, 32) # 64
decoder0 = decoder_block(decoder1, encoder0, 16) # 128
outputs = layers.Conv2D(1, (1, 1), activation='sigmoid')(decoder0)
net = models.Model(inputs=[inputs], outputs=[outputs])
# Define custom metrics
def dice_coeff(y_true, y_pred):
smooth = 1.
# Flatten
y_true_f = tf.reshape(y_true, [-1])
y_pred_f = tf.reshape(y_pred, [-1])
intersection = tf.reduce_sum(y_true_f * y_pred_f)
score = (2. * intersection + smooth) / \
(tf.reduce_sum(y_true_f) + tf.reduce_sum(y_pred_f) + smooth)
return score
# Define custom loss function
def dice_loss(y_true, y_pred):
loss = 1 - dice_coeff(y_true, y_pred)
return loss
def bce_dice_loss(y_true, y_pred):
loss = losses.binary_crossentropy(y_true, y_pred) + dice_loss(y_true, y_pred)
return loss
# compile model
net.compile(optimizer=tf.keras.optimizers.Adam(2e-3), loss=bce_dice_loss)
print(net.summary())
# create an estimator from keras model
est = Estimator.from_keras(keras_model=net)
# fit with estimator
est.fit(data=train_shards,
batch_size=batch_size,
epochs=max_epoch)
# evaluate with estimator
result = est.evaluate(val_shards)
print(result)
# predict with estimator
val_shards.cache()
val_image_shards = val_shards.transform_shard(lambda val_dict: {"x": val_dict["x"]})
pred_shards = est.predict(data=val_image_shards, batch_size=batch_size)
pred = pred_shards.collect()[0]["prediction"]
val_image_label = val_shards.collect()[0]
val_image = val_image_label["x"]
val_label = val_image_label["y"]
if not non_interactive:
# visualize 5 predicted results
plt.figure(figsize=(10, 20))
for i in range(5):
img = val_image[i]
label = val_label[i]
predicted_label = pred[i]
plt.subplot(5, 3, 3 * i + 1)
plt.imshow(img)
plt.title("Input image")
plt.subplot(5, 3, 3 * i + 2)
plt.imshow(label[:, :, 0], cmap='gray')
plt.title("Actual Mask")
plt.subplot(5, 3, 3 * i + 3)
plt.imshow(predicted_label, cmap='gray')
plt.title("Predicted Mask")
plt.suptitle("Examples of Input Image, Label, and Prediction")
plt.show()
stop_orca_context()
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--cluster_mode', type=str, default="local",
help='The mode for the Spark cluster. local or yarn.')
parser.add_argument('--file_path', type=str, default="/tmp/carvana/",
help="The path to carvana train.zip, train_mask.zip and train_mask.csv.zip")
parser.add_argument('--epochs', type=int, default=8,
help="The number of epochs to train the model")
parser.add_argument('--batch_size', type=int, default=8,
help="Batch size for training and prediction")
parser.add_argument('--platform', type=str, default="linux",
help="The platform you used. Only linux and mac are supported.")
parser.add_argument('--non_interactive', default=False, action="store_true",
help="Flag to not visualize the result.")
args = parser.parse_args()
main(args.cluster_mode, args.epochs, args.file_path, args.batch_size, args.platform,
args.non_interactive)
|
intel-analytics/analytics-zoo
|
pyzoo/zoo/examples/orca/learn/tf/image_segmentation/image_segmentation.py
|
Python
|
apache-2.0
| 9,188 | 0.001741 |
from pybindgen import Module, FileCodeSink, param, retval, cppclass, typehandlers
import pybindgen.settings
import warnings
class ErrorHandler(pybindgen.settings.ErrorHandler):
def handle_error(self, wrapper, exception, traceback_):
warnings.warn("exception %r in wrapper %s" % (exception, wrapper))
return True
pybindgen.settings.error_handler = ErrorHandler()
import sys
def module_init():
root_module = Module('ns.dsdv', cpp_namespace='::ns3')
return root_module
def register_types(module):
root_module = module.get_root()
## address.h (module 'network'): ns3::Address [class]
module.add_class('Address', import_from_module='ns.network')
## address.h (module 'network'): ns3::Address::MaxSize_e [enumeration]
module.add_enum('MaxSize_e', ['MAX_SIZE'], outer_class=root_module['ns3::Address'], import_from_module='ns.network')
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList [class]
module.add_class('AttributeConstructionList', import_from_module='ns.core')
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item [struct]
module.add_class('Item', import_from_module='ns.core', outer_class=root_module['ns3::AttributeConstructionList'])
## buffer.h (module 'network'): ns3::Buffer [class]
module.add_class('Buffer', import_from_module='ns.network')
## buffer.h (module 'network'): ns3::Buffer::Iterator [class]
module.add_class('Iterator', import_from_module='ns.network', outer_class=root_module['ns3::Buffer'])
## packet.h (module 'network'): ns3::ByteTagIterator [class]
module.add_class('ByteTagIterator', import_from_module='ns.network')
## packet.h (module 'network'): ns3::ByteTagIterator::Item [class]
module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::ByteTagIterator'])
## byte-tag-list.h (module 'network'): ns3::ByteTagList [class]
module.add_class('ByteTagList', import_from_module='ns.network')
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator [class]
module.add_class('Iterator', import_from_module='ns.network', outer_class=root_module['ns3::ByteTagList'])
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item [struct]
module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::ByteTagList::Iterator'])
## callback.h (module 'core'): ns3::CallbackBase [class]
module.add_class('CallbackBase', import_from_module='ns.core')
## event-id.h (module 'core'): ns3::EventId [class]
module.add_class('EventId', import_from_module='ns.core')
## hash.h (module 'core'): ns3::Hasher [class]
module.add_class('Hasher', import_from_module='ns.core')
## inet6-socket-address.h (module 'network'): ns3::Inet6SocketAddress [class]
module.add_class('Inet6SocketAddress', import_from_module='ns.network')
## inet6-socket-address.h (module 'network'): ns3::Inet6SocketAddress [class]
root_module['ns3::Inet6SocketAddress'].implicitly_converts_to(root_module['ns3::Address'])
## inet-socket-address.h (module 'network'): ns3::InetSocketAddress [class]
module.add_class('InetSocketAddress', import_from_module='ns.network')
## inet-socket-address.h (module 'network'): ns3::InetSocketAddress [class]
root_module['ns3::InetSocketAddress'].implicitly_converts_to(root_module['ns3::Address'])
## int-to-type.h (module 'core'): ns3::IntToType<0> [struct]
module.add_class('IntToType', import_from_module='ns.core', template_parameters=['0'])
## int-to-type.h (module 'core'): ns3::IntToType<0>::v_e [enumeration]
module.add_enum('v_e', ['value'], outer_class=root_module['ns3::IntToType< 0 >'], import_from_module='ns.core')
## int-to-type.h (module 'core'): ns3::IntToType<1> [struct]
module.add_class('IntToType', import_from_module='ns.core', template_parameters=['1'])
## int-to-type.h (module 'core'): ns3::IntToType<1>::v_e [enumeration]
module.add_enum('v_e', ['value'], outer_class=root_module['ns3::IntToType< 1 >'], import_from_module='ns.core')
## int-to-type.h (module 'core'): ns3::IntToType<2> [struct]
module.add_class('IntToType', import_from_module='ns.core', template_parameters=['2'])
## int-to-type.h (module 'core'): ns3::IntToType<2>::v_e [enumeration]
module.add_enum('v_e', ['value'], outer_class=root_module['ns3::IntToType< 2 >'], import_from_module='ns.core')
## int-to-type.h (module 'core'): ns3::IntToType<3> [struct]
module.add_class('IntToType', import_from_module='ns.core', template_parameters=['3'])
## int-to-type.h (module 'core'): ns3::IntToType<3>::v_e [enumeration]
module.add_enum('v_e', ['value'], outer_class=root_module['ns3::IntToType< 3 >'], import_from_module='ns.core')
## int-to-type.h (module 'core'): ns3::IntToType<4> [struct]
module.add_class('IntToType', import_from_module='ns.core', template_parameters=['4'])
## int-to-type.h (module 'core'): ns3::IntToType<4>::v_e [enumeration]
module.add_enum('v_e', ['value'], outer_class=root_module['ns3::IntToType< 4 >'], import_from_module='ns.core')
## int-to-type.h (module 'core'): ns3::IntToType<5> [struct]
module.add_class('IntToType', import_from_module='ns.core', template_parameters=['5'])
## int-to-type.h (module 'core'): ns3::IntToType<5>::v_e [enumeration]
module.add_enum('v_e', ['value'], outer_class=root_module['ns3::IntToType< 5 >'], import_from_module='ns.core')
## int-to-type.h (module 'core'): ns3::IntToType<6> [struct]
module.add_class('IntToType', import_from_module='ns.core', template_parameters=['6'])
## int-to-type.h (module 'core'): ns3::IntToType<6>::v_e [enumeration]
module.add_enum('v_e', ['value'], outer_class=root_module['ns3::IntToType< 6 >'], import_from_module='ns.core')
## ipv4-address.h (module 'network'): ns3::Ipv4Address [class]
module.add_class('Ipv4Address', import_from_module='ns.network')
## ipv4-address.h (module 'network'): ns3::Ipv4Address [class]
root_module['ns3::Ipv4Address'].implicitly_converts_to(root_module['ns3::Address'])
## ipv4-interface-address.h (module 'internet'): ns3::Ipv4InterfaceAddress [class]
module.add_class('Ipv4InterfaceAddress', import_from_module='ns.internet')
## ipv4-interface-address.h (module 'internet'): ns3::Ipv4InterfaceAddress::InterfaceAddressScope_e [enumeration]
module.add_enum('InterfaceAddressScope_e', ['HOST', 'LINK', 'GLOBAL'], outer_class=root_module['ns3::Ipv4InterfaceAddress'], import_from_module='ns.internet')
## ipv4-address.h (module 'network'): ns3::Ipv4Mask [class]
module.add_class('Ipv4Mask', import_from_module='ns.network')
## ipv4-routing-helper.h (module 'internet'): ns3::Ipv4RoutingHelper [class]
module.add_class('Ipv4RoutingHelper', allow_subclassing=True, import_from_module='ns.internet')
## ipv6-address.h (module 'network'): ns3::Ipv6Address [class]
module.add_class('Ipv6Address', import_from_module='ns.network')
## ipv6-address.h (module 'network'): ns3::Ipv6Address [class]
root_module['ns3::Ipv6Address'].implicitly_converts_to(root_module['ns3::Address'])
## ipv6-interface-address.h (module 'internet'): ns3::Ipv6InterfaceAddress [class]
module.add_class('Ipv6InterfaceAddress', import_from_module='ns.internet')
## ipv6-interface-address.h (module 'internet'): ns3::Ipv6InterfaceAddress::State_e [enumeration]
module.add_enum('State_e', ['TENTATIVE', 'DEPRECATED', 'PREFERRED', 'PERMANENT', 'HOMEADDRESS', 'TENTATIVE_OPTIMISTIC', 'INVALID'], outer_class=root_module['ns3::Ipv6InterfaceAddress'], import_from_module='ns.internet')
## ipv6-interface-address.h (module 'internet'): ns3::Ipv6InterfaceAddress::Scope_e [enumeration]
module.add_enum('Scope_e', ['HOST', 'LINKLOCAL', 'GLOBAL'], outer_class=root_module['ns3::Ipv6InterfaceAddress'], import_from_module='ns.internet')
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix [class]
module.add_class('Ipv6Prefix', import_from_module='ns.network')
## node-container.h (module 'network'): ns3::NodeContainer [class]
module.add_class('NodeContainer', import_from_module='ns.network')
## object-base.h (module 'core'): ns3::ObjectBase [class]
module.add_class('ObjectBase', allow_subclassing=True, import_from_module='ns.core')
## object.h (module 'core'): ns3::ObjectDeleter [struct]
module.add_class('ObjectDeleter', import_from_module='ns.core')
## object-factory.h (module 'core'): ns3::ObjectFactory [class]
module.add_class('ObjectFactory', import_from_module='ns.core')
## packet-metadata.h (module 'network'): ns3::PacketMetadata [class]
module.add_class('PacketMetadata', import_from_module='ns.network')
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item [struct]
module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::PacketMetadata'])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item [enumeration]
module.add_enum('', ['PAYLOAD', 'HEADER', 'TRAILER'], outer_class=root_module['ns3::PacketMetadata::Item'], import_from_module='ns.network')
## packet-metadata.h (module 'network'): ns3::PacketMetadata::ItemIterator [class]
module.add_class('ItemIterator', import_from_module='ns.network', outer_class=root_module['ns3::PacketMetadata'])
## packet.h (module 'network'): ns3::PacketTagIterator [class]
module.add_class('PacketTagIterator', import_from_module='ns.network')
## packet.h (module 'network'): ns3::PacketTagIterator::Item [class]
module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::PacketTagIterator'])
## packet-tag-list.h (module 'network'): ns3::PacketTagList [class]
module.add_class('PacketTagList', import_from_module='ns.network')
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData [struct]
module.add_class('TagData', import_from_module='ns.network', outer_class=root_module['ns3::PacketTagList'])
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::TagData_e [enumeration]
module.add_enum('TagData_e', ['MAX_SIZE'], outer_class=root_module['ns3::PacketTagList::TagData'], import_from_module='ns.network')
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter> [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Object', 'ns3::ObjectBase', 'ns3::ObjectDeleter'], parent=root_module['ns3::ObjectBase'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simulator.h (module 'core'): ns3::Simulator [class]
module.add_class('Simulator', destructor_visibility='private', import_from_module='ns.core')
## tag.h (module 'network'): ns3::Tag [class]
module.add_class('Tag', import_from_module='ns.network', parent=root_module['ns3::ObjectBase'])
## tag-buffer.h (module 'network'): ns3::TagBuffer [class]
module.add_class('TagBuffer', import_from_module='ns.network')
## timer.h (module 'core'): ns3::Timer [class]
module.add_class('Timer', import_from_module='ns.core')
## timer.h (module 'core'): ns3::Timer::DestroyPolicy [enumeration]
module.add_enum('DestroyPolicy', ['CANCEL_ON_DESTROY', 'REMOVE_ON_DESTROY', 'CHECK_ON_DESTROY'], outer_class=root_module['ns3::Timer'], import_from_module='ns.core')
## timer.h (module 'core'): ns3::Timer::State [enumeration]
module.add_enum('State', ['RUNNING', 'EXPIRED', 'SUSPENDED'], outer_class=root_module['ns3::Timer'], import_from_module='ns.core')
## timer-impl.h (module 'core'): ns3::TimerImpl [class]
module.add_class('TimerImpl', allow_subclassing=True, import_from_module='ns.core')
## type-id.h (module 'core'): ns3::TypeId [class]
module.add_class('TypeId', import_from_module='ns.core')
## type-id.h (module 'core'): ns3::TypeId::AttributeFlag [enumeration]
module.add_enum('AttributeFlag', ['ATTR_GET', 'ATTR_SET', 'ATTR_CONSTRUCT', 'ATTR_SGC'], outer_class=root_module['ns3::TypeId'], import_from_module='ns.core')
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation [struct]
module.add_class('AttributeInformation', import_from_module='ns.core', outer_class=root_module['ns3::TypeId'])
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation [struct]
module.add_class('TraceSourceInformation', import_from_module='ns.core', outer_class=root_module['ns3::TypeId'])
## empty.h (module 'core'): ns3::empty [class]
module.add_class('empty', import_from_module='ns.core')
## int64x64-double.h (module 'core'): ns3::int64x64_t [class]
module.add_class('int64x64_t', import_from_module='ns.core')
## chunk.h (module 'network'): ns3::Chunk [class]
module.add_class('Chunk', import_from_module='ns.network', parent=root_module['ns3::ObjectBase'])
## dsdv-helper.h (module 'dsdv'): ns3::DsdvHelper [class]
module.add_class('DsdvHelper', parent=root_module['ns3::Ipv4RoutingHelper'])
## header.h (module 'network'): ns3::Header [class]
module.add_class('Header', import_from_module='ns.network', parent=root_module['ns3::Chunk'])
## ipv4-header.h (module 'internet'): ns3::Ipv4Header [class]
module.add_class('Ipv4Header', import_from_module='ns.internet', parent=root_module['ns3::Header'])
## ipv4-header.h (module 'internet'): ns3::Ipv4Header::DscpType [enumeration]
module.add_enum('DscpType', ['DscpDefault', 'DSCP_CS1', 'DSCP_AF11', 'DSCP_AF12', 'DSCP_AF13', 'DSCP_CS2', 'DSCP_AF21', 'DSCP_AF22', 'DSCP_AF23', 'DSCP_CS3', 'DSCP_AF31', 'DSCP_AF32', 'DSCP_AF33', 'DSCP_CS4', 'DSCP_AF41', 'DSCP_AF42', 'DSCP_AF43', 'DSCP_CS5', 'DSCP_EF', 'DSCP_CS6', 'DSCP_CS7'], outer_class=root_module['ns3::Ipv4Header'], import_from_module='ns.internet')
## ipv4-header.h (module 'internet'): ns3::Ipv4Header::EcnType [enumeration]
module.add_enum('EcnType', ['ECN_NotECT', 'ECN_ECT1', 'ECN_ECT0', 'ECN_CE'], outer_class=root_module['ns3::Ipv4Header'], import_from_module='ns.internet')
## ipv6-header.h (module 'internet'): ns3::Ipv6Header [class]
module.add_class('Ipv6Header', import_from_module='ns.internet', parent=root_module['ns3::Header'])
## ipv6-header.h (module 'internet'): ns3::Ipv6Header::NextHeader_e [enumeration]
module.add_enum('NextHeader_e', ['IPV6_EXT_HOP_BY_HOP', 'IPV6_IPV4', 'IPV6_TCP', 'IPV6_UDP', 'IPV6_IPV6', 'IPV6_EXT_ROUTING', 'IPV6_EXT_FRAGMENTATION', 'IPV6_EXT_CONFIDENTIALITY', 'IPV6_EXT_AUTHENTIFICATION', 'IPV6_ICMPV6', 'IPV6_EXT_END', 'IPV6_EXT_DESTINATION', 'IPV6_SCTP', 'IPV6_EXT_MOBILITY', 'IPV6_UDP_LITE'], outer_class=root_module['ns3::Ipv6Header'], import_from_module='ns.internet')
## object.h (module 'core'): ns3::Object [class]
module.add_class('Object', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter >'])
## object.h (module 'core'): ns3::Object::AggregateIterator [class]
module.add_class('AggregateIterator', import_from_module='ns.core', outer_class=root_module['ns3::Object'])
## random-variable-stream.h (module 'core'): ns3::RandomVariableStream [class]
module.add_class('RandomVariableStream', import_from_module='ns.core', parent=root_module['ns3::Object'])
## random-variable-stream.h (module 'core'): ns3::SequentialRandomVariable [class]
module.add_class('SequentialRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeAccessor', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeAccessor>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeChecker', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeChecker>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeValue', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeValue>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::CallbackImplBase', 'ns3::empty', 'ns3::DefaultDeleter<ns3::CallbackImplBase>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::EventImpl', 'ns3::empty', 'ns3::DefaultDeleter<ns3::EventImpl>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Hash::Implementation', 'ns3::empty', 'ns3::DefaultDeleter<ns3::Hash::Implementation>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Ipv4MulticastRoute, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4MulticastRoute> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Ipv4MulticastRoute', 'ns3::empty', 'ns3::DefaultDeleter<ns3::Ipv4MulticastRoute>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Ipv4Route, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4Route> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Ipv4Route', 'ns3::empty', 'ns3::DefaultDeleter<ns3::Ipv4Route>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::NixVector', 'ns3::empty', 'ns3::DefaultDeleter<ns3::NixVector>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter<ns3::OutputStreamWrapper> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::OutputStreamWrapper', 'ns3::empty', 'ns3::DefaultDeleter<ns3::OutputStreamWrapper>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Packet', 'ns3::empty', 'ns3::DefaultDeleter<ns3::Packet>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::TraceSourceAccessor', 'ns3::empty', 'ns3::DefaultDeleter<ns3::TraceSourceAccessor>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## socket.h (module 'network'): ns3::Socket [class]
module.add_class('Socket', import_from_module='ns.network', parent=root_module['ns3::Object'])
## socket.h (module 'network'): ns3::Socket::SocketErrno [enumeration]
module.add_enum('SocketErrno', ['ERROR_NOTERROR', 'ERROR_ISCONN', 'ERROR_NOTCONN', 'ERROR_MSGSIZE', 'ERROR_AGAIN', 'ERROR_SHUTDOWN', 'ERROR_OPNOTSUPP', 'ERROR_AFNOSUPPORT', 'ERROR_INVAL', 'ERROR_BADF', 'ERROR_NOROUTETOHOST', 'ERROR_NODEV', 'ERROR_ADDRNOTAVAIL', 'ERROR_ADDRINUSE', 'SOCKET_ERRNO_LAST'], outer_class=root_module['ns3::Socket'], import_from_module='ns.network')
## socket.h (module 'network'): ns3::Socket::SocketType [enumeration]
module.add_enum('SocketType', ['NS3_SOCK_STREAM', 'NS3_SOCK_SEQPACKET', 'NS3_SOCK_DGRAM', 'NS3_SOCK_RAW'], outer_class=root_module['ns3::Socket'], import_from_module='ns.network')
## socket.h (module 'network'): ns3::SocketAddressTag [class]
module.add_class('SocketAddressTag', import_from_module='ns.network', parent=root_module['ns3::Tag'])
## socket.h (module 'network'): ns3::SocketIpTosTag [class]
module.add_class('SocketIpTosTag', import_from_module='ns.network', parent=root_module['ns3::Tag'])
## socket.h (module 'network'): ns3::SocketIpTtlTag [class]
module.add_class('SocketIpTtlTag', import_from_module='ns.network', parent=root_module['ns3::Tag'])
## socket.h (module 'network'): ns3::SocketIpv6HopLimitTag [class]
module.add_class('SocketIpv6HopLimitTag', import_from_module='ns.network', parent=root_module['ns3::Tag'])
## socket.h (module 'network'): ns3::SocketIpv6TclassTag [class]
module.add_class('SocketIpv6TclassTag', import_from_module='ns.network', parent=root_module['ns3::Tag'])
## socket.h (module 'network'): ns3::SocketSetDontFragmentTag [class]
module.add_class('SocketSetDontFragmentTag', import_from_module='ns.network', parent=root_module['ns3::Tag'])
## nstime.h (module 'core'): ns3::Time [class]
module.add_class('Time', import_from_module='ns.core')
## nstime.h (module 'core'): ns3::Time::Unit [enumeration]
module.add_enum('Unit', ['Y', 'D', 'H', 'MIN', 'S', 'MS', 'US', 'NS', 'PS', 'FS', 'LAST'], outer_class=root_module['ns3::Time'], import_from_module='ns.core')
## nstime.h (module 'core'): ns3::Time [class]
root_module['ns3::Time'].implicitly_converts_to(root_module['ns3::int64x64_t'])
## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor [class]
module.add_class('TraceSourceAccessor', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >'])
## trailer.h (module 'network'): ns3::Trailer [class]
module.add_class('Trailer', import_from_module='ns.network', parent=root_module['ns3::Chunk'])
## random-variable-stream.h (module 'core'): ns3::TriangularRandomVariable [class]
module.add_class('TriangularRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
## random-variable-stream.h (module 'core'): ns3::UniformRandomVariable [class]
module.add_class('UniformRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
## random-variable-stream.h (module 'core'): ns3::WeibullRandomVariable [class]
module.add_class('WeibullRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
## random-variable-stream.h (module 'core'): ns3::ZetaRandomVariable [class]
module.add_class('ZetaRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
## random-variable-stream.h (module 'core'): ns3::ZipfRandomVariable [class]
module.add_class('ZipfRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
## attribute.h (module 'core'): ns3::AttributeAccessor [class]
module.add_class('AttributeAccessor', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >'])
## attribute.h (module 'core'): ns3::AttributeChecker [class]
module.add_class('AttributeChecker', allow_subclassing=False, automatic_type_narrowing=True, import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >'])
## attribute.h (module 'core'): ns3::AttributeValue [class]
module.add_class('AttributeValue', allow_subclassing=False, automatic_type_narrowing=True, import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >'])
## callback.h (module 'core'): ns3::CallbackChecker [class]
module.add_class('CallbackChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## callback.h (module 'core'): ns3::CallbackImplBase [class]
module.add_class('CallbackImplBase', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >'])
## callback.h (module 'core'): ns3::CallbackValue [class]
module.add_class('CallbackValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## random-variable-stream.h (module 'core'): ns3::ConstantRandomVariable [class]
module.add_class('ConstantRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
## random-variable-stream.h (module 'core'): ns3::DeterministicRandomVariable [class]
module.add_class('DeterministicRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
## random-variable-stream.h (module 'core'): ns3::EmpiricalRandomVariable [class]
module.add_class('EmpiricalRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
## attribute.h (module 'core'): ns3::EmptyAttributeValue [class]
module.add_class('EmptyAttributeValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## random-variable-stream.h (module 'core'): ns3::ErlangRandomVariable [class]
module.add_class('ErlangRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
## event-impl.h (module 'core'): ns3::EventImpl [class]
module.add_class('EventImpl', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> >'])
## random-variable-stream.h (module 'core'): ns3::ExponentialRandomVariable [class]
module.add_class('ExponentialRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
## random-variable-stream.h (module 'core'): ns3::GammaRandomVariable [class]
module.add_class('GammaRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
## ip-l4-protocol.h (module 'internet'): ns3::IpL4Protocol [class]
module.add_class('IpL4Protocol', import_from_module='ns.internet', parent=root_module['ns3::Object'])
## ip-l4-protocol.h (module 'internet'): ns3::IpL4Protocol::RxStatus [enumeration]
module.add_enum('RxStatus', ['RX_OK', 'RX_CSUM_FAILED', 'RX_ENDPOINT_CLOSED', 'RX_ENDPOINT_UNREACH'], outer_class=root_module['ns3::IpL4Protocol'], import_from_module='ns.internet')
## ipv4.h (module 'internet'): ns3::Ipv4 [class]
module.add_class('Ipv4', import_from_module='ns.internet', parent=root_module['ns3::Object'])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressChecker [class]
module.add_class('Ipv4AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue [class]
module.add_class('Ipv4AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## ipv4-interface.h (module 'internet'): ns3::Ipv4Interface [class]
module.add_class('Ipv4Interface', import_from_module='ns.internet', parent=root_module['ns3::Object'])
## ipv4-l3-protocol.h (module 'internet'): ns3::Ipv4L3Protocol [class]
module.add_class('Ipv4L3Protocol', import_from_module='ns.internet', parent=root_module['ns3::Ipv4'])
## ipv4-l3-protocol.h (module 'internet'): ns3::Ipv4L3Protocol::DropReason [enumeration]
module.add_enum('DropReason', ['DROP_TTL_EXPIRED', 'DROP_NO_ROUTE', 'DROP_BAD_CHECKSUM', 'DROP_INTERFACE_DOWN', 'DROP_ROUTE_ERROR', 'DROP_FRAGMENT_TIMEOUT'], outer_class=root_module['ns3::Ipv4L3Protocol'], import_from_module='ns.internet')
## ipv4-address.h (module 'network'): ns3::Ipv4MaskChecker [class]
module.add_class('Ipv4MaskChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue [class]
module.add_class('Ipv4MaskValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## ipv4-route.h (module 'internet'): ns3::Ipv4MulticastRoute [class]
module.add_class('Ipv4MulticastRoute', import_from_module='ns.internet', parent=root_module['ns3::SimpleRefCount< ns3::Ipv4MulticastRoute, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4MulticastRoute> >'])
## ipv4-route.h (module 'internet'): ns3::Ipv4Route [class]
module.add_class('Ipv4Route', import_from_module='ns.internet', parent=root_module['ns3::SimpleRefCount< ns3::Ipv4Route, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4Route> >'])
## ipv4-routing-protocol.h (module 'internet'): ns3::Ipv4RoutingProtocol [class]
module.add_class('Ipv4RoutingProtocol', import_from_module='ns.internet', parent=root_module['ns3::Object'])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressChecker [class]
module.add_class('Ipv6AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue [class]
module.add_class('Ipv6AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## ipv6-interface.h (module 'internet'): ns3::Ipv6Interface [class]
module.add_class('Ipv6Interface', import_from_module='ns.internet', parent=root_module['ns3::Object'])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixChecker [class]
module.add_class('Ipv6PrefixChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue [class]
module.add_class('Ipv6PrefixValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## random-variable-stream.h (module 'core'): ns3::LogNormalRandomVariable [class]
module.add_class('LogNormalRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
## net-device.h (module 'network'): ns3::NetDevice [class]
module.add_class('NetDevice', import_from_module='ns.network', parent=root_module['ns3::Object'])
## net-device.h (module 'network'): ns3::NetDevice::PacketType [enumeration]
module.add_enum('PacketType', ['PACKET_HOST', 'NS3_PACKET_HOST', 'PACKET_BROADCAST', 'NS3_PACKET_BROADCAST', 'PACKET_MULTICAST', 'NS3_PACKET_MULTICAST', 'PACKET_OTHERHOST', 'NS3_PACKET_OTHERHOST'], outer_class=root_module['ns3::NetDevice'], import_from_module='ns.network')
## nix-vector.h (module 'network'): ns3::NixVector [class]
module.add_class('NixVector', import_from_module='ns.network', parent=root_module['ns3::SimpleRefCount< ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> >'])
## node.h (module 'network'): ns3::Node [class]
module.add_class('Node', import_from_module='ns.network', parent=root_module['ns3::Object'])
## random-variable-stream.h (module 'core'): ns3::NormalRandomVariable [class]
module.add_class('NormalRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
## object-factory.h (module 'core'): ns3::ObjectFactoryChecker [class]
module.add_class('ObjectFactoryChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## object-factory.h (module 'core'): ns3::ObjectFactoryValue [class]
module.add_class('ObjectFactoryValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## output-stream-wrapper.h (module 'network'): ns3::OutputStreamWrapper [class]
module.add_class('OutputStreamWrapper', import_from_module='ns.network', parent=root_module['ns3::SimpleRefCount< ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter<ns3::OutputStreamWrapper> >'])
## packet.h (module 'network'): ns3::Packet [class]
module.add_class('Packet', import_from_module='ns.network', parent=root_module['ns3::SimpleRefCount< ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> >'])
## random-variable-stream.h (module 'core'): ns3::ParetoRandomVariable [class]
module.add_class('ParetoRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
## nstime.h (module 'core'): ns3::TimeValue [class]
module.add_class('TimeValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## type-id.h (module 'core'): ns3::TypeIdChecker [class]
module.add_class('TypeIdChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## type-id.h (module 'core'): ns3::TypeIdValue [class]
module.add_class('TypeIdValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## address.h (module 'network'): ns3::AddressChecker [class]
module.add_class('AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## address.h (module 'network'): ns3::AddressValue [class]
module.add_class('AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## ipv4-list-routing.h (module 'internet'): ns3::Ipv4ListRouting [class]
module.add_class('Ipv4ListRouting', import_from_module='ns.internet', parent=root_module['ns3::Ipv4RoutingProtocol'])
module.add_container('std::map< unsigned int, unsigned int >', ('unsigned int', 'unsigned int'), container_type='map')
## Register a nested module for the namespace FatalImpl
nested_module = module.add_cpp_namespace('FatalImpl')
register_types_ns3_FatalImpl(nested_module)
## Register a nested module for the namespace Hash
nested_module = module.add_cpp_namespace('Hash')
register_types_ns3_Hash(nested_module)
## Register a nested module for the namespace dsdv
nested_module = module.add_cpp_namespace('dsdv')
register_types_ns3_dsdv(nested_module)
def register_types_ns3_FatalImpl(module):
root_module = module.get_root()
def register_types_ns3_Hash(module):
root_module = module.get_root()
## hash-function.h (module 'core'): ns3::Hash::Implementation [class]
module.add_class('Implementation', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >'])
typehandlers.add_type_alias('uint32_t ( * ) ( char const *, size_t ) *', 'ns3::Hash::Hash32Function_ptr')
typehandlers.add_type_alias('uint32_t ( * ) ( char const *, size_t ) **', 'ns3::Hash::Hash32Function_ptr*')
typehandlers.add_type_alias('uint32_t ( * ) ( char const *, size_t ) *&', 'ns3::Hash::Hash32Function_ptr&')
typehandlers.add_type_alias('uint64_t ( * ) ( char const *, size_t ) *', 'ns3::Hash::Hash64Function_ptr')
typehandlers.add_type_alias('uint64_t ( * ) ( char const *, size_t ) **', 'ns3::Hash::Hash64Function_ptr*')
typehandlers.add_type_alias('uint64_t ( * ) ( char const *, size_t ) *&', 'ns3::Hash::Hash64Function_ptr&')
## Register a nested module for the namespace Function
nested_module = module.add_cpp_namespace('Function')
register_types_ns3_Hash_Function(nested_module)
def register_types_ns3_Hash_Function(module):
root_module = module.get_root()
## hash-fnv.h (module 'core'): ns3::Hash::Function::Fnv1a [class]
module.add_class('Fnv1a', import_from_module='ns.core', parent=root_module['ns3::Hash::Implementation'])
## hash-function.h (module 'core'): ns3::Hash::Function::Hash32 [class]
module.add_class('Hash32', import_from_module='ns.core', parent=root_module['ns3::Hash::Implementation'])
## hash-function.h (module 'core'): ns3::Hash::Function::Hash64 [class]
module.add_class('Hash64', import_from_module='ns.core', parent=root_module['ns3::Hash::Implementation'])
## hash-murmur3.h (module 'core'): ns3::Hash::Function::Murmur3 [class]
module.add_class('Murmur3', import_from_module='ns.core', parent=root_module['ns3::Hash::Implementation'])
def register_types_ns3_dsdv(module):
root_module = module.get_root()
## dsdv-rtable.h (module 'dsdv'): ns3::dsdv::RouteFlags [enumeration]
module.add_enum('RouteFlags', ['VALID', 'INVALID'])
## dsdv-packet.h (module 'dsdv'): ns3::dsdv::DsdvHeader [class]
module.add_class('DsdvHeader', parent=root_module['ns3::Header'])
## dsdv-packet-queue.h (module 'dsdv'): ns3::dsdv::PacketQueue [class]
module.add_class('PacketQueue')
## dsdv-packet-queue.h (module 'dsdv'): ns3::dsdv::QueueEntry [class]
module.add_class('QueueEntry')
## dsdv-routing-protocol.h (module 'dsdv'): ns3::dsdv::RoutingProtocol [class]
module.add_class('RoutingProtocol', parent=root_module['ns3::Ipv4RoutingProtocol'])
## dsdv-rtable.h (module 'dsdv'): ns3::dsdv::RoutingTable [class]
module.add_class('RoutingTable')
## dsdv-rtable.h (module 'dsdv'): ns3::dsdv::RoutingTableEntry [class]
module.add_class('RoutingTableEntry')
module.add_container('std::map< ns3::Ipv4Address, ns3::dsdv::RoutingTableEntry >', ('ns3::Ipv4Address', 'ns3::dsdv::RoutingTableEntry'), container_type='map')
def register_methods(root_module):
register_Ns3Address_methods(root_module, root_module['ns3::Address'])
register_Ns3AttributeConstructionList_methods(root_module, root_module['ns3::AttributeConstructionList'])
register_Ns3AttributeConstructionListItem_methods(root_module, root_module['ns3::AttributeConstructionList::Item'])
register_Ns3Buffer_methods(root_module, root_module['ns3::Buffer'])
register_Ns3BufferIterator_methods(root_module, root_module['ns3::Buffer::Iterator'])
register_Ns3ByteTagIterator_methods(root_module, root_module['ns3::ByteTagIterator'])
register_Ns3ByteTagIteratorItem_methods(root_module, root_module['ns3::ByteTagIterator::Item'])
register_Ns3ByteTagList_methods(root_module, root_module['ns3::ByteTagList'])
register_Ns3ByteTagListIterator_methods(root_module, root_module['ns3::ByteTagList::Iterator'])
register_Ns3ByteTagListIteratorItem_methods(root_module, root_module['ns3::ByteTagList::Iterator::Item'])
register_Ns3CallbackBase_methods(root_module, root_module['ns3::CallbackBase'])
register_Ns3EventId_methods(root_module, root_module['ns3::EventId'])
register_Ns3Hasher_methods(root_module, root_module['ns3::Hasher'])
register_Ns3Inet6SocketAddress_methods(root_module, root_module['ns3::Inet6SocketAddress'])
register_Ns3InetSocketAddress_methods(root_module, root_module['ns3::InetSocketAddress'])
register_Ns3IntToType__0_methods(root_module, root_module['ns3::IntToType< 0 >'])
register_Ns3IntToType__1_methods(root_module, root_module['ns3::IntToType< 1 >'])
register_Ns3IntToType__2_methods(root_module, root_module['ns3::IntToType< 2 >'])
register_Ns3IntToType__3_methods(root_module, root_module['ns3::IntToType< 3 >'])
register_Ns3IntToType__4_methods(root_module, root_module['ns3::IntToType< 4 >'])
register_Ns3IntToType__5_methods(root_module, root_module['ns3::IntToType< 5 >'])
register_Ns3IntToType__6_methods(root_module, root_module['ns3::IntToType< 6 >'])
register_Ns3Ipv4Address_methods(root_module, root_module['ns3::Ipv4Address'])
register_Ns3Ipv4InterfaceAddress_methods(root_module, root_module['ns3::Ipv4InterfaceAddress'])
register_Ns3Ipv4Mask_methods(root_module, root_module['ns3::Ipv4Mask'])
register_Ns3Ipv4RoutingHelper_methods(root_module, root_module['ns3::Ipv4RoutingHelper'])
register_Ns3Ipv6Address_methods(root_module, root_module['ns3::Ipv6Address'])
register_Ns3Ipv6InterfaceAddress_methods(root_module, root_module['ns3::Ipv6InterfaceAddress'])
register_Ns3Ipv6Prefix_methods(root_module, root_module['ns3::Ipv6Prefix'])
register_Ns3NodeContainer_methods(root_module, root_module['ns3::NodeContainer'])
register_Ns3ObjectBase_methods(root_module, root_module['ns3::ObjectBase'])
register_Ns3ObjectDeleter_methods(root_module, root_module['ns3::ObjectDeleter'])
register_Ns3ObjectFactory_methods(root_module, root_module['ns3::ObjectFactory'])
register_Ns3PacketMetadata_methods(root_module, root_module['ns3::PacketMetadata'])
register_Ns3PacketMetadataItem_methods(root_module, root_module['ns3::PacketMetadata::Item'])
register_Ns3PacketMetadataItemIterator_methods(root_module, root_module['ns3::PacketMetadata::ItemIterator'])
register_Ns3PacketTagIterator_methods(root_module, root_module['ns3::PacketTagIterator'])
register_Ns3PacketTagIteratorItem_methods(root_module, root_module['ns3::PacketTagIterator::Item'])
register_Ns3PacketTagList_methods(root_module, root_module['ns3::PacketTagList'])
register_Ns3PacketTagListTagData_methods(root_module, root_module['ns3::PacketTagList::TagData'])
register_Ns3SimpleRefCount__Ns3Object_Ns3ObjectBase_Ns3ObjectDeleter_methods(root_module, root_module['ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter >'])
register_Ns3Simulator_methods(root_module, root_module['ns3::Simulator'])
register_Ns3Tag_methods(root_module, root_module['ns3::Tag'])
register_Ns3TagBuffer_methods(root_module, root_module['ns3::TagBuffer'])
register_Ns3Timer_methods(root_module, root_module['ns3::Timer'])
register_Ns3TimerImpl_methods(root_module, root_module['ns3::TimerImpl'])
register_Ns3TypeId_methods(root_module, root_module['ns3::TypeId'])
register_Ns3TypeIdAttributeInformation_methods(root_module, root_module['ns3::TypeId::AttributeInformation'])
register_Ns3TypeIdTraceSourceInformation_methods(root_module, root_module['ns3::TypeId::TraceSourceInformation'])
register_Ns3Empty_methods(root_module, root_module['ns3::empty'])
register_Ns3Int64x64_t_methods(root_module, root_module['ns3::int64x64_t'])
register_Ns3Chunk_methods(root_module, root_module['ns3::Chunk'])
register_Ns3DsdvHelper_methods(root_module, root_module['ns3::DsdvHelper'])
register_Ns3Header_methods(root_module, root_module['ns3::Header'])
register_Ns3Ipv4Header_methods(root_module, root_module['ns3::Ipv4Header'])
register_Ns3Ipv6Header_methods(root_module, root_module['ns3::Ipv6Header'])
register_Ns3Object_methods(root_module, root_module['ns3::Object'])
register_Ns3ObjectAggregateIterator_methods(root_module, root_module['ns3::Object::AggregateIterator'])
register_Ns3RandomVariableStream_methods(root_module, root_module['ns3::RandomVariableStream'])
register_Ns3SequentialRandomVariable_methods(root_module, root_module['ns3::SequentialRandomVariable'])
register_Ns3SimpleRefCount__Ns3AttributeAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeAccessor__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >'])
register_Ns3SimpleRefCount__Ns3AttributeChecker_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeChecker__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >'])
register_Ns3SimpleRefCount__Ns3AttributeValue_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeValue__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >'])
register_Ns3SimpleRefCount__Ns3CallbackImplBase_Ns3Empty_Ns3DefaultDeleter__lt__ns3CallbackImplBase__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >'])
register_Ns3SimpleRefCount__Ns3EventImpl_Ns3Empty_Ns3DefaultDeleter__lt__ns3EventImpl__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> >'])
register_Ns3SimpleRefCount__Ns3HashImplementation_Ns3Empty_Ns3DefaultDeleter__lt__ns3HashImplementation__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >'])
register_Ns3SimpleRefCount__Ns3Ipv4MulticastRoute_Ns3Empty_Ns3DefaultDeleter__lt__ns3Ipv4MulticastRoute__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::Ipv4MulticastRoute, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4MulticastRoute> >'])
register_Ns3SimpleRefCount__Ns3Ipv4Route_Ns3Empty_Ns3DefaultDeleter__lt__ns3Ipv4Route__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::Ipv4Route, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4Route> >'])
register_Ns3SimpleRefCount__Ns3NixVector_Ns3Empty_Ns3DefaultDeleter__lt__ns3NixVector__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> >'])
register_Ns3SimpleRefCount__Ns3OutputStreamWrapper_Ns3Empty_Ns3DefaultDeleter__lt__ns3OutputStreamWrapper__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter<ns3::OutputStreamWrapper> >'])
register_Ns3SimpleRefCount__Ns3Packet_Ns3Empty_Ns3DefaultDeleter__lt__ns3Packet__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> >'])
register_Ns3SimpleRefCount__Ns3TraceSourceAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3TraceSourceAccessor__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >'])
register_Ns3Socket_methods(root_module, root_module['ns3::Socket'])
register_Ns3SocketAddressTag_methods(root_module, root_module['ns3::SocketAddressTag'])
register_Ns3SocketIpTosTag_methods(root_module, root_module['ns3::SocketIpTosTag'])
register_Ns3SocketIpTtlTag_methods(root_module, root_module['ns3::SocketIpTtlTag'])
register_Ns3SocketIpv6HopLimitTag_methods(root_module, root_module['ns3::SocketIpv6HopLimitTag'])
register_Ns3SocketIpv6TclassTag_methods(root_module, root_module['ns3::SocketIpv6TclassTag'])
register_Ns3SocketSetDontFragmentTag_methods(root_module, root_module['ns3::SocketSetDontFragmentTag'])
register_Ns3Time_methods(root_module, root_module['ns3::Time'])
register_Ns3TraceSourceAccessor_methods(root_module, root_module['ns3::TraceSourceAccessor'])
register_Ns3Trailer_methods(root_module, root_module['ns3::Trailer'])
register_Ns3TriangularRandomVariable_methods(root_module, root_module['ns3::TriangularRandomVariable'])
register_Ns3UniformRandomVariable_methods(root_module, root_module['ns3::UniformRandomVariable'])
register_Ns3WeibullRandomVariable_methods(root_module, root_module['ns3::WeibullRandomVariable'])
register_Ns3ZetaRandomVariable_methods(root_module, root_module['ns3::ZetaRandomVariable'])
register_Ns3ZipfRandomVariable_methods(root_module, root_module['ns3::ZipfRandomVariable'])
register_Ns3AttributeAccessor_methods(root_module, root_module['ns3::AttributeAccessor'])
register_Ns3AttributeChecker_methods(root_module, root_module['ns3::AttributeChecker'])
register_Ns3AttributeValue_methods(root_module, root_module['ns3::AttributeValue'])
register_Ns3CallbackChecker_methods(root_module, root_module['ns3::CallbackChecker'])
register_Ns3CallbackImplBase_methods(root_module, root_module['ns3::CallbackImplBase'])
register_Ns3CallbackValue_methods(root_module, root_module['ns3::CallbackValue'])
register_Ns3ConstantRandomVariable_methods(root_module, root_module['ns3::ConstantRandomVariable'])
register_Ns3DeterministicRandomVariable_methods(root_module, root_module['ns3::DeterministicRandomVariable'])
register_Ns3EmpiricalRandomVariable_methods(root_module, root_module['ns3::EmpiricalRandomVariable'])
register_Ns3EmptyAttributeValue_methods(root_module, root_module['ns3::EmptyAttributeValue'])
register_Ns3ErlangRandomVariable_methods(root_module, root_module['ns3::ErlangRandomVariable'])
register_Ns3EventImpl_methods(root_module, root_module['ns3::EventImpl'])
register_Ns3ExponentialRandomVariable_methods(root_module, root_module['ns3::ExponentialRandomVariable'])
register_Ns3GammaRandomVariable_methods(root_module, root_module['ns3::GammaRandomVariable'])
register_Ns3IpL4Protocol_methods(root_module, root_module['ns3::IpL4Protocol'])
register_Ns3Ipv4_methods(root_module, root_module['ns3::Ipv4'])
register_Ns3Ipv4AddressChecker_methods(root_module, root_module['ns3::Ipv4AddressChecker'])
register_Ns3Ipv4AddressValue_methods(root_module, root_module['ns3::Ipv4AddressValue'])
register_Ns3Ipv4Interface_methods(root_module, root_module['ns3::Ipv4Interface'])
register_Ns3Ipv4L3Protocol_methods(root_module, root_module['ns3::Ipv4L3Protocol'])
register_Ns3Ipv4MaskChecker_methods(root_module, root_module['ns3::Ipv4MaskChecker'])
register_Ns3Ipv4MaskValue_methods(root_module, root_module['ns3::Ipv4MaskValue'])
register_Ns3Ipv4MulticastRoute_methods(root_module, root_module['ns3::Ipv4MulticastRoute'])
register_Ns3Ipv4Route_methods(root_module, root_module['ns3::Ipv4Route'])
register_Ns3Ipv4RoutingProtocol_methods(root_module, root_module['ns3::Ipv4RoutingProtocol'])
register_Ns3Ipv6AddressChecker_methods(root_module, root_module['ns3::Ipv6AddressChecker'])
register_Ns3Ipv6AddressValue_methods(root_module, root_module['ns3::Ipv6AddressValue'])
register_Ns3Ipv6Interface_methods(root_module, root_module['ns3::Ipv6Interface'])
register_Ns3Ipv6PrefixChecker_methods(root_module, root_module['ns3::Ipv6PrefixChecker'])
register_Ns3Ipv6PrefixValue_methods(root_module, root_module['ns3::Ipv6PrefixValue'])
register_Ns3LogNormalRandomVariable_methods(root_module, root_module['ns3::LogNormalRandomVariable'])
register_Ns3NetDevice_methods(root_module, root_module['ns3::NetDevice'])
register_Ns3NixVector_methods(root_module, root_module['ns3::NixVector'])
register_Ns3Node_methods(root_module, root_module['ns3::Node'])
register_Ns3NormalRandomVariable_methods(root_module, root_module['ns3::NormalRandomVariable'])
register_Ns3ObjectFactoryChecker_methods(root_module, root_module['ns3::ObjectFactoryChecker'])
register_Ns3ObjectFactoryValue_methods(root_module, root_module['ns3::ObjectFactoryValue'])
register_Ns3OutputStreamWrapper_methods(root_module, root_module['ns3::OutputStreamWrapper'])
register_Ns3Packet_methods(root_module, root_module['ns3::Packet'])
register_Ns3ParetoRandomVariable_methods(root_module, root_module['ns3::ParetoRandomVariable'])
register_Ns3TimeValue_methods(root_module, root_module['ns3::TimeValue'])
register_Ns3TypeIdChecker_methods(root_module, root_module['ns3::TypeIdChecker'])
register_Ns3TypeIdValue_methods(root_module, root_module['ns3::TypeIdValue'])
register_Ns3AddressChecker_methods(root_module, root_module['ns3::AddressChecker'])
register_Ns3AddressValue_methods(root_module, root_module['ns3::AddressValue'])
register_Ns3Ipv4ListRouting_methods(root_module, root_module['ns3::Ipv4ListRouting'])
register_Ns3HashImplementation_methods(root_module, root_module['ns3::Hash::Implementation'])
register_Ns3HashFunctionFnv1a_methods(root_module, root_module['ns3::Hash::Function::Fnv1a'])
register_Ns3HashFunctionHash32_methods(root_module, root_module['ns3::Hash::Function::Hash32'])
register_Ns3HashFunctionHash64_methods(root_module, root_module['ns3::Hash::Function::Hash64'])
register_Ns3HashFunctionMurmur3_methods(root_module, root_module['ns3::Hash::Function::Murmur3'])
register_Ns3DsdvDsdvHeader_methods(root_module, root_module['ns3::dsdv::DsdvHeader'])
register_Ns3DsdvPacketQueue_methods(root_module, root_module['ns3::dsdv::PacketQueue'])
register_Ns3DsdvQueueEntry_methods(root_module, root_module['ns3::dsdv::QueueEntry'])
register_Ns3DsdvRoutingProtocol_methods(root_module, root_module['ns3::dsdv::RoutingProtocol'])
register_Ns3DsdvRoutingTable_methods(root_module, root_module['ns3::dsdv::RoutingTable'])
register_Ns3DsdvRoutingTableEntry_methods(root_module, root_module['ns3::dsdv::RoutingTableEntry'])
return
def register_Ns3Address_methods(root_module, cls):
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## address.h (module 'network'): ns3::Address::Address() [constructor]
cls.add_constructor([])
## address.h (module 'network'): ns3::Address::Address(uint8_t type, uint8_t const * buffer, uint8_t len) [constructor]
cls.add_constructor([param('uint8_t', 'type'), param('uint8_t const *', 'buffer'), param('uint8_t', 'len')])
## address.h (module 'network'): ns3::Address::Address(ns3::Address const & address) [copy constructor]
cls.add_constructor([param('ns3::Address const &', 'address')])
## address.h (module 'network'): bool ns3::Address::CheckCompatible(uint8_t type, uint8_t len) const [member function]
cls.add_method('CheckCompatible',
'bool',
[param('uint8_t', 'type'), param('uint8_t', 'len')],
is_const=True)
## address.h (module 'network'): uint32_t ns3::Address::CopyAllFrom(uint8_t const * buffer, uint8_t len) [member function]
cls.add_method('CopyAllFrom',
'uint32_t',
[param('uint8_t const *', 'buffer'), param('uint8_t', 'len')])
## address.h (module 'network'): uint32_t ns3::Address::CopyAllTo(uint8_t * buffer, uint8_t len) const [member function]
cls.add_method('CopyAllTo',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint8_t', 'len')],
is_const=True)
## address.h (module 'network'): uint32_t ns3::Address::CopyFrom(uint8_t const * buffer, uint8_t len) [member function]
cls.add_method('CopyFrom',
'uint32_t',
[param('uint8_t const *', 'buffer'), param('uint8_t', 'len')])
## address.h (module 'network'): uint32_t ns3::Address::CopyTo(uint8_t * buffer) const [member function]
cls.add_method('CopyTo',
'uint32_t',
[param('uint8_t *', 'buffer')],
is_const=True)
## address.h (module 'network'): void ns3::Address::Deserialize(ns3::TagBuffer buffer) [member function]
cls.add_method('Deserialize',
'void',
[param('ns3::TagBuffer', 'buffer')])
## address.h (module 'network'): uint8_t ns3::Address::GetLength() const [member function]
cls.add_method('GetLength',
'uint8_t',
[],
is_const=True)
## address.h (module 'network'): uint32_t ns3::Address::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True)
## address.h (module 'network'): bool ns3::Address::IsInvalid() const [member function]
cls.add_method('IsInvalid',
'bool',
[],
is_const=True)
## address.h (module 'network'): bool ns3::Address::IsMatchingType(uint8_t type) const [member function]
cls.add_method('IsMatchingType',
'bool',
[param('uint8_t', 'type')],
is_const=True)
## address.h (module 'network'): static uint8_t ns3::Address::Register() [member function]
cls.add_method('Register',
'uint8_t',
[],
is_static=True)
## address.h (module 'network'): void ns3::Address::Serialize(ns3::TagBuffer buffer) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::TagBuffer', 'buffer')],
is_const=True)
return
def register_Ns3AttributeConstructionList_methods(root_module, cls):
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::AttributeConstructionList(ns3::AttributeConstructionList const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeConstructionList const &', 'arg0')])
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::AttributeConstructionList() [constructor]
cls.add_constructor([])
## attribute-construction-list.h (module 'core'): void ns3::AttributeConstructionList::Add(std::string name, ns3::Ptr<ns3::AttributeChecker const> checker, ns3::Ptr<ns3::AttributeValue> value) [member function]
cls.add_method('Add',
'void',
[param('std::string', 'name'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker'), param('ns3::Ptr< ns3::AttributeValue >', 'value')])
## attribute-construction-list.h (module 'core'): std::_List_const_iterator<ns3::AttributeConstructionList::Item> ns3::AttributeConstructionList::Begin() const [member function]
cls.add_method('Begin',
'std::_List_const_iterator< ns3::AttributeConstructionList::Item >',
[],
is_const=True)
## attribute-construction-list.h (module 'core'): std::_List_const_iterator<ns3::AttributeConstructionList::Item> ns3::AttributeConstructionList::End() const [member function]
cls.add_method('End',
'std::_List_const_iterator< ns3::AttributeConstructionList::Item >',
[],
is_const=True)
## attribute-construction-list.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeConstructionList::Find(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('Find',
'ns3::Ptr< ns3::AttributeValue >',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True)
return
def register_Ns3AttributeConstructionListItem_methods(root_module, cls):
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::Item() [constructor]
cls.add_constructor([])
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::Item(ns3::AttributeConstructionList::Item const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeConstructionList::Item const &', 'arg0')])
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::checker [variable]
cls.add_instance_attribute('checker', 'ns3::Ptr< ns3::AttributeChecker const >', is_const=False)
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::name [variable]
cls.add_instance_attribute('name', 'std::string', is_const=False)
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::value [variable]
cls.add_instance_attribute('value', 'ns3::Ptr< ns3::AttributeValue >', is_const=False)
return
def register_Ns3Buffer_methods(root_module, cls):
## buffer.h (module 'network'): ns3::Buffer::Buffer() [constructor]
cls.add_constructor([])
## buffer.h (module 'network'): ns3::Buffer::Buffer(uint32_t dataSize) [constructor]
cls.add_constructor([param('uint32_t', 'dataSize')])
## buffer.h (module 'network'): ns3::Buffer::Buffer(uint32_t dataSize, bool initialize) [constructor]
cls.add_constructor([param('uint32_t', 'dataSize'), param('bool', 'initialize')])
## buffer.h (module 'network'): ns3::Buffer::Buffer(ns3::Buffer const & o) [copy constructor]
cls.add_constructor([param('ns3::Buffer const &', 'o')])
## buffer.h (module 'network'): bool ns3::Buffer::AddAtEnd(uint32_t end) [member function]
cls.add_method('AddAtEnd',
'bool',
[param('uint32_t', 'end')])
## buffer.h (module 'network'): void ns3::Buffer::AddAtEnd(ns3::Buffer const & o) [member function]
cls.add_method('AddAtEnd',
'void',
[param('ns3::Buffer const &', 'o')])
## buffer.h (module 'network'): bool ns3::Buffer::AddAtStart(uint32_t start) [member function]
cls.add_method('AddAtStart',
'bool',
[param('uint32_t', 'start')])
## buffer.h (module 'network'): ns3::Buffer::Iterator ns3::Buffer::Begin() const [member function]
cls.add_method('Begin',
'ns3::Buffer::Iterator',
[],
is_const=True)
## buffer.h (module 'network'): void ns3::Buffer::CopyData(std::ostream * os, uint32_t size) const [member function]
cls.add_method('CopyData',
'void',
[param('std::ostream *', 'os'), param('uint32_t', 'size')],
is_const=True)
## buffer.h (module 'network'): uint32_t ns3::Buffer::CopyData(uint8_t * buffer, uint32_t size) const [member function]
cls.add_method('CopyData',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint32_t', 'size')],
is_const=True)
## buffer.h (module 'network'): ns3::Buffer ns3::Buffer::CreateFragment(uint32_t start, uint32_t length) const [member function]
cls.add_method('CreateFragment',
'ns3::Buffer',
[param('uint32_t', 'start'), param('uint32_t', 'length')],
is_const=True)
## buffer.h (module 'network'): ns3::Buffer ns3::Buffer::CreateFullCopy() const [member function]
cls.add_method('CreateFullCopy',
'ns3::Buffer',
[],
is_const=True)
## buffer.h (module 'network'): uint32_t ns3::Buffer::Deserialize(uint8_t const * buffer, uint32_t size) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('uint8_t const *', 'buffer'), param('uint32_t', 'size')])
## buffer.h (module 'network'): ns3::Buffer::Iterator ns3::Buffer::End() const [member function]
cls.add_method('End',
'ns3::Buffer::Iterator',
[],
is_const=True)
## buffer.h (module 'network'): int32_t ns3::Buffer::GetCurrentEndOffset() const [member function]
cls.add_method('GetCurrentEndOffset',
'int32_t',
[],
is_const=True)
## buffer.h (module 'network'): int32_t ns3::Buffer::GetCurrentStartOffset() const [member function]
cls.add_method('GetCurrentStartOffset',
'int32_t',
[],
is_const=True)
## buffer.h (module 'network'): uint32_t ns3::Buffer::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True)
## buffer.h (module 'network'): uint32_t ns3::Buffer::GetSize() const [member function]
cls.add_method('GetSize',
'uint32_t',
[],
is_const=True)
## buffer.h (module 'network'): uint8_t const * ns3::Buffer::PeekData() const [member function]
cls.add_method('PeekData',
'uint8_t const *',
[],
is_const=True)
## buffer.h (module 'network'): void ns3::Buffer::RemoveAtEnd(uint32_t end) [member function]
cls.add_method('RemoveAtEnd',
'void',
[param('uint32_t', 'end')])
## buffer.h (module 'network'): void ns3::Buffer::RemoveAtStart(uint32_t start) [member function]
cls.add_method('RemoveAtStart',
'void',
[param('uint32_t', 'start')])
## buffer.h (module 'network'): uint32_t ns3::Buffer::Serialize(uint8_t * buffer, uint32_t maxSize) const [member function]
cls.add_method('Serialize',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint32_t', 'maxSize')],
is_const=True)
return
def register_Ns3BufferIterator_methods(root_module, cls):
## buffer.h (module 'network'): ns3::Buffer::Iterator::Iterator(ns3::Buffer::Iterator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Buffer::Iterator const &', 'arg0')])
## buffer.h (module 'network'): ns3::Buffer::Iterator::Iterator() [constructor]
cls.add_constructor([])
## buffer.h (module 'network'): uint16_t ns3::Buffer::Iterator::CalculateIpChecksum(uint16_t size) [member function]
cls.add_method('CalculateIpChecksum',
'uint16_t',
[param('uint16_t', 'size')])
## buffer.h (module 'network'): uint16_t ns3::Buffer::Iterator::CalculateIpChecksum(uint16_t size, uint32_t initialChecksum) [member function]
cls.add_method('CalculateIpChecksum',
'uint16_t',
[param('uint16_t', 'size'), param('uint32_t', 'initialChecksum')])
## buffer.h (module 'network'): uint32_t ns3::Buffer::Iterator::GetDistanceFrom(ns3::Buffer::Iterator const & o) const [member function]
cls.add_method('GetDistanceFrom',
'uint32_t',
[param('ns3::Buffer::Iterator const &', 'o')],
is_const=True)
## buffer.h (module 'network'): uint32_t ns3::Buffer::Iterator::GetSize() const [member function]
cls.add_method('GetSize',
'uint32_t',
[],
is_const=True)
## buffer.h (module 'network'): bool ns3::Buffer::Iterator::IsEnd() const [member function]
cls.add_method('IsEnd',
'bool',
[],
is_const=True)
## buffer.h (module 'network'): bool ns3::Buffer::Iterator::IsStart() const [member function]
cls.add_method('IsStart',
'bool',
[],
is_const=True)
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Next() [member function]
cls.add_method('Next',
'void',
[])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Next(uint32_t delta) [member function]
cls.add_method('Next',
'void',
[param('uint32_t', 'delta')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Prev() [member function]
cls.add_method('Prev',
'void',
[])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Prev(uint32_t delta) [member function]
cls.add_method('Prev',
'void',
[param('uint32_t', 'delta')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Read(uint8_t * buffer, uint32_t size) [member function]
cls.add_method('Read',
'void',
[param('uint8_t *', 'buffer'), param('uint32_t', 'size')])
## buffer.h (module 'network'): uint16_t ns3::Buffer::Iterator::ReadLsbtohU16() [member function]
cls.add_method('ReadLsbtohU16',
'uint16_t',
[])
## buffer.h (module 'network'): uint32_t ns3::Buffer::Iterator::ReadLsbtohU32() [member function]
cls.add_method('ReadLsbtohU32',
'uint32_t',
[])
## buffer.h (module 'network'): uint64_t ns3::Buffer::Iterator::ReadLsbtohU64() [member function]
cls.add_method('ReadLsbtohU64',
'uint64_t',
[])
## buffer.h (module 'network'): uint16_t ns3::Buffer::Iterator::ReadNtohU16() [member function]
cls.add_method('ReadNtohU16',
'uint16_t',
[])
## buffer.h (module 'network'): uint32_t ns3::Buffer::Iterator::ReadNtohU32() [member function]
cls.add_method('ReadNtohU32',
'uint32_t',
[])
## buffer.h (module 'network'): uint64_t ns3::Buffer::Iterator::ReadNtohU64() [member function]
cls.add_method('ReadNtohU64',
'uint64_t',
[])
## buffer.h (module 'network'): uint16_t ns3::Buffer::Iterator::ReadU16() [member function]
cls.add_method('ReadU16',
'uint16_t',
[])
## buffer.h (module 'network'): uint32_t ns3::Buffer::Iterator::ReadU32() [member function]
cls.add_method('ReadU32',
'uint32_t',
[])
## buffer.h (module 'network'): uint64_t ns3::Buffer::Iterator::ReadU64() [member function]
cls.add_method('ReadU64',
'uint64_t',
[])
## buffer.h (module 'network'): uint8_t ns3::Buffer::Iterator::ReadU8() [member function]
cls.add_method('ReadU8',
'uint8_t',
[])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Write(uint8_t const * buffer, uint32_t size) [member function]
cls.add_method('Write',
'void',
[param('uint8_t const *', 'buffer'), param('uint32_t', 'size')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Write(ns3::Buffer::Iterator start, ns3::Buffer::Iterator end) [member function]
cls.add_method('Write',
'void',
[param('ns3::Buffer::Iterator', 'start'), param('ns3::Buffer::Iterator', 'end')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtolsbU16(uint16_t data) [member function]
cls.add_method('WriteHtolsbU16',
'void',
[param('uint16_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtolsbU32(uint32_t data) [member function]
cls.add_method('WriteHtolsbU32',
'void',
[param('uint32_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtolsbU64(uint64_t data) [member function]
cls.add_method('WriteHtolsbU64',
'void',
[param('uint64_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtonU16(uint16_t data) [member function]
cls.add_method('WriteHtonU16',
'void',
[param('uint16_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtonU32(uint32_t data) [member function]
cls.add_method('WriteHtonU32',
'void',
[param('uint32_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtonU64(uint64_t data) [member function]
cls.add_method('WriteHtonU64',
'void',
[param('uint64_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteU16(uint16_t data) [member function]
cls.add_method('WriteU16',
'void',
[param('uint16_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteU32(uint32_t data) [member function]
cls.add_method('WriteU32',
'void',
[param('uint32_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteU64(uint64_t data) [member function]
cls.add_method('WriteU64',
'void',
[param('uint64_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteU8(uint8_t data) [member function]
cls.add_method('WriteU8',
'void',
[param('uint8_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteU8(uint8_t data, uint32_t len) [member function]
cls.add_method('WriteU8',
'void',
[param('uint8_t', 'data'), param('uint32_t', 'len')])
return
def register_Ns3ByteTagIterator_methods(root_module, cls):
## packet.h (module 'network'): ns3::ByteTagIterator::ByteTagIterator(ns3::ByteTagIterator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ByteTagIterator const &', 'arg0')])
## packet.h (module 'network'): bool ns3::ByteTagIterator::HasNext() const [member function]
cls.add_method('HasNext',
'bool',
[],
is_const=True)
## packet.h (module 'network'): ns3::ByteTagIterator::Item ns3::ByteTagIterator::Next() [member function]
cls.add_method('Next',
'ns3::ByteTagIterator::Item',
[])
return
def register_Ns3ByteTagIteratorItem_methods(root_module, cls):
## packet.h (module 'network'): ns3::ByteTagIterator::Item::Item(ns3::ByteTagIterator::Item const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ByteTagIterator::Item const &', 'arg0')])
## packet.h (module 'network'): uint32_t ns3::ByteTagIterator::Item::GetEnd() const [member function]
cls.add_method('GetEnd',
'uint32_t',
[],
is_const=True)
## packet.h (module 'network'): uint32_t ns3::ByteTagIterator::Item::GetStart() const [member function]
cls.add_method('GetStart',
'uint32_t',
[],
is_const=True)
## packet.h (module 'network'): void ns3::ByteTagIterator::Item::GetTag(ns3::Tag & tag) const [member function]
cls.add_method('GetTag',
'void',
[param('ns3::Tag &', 'tag')],
is_const=True)
## packet.h (module 'network'): ns3::TypeId ns3::ByteTagIterator::Item::GetTypeId() const [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_const=True)
return
def register_Ns3ByteTagList_methods(root_module, cls):
## byte-tag-list.h (module 'network'): ns3::ByteTagList::ByteTagList() [constructor]
cls.add_constructor([])
## byte-tag-list.h (module 'network'): ns3::ByteTagList::ByteTagList(ns3::ByteTagList const & o) [copy constructor]
cls.add_constructor([param('ns3::ByteTagList const &', 'o')])
## byte-tag-list.h (module 'network'): ns3::TagBuffer ns3::ByteTagList::Add(ns3::TypeId tid, uint32_t bufferSize, int32_t start, int32_t end) [member function]
cls.add_method('Add',
'ns3::TagBuffer',
[param('ns3::TypeId', 'tid'), param('uint32_t', 'bufferSize'), param('int32_t', 'start'), param('int32_t', 'end')])
## byte-tag-list.h (module 'network'): void ns3::ByteTagList::Add(ns3::ByteTagList const & o) [member function]
cls.add_method('Add',
'void',
[param('ns3::ByteTagList const &', 'o')])
## byte-tag-list.h (module 'network'): void ns3::ByteTagList::AddAtEnd(int32_t adjustment, int32_t appendOffset) [member function]
cls.add_method('AddAtEnd',
'void',
[param('int32_t', 'adjustment'), param('int32_t', 'appendOffset')])
## byte-tag-list.h (module 'network'): void ns3::ByteTagList::AddAtStart(int32_t adjustment, int32_t prependOffset) [member function]
cls.add_method('AddAtStart',
'void',
[param('int32_t', 'adjustment'), param('int32_t', 'prependOffset')])
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator ns3::ByteTagList::Begin(int32_t offsetStart, int32_t offsetEnd) const [member function]
cls.add_method('Begin',
'ns3::ByteTagList::Iterator',
[param('int32_t', 'offsetStart'), param('int32_t', 'offsetEnd')],
is_const=True)
## byte-tag-list.h (module 'network'): void ns3::ByteTagList::RemoveAll() [member function]
cls.add_method('RemoveAll',
'void',
[])
return
def register_Ns3ByteTagListIterator_methods(root_module, cls):
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Iterator(ns3::ByteTagList::Iterator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ByteTagList::Iterator const &', 'arg0')])
## byte-tag-list.h (module 'network'): uint32_t ns3::ByteTagList::Iterator::GetOffsetStart() const [member function]
cls.add_method('GetOffsetStart',
'uint32_t',
[],
is_const=True)
## byte-tag-list.h (module 'network'): bool ns3::ByteTagList::Iterator::HasNext() const [member function]
cls.add_method('HasNext',
'bool',
[],
is_const=True)
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item ns3::ByteTagList::Iterator::Next() [member function]
cls.add_method('Next',
'ns3::ByteTagList::Iterator::Item',
[])
return
def register_Ns3ByteTagListIteratorItem_methods(root_module, cls):
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::Item(ns3::ByteTagList::Iterator::Item const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ByteTagList::Iterator::Item const &', 'arg0')])
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::Item(ns3::TagBuffer buf) [constructor]
cls.add_constructor([param('ns3::TagBuffer', 'buf')])
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::buf [variable]
cls.add_instance_attribute('buf', 'ns3::TagBuffer', is_const=False)
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::end [variable]
cls.add_instance_attribute('end', 'int32_t', is_const=False)
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::size [variable]
cls.add_instance_attribute('size', 'uint32_t', is_const=False)
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::start [variable]
cls.add_instance_attribute('start', 'int32_t', is_const=False)
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::tid [variable]
cls.add_instance_attribute('tid', 'ns3::TypeId', is_const=False)
return
def register_Ns3CallbackBase_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackBase::CallbackBase(ns3::CallbackBase const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CallbackBase const &', 'arg0')])
## callback.h (module 'core'): ns3::CallbackBase::CallbackBase() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::Ptr<ns3::CallbackImplBase> ns3::CallbackBase::GetImpl() const [member function]
cls.add_method('GetImpl',
'ns3::Ptr< ns3::CallbackImplBase >',
[],
is_const=True)
## callback.h (module 'core'): ns3::CallbackBase::CallbackBase(ns3::Ptr<ns3::CallbackImplBase> impl) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::CallbackImplBase >', 'impl')],
visibility='protected')
## callback.h (module 'core'): static std::string ns3::CallbackBase::Demangle(std::string const & mangled) [member function]
cls.add_method('Demangle',
'std::string',
[param('std::string const &', 'mangled')],
is_static=True, visibility='protected')
return
def register_Ns3EventId_methods(root_module, cls):
cls.add_binary_comparison_operator('!=')
cls.add_binary_comparison_operator('==')
## event-id.h (module 'core'): ns3::EventId::EventId(ns3::EventId const & arg0) [copy constructor]
cls.add_constructor([param('ns3::EventId const &', 'arg0')])
## event-id.h (module 'core'): ns3::EventId::EventId() [constructor]
cls.add_constructor([])
## event-id.h (module 'core'): ns3::EventId::EventId(ns3::Ptr<ns3::EventImpl> const & impl, uint64_t ts, uint32_t context, uint32_t uid) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::EventImpl > const &', 'impl'), param('uint64_t', 'ts'), param('uint32_t', 'context'), param('uint32_t', 'uid')])
## event-id.h (module 'core'): void ns3::EventId::Cancel() [member function]
cls.add_method('Cancel',
'void',
[])
## event-id.h (module 'core'): uint32_t ns3::EventId::GetContext() const [member function]
cls.add_method('GetContext',
'uint32_t',
[],
is_const=True)
## event-id.h (module 'core'): uint64_t ns3::EventId::GetTs() const [member function]
cls.add_method('GetTs',
'uint64_t',
[],
is_const=True)
## event-id.h (module 'core'): uint32_t ns3::EventId::GetUid() const [member function]
cls.add_method('GetUid',
'uint32_t',
[],
is_const=True)
## event-id.h (module 'core'): bool ns3::EventId::IsExpired() const [member function]
cls.add_method('IsExpired',
'bool',
[],
is_const=True)
## event-id.h (module 'core'): bool ns3::EventId::IsRunning() const [member function]
cls.add_method('IsRunning',
'bool',
[],
is_const=True)
## event-id.h (module 'core'): ns3::EventImpl * ns3::EventId::PeekEventImpl() const [member function]
cls.add_method('PeekEventImpl',
'ns3::EventImpl *',
[],
is_const=True)
return
def register_Ns3Hasher_methods(root_module, cls):
## hash.h (module 'core'): ns3::Hasher::Hasher(ns3::Hasher const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Hasher const &', 'arg0')])
## hash.h (module 'core'): ns3::Hasher::Hasher() [constructor]
cls.add_constructor([])
## hash.h (module 'core'): ns3::Hasher::Hasher(ns3::Ptr<ns3::Hash::Implementation> hp) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::Hash::Implementation >', 'hp')])
## hash.h (module 'core'): uint32_t ns3::Hasher::GetHash32(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('size_t const', 'size')])
## hash.h (module 'core'): uint32_t ns3::Hasher::GetHash32(std::string const s) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('std::string const', 's')])
## hash.h (module 'core'): uint64_t ns3::Hasher::GetHash64(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('char const *', 'buffer'), param('size_t const', 'size')])
## hash.h (module 'core'): uint64_t ns3::Hasher::GetHash64(std::string const s) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('std::string const', 's')])
## hash.h (module 'core'): ns3::Hasher & ns3::Hasher::clear() [member function]
cls.add_method('clear',
'ns3::Hasher &',
[])
return
def register_Ns3Inet6SocketAddress_methods(root_module, cls):
## inet6-socket-address.h (module 'network'): ns3::Inet6SocketAddress::Inet6SocketAddress(ns3::Inet6SocketAddress const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Inet6SocketAddress const &', 'arg0')])
## inet6-socket-address.h (module 'network'): ns3::Inet6SocketAddress::Inet6SocketAddress(ns3::Ipv6Address ipv6, uint16_t port) [constructor]
cls.add_constructor([param('ns3::Ipv6Address', 'ipv6'), param('uint16_t', 'port')])
## inet6-socket-address.h (module 'network'): ns3::Inet6SocketAddress::Inet6SocketAddress(ns3::Ipv6Address ipv6) [constructor]
cls.add_constructor([param('ns3::Ipv6Address', 'ipv6')])
## inet6-socket-address.h (module 'network'): ns3::Inet6SocketAddress::Inet6SocketAddress(uint16_t port) [constructor]
cls.add_constructor([param('uint16_t', 'port')])
## inet6-socket-address.h (module 'network'): ns3::Inet6SocketAddress::Inet6SocketAddress(char const * ipv6, uint16_t port) [constructor]
cls.add_constructor([param('char const *', 'ipv6'), param('uint16_t', 'port')])
## inet6-socket-address.h (module 'network'): ns3::Inet6SocketAddress::Inet6SocketAddress(char const * ipv6) [constructor]
cls.add_constructor([param('char const *', 'ipv6')])
## inet6-socket-address.h (module 'network'): static ns3::Inet6SocketAddress ns3::Inet6SocketAddress::ConvertFrom(ns3::Address const & addr) [member function]
cls.add_method('ConvertFrom',
'ns3::Inet6SocketAddress',
[param('ns3::Address const &', 'addr')],
is_static=True)
## inet6-socket-address.h (module 'network'): ns3::Ipv6Address ns3::Inet6SocketAddress::GetIpv6() const [member function]
cls.add_method('GetIpv6',
'ns3::Ipv6Address',
[],
is_const=True)
## inet6-socket-address.h (module 'network'): uint16_t ns3::Inet6SocketAddress::GetPort() const [member function]
cls.add_method('GetPort',
'uint16_t',
[],
is_const=True)
## inet6-socket-address.h (module 'network'): static bool ns3::Inet6SocketAddress::IsMatchingType(ns3::Address const & addr) [member function]
cls.add_method('IsMatchingType',
'bool',
[param('ns3::Address const &', 'addr')],
is_static=True)
## inet6-socket-address.h (module 'network'): void ns3::Inet6SocketAddress::SetIpv6(ns3::Ipv6Address ipv6) [member function]
cls.add_method('SetIpv6',
'void',
[param('ns3::Ipv6Address', 'ipv6')])
## inet6-socket-address.h (module 'network'): void ns3::Inet6SocketAddress::SetPort(uint16_t port) [member function]
cls.add_method('SetPort',
'void',
[param('uint16_t', 'port')])
return
def register_Ns3InetSocketAddress_methods(root_module, cls):
## inet-socket-address.h (module 'network'): ns3::InetSocketAddress::InetSocketAddress(ns3::InetSocketAddress const & arg0) [copy constructor]
cls.add_constructor([param('ns3::InetSocketAddress const &', 'arg0')])
## inet-socket-address.h (module 'network'): ns3::InetSocketAddress::InetSocketAddress(ns3::Ipv4Address ipv4, uint16_t port) [constructor]
cls.add_constructor([param('ns3::Ipv4Address', 'ipv4'), param('uint16_t', 'port')])
## inet-socket-address.h (module 'network'): ns3::InetSocketAddress::InetSocketAddress(ns3::Ipv4Address ipv4) [constructor]
cls.add_constructor([param('ns3::Ipv4Address', 'ipv4')])
## inet-socket-address.h (module 'network'): ns3::InetSocketAddress::InetSocketAddress(uint16_t port) [constructor]
cls.add_constructor([param('uint16_t', 'port')])
## inet-socket-address.h (module 'network'): ns3::InetSocketAddress::InetSocketAddress(char const * ipv4, uint16_t port) [constructor]
cls.add_constructor([param('char const *', 'ipv4'), param('uint16_t', 'port')])
## inet-socket-address.h (module 'network'): ns3::InetSocketAddress::InetSocketAddress(char const * ipv4) [constructor]
cls.add_constructor([param('char const *', 'ipv4')])
## inet-socket-address.h (module 'network'): static ns3::InetSocketAddress ns3::InetSocketAddress::ConvertFrom(ns3::Address const & address) [member function]
cls.add_method('ConvertFrom',
'ns3::InetSocketAddress',
[param('ns3::Address const &', 'address')],
is_static=True)
## inet-socket-address.h (module 'network'): ns3::Ipv4Address ns3::InetSocketAddress::GetIpv4() const [member function]
cls.add_method('GetIpv4',
'ns3::Ipv4Address',
[],
is_const=True)
## inet-socket-address.h (module 'network'): uint16_t ns3::InetSocketAddress::GetPort() const [member function]
cls.add_method('GetPort',
'uint16_t',
[],
is_const=True)
## inet-socket-address.h (module 'network'): static bool ns3::InetSocketAddress::IsMatchingType(ns3::Address const & address) [member function]
cls.add_method('IsMatchingType',
'bool',
[param('ns3::Address const &', 'address')],
is_static=True)
## inet-socket-address.h (module 'network'): void ns3::InetSocketAddress::SetIpv4(ns3::Ipv4Address address) [member function]
cls.add_method('SetIpv4',
'void',
[param('ns3::Ipv4Address', 'address')])
## inet-socket-address.h (module 'network'): void ns3::InetSocketAddress::SetPort(uint16_t port) [member function]
cls.add_method('SetPort',
'void',
[param('uint16_t', 'port')])
return
def register_Ns3IntToType__0_methods(root_module, cls):
## int-to-type.h (module 'core'): ns3::IntToType<0>::IntToType() [constructor]
cls.add_constructor([])
## int-to-type.h (module 'core'): ns3::IntToType<0>::IntToType(ns3::IntToType<0> const & arg0) [copy constructor]
cls.add_constructor([param('ns3::IntToType< 0 > const &', 'arg0')])
return
def register_Ns3IntToType__1_methods(root_module, cls):
## int-to-type.h (module 'core'): ns3::IntToType<1>::IntToType() [constructor]
cls.add_constructor([])
## int-to-type.h (module 'core'): ns3::IntToType<1>::IntToType(ns3::IntToType<1> const & arg0) [copy constructor]
cls.add_constructor([param('ns3::IntToType< 1 > const &', 'arg0')])
return
def register_Ns3IntToType__2_methods(root_module, cls):
## int-to-type.h (module 'core'): ns3::IntToType<2>::IntToType() [constructor]
cls.add_constructor([])
## int-to-type.h (module 'core'): ns3::IntToType<2>::IntToType(ns3::IntToType<2> const & arg0) [copy constructor]
cls.add_constructor([param('ns3::IntToType< 2 > const &', 'arg0')])
return
def register_Ns3IntToType__3_methods(root_module, cls):
## int-to-type.h (module 'core'): ns3::IntToType<3>::IntToType() [constructor]
cls.add_constructor([])
## int-to-type.h (module 'core'): ns3::IntToType<3>::IntToType(ns3::IntToType<3> const & arg0) [copy constructor]
cls.add_constructor([param('ns3::IntToType< 3 > const &', 'arg0')])
return
def register_Ns3IntToType__4_methods(root_module, cls):
## int-to-type.h (module 'core'): ns3::IntToType<4>::IntToType() [constructor]
cls.add_constructor([])
## int-to-type.h (module 'core'): ns3::IntToType<4>::IntToType(ns3::IntToType<4> const & arg0) [copy constructor]
cls.add_constructor([param('ns3::IntToType< 4 > const &', 'arg0')])
return
def register_Ns3IntToType__5_methods(root_module, cls):
## int-to-type.h (module 'core'): ns3::IntToType<5>::IntToType() [constructor]
cls.add_constructor([])
## int-to-type.h (module 'core'): ns3::IntToType<5>::IntToType(ns3::IntToType<5> const & arg0) [copy constructor]
cls.add_constructor([param('ns3::IntToType< 5 > const &', 'arg0')])
return
def register_Ns3IntToType__6_methods(root_module, cls):
## int-to-type.h (module 'core'): ns3::IntToType<6>::IntToType() [constructor]
cls.add_constructor([])
## int-to-type.h (module 'core'): ns3::IntToType<6>::IntToType(ns3::IntToType<6> const & arg0) [copy constructor]
cls.add_constructor([param('ns3::IntToType< 6 > const &', 'arg0')])
return
def register_Ns3Ipv4Address_methods(root_module, cls):
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address(ns3::Ipv4Address const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4Address const &', 'arg0')])
## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address(uint32_t address) [constructor]
cls.add_constructor([param('uint32_t', 'address')])
## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address(char const * address) [constructor]
cls.add_constructor([param('char const *', 'address')])
## ipv4-address.h (module 'network'): ns3::Ipv4Address ns3::Ipv4Address::CombineMask(ns3::Ipv4Mask const & mask) const [member function]
cls.add_method('CombineMask',
'ns3::Ipv4Address',
[param('ns3::Ipv4Mask const &', 'mask')],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::ConvertFrom(ns3::Address const & address) [member function]
cls.add_method('ConvertFrom',
'ns3::Ipv4Address',
[param('ns3::Address const &', 'address')],
is_static=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::Deserialize(uint8_t const * buf) [member function]
cls.add_method('Deserialize',
'ns3::Ipv4Address',
[param('uint8_t const *', 'buf')],
is_static=True)
## ipv4-address.h (module 'network'): uint32_t ns3::Ipv4Address::Get() const [member function]
cls.add_method('Get',
'uint32_t',
[],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetAny() [member function]
cls.add_method('GetAny',
'ns3::Ipv4Address',
[],
is_static=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetBroadcast() [member function]
cls.add_method('GetBroadcast',
'ns3::Ipv4Address',
[],
is_static=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetLoopback() [member function]
cls.add_method('GetLoopback',
'ns3::Ipv4Address',
[],
is_static=True)
## ipv4-address.h (module 'network'): ns3::Ipv4Address ns3::Ipv4Address::GetSubnetDirectedBroadcast(ns3::Ipv4Mask const & mask) const [member function]
cls.add_method('GetSubnetDirectedBroadcast',
'ns3::Ipv4Address',
[param('ns3::Ipv4Mask const &', 'mask')],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetZero() [member function]
cls.add_method('GetZero',
'ns3::Ipv4Address',
[],
is_static=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsBroadcast() const [member function]
cls.add_method('IsBroadcast',
'bool',
[],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsEqual(ns3::Ipv4Address const & other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ipv4Address const &', 'other')],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsLocalMulticast() const [member function]
cls.add_method('IsLocalMulticast',
'bool',
[],
is_const=True)
## ipv4-address.h (module 'network'): static bool ns3::Ipv4Address::IsMatchingType(ns3::Address const & address) [member function]
cls.add_method('IsMatchingType',
'bool',
[param('ns3::Address const &', 'address')],
is_static=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsMulticast() const [member function]
cls.add_method('IsMulticast',
'bool',
[],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsSubnetDirectedBroadcast(ns3::Ipv4Mask const & mask) const [member function]
cls.add_method('IsSubnetDirectedBroadcast',
'bool',
[param('ns3::Ipv4Mask const &', 'mask')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Serialize(uint8_t * buf) const [member function]
cls.add_method('Serialize',
'void',
[param('uint8_t *', 'buf')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Set(uint32_t address) [member function]
cls.add_method('Set',
'void',
[param('uint32_t', 'address')])
## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Set(char const * address) [member function]
cls.add_method('Set',
'void',
[param('char const *', 'address')])
return
def register_Ns3Ipv4InterfaceAddress_methods(root_module, cls):
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## ipv4-interface-address.h (module 'internet'): ns3::Ipv4InterfaceAddress::Ipv4InterfaceAddress() [constructor]
cls.add_constructor([])
## ipv4-interface-address.h (module 'internet'): ns3::Ipv4InterfaceAddress::Ipv4InterfaceAddress(ns3::Ipv4Address local, ns3::Ipv4Mask mask) [constructor]
cls.add_constructor([param('ns3::Ipv4Address', 'local'), param('ns3::Ipv4Mask', 'mask')])
## ipv4-interface-address.h (module 'internet'): ns3::Ipv4InterfaceAddress::Ipv4InterfaceAddress(ns3::Ipv4InterfaceAddress const & o) [copy constructor]
cls.add_constructor([param('ns3::Ipv4InterfaceAddress const &', 'o')])
## ipv4-interface-address.h (module 'internet'): ns3::Ipv4Address ns3::Ipv4InterfaceAddress::GetBroadcast() const [member function]
cls.add_method('GetBroadcast',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv4-interface-address.h (module 'internet'): ns3::Ipv4Address ns3::Ipv4InterfaceAddress::GetLocal() const [member function]
cls.add_method('GetLocal',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv4-interface-address.h (module 'internet'): ns3::Ipv4Mask ns3::Ipv4InterfaceAddress::GetMask() const [member function]
cls.add_method('GetMask',
'ns3::Ipv4Mask',
[],
is_const=True)
## ipv4-interface-address.h (module 'internet'): ns3::Ipv4InterfaceAddress::InterfaceAddressScope_e ns3::Ipv4InterfaceAddress::GetScope() const [member function]
cls.add_method('GetScope',
'ns3::Ipv4InterfaceAddress::InterfaceAddressScope_e',
[],
is_const=True)
## ipv4-interface-address.h (module 'internet'): bool ns3::Ipv4InterfaceAddress::IsSecondary() const [member function]
cls.add_method('IsSecondary',
'bool',
[],
is_const=True)
## ipv4-interface-address.h (module 'internet'): void ns3::Ipv4InterfaceAddress::SetBroadcast(ns3::Ipv4Address broadcast) [member function]
cls.add_method('SetBroadcast',
'void',
[param('ns3::Ipv4Address', 'broadcast')])
## ipv4-interface-address.h (module 'internet'): void ns3::Ipv4InterfaceAddress::SetLocal(ns3::Ipv4Address local) [member function]
cls.add_method('SetLocal',
'void',
[param('ns3::Ipv4Address', 'local')])
## ipv4-interface-address.h (module 'internet'): void ns3::Ipv4InterfaceAddress::SetMask(ns3::Ipv4Mask mask) [member function]
cls.add_method('SetMask',
'void',
[param('ns3::Ipv4Mask', 'mask')])
## ipv4-interface-address.h (module 'internet'): void ns3::Ipv4InterfaceAddress::SetPrimary() [member function]
cls.add_method('SetPrimary',
'void',
[])
## ipv4-interface-address.h (module 'internet'): void ns3::Ipv4InterfaceAddress::SetScope(ns3::Ipv4InterfaceAddress::InterfaceAddressScope_e scope) [member function]
cls.add_method('SetScope',
'void',
[param('ns3::Ipv4InterfaceAddress::InterfaceAddressScope_e', 'scope')])
## ipv4-interface-address.h (module 'internet'): void ns3::Ipv4InterfaceAddress::SetSecondary() [member function]
cls.add_method('SetSecondary',
'void',
[])
return
def register_Ns3Ipv4Mask_methods(root_module, cls):
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask(ns3::Ipv4Mask const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4Mask const &', 'arg0')])
## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask(uint32_t mask) [constructor]
cls.add_constructor([param('uint32_t', 'mask')])
## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask(char const * mask) [constructor]
cls.add_constructor([param('char const *', 'mask')])
## ipv4-address.h (module 'network'): uint32_t ns3::Ipv4Mask::Get() const [member function]
cls.add_method('Get',
'uint32_t',
[],
is_const=True)
## ipv4-address.h (module 'network'): uint32_t ns3::Ipv4Mask::GetInverse() const [member function]
cls.add_method('GetInverse',
'uint32_t',
[],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Mask ns3::Ipv4Mask::GetLoopback() [member function]
cls.add_method('GetLoopback',
'ns3::Ipv4Mask',
[],
is_static=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Mask ns3::Ipv4Mask::GetOnes() [member function]
cls.add_method('GetOnes',
'ns3::Ipv4Mask',
[],
is_static=True)
## ipv4-address.h (module 'network'): uint16_t ns3::Ipv4Mask::GetPrefixLength() const [member function]
cls.add_method('GetPrefixLength',
'uint16_t',
[],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Mask ns3::Ipv4Mask::GetZero() [member function]
cls.add_method('GetZero',
'ns3::Ipv4Mask',
[],
is_static=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Mask::IsEqual(ns3::Ipv4Mask other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ipv4Mask', 'other')],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Mask::IsMatch(ns3::Ipv4Address a, ns3::Ipv4Address b) const [member function]
cls.add_method('IsMatch',
'bool',
[param('ns3::Ipv4Address', 'a'), param('ns3::Ipv4Address', 'b')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Mask::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Mask::Set(uint32_t mask) [member function]
cls.add_method('Set',
'void',
[param('uint32_t', 'mask')])
return
def register_Ns3Ipv4RoutingHelper_methods(root_module, cls):
## ipv4-routing-helper.h (module 'internet'): ns3::Ipv4RoutingHelper::Ipv4RoutingHelper() [constructor]
cls.add_constructor([])
## ipv4-routing-helper.h (module 'internet'): ns3::Ipv4RoutingHelper::Ipv4RoutingHelper(ns3::Ipv4RoutingHelper const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4RoutingHelper const &', 'arg0')])
## ipv4-routing-helper.h (module 'internet'): ns3::Ipv4RoutingHelper * ns3::Ipv4RoutingHelper::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ipv4RoutingHelper *',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4-routing-helper.h (module 'internet'): ns3::Ptr<ns3::Ipv4RoutingProtocol> ns3::Ipv4RoutingHelper::Create(ns3::Ptr<ns3::Node> node) const [member function]
cls.add_method('Create',
'ns3::Ptr< ns3::Ipv4RoutingProtocol >',
[param('ns3::Ptr< ns3::Node >', 'node')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4-routing-helper.h (module 'internet'): void ns3::Ipv4RoutingHelper::PrintRoutingTableAllAt(ns3::Time printTime, ns3::Ptr<ns3::OutputStreamWrapper> stream) const [member function]
cls.add_method('PrintRoutingTableAllAt',
'void',
[param('ns3::Time', 'printTime'), param('ns3::Ptr< ns3::OutputStreamWrapper >', 'stream')],
is_const=True)
## ipv4-routing-helper.h (module 'internet'): void ns3::Ipv4RoutingHelper::PrintRoutingTableAllEvery(ns3::Time printInterval, ns3::Ptr<ns3::OutputStreamWrapper> stream) const [member function]
cls.add_method('PrintRoutingTableAllEvery',
'void',
[param('ns3::Time', 'printInterval'), param('ns3::Ptr< ns3::OutputStreamWrapper >', 'stream')],
is_const=True)
## ipv4-routing-helper.h (module 'internet'): void ns3::Ipv4RoutingHelper::PrintRoutingTableAt(ns3::Time printTime, ns3::Ptr<ns3::Node> node, ns3::Ptr<ns3::OutputStreamWrapper> stream) const [member function]
cls.add_method('PrintRoutingTableAt',
'void',
[param('ns3::Time', 'printTime'), param('ns3::Ptr< ns3::Node >', 'node'), param('ns3::Ptr< ns3::OutputStreamWrapper >', 'stream')],
is_const=True)
## ipv4-routing-helper.h (module 'internet'): void ns3::Ipv4RoutingHelper::PrintRoutingTableEvery(ns3::Time printInterval, ns3::Ptr<ns3::Node> node, ns3::Ptr<ns3::OutputStreamWrapper> stream) const [member function]
cls.add_method('PrintRoutingTableEvery',
'void',
[param('ns3::Time', 'printInterval'), param('ns3::Ptr< ns3::Node >', 'node'), param('ns3::Ptr< ns3::OutputStreamWrapper >', 'stream')],
is_const=True)
return
def register_Ns3Ipv6Address_methods(root_module, cls):
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(char const * address) [constructor]
cls.add_constructor([param('char const *', 'address')])
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(uint8_t * address) [constructor]
cls.add_constructor([param('uint8_t *', 'address')])
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(ns3::Ipv6Address const & addr) [copy constructor]
cls.add_constructor([param('ns3::Ipv6Address const &', 'addr')])
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(ns3::Ipv6Address const * addr) [constructor]
cls.add_constructor([param('ns3::Ipv6Address const *', 'addr')])
## ipv6-address.h (module 'network'): ns3::Ipv6Address ns3::Ipv6Address::CombinePrefix(ns3::Ipv6Prefix const & prefix) [member function]
cls.add_method('CombinePrefix',
'ns3::Ipv6Address',
[param('ns3::Ipv6Prefix const &', 'prefix')])
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::ConvertFrom(ns3::Address const & address) [member function]
cls.add_method('ConvertFrom',
'ns3::Ipv6Address',
[param('ns3::Address const &', 'address')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::Deserialize(uint8_t const * buf) [member function]
cls.add_method('Deserialize',
'ns3::Ipv6Address',
[param('uint8_t const *', 'buf')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAllHostsMulticast() [member function]
cls.add_method('GetAllHostsMulticast',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAllNodesMulticast() [member function]
cls.add_method('GetAllNodesMulticast',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAllRoutersMulticast() [member function]
cls.add_method('GetAllRoutersMulticast',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAny() [member function]
cls.add_method('GetAny',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::GetBytes(uint8_t * buf) const [member function]
cls.add_method('GetBytes',
'void',
[param('uint8_t *', 'buf')],
is_const=True)
## ipv6-address.h (module 'network'): ns3::Ipv4Address ns3::Ipv6Address::GetIpv4MappedAddress() const [member function]
cls.add_method('GetIpv4MappedAddress',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetLoopback() [member function]
cls.add_method('GetLoopback',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetOnes() [member function]
cls.add_method('GetOnes',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetZero() [member function]
cls.add_method('GetZero',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAllHostsMulticast() const [member function]
cls.add_method('IsAllHostsMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAllNodesMulticast() const [member function]
cls.add_method('IsAllNodesMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAllRoutersMulticast() const [member function]
cls.add_method('IsAllRoutersMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAny() const [member function]
cls.add_method('IsAny',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsDocumentation() const [member function]
cls.add_method('IsDocumentation',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsEqual(ns3::Ipv6Address const & other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ipv6Address const &', 'other')],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsIpv4MappedAddress() const [member function]
cls.add_method('IsIpv4MappedAddress',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsLinkLocal() const [member function]
cls.add_method('IsLinkLocal',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsLinkLocalMulticast() const [member function]
cls.add_method('IsLinkLocalMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsLocalhost() const [member function]
cls.add_method('IsLocalhost',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): static bool ns3::Ipv6Address::IsMatchingType(ns3::Address const & address) [member function]
cls.add_method('IsMatchingType',
'bool',
[param('ns3::Address const &', 'address')],
is_static=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsMulticast() const [member function]
cls.add_method('IsMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsSolicitedMulticast() const [member function]
cls.add_method('IsSolicitedMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredAddress(ns3::Mac16Address addr, ns3::Ipv6Address prefix) [member function]
cls.add_method('MakeAutoconfiguredAddress',
'ns3::Ipv6Address',
[param('ns3::Mac16Address', 'addr'), param('ns3::Ipv6Address', 'prefix')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredAddress(ns3::Mac48Address addr, ns3::Ipv6Address prefix) [member function]
cls.add_method('MakeAutoconfiguredAddress',
'ns3::Ipv6Address',
[param('ns3::Mac48Address', 'addr'), param('ns3::Ipv6Address', 'prefix')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredAddress(ns3::Mac64Address addr, ns3::Ipv6Address prefix) [member function]
cls.add_method('MakeAutoconfiguredAddress',
'ns3::Ipv6Address',
[param('ns3::Mac64Address', 'addr'), param('ns3::Ipv6Address', 'prefix')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredLinkLocalAddress(ns3::Mac16Address mac) [member function]
cls.add_method('MakeAutoconfiguredLinkLocalAddress',
'ns3::Ipv6Address',
[param('ns3::Mac16Address', 'mac')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredLinkLocalAddress(ns3::Mac48Address mac) [member function]
cls.add_method('MakeAutoconfiguredLinkLocalAddress',
'ns3::Ipv6Address',
[param('ns3::Mac48Address', 'mac')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredLinkLocalAddress(ns3::Mac64Address mac) [member function]
cls.add_method('MakeAutoconfiguredLinkLocalAddress',
'ns3::Ipv6Address',
[param('ns3::Mac64Address', 'mac')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeIpv4MappedAddress(ns3::Ipv4Address addr) [member function]
cls.add_method('MakeIpv4MappedAddress',
'ns3::Ipv6Address',
[param('ns3::Ipv4Address', 'addr')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeSolicitedAddress(ns3::Ipv6Address addr) [member function]
cls.add_method('MakeSolicitedAddress',
'ns3::Ipv6Address',
[param('ns3::Ipv6Address', 'addr')],
is_static=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Serialize(uint8_t * buf) const [member function]
cls.add_method('Serialize',
'void',
[param('uint8_t *', 'buf')],
is_const=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Set(char const * address) [member function]
cls.add_method('Set',
'void',
[param('char const *', 'address')])
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Set(uint8_t * address) [member function]
cls.add_method('Set',
'void',
[param('uint8_t *', 'address')])
return
def register_Ns3Ipv6InterfaceAddress_methods(root_module, cls):
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## ipv6-interface-address.h (module 'internet'): ns3::Ipv6InterfaceAddress::Ipv6InterfaceAddress() [constructor]
cls.add_constructor([])
## ipv6-interface-address.h (module 'internet'): ns3::Ipv6InterfaceAddress::Ipv6InterfaceAddress(ns3::Ipv6Address address) [constructor]
cls.add_constructor([param('ns3::Ipv6Address', 'address')])
## ipv6-interface-address.h (module 'internet'): ns3::Ipv6InterfaceAddress::Ipv6InterfaceAddress(ns3::Ipv6Address address, ns3::Ipv6Prefix prefix) [constructor]
cls.add_constructor([param('ns3::Ipv6Address', 'address'), param('ns3::Ipv6Prefix', 'prefix')])
## ipv6-interface-address.h (module 'internet'): ns3::Ipv6InterfaceAddress::Ipv6InterfaceAddress(ns3::Ipv6InterfaceAddress const & o) [copy constructor]
cls.add_constructor([param('ns3::Ipv6InterfaceAddress const &', 'o')])
## ipv6-interface-address.h (module 'internet'): ns3::Ipv6Address ns3::Ipv6InterfaceAddress::GetAddress() const [member function]
cls.add_method('GetAddress',
'ns3::Ipv6Address',
[],
is_const=True)
## ipv6-interface-address.h (module 'internet'): uint32_t ns3::Ipv6InterfaceAddress::GetNsDadUid() const [member function]
cls.add_method('GetNsDadUid',
'uint32_t',
[],
is_const=True)
## ipv6-interface-address.h (module 'internet'): ns3::Ipv6Prefix ns3::Ipv6InterfaceAddress::GetPrefix() const [member function]
cls.add_method('GetPrefix',
'ns3::Ipv6Prefix',
[],
is_const=True)
## ipv6-interface-address.h (module 'internet'): ns3::Ipv6InterfaceAddress::Scope_e ns3::Ipv6InterfaceAddress::GetScope() const [member function]
cls.add_method('GetScope',
'ns3::Ipv6InterfaceAddress::Scope_e',
[],
is_const=True)
## ipv6-interface-address.h (module 'internet'): ns3::Ipv6InterfaceAddress::State_e ns3::Ipv6InterfaceAddress::GetState() const [member function]
cls.add_method('GetState',
'ns3::Ipv6InterfaceAddress::State_e',
[],
is_const=True)
## ipv6-interface-address.h (module 'internet'): void ns3::Ipv6InterfaceAddress::SetAddress(ns3::Ipv6Address address) [member function]
cls.add_method('SetAddress',
'void',
[param('ns3::Ipv6Address', 'address')])
## ipv6-interface-address.h (module 'internet'): void ns3::Ipv6InterfaceAddress::SetNsDadUid(uint32_t uid) [member function]
cls.add_method('SetNsDadUid',
'void',
[param('uint32_t', 'uid')])
## ipv6-interface-address.h (module 'internet'): void ns3::Ipv6InterfaceAddress::SetScope(ns3::Ipv6InterfaceAddress::Scope_e scope) [member function]
cls.add_method('SetScope',
'void',
[param('ns3::Ipv6InterfaceAddress::Scope_e', 'scope')])
## ipv6-interface-address.h (module 'internet'): void ns3::Ipv6InterfaceAddress::SetState(ns3::Ipv6InterfaceAddress::State_e state) [member function]
cls.add_method('SetState',
'void',
[param('ns3::Ipv6InterfaceAddress::State_e', 'state')])
return
def register_Ns3Ipv6Prefix_methods(root_module, cls):
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(uint8_t * prefix) [constructor]
cls.add_constructor([param('uint8_t *', 'prefix')])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(char const * prefix) [constructor]
cls.add_constructor([param('char const *', 'prefix')])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(uint8_t prefix) [constructor]
cls.add_constructor([param('uint8_t', 'prefix')])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(ns3::Ipv6Prefix const & prefix) [copy constructor]
cls.add_constructor([param('ns3::Ipv6Prefix const &', 'prefix')])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(ns3::Ipv6Prefix const * prefix) [constructor]
cls.add_constructor([param('ns3::Ipv6Prefix const *', 'prefix')])
## ipv6-address.h (module 'network'): void ns3::Ipv6Prefix::GetBytes(uint8_t * buf) const [member function]
cls.add_method('GetBytes',
'void',
[param('uint8_t *', 'buf')],
is_const=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Prefix ns3::Ipv6Prefix::GetLoopback() [member function]
cls.add_method('GetLoopback',
'ns3::Ipv6Prefix',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Prefix ns3::Ipv6Prefix::GetOnes() [member function]
cls.add_method('GetOnes',
'ns3::Ipv6Prefix',
[],
is_static=True)
## ipv6-address.h (module 'network'): uint8_t ns3::Ipv6Prefix::GetPrefixLength() const [member function]
cls.add_method('GetPrefixLength',
'uint8_t',
[],
is_const=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Prefix ns3::Ipv6Prefix::GetZero() [member function]
cls.add_method('GetZero',
'ns3::Ipv6Prefix',
[],
is_static=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Prefix::IsEqual(ns3::Ipv6Prefix const & other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ipv6Prefix const &', 'other')],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Prefix::IsMatch(ns3::Ipv6Address a, ns3::Ipv6Address b) const [member function]
cls.add_method('IsMatch',
'bool',
[param('ns3::Ipv6Address', 'a'), param('ns3::Ipv6Address', 'b')],
is_const=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Prefix::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
return
def register_Ns3NodeContainer_methods(root_module, cls):
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer(ns3::NodeContainer const & arg0) [copy constructor]
cls.add_constructor([param('ns3::NodeContainer const &', 'arg0')])
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer() [constructor]
cls.add_constructor([])
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer(ns3::Ptr<ns3::Node> node) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::Node >', 'node')])
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer(std::string nodeName) [constructor]
cls.add_constructor([param('std::string', 'nodeName')])
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer(ns3::NodeContainer const & a, ns3::NodeContainer const & b) [constructor]
cls.add_constructor([param('ns3::NodeContainer const &', 'a'), param('ns3::NodeContainer const &', 'b')])
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer(ns3::NodeContainer const & a, ns3::NodeContainer const & b, ns3::NodeContainer const & c) [constructor]
cls.add_constructor([param('ns3::NodeContainer const &', 'a'), param('ns3::NodeContainer const &', 'b'), param('ns3::NodeContainer const &', 'c')])
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer(ns3::NodeContainer const & a, ns3::NodeContainer const & b, ns3::NodeContainer const & c, ns3::NodeContainer const & d) [constructor]
cls.add_constructor([param('ns3::NodeContainer const &', 'a'), param('ns3::NodeContainer const &', 'b'), param('ns3::NodeContainer const &', 'c'), param('ns3::NodeContainer const &', 'd')])
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer(ns3::NodeContainer const & a, ns3::NodeContainer const & b, ns3::NodeContainer const & c, ns3::NodeContainer const & d, ns3::NodeContainer const & e) [constructor]
cls.add_constructor([param('ns3::NodeContainer const &', 'a'), param('ns3::NodeContainer const &', 'b'), param('ns3::NodeContainer const &', 'c'), param('ns3::NodeContainer const &', 'd'), param('ns3::NodeContainer const &', 'e')])
## node-container.h (module 'network'): void ns3::NodeContainer::Add(ns3::NodeContainer other) [member function]
cls.add_method('Add',
'void',
[param('ns3::NodeContainer', 'other')])
## node-container.h (module 'network'): void ns3::NodeContainer::Add(ns3::Ptr<ns3::Node> node) [member function]
cls.add_method('Add',
'void',
[param('ns3::Ptr< ns3::Node >', 'node')])
## node-container.h (module 'network'): void ns3::NodeContainer::Add(std::string nodeName) [member function]
cls.add_method('Add',
'void',
[param('std::string', 'nodeName')])
## node-container.h (module 'network'): __gnu_cxx::__normal_iterator<const ns3::Ptr<ns3::Node>*,std::vector<ns3::Ptr<ns3::Node>, std::allocator<ns3::Ptr<ns3::Node> > > > ns3::NodeContainer::Begin() const [member function]
cls.add_method('Begin',
'__gnu_cxx::__normal_iterator< ns3::Ptr< ns3::Node > const, std::vector< ns3::Ptr< ns3::Node > > >',
[],
is_const=True)
## node-container.h (module 'network'): void ns3::NodeContainer::Create(uint32_t n) [member function]
cls.add_method('Create',
'void',
[param('uint32_t', 'n')])
## node-container.h (module 'network'): void ns3::NodeContainer::Create(uint32_t n, uint32_t systemId) [member function]
cls.add_method('Create',
'void',
[param('uint32_t', 'n'), param('uint32_t', 'systemId')])
## node-container.h (module 'network'): __gnu_cxx::__normal_iterator<const ns3::Ptr<ns3::Node>*,std::vector<ns3::Ptr<ns3::Node>, std::allocator<ns3::Ptr<ns3::Node> > > > ns3::NodeContainer::End() const [member function]
cls.add_method('End',
'__gnu_cxx::__normal_iterator< ns3::Ptr< ns3::Node > const, std::vector< ns3::Ptr< ns3::Node > > >',
[],
is_const=True)
## node-container.h (module 'network'): ns3::Ptr<ns3::Node> ns3::NodeContainer::Get(uint32_t i) const [member function]
cls.add_method('Get',
'ns3::Ptr< ns3::Node >',
[param('uint32_t', 'i')],
is_const=True)
## node-container.h (module 'network'): static ns3::NodeContainer ns3::NodeContainer::GetGlobal() [member function]
cls.add_method('GetGlobal',
'ns3::NodeContainer',
[],
is_static=True)
## node-container.h (module 'network'): uint32_t ns3::NodeContainer::GetN() const [member function]
cls.add_method('GetN',
'uint32_t',
[],
is_const=True)
return
def register_Ns3ObjectBase_methods(root_module, cls):
## object-base.h (module 'core'): ns3::ObjectBase::ObjectBase() [constructor]
cls.add_constructor([])
## object-base.h (module 'core'): ns3::ObjectBase::ObjectBase(ns3::ObjectBase const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ObjectBase const &', 'arg0')])
## object-base.h (module 'core'): void ns3::ObjectBase::GetAttribute(std::string name, ns3::AttributeValue & value) const [member function]
cls.add_method('GetAttribute',
'void',
[param('std::string', 'name'), param('ns3::AttributeValue &', 'value')],
is_const=True)
## object-base.h (module 'core'): bool ns3::ObjectBase::GetAttributeFailSafe(std::string name, ns3::AttributeValue & attribute) const [member function]
cls.add_method('GetAttributeFailSafe',
'bool',
[param('std::string', 'name'), param('ns3::AttributeValue &', 'attribute')],
is_const=True)
## object-base.h (module 'core'): ns3::TypeId ns3::ObjectBase::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## object-base.h (module 'core'): static ns3::TypeId ns3::ObjectBase::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## object-base.h (module 'core'): void ns3::ObjectBase::SetAttribute(std::string name, ns3::AttributeValue const & value) [member function]
cls.add_method('SetAttribute',
'void',
[param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')])
## object-base.h (module 'core'): bool ns3::ObjectBase::SetAttributeFailSafe(std::string name, ns3::AttributeValue const & value) [member function]
cls.add_method('SetAttributeFailSafe',
'bool',
[param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceConnect(std::string name, std::string context, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceConnect',
'bool',
[param('std::string', 'name'), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceConnectWithoutContext(std::string name, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceConnectWithoutContext',
'bool',
[param('std::string', 'name'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceDisconnect(std::string name, std::string context, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceDisconnect',
'bool',
[param('std::string', 'name'), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceDisconnectWithoutContext(std::string name, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceDisconnectWithoutContext',
'bool',
[param('std::string', 'name'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): void ns3::ObjectBase::ConstructSelf(ns3::AttributeConstructionList const & attributes) [member function]
cls.add_method('ConstructSelf',
'void',
[param('ns3::AttributeConstructionList const &', 'attributes')],
visibility='protected')
## object-base.h (module 'core'): void ns3::ObjectBase::NotifyConstructionCompleted() [member function]
cls.add_method('NotifyConstructionCompleted',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3ObjectDeleter_methods(root_module, cls):
## object.h (module 'core'): ns3::ObjectDeleter::ObjectDeleter() [constructor]
cls.add_constructor([])
## object.h (module 'core'): ns3::ObjectDeleter::ObjectDeleter(ns3::ObjectDeleter const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ObjectDeleter const &', 'arg0')])
## object.h (module 'core'): static void ns3::ObjectDeleter::Delete(ns3::Object * object) [member function]
cls.add_method('Delete',
'void',
[param('ns3::Object *', 'object')],
is_static=True)
return
def register_Ns3ObjectFactory_methods(root_module, cls):
cls.add_output_stream_operator()
## object-factory.h (module 'core'): ns3::ObjectFactory::ObjectFactory(ns3::ObjectFactory const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ObjectFactory const &', 'arg0')])
## object-factory.h (module 'core'): ns3::ObjectFactory::ObjectFactory() [constructor]
cls.add_constructor([])
## object-factory.h (module 'core'): ns3::ObjectFactory::ObjectFactory(std::string typeId) [constructor]
cls.add_constructor([param('std::string', 'typeId')])
## object-factory.h (module 'core'): ns3::Ptr<ns3::Object> ns3::ObjectFactory::Create() const [member function]
cls.add_method('Create',
'ns3::Ptr< ns3::Object >',
[],
is_const=True)
## object-factory.h (module 'core'): ns3::TypeId ns3::ObjectFactory::GetTypeId() const [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_const=True)
## object-factory.h (module 'core'): void ns3::ObjectFactory::Set(std::string name, ns3::AttributeValue const & value) [member function]
cls.add_method('Set',
'void',
[param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')])
## object-factory.h (module 'core'): void ns3::ObjectFactory::SetTypeId(ns3::TypeId tid) [member function]
cls.add_method('SetTypeId',
'void',
[param('ns3::TypeId', 'tid')])
## object-factory.h (module 'core'): void ns3::ObjectFactory::SetTypeId(char const * tid) [member function]
cls.add_method('SetTypeId',
'void',
[param('char const *', 'tid')])
## object-factory.h (module 'core'): void ns3::ObjectFactory::SetTypeId(std::string tid) [member function]
cls.add_method('SetTypeId',
'void',
[param('std::string', 'tid')])
return
def register_Ns3PacketMetadata_methods(root_module, cls):
## packet-metadata.h (module 'network'): ns3::PacketMetadata::PacketMetadata(uint64_t uid, uint32_t size) [constructor]
cls.add_constructor([param('uint64_t', 'uid'), param('uint32_t', 'size')])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::PacketMetadata(ns3::PacketMetadata const & o) [copy constructor]
cls.add_constructor([param('ns3::PacketMetadata const &', 'o')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::AddAtEnd(ns3::PacketMetadata const & o) [member function]
cls.add_method('AddAtEnd',
'void',
[param('ns3::PacketMetadata const &', 'o')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::AddHeader(ns3::Header const & header, uint32_t size) [member function]
cls.add_method('AddHeader',
'void',
[param('ns3::Header const &', 'header'), param('uint32_t', 'size')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::AddPaddingAtEnd(uint32_t end) [member function]
cls.add_method('AddPaddingAtEnd',
'void',
[param('uint32_t', 'end')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::AddTrailer(ns3::Trailer const & trailer, uint32_t size) [member function]
cls.add_method('AddTrailer',
'void',
[param('ns3::Trailer const &', 'trailer'), param('uint32_t', 'size')])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::ItemIterator ns3::PacketMetadata::BeginItem(ns3::Buffer buffer) const [member function]
cls.add_method('BeginItem',
'ns3::PacketMetadata::ItemIterator',
[param('ns3::Buffer', 'buffer')],
is_const=True)
## packet-metadata.h (module 'network'): ns3::PacketMetadata ns3::PacketMetadata::CreateFragment(uint32_t start, uint32_t end) const [member function]
cls.add_method('CreateFragment',
'ns3::PacketMetadata',
[param('uint32_t', 'start'), param('uint32_t', 'end')],
is_const=True)
## packet-metadata.h (module 'network'): uint32_t ns3::PacketMetadata::Deserialize(uint8_t const * buffer, uint32_t size) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('uint8_t const *', 'buffer'), param('uint32_t', 'size')])
## packet-metadata.h (module 'network'): static void ns3::PacketMetadata::Enable() [member function]
cls.add_method('Enable',
'void',
[],
is_static=True)
## packet-metadata.h (module 'network'): static void ns3::PacketMetadata::EnableChecking() [member function]
cls.add_method('EnableChecking',
'void',
[],
is_static=True)
## packet-metadata.h (module 'network'): uint32_t ns3::PacketMetadata::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True)
## packet-metadata.h (module 'network'): uint64_t ns3::PacketMetadata::GetUid() const [member function]
cls.add_method('GetUid',
'uint64_t',
[],
is_const=True)
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::RemoveAtEnd(uint32_t end) [member function]
cls.add_method('RemoveAtEnd',
'void',
[param('uint32_t', 'end')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::RemoveAtStart(uint32_t start) [member function]
cls.add_method('RemoveAtStart',
'void',
[param('uint32_t', 'start')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::RemoveHeader(ns3::Header const & header, uint32_t size) [member function]
cls.add_method('RemoveHeader',
'void',
[param('ns3::Header const &', 'header'), param('uint32_t', 'size')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::RemoveTrailer(ns3::Trailer const & trailer, uint32_t size) [member function]
cls.add_method('RemoveTrailer',
'void',
[param('ns3::Trailer const &', 'trailer'), param('uint32_t', 'size')])
## packet-metadata.h (module 'network'): uint32_t ns3::PacketMetadata::Serialize(uint8_t * buffer, uint32_t maxSize) const [member function]
cls.add_method('Serialize',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint32_t', 'maxSize')],
is_const=True)
return
def register_Ns3PacketMetadataItem_methods(root_module, cls):
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::Item() [constructor]
cls.add_constructor([])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::Item(ns3::PacketMetadata::Item const & arg0) [copy constructor]
cls.add_constructor([param('ns3::PacketMetadata::Item const &', 'arg0')])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::current [variable]
cls.add_instance_attribute('current', 'ns3::Buffer::Iterator', is_const=False)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::currentSize [variable]
cls.add_instance_attribute('currentSize', 'uint32_t', is_const=False)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::currentTrimedFromEnd [variable]
cls.add_instance_attribute('currentTrimedFromEnd', 'uint32_t', is_const=False)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::currentTrimedFromStart [variable]
cls.add_instance_attribute('currentTrimedFromStart', 'uint32_t', is_const=False)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::isFragment [variable]
cls.add_instance_attribute('isFragment', 'bool', is_const=False)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::tid [variable]
cls.add_instance_attribute('tid', 'ns3::TypeId', is_const=False)
return
def register_Ns3PacketMetadataItemIterator_methods(root_module, cls):
## packet-metadata.h (module 'network'): ns3::PacketMetadata::ItemIterator::ItemIterator(ns3::PacketMetadata::ItemIterator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::PacketMetadata::ItemIterator const &', 'arg0')])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::ItemIterator::ItemIterator(ns3::PacketMetadata const * metadata, ns3::Buffer buffer) [constructor]
cls.add_constructor([param('ns3::PacketMetadata const *', 'metadata'), param('ns3::Buffer', 'buffer')])
## packet-metadata.h (module 'network'): bool ns3::PacketMetadata::ItemIterator::HasNext() const [member function]
cls.add_method('HasNext',
'bool',
[],
is_const=True)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item ns3::PacketMetadata::ItemIterator::Next() [member function]
cls.add_method('Next',
'ns3::PacketMetadata::Item',
[])
return
def register_Ns3PacketTagIterator_methods(root_module, cls):
## packet.h (module 'network'): ns3::PacketTagIterator::PacketTagIterator(ns3::PacketTagIterator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::PacketTagIterator const &', 'arg0')])
## packet.h (module 'network'): bool ns3::PacketTagIterator::HasNext() const [member function]
cls.add_method('HasNext',
'bool',
[],
is_const=True)
## packet.h (module 'network'): ns3::PacketTagIterator::Item ns3::PacketTagIterator::Next() [member function]
cls.add_method('Next',
'ns3::PacketTagIterator::Item',
[])
return
def register_Ns3PacketTagIteratorItem_methods(root_module, cls):
## packet.h (module 'network'): ns3::PacketTagIterator::Item::Item(ns3::PacketTagIterator::Item const & arg0) [copy constructor]
cls.add_constructor([param('ns3::PacketTagIterator::Item const &', 'arg0')])
## packet.h (module 'network'): void ns3::PacketTagIterator::Item::GetTag(ns3::Tag & tag) const [member function]
cls.add_method('GetTag',
'void',
[param('ns3::Tag &', 'tag')],
is_const=True)
## packet.h (module 'network'): ns3::TypeId ns3::PacketTagIterator::Item::GetTypeId() const [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_const=True)
return
def register_Ns3PacketTagList_methods(root_module, cls):
## packet-tag-list.h (module 'network'): ns3::PacketTagList::PacketTagList() [constructor]
cls.add_constructor([])
## packet-tag-list.h (module 'network'): ns3::PacketTagList::PacketTagList(ns3::PacketTagList const & o) [copy constructor]
cls.add_constructor([param('ns3::PacketTagList const &', 'o')])
## packet-tag-list.h (module 'network'): void ns3::PacketTagList::Add(ns3::Tag const & tag) const [member function]
cls.add_method('Add',
'void',
[param('ns3::Tag const &', 'tag')],
is_const=True)
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData const * ns3::PacketTagList::Head() const [member function]
cls.add_method('Head',
'ns3::PacketTagList::TagData const *',
[],
is_const=True)
## packet-tag-list.h (module 'network'): bool ns3::PacketTagList::Peek(ns3::Tag & tag) const [member function]
cls.add_method('Peek',
'bool',
[param('ns3::Tag &', 'tag')],
is_const=True)
## packet-tag-list.h (module 'network'): bool ns3::PacketTagList::Remove(ns3::Tag & tag) [member function]
cls.add_method('Remove',
'bool',
[param('ns3::Tag &', 'tag')])
## packet-tag-list.h (module 'network'): void ns3::PacketTagList::RemoveAll() [member function]
cls.add_method('RemoveAll',
'void',
[])
## packet-tag-list.h (module 'network'): bool ns3::PacketTagList::Replace(ns3::Tag & tag) [member function]
cls.add_method('Replace',
'bool',
[param('ns3::Tag &', 'tag')])
return
def register_Ns3PacketTagListTagData_methods(root_module, cls):
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::TagData() [constructor]
cls.add_constructor([])
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::TagData(ns3::PacketTagList::TagData const & arg0) [copy constructor]
cls.add_constructor([param('ns3::PacketTagList::TagData const &', 'arg0')])
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::count [variable]
cls.add_instance_attribute('count', 'uint32_t', is_const=False)
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::data [variable]
cls.add_instance_attribute('data', 'uint8_t [ 20 ]', is_const=False)
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::next [variable]
cls.add_instance_attribute('next', 'ns3::PacketTagList::TagData *', is_const=False)
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::tid [variable]
cls.add_instance_attribute('tid', 'ns3::TypeId', is_const=False)
return
def register_Ns3SimpleRefCount__Ns3Object_Ns3ObjectBase_Ns3ObjectDeleter_methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter>::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter>::SimpleRefCount(ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter> const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter>::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3Simulator_methods(root_module, cls):
## simulator.h (module 'core'): ns3::Simulator::Simulator(ns3::Simulator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Simulator const &', 'arg0')])
## simulator.h (module 'core'): static void ns3::Simulator::Cancel(ns3::EventId const & id) [member function]
cls.add_method('Cancel',
'void',
[param('ns3::EventId const &', 'id')],
is_static=True)
## simulator.h (module 'core'): static void ns3::Simulator::Destroy() [member function]
cls.add_method('Destroy',
'void',
[],
is_static=True)
## simulator.h (module 'core'): static uint32_t ns3::Simulator::GetContext() [member function]
cls.add_method('GetContext',
'uint32_t',
[],
is_static=True)
## simulator.h (module 'core'): static ns3::Time ns3::Simulator::GetDelayLeft(ns3::EventId const & id) [member function]
cls.add_method('GetDelayLeft',
'ns3::Time',
[param('ns3::EventId const &', 'id')],
is_static=True)
## simulator.h (module 'core'): static ns3::Ptr<ns3::SimulatorImpl> ns3::Simulator::GetImplementation() [member function]
cls.add_method('GetImplementation',
'ns3::Ptr< ns3::SimulatorImpl >',
[],
is_static=True)
## simulator.h (module 'core'): static ns3::Time ns3::Simulator::GetMaximumSimulationTime() [member function]
cls.add_method('GetMaximumSimulationTime',
'ns3::Time',
[],
is_static=True)
## simulator.h (module 'core'): static uint32_t ns3::Simulator::GetSystemId() [member function]
cls.add_method('GetSystemId',
'uint32_t',
[],
is_static=True)
## simulator.h (module 'core'): static bool ns3::Simulator::IsExpired(ns3::EventId const & id) [member function]
cls.add_method('IsExpired',
'bool',
[param('ns3::EventId const &', 'id')],
is_static=True)
## simulator.h (module 'core'): static bool ns3::Simulator::IsFinished() [member function]
cls.add_method('IsFinished',
'bool',
[],
is_static=True)
## simulator.h (module 'core'): static ns3::Time ns3::Simulator::Now() [member function]
cls.add_method('Now',
'ns3::Time',
[],
is_static=True)
## simulator.h (module 'core'): static void ns3::Simulator::Remove(ns3::EventId const & id) [member function]
cls.add_method('Remove',
'void',
[param('ns3::EventId const &', 'id')],
is_static=True)
## simulator.h (module 'core'): static void ns3::Simulator::SetImplementation(ns3::Ptr<ns3::SimulatorImpl> impl) [member function]
cls.add_method('SetImplementation',
'void',
[param('ns3::Ptr< ns3::SimulatorImpl >', 'impl')],
is_static=True)
## simulator.h (module 'core'): static void ns3::Simulator::SetScheduler(ns3::ObjectFactory schedulerFactory) [member function]
cls.add_method('SetScheduler',
'void',
[param('ns3::ObjectFactory', 'schedulerFactory')],
is_static=True)
## simulator.h (module 'core'): static void ns3::Simulator::Stop() [member function]
cls.add_method('Stop',
'void',
[],
is_static=True)
## simulator.h (module 'core'): static void ns3::Simulator::Stop(ns3::Time const & time) [member function]
cls.add_method('Stop',
'void',
[param('ns3::Time const &', 'time')],
is_static=True)
return
def register_Ns3Tag_methods(root_module, cls):
## tag.h (module 'network'): ns3::Tag::Tag() [constructor]
cls.add_constructor([])
## tag.h (module 'network'): ns3::Tag::Tag(ns3::Tag const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Tag const &', 'arg0')])
## tag.h (module 'network'): void ns3::Tag::Deserialize(ns3::TagBuffer i) [member function]
cls.add_method('Deserialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_pure_virtual=True, is_virtual=True)
## tag.h (module 'network'): uint32_t ns3::Tag::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## tag.h (module 'network'): static ns3::TypeId ns3::Tag::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## tag.h (module 'network'): void ns3::Tag::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## tag.h (module 'network'): void ns3::Tag::Serialize(ns3::TagBuffer i) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3TagBuffer_methods(root_module, cls):
## tag-buffer.h (module 'network'): ns3::TagBuffer::TagBuffer(ns3::TagBuffer const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TagBuffer const &', 'arg0')])
## tag-buffer.h (module 'network'): ns3::TagBuffer::TagBuffer(uint8_t * start, uint8_t * end) [constructor]
cls.add_constructor([param('uint8_t *', 'start'), param('uint8_t *', 'end')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::CopyFrom(ns3::TagBuffer o) [member function]
cls.add_method('CopyFrom',
'void',
[param('ns3::TagBuffer', 'o')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::Read(uint8_t * buffer, uint32_t size) [member function]
cls.add_method('Read',
'void',
[param('uint8_t *', 'buffer'), param('uint32_t', 'size')])
## tag-buffer.h (module 'network'): double ns3::TagBuffer::ReadDouble() [member function]
cls.add_method('ReadDouble',
'double',
[])
## tag-buffer.h (module 'network'): uint16_t ns3::TagBuffer::ReadU16() [member function]
cls.add_method('ReadU16',
'uint16_t',
[])
## tag-buffer.h (module 'network'): uint32_t ns3::TagBuffer::ReadU32() [member function]
cls.add_method('ReadU32',
'uint32_t',
[])
## tag-buffer.h (module 'network'): uint64_t ns3::TagBuffer::ReadU64() [member function]
cls.add_method('ReadU64',
'uint64_t',
[])
## tag-buffer.h (module 'network'): uint8_t ns3::TagBuffer::ReadU8() [member function]
cls.add_method('ReadU8',
'uint8_t',
[])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::TrimAtEnd(uint32_t trim) [member function]
cls.add_method('TrimAtEnd',
'void',
[param('uint32_t', 'trim')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::Write(uint8_t const * buffer, uint32_t size) [member function]
cls.add_method('Write',
'void',
[param('uint8_t const *', 'buffer'), param('uint32_t', 'size')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteDouble(double v) [member function]
cls.add_method('WriteDouble',
'void',
[param('double', 'v')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU16(uint16_t data) [member function]
cls.add_method('WriteU16',
'void',
[param('uint16_t', 'data')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU32(uint32_t data) [member function]
cls.add_method('WriteU32',
'void',
[param('uint32_t', 'data')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU64(uint64_t v) [member function]
cls.add_method('WriteU64',
'void',
[param('uint64_t', 'v')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU8(uint8_t v) [member function]
cls.add_method('WriteU8',
'void',
[param('uint8_t', 'v')])
return
def register_Ns3Timer_methods(root_module, cls):
## timer.h (module 'core'): ns3::Timer::Timer(ns3::Timer const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Timer const &', 'arg0')])
## timer.h (module 'core'): ns3::Timer::Timer() [constructor]
cls.add_constructor([])
## timer.h (module 'core'): ns3::Timer::Timer(ns3::Timer::DestroyPolicy destroyPolicy) [constructor]
cls.add_constructor([param('ns3::Timer::DestroyPolicy', 'destroyPolicy')])
## timer.h (module 'core'): void ns3::Timer::Cancel() [member function]
cls.add_method('Cancel',
'void',
[])
## timer.h (module 'core'): ns3::Time ns3::Timer::GetDelay() const [member function]
cls.add_method('GetDelay',
'ns3::Time',
[],
is_const=True)
## timer.h (module 'core'): ns3::Time ns3::Timer::GetDelayLeft() const [member function]
cls.add_method('GetDelayLeft',
'ns3::Time',
[],
is_const=True)
## timer.h (module 'core'): ns3::Timer::State ns3::Timer::GetState() const [member function]
cls.add_method('GetState',
'ns3::Timer::State',
[],
is_const=True)
## timer.h (module 'core'): bool ns3::Timer::IsExpired() const [member function]
cls.add_method('IsExpired',
'bool',
[],
is_const=True)
## timer.h (module 'core'): bool ns3::Timer::IsRunning() const [member function]
cls.add_method('IsRunning',
'bool',
[],
is_const=True)
## timer.h (module 'core'): bool ns3::Timer::IsSuspended() const [member function]
cls.add_method('IsSuspended',
'bool',
[],
is_const=True)
## timer.h (module 'core'): void ns3::Timer::Remove() [member function]
cls.add_method('Remove',
'void',
[])
## timer.h (module 'core'): void ns3::Timer::Resume() [member function]
cls.add_method('Resume',
'void',
[])
## timer.h (module 'core'): void ns3::Timer::Schedule() [member function]
cls.add_method('Schedule',
'void',
[])
## timer.h (module 'core'): void ns3::Timer::Schedule(ns3::Time delay) [member function]
cls.add_method('Schedule',
'void',
[param('ns3::Time', 'delay')])
## timer.h (module 'core'): void ns3::Timer::SetDelay(ns3::Time const & delay) [member function]
cls.add_method('SetDelay',
'void',
[param('ns3::Time const &', 'delay')])
## timer.h (module 'core'): void ns3::Timer::Suspend() [member function]
cls.add_method('Suspend',
'void',
[])
return
def register_Ns3TimerImpl_methods(root_module, cls):
## timer-impl.h (module 'core'): ns3::TimerImpl::TimerImpl() [constructor]
cls.add_constructor([])
## timer-impl.h (module 'core'): ns3::TimerImpl::TimerImpl(ns3::TimerImpl const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TimerImpl const &', 'arg0')])
## timer-impl.h (module 'core'): void ns3::TimerImpl::Invoke() [member function]
cls.add_method('Invoke',
'void',
[],
is_pure_virtual=True, is_virtual=True)
## timer-impl.h (module 'core'): ns3::EventId ns3::TimerImpl::Schedule(ns3::Time const & delay) [member function]
cls.add_method('Schedule',
'ns3::EventId',
[param('ns3::Time const &', 'delay')],
is_pure_virtual=True, is_virtual=True)
return
def register_Ns3TypeId_methods(root_module, cls):
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## type-id.h (module 'core'): ns3::TypeId::TypeId(char const * name) [constructor]
cls.add_constructor([param('char const *', 'name')])
## type-id.h (module 'core'): ns3::TypeId::TypeId() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeId::TypeId(ns3::TypeId const & o) [copy constructor]
cls.add_constructor([param('ns3::TypeId const &', 'o')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddAttribute(std::string name, std::string help, ns3::AttributeValue const & initialValue, ns3::Ptr<ns3::AttributeAccessor const> accessor, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('AddAttribute',
'ns3::TypeId',
[param('std::string', 'name'), param('std::string', 'help'), param('ns3::AttributeValue const &', 'initialValue'), param('ns3::Ptr< ns3::AttributeAccessor const >', 'accessor'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddAttribute(std::string name, std::string help, uint32_t flags, ns3::AttributeValue const & initialValue, ns3::Ptr<ns3::AttributeAccessor const> accessor, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('AddAttribute',
'ns3::TypeId',
[param('std::string', 'name'), param('std::string', 'help'), param('uint32_t', 'flags'), param('ns3::AttributeValue const &', 'initialValue'), param('ns3::Ptr< ns3::AttributeAccessor const >', 'accessor'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddTraceSource(std::string name, std::string help, ns3::Ptr<ns3::TraceSourceAccessor const> accessor) [member function]
cls.add_method('AddTraceSource',
'ns3::TypeId',
[param('std::string', 'name'), param('std::string', 'help'), param('ns3::Ptr< ns3::TraceSourceAccessor const >', 'accessor')])
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation ns3::TypeId::GetAttribute(uint32_t i) const [member function]
cls.add_method('GetAttribute',
'ns3::TypeId::AttributeInformation',
[param('uint32_t', 'i')],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeId::GetAttributeFullName(uint32_t i) const [member function]
cls.add_method('GetAttributeFullName',
'std::string',
[param('uint32_t', 'i')],
is_const=True)
## type-id.h (module 'core'): uint32_t ns3::TypeId::GetAttributeN() const [member function]
cls.add_method('GetAttributeN',
'uint32_t',
[],
is_const=True)
## type-id.h (module 'core'): ns3::Callback<ns3::ObjectBase*,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> ns3::TypeId::GetConstructor() const [member function]
cls.add_method('GetConstructor',
'ns3::Callback< ns3::ObjectBase *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >',
[],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeId::GetGroupName() const [member function]
cls.add_method('GetGroupName',
'std::string',
[],
is_const=True)
## type-id.h (module 'core'): uint32_t ns3::TypeId::GetHash() const [member function]
cls.add_method('GetHash',
'uint32_t',
[],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeId::GetName() const [member function]
cls.add_method('GetName',
'std::string',
[],
is_const=True)
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::GetParent() const [member function]
cls.add_method('GetParent',
'ns3::TypeId',
[],
is_const=True)
## type-id.h (module 'core'): static ns3::TypeId ns3::TypeId::GetRegistered(uint32_t i) [member function]
cls.add_method('GetRegistered',
'ns3::TypeId',
[param('uint32_t', 'i')],
is_static=True)
## type-id.h (module 'core'): static uint32_t ns3::TypeId::GetRegisteredN() [member function]
cls.add_method('GetRegisteredN',
'uint32_t',
[],
is_static=True)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation ns3::TypeId::GetTraceSource(uint32_t i) const [member function]
cls.add_method('GetTraceSource',
'ns3::TypeId::TraceSourceInformation',
[param('uint32_t', 'i')],
is_const=True)
## type-id.h (module 'core'): uint32_t ns3::TypeId::GetTraceSourceN() const [member function]
cls.add_method('GetTraceSourceN',
'uint32_t',
[],
is_const=True)
## type-id.h (module 'core'): uint16_t ns3::TypeId::GetUid() const [member function]
cls.add_method('GetUid',
'uint16_t',
[],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::HasConstructor() const [member function]
cls.add_method('HasConstructor',
'bool',
[],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::HasParent() const [member function]
cls.add_method('HasParent',
'bool',
[],
is_const=True)
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::HideFromDocumentation() [member function]
cls.add_method('HideFromDocumentation',
'ns3::TypeId',
[])
## type-id.h (module 'core'): bool ns3::TypeId::IsChildOf(ns3::TypeId other) const [member function]
cls.add_method('IsChildOf',
'bool',
[param('ns3::TypeId', 'other')],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::LookupAttributeByName(std::string name, ns3::TypeId::AttributeInformation * info) const [member function]
cls.add_method('LookupAttributeByName',
'bool',
[param('std::string', 'name'), param('ns3::TypeId::AttributeInformation *', 'info', transfer_ownership=False)],
is_const=True)
## type-id.h (module 'core'): static ns3::TypeId ns3::TypeId::LookupByHash(uint32_t hash) [member function]
cls.add_method('LookupByHash',
'ns3::TypeId',
[param('uint32_t', 'hash')],
is_static=True)
## type-id.h (module 'core'): static bool ns3::TypeId::LookupByHashFailSafe(uint32_t hash, ns3::TypeId * tid) [member function]
cls.add_method('LookupByHashFailSafe',
'bool',
[param('uint32_t', 'hash'), param('ns3::TypeId *', 'tid')],
is_static=True)
## type-id.h (module 'core'): static ns3::TypeId ns3::TypeId::LookupByName(std::string name) [member function]
cls.add_method('LookupByName',
'ns3::TypeId',
[param('std::string', 'name')],
is_static=True)
## type-id.h (module 'core'): ns3::Ptr<ns3::TraceSourceAccessor const> ns3::TypeId::LookupTraceSourceByName(std::string name) const [member function]
cls.add_method('LookupTraceSourceByName',
'ns3::Ptr< ns3::TraceSourceAccessor const >',
[param('std::string', 'name')],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::MustHideFromDocumentation() const [member function]
cls.add_method('MustHideFromDocumentation',
'bool',
[],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::SetAttributeInitialValue(uint32_t i, ns3::Ptr<ns3::AttributeValue const> initialValue) [member function]
cls.add_method('SetAttributeInitialValue',
'bool',
[param('uint32_t', 'i'), param('ns3::Ptr< ns3::AttributeValue const >', 'initialValue')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::SetGroupName(std::string groupName) [member function]
cls.add_method('SetGroupName',
'ns3::TypeId',
[param('std::string', 'groupName')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::SetParent(ns3::TypeId tid) [member function]
cls.add_method('SetParent',
'ns3::TypeId',
[param('ns3::TypeId', 'tid')])
## type-id.h (module 'core'): void ns3::TypeId::SetUid(uint16_t tid) [member function]
cls.add_method('SetUid',
'void',
[param('uint16_t', 'tid')])
return
def register_Ns3TypeIdAttributeInformation_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::AttributeInformation() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::AttributeInformation(ns3::TypeId::AttributeInformation const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TypeId::AttributeInformation const &', 'arg0')])
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::accessor [variable]
cls.add_instance_attribute('accessor', 'ns3::Ptr< ns3::AttributeAccessor const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::checker [variable]
cls.add_instance_attribute('checker', 'ns3::Ptr< ns3::AttributeChecker const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::flags [variable]
cls.add_instance_attribute('flags', 'uint32_t', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::help [variable]
cls.add_instance_attribute('help', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::initialValue [variable]
cls.add_instance_attribute('initialValue', 'ns3::Ptr< ns3::AttributeValue const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::name [variable]
cls.add_instance_attribute('name', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::originalInitialValue [variable]
cls.add_instance_attribute('originalInitialValue', 'ns3::Ptr< ns3::AttributeValue const >', is_const=False)
return
def register_Ns3TypeIdTraceSourceInformation_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::TraceSourceInformation() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::TraceSourceInformation(ns3::TypeId::TraceSourceInformation const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TypeId::TraceSourceInformation const &', 'arg0')])
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::accessor [variable]
cls.add_instance_attribute('accessor', 'ns3::Ptr< ns3::TraceSourceAccessor const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::help [variable]
cls.add_instance_attribute('help', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::name [variable]
cls.add_instance_attribute('name', 'std::string', is_const=False)
return
def register_Ns3Empty_methods(root_module, cls):
## empty.h (module 'core'): ns3::empty::empty() [constructor]
cls.add_constructor([])
## empty.h (module 'core'): ns3::empty::empty(ns3::empty const & arg0) [copy constructor]
cls.add_constructor([param('ns3::empty const &', 'arg0')])
return
def register_Ns3Int64x64_t_methods(root_module, cls):
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long long unsigned int const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long unsigned int const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('unsigned int const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('short unsigned int const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('unsigned char const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long long int const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long int const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('int const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('short int const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('signed char const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('double const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long long unsigned int const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long unsigned int const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('unsigned int const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('short unsigned int const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('unsigned char const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long long int const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long int const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('int const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('short int const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('signed char const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('double const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long long unsigned int const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long unsigned int const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('unsigned int const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('short unsigned int const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('unsigned char const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long long int const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long int const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('int const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('short int const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('signed char const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('double const', 'right'))
cls.add_unary_numeric_operator('-')
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long long unsigned int const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long unsigned int const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('unsigned int const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('short unsigned int const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('unsigned char const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long long int const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long int const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('int const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('short int const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('signed char const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('double const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', 'right'))
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('>')
cls.add_binary_comparison_operator('!=')
cls.add_inplace_numeric_operator('*=', param('ns3::int64x64_t const &', 'right'))
cls.add_inplace_numeric_operator('+=', param('ns3::int64x64_t const &', 'right'))
cls.add_inplace_numeric_operator('-=', param('ns3::int64x64_t const &', 'right'))
cls.add_inplace_numeric_operator('/=', param('ns3::int64x64_t const &', 'right'))
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('<=')
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('>=')
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t() [constructor]
cls.add_constructor([])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(double v) [constructor]
cls.add_constructor([param('double', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(int v) [constructor]
cls.add_constructor([param('int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long int v) [constructor]
cls.add_constructor([param('long int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long long int v) [constructor]
cls.add_constructor([param('long long int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(unsigned int v) [constructor]
cls.add_constructor([param('unsigned int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long unsigned int v) [constructor]
cls.add_constructor([param('long unsigned int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long long unsigned int v) [constructor]
cls.add_constructor([param('long long unsigned int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(int64_t hi, uint64_t lo) [constructor]
cls.add_constructor([param('int64_t', 'hi'), param('uint64_t', 'lo')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(ns3::int64x64_t const & o) [copy constructor]
cls.add_constructor([param('ns3::int64x64_t const &', 'o')])
## int64x64-double.h (module 'core'): double ns3::int64x64_t::GetDouble() const [member function]
cls.add_method('GetDouble',
'double',
[],
is_const=True)
## int64x64-double.h (module 'core'): int64_t ns3::int64x64_t::GetHigh() const [member function]
cls.add_method('GetHigh',
'int64_t',
[],
is_const=True)
## int64x64-double.h (module 'core'): uint64_t ns3::int64x64_t::GetLow() const [member function]
cls.add_method('GetLow',
'uint64_t',
[],
is_const=True)
## int64x64-double.h (module 'core'): static ns3::int64x64_t ns3::int64x64_t::Invert(uint64_t v) [member function]
cls.add_method('Invert',
'ns3::int64x64_t',
[param('uint64_t', 'v')],
is_static=True)
## int64x64-double.h (module 'core'): void ns3::int64x64_t::MulByInvert(ns3::int64x64_t const & o) [member function]
cls.add_method('MulByInvert',
'void',
[param('ns3::int64x64_t const &', 'o')])
return
def register_Ns3Chunk_methods(root_module, cls):
## chunk.h (module 'network'): ns3::Chunk::Chunk() [constructor]
cls.add_constructor([])
## chunk.h (module 'network'): ns3::Chunk::Chunk(ns3::Chunk const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Chunk const &', 'arg0')])
## chunk.h (module 'network'): uint32_t ns3::Chunk::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start')],
is_pure_virtual=True, is_virtual=True)
## chunk.h (module 'network'): static ns3::TypeId ns3::Chunk::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## chunk.h (module 'network'): void ns3::Chunk::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3DsdvHelper_methods(root_module, cls):
## dsdv-helper.h (module 'dsdv'): ns3::DsdvHelper::DsdvHelper(ns3::DsdvHelper const & arg0) [copy constructor]
cls.add_constructor([param('ns3::DsdvHelper const &', 'arg0')])
## dsdv-helper.h (module 'dsdv'): ns3::DsdvHelper::DsdvHelper() [constructor]
cls.add_constructor([])
## dsdv-helper.h (module 'dsdv'): ns3::DsdvHelper * ns3::DsdvHelper::Copy() const [member function]
cls.add_method('Copy',
'ns3::DsdvHelper *',
[],
is_const=True, is_virtual=True)
## dsdv-helper.h (module 'dsdv'): ns3::Ptr<ns3::Ipv4RoutingProtocol> ns3::DsdvHelper::Create(ns3::Ptr<ns3::Node> node) const [member function]
cls.add_method('Create',
'ns3::Ptr< ns3::Ipv4RoutingProtocol >',
[param('ns3::Ptr< ns3::Node >', 'node')],
is_const=True, is_virtual=True)
## dsdv-helper.h (module 'dsdv'): void ns3::DsdvHelper::Set(std::string name, ns3::AttributeValue const & value) [member function]
cls.add_method('Set',
'void',
[param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')])
return
def register_Ns3Header_methods(root_module, cls):
cls.add_output_stream_operator()
## header.h (module 'network'): ns3::Header::Header() [constructor]
cls.add_constructor([])
## header.h (module 'network'): ns3::Header::Header(ns3::Header const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Header const &', 'arg0')])
## header.h (module 'network'): uint32_t ns3::Header::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start')],
is_pure_virtual=True, is_virtual=True)
## header.h (module 'network'): uint32_t ns3::Header::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## header.h (module 'network'): static ns3::TypeId ns3::Header::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## header.h (module 'network'): void ns3::Header::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## header.h (module 'network'): void ns3::Header::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3Ipv4Header_methods(root_module, cls):
## ipv4-header.h (module 'internet'): ns3::Ipv4Header::Ipv4Header(ns3::Ipv4Header const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4Header const &', 'arg0')])
## ipv4-header.h (module 'internet'): ns3::Ipv4Header::Ipv4Header() [constructor]
cls.add_constructor([])
## ipv4-header.h (module 'internet'): uint32_t ns3::Ipv4Header::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start')],
is_virtual=True)
## ipv4-header.h (module 'internet'): std::string ns3::Ipv4Header::DscpTypeToString(ns3::Ipv4Header::DscpType dscp) const [member function]
cls.add_method('DscpTypeToString',
'std::string',
[param('ns3::Ipv4Header::DscpType', 'dscp')],
is_const=True)
## ipv4-header.h (module 'internet'): std::string ns3::Ipv4Header::EcnTypeToString(ns3::Ipv4Header::EcnType ecn) const [member function]
cls.add_method('EcnTypeToString',
'std::string',
[param('ns3::Ipv4Header::EcnType', 'ecn')],
is_const=True)
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::EnableChecksum() [member function]
cls.add_method('EnableChecksum',
'void',
[])
## ipv4-header.h (module 'internet'): ns3::Ipv4Address ns3::Ipv4Header::GetDestination() const [member function]
cls.add_method('GetDestination',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv4-header.h (module 'internet'): ns3::Ipv4Header::DscpType ns3::Ipv4Header::GetDscp() const [member function]
cls.add_method('GetDscp',
'ns3::Ipv4Header::DscpType',
[],
is_const=True)
## ipv4-header.h (module 'internet'): ns3::Ipv4Header::EcnType ns3::Ipv4Header::GetEcn() const [member function]
cls.add_method('GetEcn',
'ns3::Ipv4Header::EcnType',
[],
is_const=True)
## ipv4-header.h (module 'internet'): uint16_t ns3::Ipv4Header::GetFragmentOffset() const [member function]
cls.add_method('GetFragmentOffset',
'uint16_t',
[],
is_const=True)
## ipv4-header.h (module 'internet'): uint16_t ns3::Ipv4Header::GetIdentification() const [member function]
cls.add_method('GetIdentification',
'uint16_t',
[],
is_const=True)
## ipv4-header.h (module 'internet'): ns3::TypeId ns3::Ipv4Header::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## ipv4-header.h (module 'internet'): uint16_t ns3::Ipv4Header::GetPayloadSize() const [member function]
cls.add_method('GetPayloadSize',
'uint16_t',
[],
is_const=True)
## ipv4-header.h (module 'internet'): uint8_t ns3::Ipv4Header::GetProtocol() const [member function]
cls.add_method('GetProtocol',
'uint8_t',
[],
is_const=True)
## ipv4-header.h (module 'internet'): uint32_t ns3::Ipv4Header::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## ipv4-header.h (module 'internet'): ns3::Ipv4Address ns3::Ipv4Header::GetSource() const [member function]
cls.add_method('GetSource',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv4-header.h (module 'internet'): uint8_t ns3::Ipv4Header::GetTos() const [member function]
cls.add_method('GetTos',
'uint8_t',
[],
is_const=True)
## ipv4-header.h (module 'internet'): uint8_t ns3::Ipv4Header::GetTtl() const [member function]
cls.add_method('GetTtl',
'uint8_t',
[],
is_const=True)
## ipv4-header.h (module 'internet'): static ns3::TypeId ns3::Ipv4Header::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## ipv4-header.h (module 'internet'): bool ns3::Ipv4Header::IsChecksumOk() const [member function]
cls.add_method('IsChecksumOk',
'bool',
[],
is_const=True)
## ipv4-header.h (module 'internet'): bool ns3::Ipv4Header::IsDontFragment() const [member function]
cls.add_method('IsDontFragment',
'bool',
[],
is_const=True)
## ipv4-header.h (module 'internet'): bool ns3::Ipv4Header::IsLastFragment() const [member function]
cls.add_method('IsLastFragment',
'bool',
[],
is_const=True)
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_const=True, is_virtual=True)
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetDestination(ns3::Ipv4Address destination) [member function]
cls.add_method('SetDestination',
'void',
[param('ns3::Ipv4Address', 'destination')])
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetDontFragment() [member function]
cls.add_method('SetDontFragment',
'void',
[])
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetDscp(ns3::Ipv4Header::DscpType dscp) [member function]
cls.add_method('SetDscp',
'void',
[param('ns3::Ipv4Header::DscpType', 'dscp')])
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetEcn(ns3::Ipv4Header::EcnType ecn) [member function]
cls.add_method('SetEcn',
'void',
[param('ns3::Ipv4Header::EcnType', 'ecn')])
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetFragmentOffset(uint16_t offsetBytes) [member function]
cls.add_method('SetFragmentOffset',
'void',
[param('uint16_t', 'offsetBytes')])
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetIdentification(uint16_t identification) [member function]
cls.add_method('SetIdentification',
'void',
[param('uint16_t', 'identification')])
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetLastFragment() [member function]
cls.add_method('SetLastFragment',
'void',
[])
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetMayFragment() [member function]
cls.add_method('SetMayFragment',
'void',
[])
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetMoreFragments() [member function]
cls.add_method('SetMoreFragments',
'void',
[])
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetPayloadSize(uint16_t size) [member function]
cls.add_method('SetPayloadSize',
'void',
[param('uint16_t', 'size')])
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetProtocol(uint8_t num) [member function]
cls.add_method('SetProtocol',
'void',
[param('uint8_t', 'num')])
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetSource(ns3::Ipv4Address source) [member function]
cls.add_method('SetSource',
'void',
[param('ns3::Ipv4Address', 'source')])
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetTos(uint8_t tos) [member function]
cls.add_method('SetTos',
'void',
[param('uint8_t', 'tos')])
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetTtl(uint8_t ttl) [member function]
cls.add_method('SetTtl',
'void',
[param('uint8_t', 'ttl')])
return
def register_Ns3Ipv6Header_methods(root_module, cls):
## ipv6-header.h (module 'internet'): ns3::Ipv6Header::Ipv6Header(ns3::Ipv6Header const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv6Header const &', 'arg0')])
## ipv6-header.h (module 'internet'): ns3::Ipv6Header::Ipv6Header() [constructor]
cls.add_constructor([])
## ipv6-header.h (module 'internet'): uint32_t ns3::Ipv6Header::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start')],
is_virtual=True)
## ipv6-header.h (module 'internet'): ns3::Ipv6Address ns3::Ipv6Header::GetDestinationAddress() const [member function]
cls.add_method('GetDestinationAddress',
'ns3::Ipv6Address',
[],
is_const=True)
## ipv6-header.h (module 'internet'): uint32_t ns3::Ipv6Header::GetFlowLabel() const [member function]
cls.add_method('GetFlowLabel',
'uint32_t',
[],
is_const=True)
## ipv6-header.h (module 'internet'): uint8_t ns3::Ipv6Header::GetHopLimit() const [member function]
cls.add_method('GetHopLimit',
'uint8_t',
[],
is_const=True)
## ipv6-header.h (module 'internet'): ns3::TypeId ns3::Ipv6Header::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## ipv6-header.h (module 'internet'): uint8_t ns3::Ipv6Header::GetNextHeader() const [member function]
cls.add_method('GetNextHeader',
'uint8_t',
[],
is_const=True)
## ipv6-header.h (module 'internet'): uint16_t ns3::Ipv6Header::GetPayloadLength() const [member function]
cls.add_method('GetPayloadLength',
'uint16_t',
[],
is_const=True)
## ipv6-header.h (module 'internet'): uint32_t ns3::Ipv6Header::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## ipv6-header.h (module 'internet'): ns3::Ipv6Address ns3::Ipv6Header::GetSourceAddress() const [member function]
cls.add_method('GetSourceAddress',
'ns3::Ipv6Address',
[],
is_const=True)
## ipv6-header.h (module 'internet'): uint8_t ns3::Ipv6Header::GetTrafficClass() const [member function]
cls.add_method('GetTrafficClass',
'uint8_t',
[],
is_const=True)
## ipv6-header.h (module 'internet'): static ns3::TypeId ns3::Ipv6Header::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## ipv6-header.h (module 'internet'): void ns3::Ipv6Header::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## ipv6-header.h (module 'internet'): void ns3::Ipv6Header::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_const=True, is_virtual=True)
## ipv6-header.h (module 'internet'): void ns3::Ipv6Header::SetDestinationAddress(ns3::Ipv6Address dst) [member function]
cls.add_method('SetDestinationAddress',
'void',
[param('ns3::Ipv6Address', 'dst')])
## ipv6-header.h (module 'internet'): void ns3::Ipv6Header::SetFlowLabel(uint32_t flow) [member function]
cls.add_method('SetFlowLabel',
'void',
[param('uint32_t', 'flow')])
## ipv6-header.h (module 'internet'): void ns3::Ipv6Header::SetHopLimit(uint8_t limit) [member function]
cls.add_method('SetHopLimit',
'void',
[param('uint8_t', 'limit')])
## ipv6-header.h (module 'internet'): void ns3::Ipv6Header::SetNextHeader(uint8_t next) [member function]
cls.add_method('SetNextHeader',
'void',
[param('uint8_t', 'next')])
## ipv6-header.h (module 'internet'): void ns3::Ipv6Header::SetPayloadLength(uint16_t len) [member function]
cls.add_method('SetPayloadLength',
'void',
[param('uint16_t', 'len')])
## ipv6-header.h (module 'internet'): void ns3::Ipv6Header::SetSourceAddress(ns3::Ipv6Address src) [member function]
cls.add_method('SetSourceAddress',
'void',
[param('ns3::Ipv6Address', 'src')])
## ipv6-header.h (module 'internet'): void ns3::Ipv6Header::SetTrafficClass(uint8_t traffic) [member function]
cls.add_method('SetTrafficClass',
'void',
[param('uint8_t', 'traffic')])
return
def register_Ns3Object_methods(root_module, cls):
## object.h (module 'core'): ns3::Object::Object() [constructor]
cls.add_constructor([])
## object.h (module 'core'): void ns3::Object::AggregateObject(ns3::Ptr<ns3::Object> other) [member function]
cls.add_method('AggregateObject',
'void',
[param('ns3::Ptr< ns3::Object >', 'other')])
## object.h (module 'core'): void ns3::Object::Dispose() [member function]
cls.add_method('Dispose',
'void',
[])
## object.h (module 'core'): ns3::Object::AggregateIterator ns3::Object::GetAggregateIterator() const [member function]
cls.add_method('GetAggregateIterator',
'ns3::Object::AggregateIterator',
[],
is_const=True)
## object.h (module 'core'): ns3::TypeId ns3::Object::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## object.h (module 'core'): static ns3::TypeId ns3::Object::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## object.h (module 'core'): void ns3::Object::Initialize() [member function]
cls.add_method('Initialize',
'void',
[])
## object.h (module 'core'): ns3::Object::Object(ns3::Object const & o) [copy constructor]
cls.add_constructor([param('ns3::Object const &', 'o')],
visibility='protected')
## object.h (module 'core'): void ns3::Object::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
## object.h (module 'core'): void ns3::Object::DoInitialize() [member function]
cls.add_method('DoInitialize',
'void',
[],
visibility='protected', is_virtual=True)
## object.h (module 'core'): void ns3::Object::NotifyNewAggregate() [member function]
cls.add_method('NotifyNewAggregate',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3ObjectAggregateIterator_methods(root_module, cls):
## object.h (module 'core'): ns3::Object::AggregateIterator::AggregateIterator(ns3::Object::AggregateIterator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Object::AggregateIterator const &', 'arg0')])
## object.h (module 'core'): ns3::Object::AggregateIterator::AggregateIterator() [constructor]
cls.add_constructor([])
## object.h (module 'core'): bool ns3::Object::AggregateIterator::HasNext() const [member function]
cls.add_method('HasNext',
'bool',
[],
is_const=True)
## object.h (module 'core'): ns3::Ptr<ns3::Object const> ns3::Object::AggregateIterator::Next() [member function]
cls.add_method('Next',
'ns3::Ptr< ns3::Object const >',
[])
return
def register_Ns3RandomVariableStream_methods(root_module, cls):
## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::RandomVariableStream::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## random-variable-stream.h (module 'core'): ns3::RandomVariableStream::RandomVariableStream() [constructor]
cls.add_constructor([])
## random-variable-stream.h (module 'core'): void ns3::RandomVariableStream::SetStream(int64_t stream) [member function]
cls.add_method('SetStream',
'void',
[param('int64_t', 'stream')])
## random-variable-stream.h (module 'core'): int64_t ns3::RandomVariableStream::GetStream() const [member function]
cls.add_method('GetStream',
'int64_t',
[],
is_const=True)
## random-variable-stream.h (module 'core'): void ns3::RandomVariableStream::SetAntithetic(bool isAntithetic) [member function]
cls.add_method('SetAntithetic',
'void',
[param('bool', 'isAntithetic')])
## random-variable-stream.h (module 'core'): bool ns3::RandomVariableStream::IsAntithetic() const [member function]
cls.add_method('IsAntithetic',
'bool',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::RandomVariableStream::GetValue() [member function]
cls.add_method('GetValue',
'double',
[],
is_pure_virtual=True, is_virtual=True)
## random-variable-stream.h (module 'core'): uint32_t ns3::RandomVariableStream::GetInteger() [member function]
cls.add_method('GetInteger',
'uint32_t',
[],
is_pure_virtual=True, is_virtual=True)
## random-variable-stream.h (module 'core'): ns3::RngStream * ns3::RandomVariableStream::Peek() const [member function]
cls.add_method('Peek',
'ns3::RngStream *',
[],
is_const=True, visibility='protected')
return
def register_Ns3SequentialRandomVariable_methods(root_module, cls):
## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::SequentialRandomVariable::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## random-variable-stream.h (module 'core'): ns3::SequentialRandomVariable::SequentialRandomVariable() [constructor]
cls.add_constructor([])
## random-variable-stream.h (module 'core'): double ns3::SequentialRandomVariable::GetMin() const [member function]
cls.add_method('GetMin',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::SequentialRandomVariable::GetMax() const [member function]
cls.add_method('GetMax',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): ns3::Ptr<ns3::RandomVariableStream> ns3::SequentialRandomVariable::GetIncrement() const [member function]
cls.add_method('GetIncrement',
'ns3::Ptr< ns3::RandomVariableStream >',
[],
is_const=True)
## random-variable-stream.h (module 'core'): uint32_t ns3::SequentialRandomVariable::GetConsecutive() const [member function]
cls.add_method('GetConsecutive',
'uint32_t',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::SequentialRandomVariable::GetValue() [member function]
cls.add_method('GetValue',
'double',
[],
is_virtual=True)
## random-variable-stream.h (module 'core'): uint32_t ns3::SequentialRandomVariable::GetInteger() [member function]
cls.add_method('GetInteger',
'uint32_t',
[],
is_virtual=True)
return
def register_Ns3SimpleRefCount__Ns3AttributeAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeAccessor__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >::SimpleRefCount(ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter< ns3::AttributeAccessor > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3AttributeChecker_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeChecker__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >::SimpleRefCount(ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter< ns3::AttributeChecker > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3AttributeValue_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeValue__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >::SimpleRefCount(ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter< ns3::AttributeValue > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3CallbackImplBase_Ns3Empty_Ns3DefaultDeleter__lt__ns3CallbackImplBase__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >::SimpleRefCount(ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter< ns3::CallbackImplBase > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3EventImpl_Ns3Empty_Ns3DefaultDeleter__lt__ns3EventImpl__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> >::SimpleRefCount(ns3::SimpleRefCount<ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::EventImpl, ns3::empty, ns3::DefaultDeleter< ns3::EventImpl > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3HashImplementation_Ns3Empty_Ns3DefaultDeleter__lt__ns3HashImplementation__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >::SimpleRefCount(ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter< ns3::Hash::Implementation > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3Ipv4MulticastRoute_Ns3Empty_Ns3DefaultDeleter__lt__ns3Ipv4MulticastRoute__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Ipv4MulticastRoute, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4MulticastRoute> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Ipv4MulticastRoute, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4MulticastRoute> >::SimpleRefCount(ns3::SimpleRefCount<ns3::Ipv4MulticastRoute, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4MulticastRoute> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::Ipv4MulticastRoute, ns3::empty, ns3::DefaultDeleter< ns3::Ipv4MulticastRoute > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::Ipv4MulticastRoute, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4MulticastRoute> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3Ipv4Route_Ns3Empty_Ns3DefaultDeleter__lt__ns3Ipv4Route__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Ipv4Route, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4Route> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Ipv4Route, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4Route> >::SimpleRefCount(ns3::SimpleRefCount<ns3::Ipv4Route, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4Route> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::Ipv4Route, ns3::empty, ns3::DefaultDeleter< ns3::Ipv4Route > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::Ipv4Route, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4Route> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3NixVector_Ns3Empty_Ns3DefaultDeleter__lt__ns3NixVector__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> >::SimpleRefCount(ns3::SimpleRefCount<ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::NixVector, ns3::empty, ns3::DefaultDeleter< ns3::NixVector > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3OutputStreamWrapper_Ns3Empty_Ns3DefaultDeleter__lt__ns3OutputStreamWrapper__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter<ns3::OutputStreamWrapper> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter<ns3::OutputStreamWrapper> >::SimpleRefCount(ns3::SimpleRefCount<ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter<ns3::OutputStreamWrapper> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter< ns3::OutputStreamWrapper > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter<ns3::OutputStreamWrapper> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3Packet_Ns3Empty_Ns3DefaultDeleter__lt__ns3Packet__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> >::SimpleRefCount(ns3::SimpleRefCount<ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::Packet, ns3::empty, ns3::DefaultDeleter< ns3::Packet > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3TraceSourceAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3TraceSourceAccessor__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >::SimpleRefCount(ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter< ns3::TraceSourceAccessor > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3Socket_methods(root_module, cls):
## socket.h (module 'network'): ns3::Socket::Socket(ns3::Socket const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Socket const &', 'arg0')])
## socket.h (module 'network'): ns3::Socket::Socket() [constructor]
cls.add_constructor([])
## socket.h (module 'network'): int ns3::Socket::Bind(ns3::Address const & address) [member function]
cls.add_method('Bind',
'int',
[param('ns3::Address const &', 'address')],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): int ns3::Socket::Bind() [member function]
cls.add_method('Bind',
'int',
[],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): int ns3::Socket::Bind6() [member function]
cls.add_method('Bind6',
'int',
[],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): void ns3::Socket::BindToNetDevice(ns3::Ptr<ns3::NetDevice> netdevice) [member function]
cls.add_method('BindToNetDevice',
'void',
[param('ns3::Ptr< ns3::NetDevice >', 'netdevice')],
is_virtual=True)
## socket.h (module 'network'): int ns3::Socket::Close() [member function]
cls.add_method('Close',
'int',
[],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): int ns3::Socket::Connect(ns3::Address const & address) [member function]
cls.add_method('Connect',
'int',
[param('ns3::Address const &', 'address')],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): static ns3::Ptr<ns3::Socket> ns3::Socket::CreateSocket(ns3::Ptr<ns3::Node> node, ns3::TypeId tid) [member function]
cls.add_method('CreateSocket',
'ns3::Ptr< ns3::Socket >',
[param('ns3::Ptr< ns3::Node >', 'node'), param('ns3::TypeId', 'tid')],
is_static=True)
## socket.h (module 'network'): bool ns3::Socket::GetAllowBroadcast() const [member function]
cls.add_method('GetAllowBroadcast',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## socket.h (module 'network'): ns3::Ptr<ns3::NetDevice> ns3::Socket::GetBoundNetDevice() [member function]
cls.add_method('GetBoundNetDevice',
'ns3::Ptr< ns3::NetDevice >',
[])
## socket.h (module 'network'): ns3::Socket::SocketErrno ns3::Socket::GetErrno() const [member function]
cls.add_method('GetErrno',
'ns3::Socket::SocketErrno',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## socket.h (module 'network'): uint8_t ns3::Socket::GetIpTos() const [member function]
cls.add_method('GetIpTos',
'uint8_t',
[],
is_const=True)
## socket.h (module 'network'): uint8_t ns3::Socket::GetIpTtl() const [member function]
cls.add_method('GetIpTtl',
'uint8_t',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): uint8_t ns3::Socket::GetIpv6HopLimit() const [member function]
cls.add_method('GetIpv6HopLimit',
'uint8_t',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): uint8_t ns3::Socket::GetIpv6Tclass() const [member function]
cls.add_method('GetIpv6Tclass',
'uint8_t',
[],
is_const=True)
## socket.h (module 'network'): ns3::Ptr<ns3::Node> ns3::Socket::GetNode() const [member function]
cls.add_method('GetNode',
'ns3::Ptr< ns3::Node >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## socket.h (module 'network'): uint32_t ns3::Socket::GetRxAvailable() const [member function]
cls.add_method('GetRxAvailable',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## socket.h (module 'network'): int ns3::Socket::GetSockName(ns3::Address & address) const [member function]
cls.add_method('GetSockName',
'int',
[param('ns3::Address &', 'address')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## socket.h (module 'network'): ns3::Socket::SocketType ns3::Socket::GetSocketType() const [member function]
cls.add_method('GetSocketType',
'ns3::Socket::SocketType',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## socket.h (module 'network'): uint32_t ns3::Socket::GetTxAvailable() const [member function]
cls.add_method('GetTxAvailable',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## socket.h (module 'network'): static ns3::TypeId ns3::Socket::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## socket.h (module 'network'): bool ns3::Socket::IsIpRecvTos() const [member function]
cls.add_method('IsIpRecvTos',
'bool',
[],
is_const=True)
## socket.h (module 'network'): bool ns3::Socket::IsIpRecvTtl() const [member function]
cls.add_method('IsIpRecvTtl',
'bool',
[],
is_const=True)
## socket.h (module 'network'): bool ns3::Socket::IsIpv6RecvHopLimit() const [member function]
cls.add_method('IsIpv6RecvHopLimit',
'bool',
[],
is_const=True)
## socket.h (module 'network'): bool ns3::Socket::IsIpv6RecvTclass() const [member function]
cls.add_method('IsIpv6RecvTclass',
'bool',
[],
is_const=True)
## socket.h (module 'network'): bool ns3::Socket::IsRecvPktInfo() const [member function]
cls.add_method('IsRecvPktInfo',
'bool',
[],
is_const=True)
## socket.h (module 'network'): int ns3::Socket::Listen() [member function]
cls.add_method('Listen',
'int',
[],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): ns3::Ptr<ns3::Packet> ns3::Socket::Recv(uint32_t maxSize, uint32_t flags) [member function]
cls.add_method('Recv',
'ns3::Ptr< ns3::Packet >',
[param('uint32_t', 'maxSize'), param('uint32_t', 'flags')],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): ns3::Ptr<ns3::Packet> ns3::Socket::Recv() [member function]
cls.add_method('Recv',
'ns3::Ptr< ns3::Packet >',
[])
## socket.h (module 'network'): int ns3::Socket::Recv(uint8_t * buf, uint32_t size, uint32_t flags) [member function]
cls.add_method('Recv',
'int',
[param('uint8_t *', 'buf'), param('uint32_t', 'size'), param('uint32_t', 'flags')])
## socket.h (module 'network'): ns3::Ptr<ns3::Packet> ns3::Socket::RecvFrom(uint32_t maxSize, uint32_t flags, ns3::Address & fromAddress) [member function]
cls.add_method('RecvFrom',
'ns3::Ptr< ns3::Packet >',
[param('uint32_t', 'maxSize'), param('uint32_t', 'flags'), param('ns3::Address &', 'fromAddress')],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): ns3::Ptr<ns3::Packet> ns3::Socket::RecvFrom(ns3::Address & fromAddress) [member function]
cls.add_method('RecvFrom',
'ns3::Ptr< ns3::Packet >',
[param('ns3::Address &', 'fromAddress')])
## socket.h (module 'network'): int ns3::Socket::RecvFrom(uint8_t * buf, uint32_t size, uint32_t flags, ns3::Address & fromAddress) [member function]
cls.add_method('RecvFrom',
'int',
[param('uint8_t *', 'buf'), param('uint32_t', 'size'), param('uint32_t', 'flags'), param('ns3::Address &', 'fromAddress')])
## socket.h (module 'network'): int ns3::Socket::Send(ns3::Ptr<ns3::Packet> p, uint32_t flags) [member function]
cls.add_method('Send',
'int',
[param('ns3::Ptr< ns3::Packet >', 'p'), param('uint32_t', 'flags')],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): int ns3::Socket::Send(ns3::Ptr<ns3::Packet> p) [member function]
cls.add_method('Send',
'int',
[param('ns3::Ptr< ns3::Packet >', 'p')])
## socket.h (module 'network'): int ns3::Socket::Send(uint8_t const * buf, uint32_t size, uint32_t flags) [member function]
cls.add_method('Send',
'int',
[param('uint8_t const *', 'buf'), param('uint32_t', 'size'), param('uint32_t', 'flags')])
## socket.h (module 'network'): int ns3::Socket::SendTo(ns3::Ptr<ns3::Packet> p, uint32_t flags, ns3::Address const & toAddress) [member function]
cls.add_method('SendTo',
'int',
[param('ns3::Ptr< ns3::Packet >', 'p'), param('uint32_t', 'flags'), param('ns3::Address const &', 'toAddress')],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): int ns3::Socket::SendTo(uint8_t const * buf, uint32_t size, uint32_t flags, ns3::Address const & address) [member function]
cls.add_method('SendTo',
'int',
[param('uint8_t const *', 'buf'), param('uint32_t', 'size'), param('uint32_t', 'flags'), param('ns3::Address const &', 'address')])
## socket.h (module 'network'): void ns3::Socket::SetAcceptCallback(ns3::Callback<bool, ns3::Ptr<ns3::Socket>, ns3::Address const&, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> connectionRequest, ns3::Callback<void, ns3::Ptr<ns3::Socket>, ns3::Address const&, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> newConnectionCreated) [member function]
cls.add_method('SetAcceptCallback',
'void',
[param('ns3::Callback< bool, ns3::Ptr< ns3::Socket >, ns3::Address const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'connectionRequest'), param('ns3::Callback< void, ns3::Ptr< ns3::Socket >, ns3::Address const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'newConnectionCreated')])
## socket.h (module 'network'): bool ns3::Socket::SetAllowBroadcast(bool allowBroadcast) [member function]
cls.add_method('SetAllowBroadcast',
'bool',
[param('bool', 'allowBroadcast')],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): void ns3::Socket::SetCloseCallbacks(ns3::Callback<void, ns3::Ptr<ns3::Socket>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> normalClose, ns3::Callback<void, ns3::Ptr<ns3::Socket>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> errorClose) [member function]
cls.add_method('SetCloseCallbacks',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::Socket >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'normalClose'), param('ns3::Callback< void, ns3::Ptr< ns3::Socket >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'errorClose')])
## socket.h (module 'network'): void ns3::Socket::SetConnectCallback(ns3::Callback<void, ns3::Ptr<ns3::Socket>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> connectionSucceeded, ns3::Callback<void, ns3::Ptr<ns3::Socket>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> connectionFailed) [member function]
cls.add_method('SetConnectCallback',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::Socket >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'connectionSucceeded'), param('ns3::Callback< void, ns3::Ptr< ns3::Socket >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'connectionFailed')])
## socket.h (module 'network'): void ns3::Socket::SetDataSentCallback(ns3::Callback<void, ns3::Ptr<ns3::Socket>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> dataSent) [member function]
cls.add_method('SetDataSentCallback',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::Socket >, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'dataSent')])
## socket.h (module 'network'): void ns3::Socket::SetIpRecvTos(bool ipv4RecvTos) [member function]
cls.add_method('SetIpRecvTos',
'void',
[param('bool', 'ipv4RecvTos')])
## socket.h (module 'network'): void ns3::Socket::SetIpRecvTtl(bool ipv4RecvTtl) [member function]
cls.add_method('SetIpRecvTtl',
'void',
[param('bool', 'ipv4RecvTtl')])
## socket.h (module 'network'): void ns3::Socket::SetIpTos(uint8_t ipTos) [member function]
cls.add_method('SetIpTos',
'void',
[param('uint8_t', 'ipTos')])
## socket.h (module 'network'): void ns3::Socket::SetIpTtl(uint8_t ipTtl) [member function]
cls.add_method('SetIpTtl',
'void',
[param('uint8_t', 'ipTtl')],
is_virtual=True)
## socket.h (module 'network'): void ns3::Socket::SetIpv6HopLimit(uint8_t ipHopLimit) [member function]
cls.add_method('SetIpv6HopLimit',
'void',
[param('uint8_t', 'ipHopLimit')],
is_virtual=True)
## socket.h (module 'network'): void ns3::Socket::SetIpv6RecvHopLimit(bool ipv6RecvHopLimit) [member function]
cls.add_method('SetIpv6RecvHopLimit',
'void',
[param('bool', 'ipv6RecvHopLimit')])
## socket.h (module 'network'): void ns3::Socket::SetIpv6RecvTclass(bool ipv6RecvTclass) [member function]
cls.add_method('SetIpv6RecvTclass',
'void',
[param('bool', 'ipv6RecvTclass')])
## socket.h (module 'network'): void ns3::Socket::SetIpv6Tclass(int ipTclass) [member function]
cls.add_method('SetIpv6Tclass',
'void',
[param('int', 'ipTclass')])
## socket.h (module 'network'): void ns3::Socket::SetRecvCallback(ns3::Callback<void, ns3::Ptr<ns3::Socket>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> arg0) [member function]
cls.add_method('SetRecvCallback',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::Socket >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'arg0')])
## socket.h (module 'network'): void ns3::Socket::SetRecvPktInfo(bool flag) [member function]
cls.add_method('SetRecvPktInfo',
'void',
[param('bool', 'flag')])
## socket.h (module 'network'): void ns3::Socket::SetSendCallback(ns3::Callback<void, ns3::Ptr<ns3::Socket>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> sendCb) [member function]
cls.add_method('SetSendCallback',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::Socket >, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'sendCb')])
## socket.h (module 'network'): int ns3::Socket::ShutdownRecv() [member function]
cls.add_method('ShutdownRecv',
'int',
[],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): int ns3::Socket::ShutdownSend() [member function]
cls.add_method('ShutdownSend',
'int',
[],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): void ns3::Socket::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
## socket.h (module 'network'): bool ns3::Socket::IsManualIpTos() const [member function]
cls.add_method('IsManualIpTos',
'bool',
[],
is_const=True, visibility='protected')
## socket.h (module 'network'): bool ns3::Socket::IsManualIpTtl() const [member function]
cls.add_method('IsManualIpTtl',
'bool',
[],
is_const=True, visibility='protected')
## socket.h (module 'network'): bool ns3::Socket::IsManualIpv6HopLimit() const [member function]
cls.add_method('IsManualIpv6HopLimit',
'bool',
[],
is_const=True, visibility='protected')
## socket.h (module 'network'): bool ns3::Socket::IsManualIpv6Tclass() const [member function]
cls.add_method('IsManualIpv6Tclass',
'bool',
[],
is_const=True, visibility='protected')
## socket.h (module 'network'): void ns3::Socket::NotifyConnectionFailed() [member function]
cls.add_method('NotifyConnectionFailed',
'void',
[],
visibility='protected')
## socket.h (module 'network'): bool ns3::Socket::NotifyConnectionRequest(ns3::Address const & from) [member function]
cls.add_method('NotifyConnectionRequest',
'bool',
[param('ns3::Address const &', 'from')],
visibility='protected')
## socket.h (module 'network'): void ns3::Socket::NotifyConnectionSucceeded() [member function]
cls.add_method('NotifyConnectionSucceeded',
'void',
[],
visibility='protected')
## socket.h (module 'network'): void ns3::Socket::NotifyDataRecv() [member function]
cls.add_method('NotifyDataRecv',
'void',
[],
visibility='protected')
## socket.h (module 'network'): void ns3::Socket::NotifyDataSent(uint32_t size) [member function]
cls.add_method('NotifyDataSent',
'void',
[param('uint32_t', 'size')],
visibility='protected')
## socket.h (module 'network'): void ns3::Socket::NotifyErrorClose() [member function]
cls.add_method('NotifyErrorClose',
'void',
[],
visibility='protected')
## socket.h (module 'network'): void ns3::Socket::NotifyNewConnectionCreated(ns3::Ptr<ns3::Socket> socket, ns3::Address const & from) [member function]
cls.add_method('NotifyNewConnectionCreated',
'void',
[param('ns3::Ptr< ns3::Socket >', 'socket'), param('ns3::Address const &', 'from')],
visibility='protected')
## socket.h (module 'network'): void ns3::Socket::NotifyNormalClose() [member function]
cls.add_method('NotifyNormalClose',
'void',
[],
visibility='protected')
## socket.h (module 'network'): void ns3::Socket::NotifySend(uint32_t spaceAvailable) [member function]
cls.add_method('NotifySend',
'void',
[param('uint32_t', 'spaceAvailable')],
visibility='protected')
return
def register_Ns3SocketAddressTag_methods(root_module, cls):
## socket.h (module 'network'): ns3::SocketAddressTag::SocketAddressTag(ns3::SocketAddressTag const & arg0) [copy constructor]
cls.add_constructor([param('ns3::SocketAddressTag const &', 'arg0')])
## socket.h (module 'network'): ns3::SocketAddressTag::SocketAddressTag() [constructor]
cls.add_constructor([])
## socket.h (module 'network'): void ns3::SocketAddressTag::Deserialize(ns3::TagBuffer i) [member function]
cls.add_method('Deserialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_virtual=True)
## socket.h (module 'network'): ns3::Address ns3::SocketAddressTag::GetAddress() const [member function]
cls.add_method('GetAddress',
'ns3::Address',
[],
is_const=True)
## socket.h (module 'network'): ns3::TypeId ns3::SocketAddressTag::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): uint32_t ns3::SocketAddressTag::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): static ns3::TypeId ns3::SocketAddressTag::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## socket.h (module 'network'): void ns3::SocketAddressTag::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## socket.h (module 'network'): void ns3::SocketAddressTag::Serialize(ns3::TagBuffer i) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_const=True, is_virtual=True)
## socket.h (module 'network'): void ns3::SocketAddressTag::SetAddress(ns3::Address addr) [member function]
cls.add_method('SetAddress',
'void',
[param('ns3::Address', 'addr')])
return
def register_Ns3SocketIpTosTag_methods(root_module, cls):
## socket.h (module 'network'): ns3::SocketIpTosTag::SocketIpTosTag(ns3::SocketIpTosTag const & arg0) [copy constructor]
cls.add_constructor([param('ns3::SocketIpTosTag const &', 'arg0')])
## socket.h (module 'network'): ns3::SocketIpTosTag::SocketIpTosTag() [constructor]
cls.add_constructor([])
## socket.h (module 'network'): void ns3::SocketIpTosTag::Deserialize(ns3::TagBuffer i) [member function]
cls.add_method('Deserialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_virtual=True)
## socket.h (module 'network'): ns3::TypeId ns3::SocketIpTosTag::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): uint32_t ns3::SocketIpTosTag::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): uint8_t ns3::SocketIpTosTag::GetTos() const [member function]
cls.add_method('GetTos',
'uint8_t',
[],
is_const=True)
## socket.h (module 'network'): static ns3::TypeId ns3::SocketIpTosTag::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## socket.h (module 'network'): void ns3::SocketIpTosTag::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## socket.h (module 'network'): void ns3::SocketIpTosTag::Serialize(ns3::TagBuffer i) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_const=True, is_virtual=True)
## socket.h (module 'network'): void ns3::SocketIpTosTag::SetTos(uint8_t tos) [member function]
cls.add_method('SetTos',
'void',
[param('uint8_t', 'tos')])
return
def register_Ns3SocketIpTtlTag_methods(root_module, cls):
## socket.h (module 'network'): ns3::SocketIpTtlTag::SocketIpTtlTag(ns3::SocketIpTtlTag const & arg0) [copy constructor]
cls.add_constructor([param('ns3::SocketIpTtlTag const &', 'arg0')])
## socket.h (module 'network'): ns3::SocketIpTtlTag::SocketIpTtlTag() [constructor]
cls.add_constructor([])
## socket.h (module 'network'): void ns3::SocketIpTtlTag::Deserialize(ns3::TagBuffer i) [member function]
cls.add_method('Deserialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_virtual=True)
## socket.h (module 'network'): ns3::TypeId ns3::SocketIpTtlTag::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): uint32_t ns3::SocketIpTtlTag::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): uint8_t ns3::SocketIpTtlTag::GetTtl() const [member function]
cls.add_method('GetTtl',
'uint8_t',
[],
is_const=True)
## socket.h (module 'network'): static ns3::TypeId ns3::SocketIpTtlTag::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## socket.h (module 'network'): void ns3::SocketIpTtlTag::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## socket.h (module 'network'): void ns3::SocketIpTtlTag::Serialize(ns3::TagBuffer i) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_const=True, is_virtual=True)
## socket.h (module 'network'): void ns3::SocketIpTtlTag::SetTtl(uint8_t ttl) [member function]
cls.add_method('SetTtl',
'void',
[param('uint8_t', 'ttl')])
return
def register_Ns3SocketIpv6HopLimitTag_methods(root_module, cls):
## socket.h (module 'network'): ns3::SocketIpv6HopLimitTag::SocketIpv6HopLimitTag(ns3::SocketIpv6HopLimitTag const & arg0) [copy constructor]
cls.add_constructor([param('ns3::SocketIpv6HopLimitTag const &', 'arg0')])
## socket.h (module 'network'): ns3::SocketIpv6HopLimitTag::SocketIpv6HopLimitTag() [constructor]
cls.add_constructor([])
## socket.h (module 'network'): void ns3::SocketIpv6HopLimitTag::Deserialize(ns3::TagBuffer i) [member function]
cls.add_method('Deserialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_virtual=True)
## socket.h (module 'network'): uint8_t ns3::SocketIpv6HopLimitTag::GetHopLimit() const [member function]
cls.add_method('GetHopLimit',
'uint8_t',
[],
is_const=True)
## socket.h (module 'network'): ns3::TypeId ns3::SocketIpv6HopLimitTag::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): uint32_t ns3::SocketIpv6HopLimitTag::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): static ns3::TypeId ns3::SocketIpv6HopLimitTag::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## socket.h (module 'network'): void ns3::SocketIpv6HopLimitTag::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## socket.h (module 'network'): void ns3::SocketIpv6HopLimitTag::Serialize(ns3::TagBuffer i) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_const=True, is_virtual=True)
## socket.h (module 'network'): void ns3::SocketIpv6HopLimitTag::SetHopLimit(uint8_t hopLimit) [member function]
cls.add_method('SetHopLimit',
'void',
[param('uint8_t', 'hopLimit')])
return
def register_Ns3SocketIpv6TclassTag_methods(root_module, cls):
## socket.h (module 'network'): ns3::SocketIpv6TclassTag::SocketIpv6TclassTag(ns3::SocketIpv6TclassTag const & arg0) [copy constructor]
cls.add_constructor([param('ns3::SocketIpv6TclassTag const &', 'arg0')])
## socket.h (module 'network'): ns3::SocketIpv6TclassTag::SocketIpv6TclassTag() [constructor]
cls.add_constructor([])
## socket.h (module 'network'): void ns3::SocketIpv6TclassTag::Deserialize(ns3::TagBuffer i) [member function]
cls.add_method('Deserialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_virtual=True)
## socket.h (module 'network'): ns3::TypeId ns3::SocketIpv6TclassTag::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): uint32_t ns3::SocketIpv6TclassTag::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): uint8_t ns3::SocketIpv6TclassTag::GetTclass() const [member function]
cls.add_method('GetTclass',
'uint8_t',
[],
is_const=True)
## socket.h (module 'network'): static ns3::TypeId ns3::SocketIpv6TclassTag::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## socket.h (module 'network'): void ns3::SocketIpv6TclassTag::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## socket.h (module 'network'): void ns3::SocketIpv6TclassTag::Serialize(ns3::TagBuffer i) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_const=True, is_virtual=True)
## socket.h (module 'network'): void ns3::SocketIpv6TclassTag::SetTclass(uint8_t tclass) [member function]
cls.add_method('SetTclass',
'void',
[param('uint8_t', 'tclass')])
return
def register_Ns3SocketSetDontFragmentTag_methods(root_module, cls):
## socket.h (module 'network'): ns3::SocketSetDontFragmentTag::SocketSetDontFragmentTag(ns3::SocketSetDontFragmentTag const & arg0) [copy constructor]
cls.add_constructor([param('ns3::SocketSetDontFragmentTag const &', 'arg0')])
## socket.h (module 'network'): ns3::SocketSetDontFragmentTag::SocketSetDontFragmentTag() [constructor]
cls.add_constructor([])
## socket.h (module 'network'): void ns3::SocketSetDontFragmentTag::Deserialize(ns3::TagBuffer i) [member function]
cls.add_method('Deserialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_virtual=True)
## socket.h (module 'network'): void ns3::SocketSetDontFragmentTag::Disable() [member function]
cls.add_method('Disable',
'void',
[])
## socket.h (module 'network'): void ns3::SocketSetDontFragmentTag::Enable() [member function]
cls.add_method('Enable',
'void',
[])
## socket.h (module 'network'): ns3::TypeId ns3::SocketSetDontFragmentTag::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): uint32_t ns3::SocketSetDontFragmentTag::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): static ns3::TypeId ns3::SocketSetDontFragmentTag::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## socket.h (module 'network'): bool ns3::SocketSetDontFragmentTag::IsEnabled() const [member function]
cls.add_method('IsEnabled',
'bool',
[],
is_const=True)
## socket.h (module 'network'): void ns3::SocketSetDontFragmentTag::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## socket.h (module 'network'): void ns3::SocketSetDontFragmentTag::Serialize(ns3::TagBuffer i) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_const=True, is_virtual=True)
return
def register_Ns3Time_methods(root_module, cls):
cls.add_binary_numeric_operator('+', root_module['ns3::Time'], root_module['ns3::Time'], param('ns3::Time const &', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::Time'], root_module['ns3::Time'], param('ns3::Time const &', 'right'))
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('>')
cls.add_binary_comparison_operator('!=')
cls.add_inplace_numeric_operator('+=', param('ns3::Time const &', 'right'))
cls.add_inplace_numeric_operator('-=', param('ns3::Time const &', 'right'))
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('<=')
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('>=')
## nstime.h (module 'core'): ns3::Time::Time() [constructor]
cls.add_constructor([])
## nstime.h (module 'core'): ns3::Time::Time(ns3::Time const & o) [copy constructor]
cls.add_constructor([param('ns3::Time const &', 'o')])
## nstime.h (module 'core'): ns3::Time::Time(double v) [constructor]
cls.add_constructor([param('double', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(int v) [constructor]
cls.add_constructor([param('int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(long int v) [constructor]
cls.add_constructor([param('long int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(long long int v) [constructor]
cls.add_constructor([param('long long int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(unsigned int v) [constructor]
cls.add_constructor([param('unsigned int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(long unsigned int v) [constructor]
cls.add_constructor([param('long unsigned int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(long long unsigned int v) [constructor]
cls.add_constructor([param('long long unsigned int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(std::string const & s) [constructor]
cls.add_constructor([param('std::string const &', 's')])
## nstime.h (module 'core'): ns3::Time::Time(ns3::int64x64_t const & value) [constructor]
cls.add_constructor([param('ns3::int64x64_t const &', 'value')])
## nstime.h (module 'core'): int ns3::Time::Compare(ns3::Time const & o) const [member function]
cls.add_method('Compare',
'int',
[param('ns3::Time const &', 'o')],
is_const=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::From(ns3::int64x64_t const & from, ns3::Time::Unit timeUnit) [member function]
cls.add_method('From',
'ns3::Time',
[param('ns3::int64x64_t const &', 'from'), param('ns3::Time::Unit', 'timeUnit')],
is_static=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::From(ns3::int64x64_t const & value) [member function]
cls.add_method('From',
'ns3::Time',
[param('ns3::int64x64_t const &', 'value')],
is_static=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::FromDouble(double value, ns3::Time::Unit timeUnit) [member function]
cls.add_method('FromDouble',
'ns3::Time',
[param('double', 'value'), param('ns3::Time::Unit', 'timeUnit')],
is_static=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::FromInteger(uint64_t value, ns3::Time::Unit timeUnit) [member function]
cls.add_method('FromInteger',
'ns3::Time',
[param('uint64_t', 'value'), param('ns3::Time::Unit', 'timeUnit')],
is_static=True)
## nstime.h (module 'core'): double ns3::Time::GetDays() const [member function]
cls.add_method('GetDays',
'double',
[],
is_const=True)
## nstime.h (module 'core'): double ns3::Time::GetDouble() const [member function]
cls.add_method('GetDouble',
'double',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetFemtoSeconds() const [member function]
cls.add_method('GetFemtoSeconds',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): double ns3::Time::GetHours() const [member function]
cls.add_method('GetHours',
'double',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetInteger() const [member function]
cls.add_method('GetInteger',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetMicroSeconds() const [member function]
cls.add_method('GetMicroSeconds',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetMilliSeconds() const [member function]
cls.add_method('GetMilliSeconds',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): double ns3::Time::GetMinutes() const [member function]
cls.add_method('GetMinutes',
'double',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetNanoSeconds() const [member function]
cls.add_method('GetNanoSeconds',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetPicoSeconds() const [member function]
cls.add_method('GetPicoSeconds',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): static ns3::Time::Unit ns3::Time::GetResolution() [member function]
cls.add_method('GetResolution',
'ns3::Time::Unit',
[],
is_static=True)
## nstime.h (module 'core'): double ns3::Time::GetSeconds() const [member function]
cls.add_method('GetSeconds',
'double',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetTimeStep() const [member function]
cls.add_method('GetTimeStep',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): double ns3::Time::GetYears() const [member function]
cls.add_method('GetYears',
'double',
[],
is_const=True)
## nstime.h (module 'core'): bool ns3::Time::IsNegative() const [member function]
cls.add_method('IsNegative',
'bool',
[],
is_const=True)
## nstime.h (module 'core'): bool ns3::Time::IsPositive() const [member function]
cls.add_method('IsPositive',
'bool',
[],
is_const=True)
## nstime.h (module 'core'): bool ns3::Time::IsStrictlyNegative() const [member function]
cls.add_method('IsStrictlyNegative',
'bool',
[],
is_const=True)
## nstime.h (module 'core'): bool ns3::Time::IsStrictlyPositive() const [member function]
cls.add_method('IsStrictlyPositive',
'bool',
[],
is_const=True)
## nstime.h (module 'core'): bool ns3::Time::IsZero() const [member function]
cls.add_method('IsZero',
'bool',
[],
is_const=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::Max() [member function]
cls.add_method('Max',
'ns3::Time',
[],
is_static=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::Min() [member function]
cls.add_method('Min',
'ns3::Time',
[],
is_static=True)
## nstime.h (module 'core'): static void ns3::Time::SetResolution(ns3::Time::Unit resolution) [member function]
cls.add_method('SetResolution',
'void',
[param('ns3::Time::Unit', 'resolution')],
is_static=True)
## nstime.h (module 'core'): static bool ns3::Time::StaticInit() [member function]
cls.add_method('StaticInit',
'bool',
[],
is_static=True)
## nstime.h (module 'core'): ns3::int64x64_t ns3::Time::To(ns3::Time::Unit timeUnit) const [member function]
cls.add_method('To',
'ns3::int64x64_t',
[param('ns3::Time::Unit', 'timeUnit')],
is_const=True)
## nstime.h (module 'core'): double ns3::Time::ToDouble(ns3::Time::Unit timeUnit) const [member function]
cls.add_method('ToDouble',
'double',
[param('ns3::Time::Unit', 'timeUnit')],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::ToInteger(ns3::Time::Unit timeUnit) const [member function]
cls.add_method('ToInteger',
'int64_t',
[param('ns3::Time::Unit', 'timeUnit')],
is_const=True)
return
def register_Ns3TraceSourceAccessor_methods(root_module, cls):
## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor::TraceSourceAccessor(ns3::TraceSourceAccessor const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TraceSourceAccessor const &', 'arg0')])
## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor::TraceSourceAccessor() [constructor]
cls.add_constructor([])
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::Connect(ns3::ObjectBase * obj, std::string context, ns3::CallbackBase const & cb) const [member function]
cls.add_method('Connect',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::ConnectWithoutContext(ns3::ObjectBase * obj, ns3::CallbackBase const & cb) const [member function]
cls.add_method('ConnectWithoutContext',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::Disconnect(ns3::ObjectBase * obj, std::string context, ns3::CallbackBase const & cb) const [member function]
cls.add_method('Disconnect',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::DisconnectWithoutContext(ns3::ObjectBase * obj, ns3::CallbackBase const & cb) const [member function]
cls.add_method('DisconnectWithoutContext',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3Trailer_methods(root_module, cls):
cls.add_output_stream_operator()
## trailer.h (module 'network'): ns3::Trailer::Trailer() [constructor]
cls.add_constructor([])
## trailer.h (module 'network'): ns3::Trailer::Trailer(ns3::Trailer const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Trailer const &', 'arg0')])
## trailer.h (module 'network'): uint32_t ns3::Trailer::Deserialize(ns3::Buffer::Iterator end) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'end')],
is_pure_virtual=True, is_virtual=True)
## trailer.h (module 'network'): uint32_t ns3::Trailer::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trailer.h (module 'network'): static ns3::TypeId ns3::Trailer::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## trailer.h (module 'network'): void ns3::Trailer::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trailer.h (module 'network'): void ns3::Trailer::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3TriangularRandomVariable_methods(root_module, cls):
## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::TriangularRandomVariable::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## random-variable-stream.h (module 'core'): ns3::TriangularRandomVariable::TriangularRandomVariable() [constructor]
cls.add_constructor([])
## random-variable-stream.h (module 'core'): double ns3::TriangularRandomVariable::GetMean() const [member function]
cls.add_method('GetMean',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::TriangularRandomVariable::GetMin() const [member function]
cls.add_method('GetMin',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::TriangularRandomVariable::GetMax() const [member function]
cls.add_method('GetMax',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::TriangularRandomVariable::GetValue(double mean, double min, double max) [member function]
cls.add_method('GetValue',
'double',
[param('double', 'mean'), param('double', 'min'), param('double', 'max')])
## random-variable-stream.h (module 'core'): uint32_t ns3::TriangularRandomVariable::GetInteger(uint32_t mean, uint32_t min, uint32_t max) [member function]
cls.add_method('GetInteger',
'uint32_t',
[param('uint32_t', 'mean'), param('uint32_t', 'min'), param('uint32_t', 'max')])
## random-variable-stream.h (module 'core'): double ns3::TriangularRandomVariable::GetValue() [member function]
cls.add_method('GetValue',
'double',
[],
is_virtual=True)
## random-variable-stream.h (module 'core'): uint32_t ns3::TriangularRandomVariable::GetInteger() [member function]
cls.add_method('GetInteger',
'uint32_t',
[],
is_virtual=True)
return
def register_Ns3UniformRandomVariable_methods(root_module, cls):
## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::UniformRandomVariable::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## random-variable-stream.h (module 'core'): ns3::UniformRandomVariable::UniformRandomVariable() [constructor]
cls.add_constructor([])
## random-variable-stream.h (module 'core'): double ns3::UniformRandomVariable::GetMin() const [member function]
cls.add_method('GetMin',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::UniformRandomVariable::GetMax() const [member function]
cls.add_method('GetMax',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::UniformRandomVariable::GetValue(double min, double max) [member function]
cls.add_method('GetValue',
'double',
[param('double', 'min'), param('double', 'max')])
## random-variable-stream.h (module 'core'): uint32_t ns3::UniformRandomVariable::GetInteger(uint32_t min, uint32_t max) [member function]
cls.add_method('GetInteger',
'uint32_t',
[param('uint32_t', 'min'), param('uint32_t', 'max')])
## random-variable-stream.h (module 'core'): double ns3::UniformRandomVariable::GetValue() [member function]
cls.add_method('GetValue',
'double',
[],
is_virtual=True)
## random-variable-stream.h (module 'core'): uint32_t ns3::UniformRandomVariable::GetInteger() [member function]
cls.add_method('GetInteger',
'uint32_t',
[],
is_virtual=True)
return
def register_Ns3WeibullRandomVariable_methods(root_module, cls):
## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::WeibullRandomVariable::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## random-variable-stream.h (module 'core'): ns3::WeibullRandomVariable::WeibullRandomVariable() [constructor]
cls.add_constructor([])
## random-variable-stream.h (module 'core'): double ns3::WeibullRandomVariable::GetScale() const [member function]
cls.add_method('GetScale',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::WeibullRandomVariable::GetShape() const [member function]
cls.add_method('GetShape',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::WeibullRandomVariable::GetBound() const [member function]
cls.add_method('GetBound',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::WeibullRandomVariable::GetValue(double scale, double shape, double bound) [member function]
cls.add_method('GetValue',
'double',
[param('double', 'scale'), param('double', 'shape'), param('double', 'bound')])
## random-variable-stream.h (module 'core'): uint32_t ns3::WeibullRandomVariable::GetInteger(uint32_t scale, uint32_t shape, uint32_t bound) [member function]
cls.add_method('GetInteger',
'uint32_t',
[param('uint32_t', 'scale'), param('uint32_t', 'shape'), param('uint32_t', 'bound')])
## random-variable-stream.h (module 'core'): double ns3::WeibullRandomVariable::GetValue() [member function]
cls.add_method('GetValue',
'double',
[],
is_virtual=True)
## random-variable-stream.h (module 'core'): uint32_t ns3::WeibullRandomVariable::GetInteger() [member function]
cls.add_method('GetInteger',
'uint32_t',
[],
is_virtual=True)
return
def register_Ns3ZetaRandomVariable_methods(root_module, cls):
## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::ZetaRandomVariable::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## random-variable-stream.h (module 'core'): ns3::ZetaRandomVariable::ZetaRandomVariable() [constructor]
cls.add_constructor([])
## random-variable-stream.h (module 'core'): double ns3::ZetaRandomVariable::GetAlpha() const [member function]
cls.add_method('GetAlpha',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::ZetaRandomVariable::GetValue(double alpha) [member function]
cls.add_method('GetValue',
'double',
[param('double', 'alpha')])
## random-variable-stream.h (module 'core'): uint32_t ns3::ZetaRandomVariable::GetInteger(uint32_t alpha) [member function]
cls.add_method('GetInteger',
'uint32_t',
[param('uint32_t', 'alpha')])
## random-variable-stream.h (module 'core'): double ns3::ZetaRandomVariable::GetValue() [member function]
cls.add_method('GetValue',
'double',
[],
is_virtual=True)
## random-variable-stream.h (module 'core'): uint32_t ns3::ZetaRandomVariable::GetInteger() [member function]
cls.add_method('GetInteger',
'uint32_t',
[],
is_virtual=True)
return
def register_Ns3ZipfRandomVariable_methods(root_module, cls):
## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::ZipfRandomVariable::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## random-variable-stream.h (module 'core'): ns3::ZipfRandomVariable::ZipfRandomVariable() [constructor]
cls.add_constructor([])
## random-variable-stream.h (module 'core'): uint32_t ns3::ZipfRandomVariable::GetN() const [member function]
cls.add_method('GetN',
'uint32_t',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::ZipfRandomVariable::GetAlpha() const [member function]
cls.add_method('GetAlpha',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::ZipfRandomVariable::GetValue(uint32_t n, double alpha) [member function]
cls.add_method('GetValue',
'double',
[param('uint32_t', 'n'), param('double', 'alpha')])
## random-variable-stream.h (module 'core'): uint32_t ns3::ZipfRandomVariable::GetInteger(uint32_t n, uint32_t alpha) [member function]
cls.add_method('GetInteger',
'uint32_t',
[param('uint32_t', 'n'), param('uint32_t', 'alpha')])
## random-variable-stream.h (module 'core'): double ns3::ZipfRandomVariable::GetValue() [member function]
cls.add_method('GetValue',
'double',
[],
is_virtual=True)
## random-variable-stream.h (module 'core'): uint32_t ns3::ZipfRandomVariable::GetInteger() [member function]
cls.add_method('GetInteger',
'uint32_t',
[],
is_virtual=True)
return
def register_Ns3AttributeAccessor_methods(root_module, cls):
## attribute.h (module 'core'): ns3::AttributeAccessor::AttributeAccessor(ns3::AttributeAccessor const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeAccessor const &', 'arg0')])
## attribute.h (module 'core'): ns3::AttributeAccessor::AttributeAccessor() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): bool ns3::AttributeAccessor::Get(ns3::ObjectBase const * object, ns3::AttributeValue & attribute) const [member function]
cls.add_method('Get',
'bool',
[param('ns3::ObjectBase const *', 'object'), param('ns3::AttributeValue &', 'attribute')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeAccessor::HasGetter() const [member function]
cls.add_method('HasGetter',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeAccessor::HasSetter() const [member function]
cls.add_method('HasSetter',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeAccessor::Set(ns3::ObjectBase * object, ns3::AttributeValue const & value) const [member function]
cls.add_method('Set',
'bool',
[param('ns3::ObjectBase *', 'object', transfer_ownership=False), param('ns3::AttributeValue const &', 'value')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3AttributeChecker_methods(root_module, cls):
## attribute.h (module 'core'): ns3::AttributeChecker::AttributeChecker(ns3::AttributeChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeChecker const &', 'arg0')])
## attribute.h (module 'core'): ns3::AttributeChecker::AttributeChecker() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): bool ns3::AttributeChecker::Check(ns3::AttributeValue const & value) const [member function]
cls.add_method('Check',
'bool',
[param('ns3::AttributeValue const &', 'value')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeChecker::Copy(ns3::AttributeValue const & source, ns3::AttributeValue & destination) const [member function]
cls.add_method('Copy',
'bool',
[param('ns3::AttributeValue const &', 'source'), param('ns3::AttributeValue &', 'destination')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeChecker::Create() const [member function]
cls.add_method('Create',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeChecker::CreateValidValue(ns3::AttributeValue const & value) const [member function]
cls.add_method('CreateValidValue',
'ns3::Ptr< ns3::AttributeValue >',
[param('ns3::AttributeValue const &', 'value')],
is_const=True)
## attribute.h (module 'core'): std::string ns3::AttributeChecker::GetUnderlyingTypeInformation() const [member function]
cls.add_method('GetUnderlyingTypeInformation',
'std::string',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): std::string ns3::AttributeChecker::GetValueTypeName() const [member function]
cls.add_method('GetValueTypeName',
'std::string',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeChecker::HasUnderlyingTypeInformation() const [member function]
cls.add_method('HasUnderlyingTypeInformation',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3AttributeValue_methods(root_module, cls):
## attribute.h (module 'core'): ns3::AttributeValue::AttributeValue(ns3::AttributeValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeValue const &', 'arg0')])
## attribute.h (module 'core'): ns3::AttributeValue::AttributeValue() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_pure_virtual=True, is_virtual=True)
## attribute.h (module 'core'): std::string ns3::AttributeValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3CallbackChecker_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackChecker::CallbackChecker() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackChecker::CallbackChecker(ns3::CallbackChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CallbackChecker const &', 'arg0')])
return
def register_Ns3CallbackImplBase_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackImplBase::CallbackImplBase() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackImplBase::CallbackImplBase(ns3::CallbackImplBase const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CallbackImplBase const &', 'arg0')])
## callback.h (module 'core'): bool ns3::CallbackImplBase::IsEqual(ns3::Ptr<ns3::CallbackImplBase const> other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ptr< ns3::CallbackImplBase const >', 'other')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3CallbackValue_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackValue::CallbackValue(ns3::CallbackValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CallbackValue const &', 'arg0')])
## callback.h (module 'core'): ns3::CallbackValue::CallbackValue() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackValue::CallbackValue(ns3::CallbackBase const & base) [constructor]
cls.add_constructor([param('ns3::CallbackBase const &', 'base')])
## callback.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::CallbackValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## callback.h (module 'core'): bool ns3::CallbackValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## callback.h (module 'core'): std::string ns3::CallbackValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## callback.h (module 'core'): void ns3::CallbackValue::Set(ns3::CallbackBase base) [member function]
cls.add_method('Set',
'void',
[param('ns3::CallbackBase', 'base')])
return
def register_Ns3ConstantRandomVariable_methods(root_module, cls):
## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::ConstantRandomVariable::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## random-variable-stream.h (module 'core'): ns3::ConstantRandomVariable::ConstantRandomVariable() [constructor]
cls.add_constructor([])
## random-variable-stream.h (module 'core'): double ns3::ConstantRandomVariable::GetConstant() const [member function]
cls.add_method('GetConstant',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::ConstantRandomVariable::GetValue(double constant) [member function]
cls.add_method('GetValue',
'double',
[param('double', 'constant')])
## random-variable-stream.h (module 'core'): uint32_t ns3::ConstantRandomVariable::GetInteger(uint32_t constant) [member function]
cls.add_method('GetInteger',
'uint32_t',
[param('uint32_t', 'constant')])
## random-variable-stream.h (module 'core'): double ns3::ConstantRandomVariable::GetValue() [member function]
cls.add_method('GetValue',
'double',
[],
is_virtual=True)
## random-variable-stream.h (module 'core'): uint32_t ns3::ConstantRandomVariable::GetInteger() [member function]
cls.add_method('GetInteger',
'uint32_t',
[],
is_virtual=True)
return
def register_Ns3DeterministicRandomVariable_methods(root_module, cls):
## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::DeterministicRandomVariable::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## random-variable-stream.h (module 'core'): ns3::DeterministicRandomVariable::DeterministicRandomVariable() [constructor]
cls.add_constructor([])
## random-variable-stream.h (module 'core'): void ns3::DeterministicRandomVariable::SetValueArray(double * values, uint64_t length) [member function]
cls.add_method('SetValueArray',
'void',
[param('double *', 'values'), param('uint64_t', 'length')])
## random-variable-stream.h (module 'core'): double ns3::DeterministicRandomVariable::GetValue() [member function]
cls.add_method('GetValue',
'double',
[],
is_virtual=True)
## random-variable-stream.h (module 'core'): uint32_t ns3::DeterministicRandomVariable::GetInteger() [member function]
cls.add_method('GetInteger',
'uint32_t',
[],
is_virtual=True)
return
def register_Ns3EmpiricalRandomVariable_methods(root_module, cls):
## random-variable-stream.h (module 'core'): ns3::EmpiricalRandomVariable::EmpiricalRandomVariable() [constructor]
cls.add_constructor([])
## random-variable-stream.h (module 'core'): void ns3::EmpiricalRandomVariable::CDF(double v, double c) [member function]
cls.add_method('CDF',
'void',
[param('double', 'v'), param('double', 'c')])
## random-variable-stream.h (module 'core'): uint32_t ns3::EmpiricalRandomVariable::GetInteger() [member function]
cls.add_method('GetInteger',
'uint32_t',
[],
is_virtual=True)
## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::EmpiricalRandomVariable::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## random-variable-stream.h (module 'core'): double ns3::EmpiricalRandomVariable::GetValue() [member function]
cls.add_method('GetValue',
'double',
[],
is_virtual=True)
## random-variable-stream.h (module 'core'): double ns3::EmpiricalRandomVariable::Interpolate(double arg0, double arg1, double arg2, double arg3, double arg4) [member function]
cls.add_method('Interpolate',
'double',
[param('double', 'arg0'), param('double', 'arg1'), param('double', 'arg2'), param('double', 'arg3'), param('double', 'arg4')],
visibility='private', is_virtual=True)
## random-variable-stream.h (module 'core'): void ns3::EmpiricalRandomVariable::Validate() [member function]
cls.add_method('Validate',
'void',
[],
visibility='private', is_virtual=True)
return
def register_Ns3EmptyAttributeValue_methods(root_module, cls):
## attribute.h (module 'core'): ns3::EmptyAttributeValue::EmptyAttributeValue(ns3::EmptyAttributeValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::EmptyAttributeValue const &', 'arg0')])
## attribute.h (module 'core'): ns3::EmptyAttributeValue::EmptyAttributeValue() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::EmptyAttributeValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, visibility='private', is_virtual=True)
## attribute.h (module 'core'): bool ns3::EmptyAttributeValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
visibility='private', is_virtual=True)
## attribute.h (module 'core'): std::string ns3::EmptyAttributeValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, visibility='private', is_virtual=True)
return
def register_Ns3ErlangRandomVariable_methods(root_module, cls):
## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::ErlangRandomVariable::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## random-variable-stream.h (module 'core'): ns3::ErlangRandomVariable::ErlangRandomVariable() [constructor]
cls.add_constructor([])
## random-variable-stream.h (module 'core'): uint32_t ns3::ErlangRandomVariable::GetK() const [member function]
cls.add_method('GetK',
'uint32_t',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::ErlangRandomVariable::GetLambda() const [member function]
cls.add_method('GetLambda',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::ErlangRandomVariable::GetValue(uint32_t k, double lambda) [member function]
cls.add_method('GetValue',
'double',
[param('uint32_t', 'k'), param('double', 'lambda')])
## random-variable-stream.h (module 'core'): uint32_t ns3::ErlangRandomVariable::GetInteger(uint32_t k, uint32_t lambda) [member function]
cls.add_method('GetInteger',
'uint32_t',
[param('uint32_t', 'k'), param('uint32_t', 'lambda')])
## random-variable-stream.h (module 'core'): double ns3::ErlangRandomVariable::GetValue() [member function]
cls.add_method('GetValue',
'double',
[],
is_virtual=True)
## random-variable-stream.h (module 'core'): uint32_t ns3::ErlangRandomVariable::GetInteger() [member function]
cls.add_method('GetInteger',
'uint32_t',
[],
is_virtual=True)
return
def register_Ns3EventImpl_methods(root_module, cls):
## event-impl.h (module 'core'): ns3::EventImpl::EventImpl(ns3::EventImpl const & arg0) [copy constructor]
cls.add_constructor([param('ns3::EventImpl const &', 'arg0')])
## event-impl.h (module 'core'): ns3::EventImpl::EventImpl() [constructor]
cls.add_constructor([])
## event-impl.h (module 'core'): void ns3::EventImpl::Cancel() [member function]
cls.add_method('Cancel',
'void',
[])
## event-impl.h (module 'core'): void ns3::EventImpl::Invoke() [member function]
cls.add_method('Invoke',
'void',
[])
## event-impl.h (module 'core'): bool ns3::EventImpl::IsCancelled() [member function]
cls.add_method('IsCancelled',
'bool',
[])
## event-impl.h (module 'core'): void ns3::EventImpl::Notify() [member function]
cls.add_method('Notify',
'void',
[],
is_pure_virtual=True, visibility='protected', is_virtual=True)
return
def register_Ns3ExponentialRandomVariable_methods(root_module, cls):
## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::ExponentialRandomVariable::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## random-variable-stream.h (module 'core'): ns3::ExponentialRandomVariable::ExponentialRandomVariable() [constructor]
cls.add_constructor([])
## random-variable-stream.h (module 'core'): double ns3::ExponentialRandomVariable::GetMean() const [member function]
cls.add_method('GetMean',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::ExponentialRandomVariable::GetBound() const [member function]
cls.add_method('GetBound',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::ExponentialRandomVariable::GetValue(double mean, double bound) [member function]
cls.add_method('GetValue',
'double',
[param('double', 'mean'), param('double', 'bound')])
## random-variable-stream.h (module 'core'): uint32_t ns3::ExponentialRandomVariable::GetInteger(uint32_t mean, uint32_t bound) [member function]
cls.add_method('GetInteger',
'uint32_t',
[param('uint32_t', 'mean'), param('uint32_t', 'bound')])
## random-variable-stream.h (module 'core'): double ns3::ExponentialRandomVariable::GetValue() [member function]
cls.add_method('GetValue',
'double',
[],
is_virtual=True)
## random-variable-stream.h (module 'core'): uint32_t ns3::ExponentialRandomVariable::GetInteger() [member function]
cls.add_method('GetInteger',
'uint32_t',
[],
is_virtual=True)
return
def register_Ns3GammaRandomVariable_methods(root_module, cls):
## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::GammaRandomVariable::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## random-variable-stream.h (module 'core'): ns3::GammaRandomVariable::GammaRandomVariable() [constructor]
cls.add_constructor([])
## random-variable-stream.h (module 'core'): double ns3::GammaRandomVariable::GetAlpha() const [member function]
cls.add_method('GetAlpha',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::GammaRandomVariable::GetBeta() const [member function]
cls.add_method('GetBeta',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::GammaRandomVariable::GetValue(double alpha, double beta) [member function]
cls.add_method('GetValue',
'double',
[param('double', 'alpha'), param('double', 'beta')])
## random-variable-stream.h (module 'core'): uint32_t ns3::GammaRandomVariable::GetInteger(uint32_t alpha, uint32_t beta) [member function]
cls.add_method('GetInteger',
'uint32_t',
[param('uint32_t', 'alpha'), param('uint32_t', 'beta')])
## random-variable-stream.h (module 'core'): double ns3::GammaRandomVariable::GetValue() [member function]
cls.add_method('GetValue',
'double',
[],
is_virtual=True)
## random-variable-stream.h (module 'core'): uint32_t ns3::GammaRandomVariable::GetInteger() [member function]
cls.add_method('GetInteger',
'uint32_t',
[],
is_virtual=True)
return
def register_Ns3IpL4Protocol_methods(root_module, cls):
## ip-l4-protocol.h (module 'internet'): ns3::IpL4Protocol::IpL4Protocol() [constructor]
cls.add_constructor([])
## ip-l4-protocol.h (module 'internet'): ns3::IpL4Protocol::IpL4Protocol(ns3::IpL4Protocol const & arg0) [copy constructor]
cls.add_constructor([param('ns3::IpL4Protocol const &', 'arg0')])
## ip-l4-protocol.h (module 'internet'): ns3::Callback<void,ns3::Ptr<ns3::Packet>,ns3::Ipv4Address,ns3::Ipv4Address,unsigned char,ns3::Ptr<ns3::Ipv4Route>,ns3::empty,ns3::empty,ns3::empty,ns3::empty> ns3::IpL4Protocol::GetDownTarget() const [member function]
cls.add_method('GetDownTarget',
'ns3::Callback< void, ns3::Ptr< ns3::Packet >, ns3::Ipv4Address, ns3::Ipv4Address, unsigned char, ns3::Ptr< ns3::Ipv4Route >, ns3::empty, ns3::empty, ns3::empty, ns3::empty >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ip-l4-protocol.h (module 'internet'): ns3::Callback<void,ns3::Ptr<ns3::Packet>,ns3::Ipv6Address,ns3::Ipv6Address,unsigned char,ns3::Ptr<ns3::Ipv6Route>,ns3::empty,ns3::empty,ns3::empty,ns3::empty> ns3::IpL4Protocol::GetDownTarget6() const [member function]
cls.add_method('GetDownTarget6',
'ns3::Callback< void, ns3::Ptr< ns3::Packet >, ns3::Ipv6Address, ns3::Ipv6Address, unsigned char, ns3::Ptr< ns3::Ipv6Route >, ns3::empty, ns3::empty, ns3::empty, ns3::empty >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ip-l4-protocol.h (module 'internet'): int ns3::IpL4Protocol::GetProtocolNumber() const [member function]
cls.add_method('GetProtocolNumber',
'int',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ip-l4-protocol.h (module 'internet'): static ns3::TypeId ns3::IpL4Protocol::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## ip-l4-protocol.h (module 'internet'): ns3::IpL4Protocol::RxStatus ns3::IpL4Protocol::Receive(ns3::Ptr<ns3::Packet> p, ns3::Ipv4Header const & header, ns3::Ptr<ns3::Ipv4Interface> incomingInterface) [member function]
cls.add_method('Receive',
'ns3::IpL4Protocol::RxStatus',
[param('ns3::Ptr< ns3::Packet >', 'p'), param('ns3::Ipv4Header const &', 'header'), param('ns3::Ptr< ns3::Ipv4Interface >', 'incomingInterface')],
is_pure_virtual=True, is_virtual=True)
## ip-l4-protocol.h (module 'internet'): ns3::IpL4Protocol::RxStatus ns3::IpL4Protocol::Receive(ns3::Ptr<ns3::Packet> p, ns3::Ipv6Header const & header, ns3::Ptr<ns3::Ipv6Interface> incomingInterface) [member function]
cls.add_method('Receive',
'ns3::IpL4Protocol::RxStatus',
[param('ns3::Ptr< ns3::Packet >', 'p'), param('ns3::Ipv6Header const &', 'header'), param('ns3::Ptr< ns3::Ipv6Interface >', 'incomingInterface')],
is_pure_virtual=True, is_virtual=True)
## ip-l4-protocol.h (module 'internet'): void ns3::IpL4Protocol::ReceiveIcmp(ns3::Ipv4Address icmpSource, uint8_t icmpTtl, uint8_t icmpType, uint8_t icmpCode, uint32_t icmpInfo, ns3::Ipv4Address payloadSource, ns3::Ipv4Address payloadDestination, uint8_t const * payload) [member function]
cls.add_method('ReceiveIcmp',
'void',
[param('ns3::Ipv4Address', 'icmpSource'), param('uint8_t', 'icmpTtl'), param('uint8_t', 'icmpType'), param('uint8_t', 'icmpCode'), param('uint32_t', 'icmpInfo'), param('ns3::Ipv4Address', 'payloadSource'), param('ns3::Ipv4Address', 'payloadDestination'), param('uint8_t const *', 'payload')],
is_virtual=True)
## ip-l4-protocol.h (module 'internet'): void ns3::IpL4Protocol::ReceiveIcmp(ns3::Ipv6Address icmpSource, uint8_t icmpTtl, uint8_t icmpType, uint8_t icmpCode, uint32_t icmpInfo, ns3::Ipv6Address payloadSource, ns3::Ipv6Address payloadDestination, uint8_t const * payload) [member function]
cls.add_method('ReceiveIcmp',
'void',
[param('ns3::Ipv6Address', 'icmpSource'), param('uint8_t', 'icmpTtl'), param('uint8_t', 'icmpType'), param('uint8_t', 'icmpCode'), param('uint32_t', 'icmpInfo'), param('ns3::Ipv6Address', 'payloadSource'), param('ns3::Ipv6Address', 'payloadDestination'), param('uint8_t const *', 'payload')],
is_virtual=True)
## ip-l4-protocol.h (module 'internet'): void ns3::IpL4Protocol::SetDownTarget(ns3::Callback<void,ns3::Ptr<ns3::Packet>,ns3::Ipv4Address,ns3::Ipv4Address,unsigned char,ns3::Ptr<ns3::Ipv4Route>,ns3::empty,ns3::empty,ns3::empty,ns3::empty> cb) [member function]
cls.add_method('SetDownTarget',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::Packet >, ns3::Ipv4Address, ns3::Ipv4Address, unsigned char, ns3::Ptr< ns3::Ipv4Route >, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'cb')],
is_pure_virtual=True, is_virtual=True)
## ip-l4-protocol.h (module 'internet'): void ns3::IpL4Protocol::SetDownTarget6(ns3::Callback<void,ns3::Ptr<ns3::Packet>,ns3::Ipv6Address,ns3::Ipv6Address,unsigned char,ns3::Ptr<ns3::Ipv6Route>,ns3::empty,ns3::empty,ns3::empty,ns3::empty> cb) [member function]
cls.add_method('SetDownTarget6',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::Packet >, ns3::Ipv6Address, ns3::Ipv6Address, unsigned char, ns3::Ptr< ns3::Ipv6Route >, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'cb')],
is_pure_virtual=True, is_virtual=True)
return
def register_Ns3Ipv4_methods(root_module, cls):
## ipv4.h (module 'internet'): ns3::Ipv4::Ipv4(ns3::Ipv4 const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4 const &', 'arg0')])
## ipv4.h (module 'internet'): ns3::Ipv4::Ipv4() [constructor]
cls.add_constructor([])
## ipv4.h (module 'internet'): bool ns3::Ipv4::AddAddress(uint32_t interface, ns3::Ipv4InterfaceAddress address) [member function]
cls.add_method('AddAddress',
'bool',
[param('uint32_t', 'interface'), param('ns3::Ipv4InterfaceAddress', 'address')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): uint32_t ns3::Ipv4::AddInterface(ns3::Ptr<ns3::NetDevice> device) [member function]
cls.add_method('AddInterface',
'uint32_t',
[param('ns3::Ptr< ns3::NetDevice >', 'device')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): ns3::Ptr<ns3::Socket> ns3::Ipv4::CreateRawSocket() [member function]
cls.add_method('CreateRawSocket',
'ns3::Ptr< ns3::Socket >',
[],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): void ns3::Ipv4::DeleteRawSocket(ns3::Ptr<ns3::Socket> socket) [member function]
cls.add_method('DeleteRawSocket',
'void',
[param('ns3::Ptr< ns3::Socket >', 'socket')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): ns3::Ipv4InterfaceAddress ns3::Ipv4::GetAddress(uint32_t interface, uint32_t addressIndex) const [member function]
cls.add_method('GetAddress',
'ns3::Ipv4InterfaceAddress',
[param('uint32_t', 'interface'), param('uint32_t', 'addressIndex')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): int32_t ns3::Ipv4::GetInterfaceForAddress(ns3::Ipv4Address address) const [member function]
cls.add_method('GetInterfaceForAddress',
'int32_t',
[param('ns3::Ipv4Address', 'address')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): int32_t ns3::Ipv4::GetInterfaceForDevice(ns3::Ptr<const ns3::NetDevice> device) const [member function]
cls.add_method('GetInterfaceForDevice',
'int32_t',
[param('ns3::Ptr< ns3::NetDevice const >', 'device')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): int32_t ns3::Ipv4::GetInterfaceForPrefix(ns3::Ipv4Address address, ns3::Ipv4Mask mask) const [member function]
cls.add_method('GetInterfaceForPrefix',
'int32_t',
[param('ns3::Ipv4Address', 'address'), param('ns3::Ipv4Mask', 'mask')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): uint16_t ns3::Ipv4::GetMetric(uint32_t interface) const [member function]
cls.add_method('GetMetric',
'uint16_t',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): uint16_t ns3::Ipv4::GetMtu(uint32_t interface) const [member function]
cls.add_method('GetMtu',
'uint16_t',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): uint32_t ns3::Ipv4::GetNAddresses(uint32_t interface) const [member function]
cls.add_method('GetNAddresses',
'uint32_t',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): uint32_t ns3::Ipv4::GetNInterfaces() const [member function]
cls.add_method('GetNInterfaces',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): ns3::Ptr<ns3::NetDevice> ns3::Ipv4::GetNetDevice(uint32_t interface) [member function]
cls.add_method('GetNetDevice',
'ns3::Ptr< ns3::NetDevice >',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): ns3::Ptr<ns3::IpL4Protocol> ns3::Ipv4::GetProtocol(int protocolNumber) const [member function]
cls.add_method('GetProtocol',
'ns3::Ptr< ns3::IpL4Protocol >',
[param('int', 'protocolNumber')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): ns3::Ptr<ns3::Ipv4RoutingProtocol> ns3::Ipv4::GetRoutingProtocol() const [member function]
cls.add_method('GetRoutingProtocol',
'ns3::Ptr< ns3::Ipv4RoutingProtocol >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): static ns3::TypeId ns3::Ipv4::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## ipv4.h (module 'internet'): void ns3::Ipv4::Insert(ns3::Ptr<ns3::IpL4Protocol> protocol) [member function]
cls.add_method('Insert',
'void',
[param('ns3::Ptr< ns3::IpL4Protocol >', 'protocol')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): bool ns3::Ipv4::IsDestinationAddress(ns3::Ipv4Address address, uint32_t iif) const [member function]
cls.add_method('IsDestinationAddress',
'bool',
[param('ns3::Ipv4Address', 'address'), param('uint32_t', 'iif')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): bool ns3::Ipv4::IsForwarding(uint32_t interface) const [member function]
cls.add_method('IsForwarding',
'bool',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): bool ns3::Ipv4::IsUp(uint32_t interface) const [member function]
cls.add_method('IsUp',
'bool',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): bool ns3::Ipv4::RemoveAddress(uint32_t interface, uint32_t addressIndex) [member function]
cls.add_method('RemoveAddress',
'bool',
[param('uint32_t', 'interface'), param('uint32_t', 'addressIndex')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): bool ns3::Ipv4::RemoveAddress(uint32_t interface, ns3::Ipv4Address address) [member function]
cls.add_method('RemoveAddress',
'bool',
[param('uint32_t', 'interface'), param('ns3::Ipv4Address', 'address')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): ns3::Ipv4Address ns3::Ipv4::SelectSourceAddress(ns3::Ptr<const ns3::NetDevice> device, ns3::Ipv4Address dst, ns3::Ipv4InterfaceAddress::InterfaceAddressScope_e scope) [member function]
cls.add_method('SelectSourceAddress',
'ns3::Ipv4Address',
[param('ns3::Ptr< ns3::NetDevice const >', 'device'), param('ns3::Ipv4Address', 'dst'), param('ns3::Ipv4InterfaceAddress::InterfaceAddressScope_e', 'scope')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): void ns3::Ipv4::Send(ns3::Ptr<ns3::Packet> packet, ns3::Ipv4Address source, ns3::Ipv4Address destination, uint8_t protocol, ns3::Ptr<ns3::Ipv4Route> route) [member function]
cls.add_method('Send',
'void',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Ipv4Address', 'source'), param('ns3::Ipv4Address', 'destination'), param('uint8_t', 'protocol'), param('ns3::Ptr< ns3::Ipv4Route >', 'route')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): void ns3::Ipv4::SendWithHeader(ns3::Ptr<ns3::Packet> packet, ns3::Ipv4Header ipHeader, ns3::Ptr<ns3::Ipv4Route> route) [member function]
cls.add_method('SendWithHeader',
'void',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Ipv4Header', 'ipHeader'), param('ns3::Ptr< ns3::Ipv4Route >', 'route')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): void ns3::Ipv4::SetDown(uint32_t interface) [member function]
cls.add_method('SetDown',
'void',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): void ns3::Ipv4::SetForwarding(uint32_t interface, bool val) [member function]
cls.add_method('SetForwarding',
'void',
[param('uint32_t', 'interface'), param('bool', 'val')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): void ns3::Ipv4::SetMetric(uint32_t interface, uint16_t metric) [member function]
cls.add_method('SetMetric',
'void',
[param('uint32_t', 'interface'), param('uint16_t', 'metric')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): void ns3::Ipv4::SetRoutingProtocol(ns3::Ptr<ns3::Ipv4RoutingProtocol> routingProtocol) [member function]
cls.add_method('SetRoutingProtocol',
'void',
[param('ns3::Ptr< ns3::Ipv4RoutingProtocol >', 'routingProtocol')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): void ns3::Ipv4::SetUp(uint32_t interface) [member function]
cls.add_method('SetUp',
'void',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): ns3::Ipv4::IF_ANY [variable]
cls.add_static_attribute('IF_ANY', 'uint32_t const', is_const=True)
## ipv4.h (module 'internet'): bool ns3::Ipv4::GetIpForward() const [member function]
cls.add_method('GetIpForward',
'bool',
[],
is_pure_virtual=True, is_const=True, visibility='private', is_virtual=True)
## ipv4.h (module 'internet'): bool ns3::Ipv4::GetWeakEsModel() const [member function]
cls.add_method('GetWeakEsModel',
'bool',
[],
is_pure_virtual=True, is_const=True, visibility='private', is_virtual=True)
## ipv4.h (module 'internet'): void ns3::Ipv4::SetIpForward(bool forward) [member function]
cls.add_method('SetIpForward',
'void',
[param('bool', 'forward')],
is_pure_virtual=True, visibility='private', is_virtual=True)
## ipv4.h (module 'internet'): void ns3::Ipv4::SetWeakEsModel(bool model) [member function]
cls.add_method('SetWeakEsModel',
'void',
[param('bool', 'model')],
is_pure_virtual=True, visibility='private', is_virtual=True)
return
def register_Ns3Ipv4AddressChecker_methods(root_module, cls):
## ipv4-address.h (module 'network'): ns3::Ipv4AddressChecker::Ipv4AddressChecker() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressChecker::Ipv4AddressChecker(ns3::Ipv4AddressChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4AddressChecker const &', 'arg0')])
return
def register_Ns3Ipv4AddressValue_methods(root_module, cls):
## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue::Ipv4AddressValue() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue::Ipv4AddressValue(ns3::Ipv4AddressValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4AddressValue const &', 'arg0')])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue::Ipv4AddressValue(ns3::Ipv4Address const & value) [constructor]
cls.add_constructor([param('ns3::Ipv4Address const &', 'value')])
## ipv4-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv4AddressValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4AddressValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## ipv4-address.h (module 'network'): ns3::Ipv4Address ns3::Ipv4AddressValue::Get() const [member function]
cls.add_method('Get',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv4-address.h (module 'network'): std::string ns3::Ipv4AddressValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4AddressValue::Set(ns3::Ipv4Address const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Ipv4Address const &', 'value')])
return
def register_Ns3Ipv4Interface_methods(root_module, cls):
## ipv4-interface.h (module 'internet'): ns3::Ipv4Interface::Ipv4Interface(ns3::Ipv4Interface const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4Interface const &', 'arg0')])
## ipv4-interface.h (module 'internet'): ns3::Ipv4Interface::Ipv4Interface() [constructor]
cls.add_constructor([])
## ipv4-interface.h (module 'internet'): bool ns3::Ipv4Interface::AddAddress(ns3::Ipv4InterfaceAddress address) [member function]
cls.add_method('AddAddress',
'bool',
[param('ns3::Ipv4InterfaceAddress', 'address')])
## ipv4-interface.h (module 'internet'): ns3::Ipv4InterfaceAddress ns3::Ipv4Interface::GetAddress(uint32_t index) const [member function]
cls.add_method('GetAddress',
'ns3::Ipv4InterfaceAddress',
[param('uint32_t', 'index')],
is_const=True)
## ipv4-interface.h (module 'internet'): ns3::Ptr<ns3::ArpCache> ns3::Ipv4Interface::GetArpCache() const [member function]
cls.add_method('GetArpCache',
'ns3::Ptr< ns3::ArpCache >',
[],
is_const=True)
## ipv4-interface.h (module 'internet'): ns3::Ptr<ns3::NetDevice> ns3::Ipv4Interface::GetDevice() const [member function]
cls.add_method('GetDevice',
'ns3::Ptr< ns3::NetDevice >',
[],
is_const=True)
## ipv4-interface.h (module 'internet'): uint16_t ns3::Ipv4Interface::GetMetric() const [member function]
cls.add_method('GetMetric',
'uint16_t',
[],
is_const=True)
## ipv4-interface.h (module 'internet'): uint32_t ns3::Ipv4Interface::GetNAddresses() const [member function]
cls.add_method('GetNAddresses',
'uint32_t',
[],
is_const=True)
## ipv4-interface.h (module 'internet'): static ns3::TypeId ns3::Ipv4Interface::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## ipv4-interface.h (module 'internet'): bool ns3::Ipv4Interface::IsDown() const [member function]
cls.add_method('IsDown',
'bool',
[],
is_const=True)
## ipv4-interface.h (module 'internet'): bool ns3::Ipv4Interface::IsForwarding() const [member function]
cls.add_method('IsForwarding',
'bool',
[],
is_const=True)
## ipv4-interface.h (module 'internet'): bool ns3::Ipv4Interface::IsUp() const [member function]
cls.add_method('IsUp',
'bool',
[],
is_const=True)
## ipv4-interface.h (module 'internet'): ns3::Ipv4InterfaceAddress ns3::Ipv4Interface::RemoveAddress(uint32_t index) [member function]
cls.add_method('RemoveAddress',
'ns3::Ipv4InterfaceAddress',
[param('uint32_t', 'index')])
## ipv4-interface.h (module 'internet'): ns3::Ipv4InterfaceAddress ns3::Ipv4Interface::RemoveAddress(ns3::Ipv4Address address) [member function]
cls.add_method('RemoveAddress',
'ns3::Ipv4InterfaceAddress',
[param('ns3::Ipv4Address', 'address')])
## ipv4-interface.h (module 'internet'): void ns3::Ipv4Interface::Send(ns3::Ptr<ns3::Packet> p, ns3::Ipv4Address dest) [member function]
cls.add_method('Send',
'void',
[param('ns3::Ptr< ns3::Packet >', 'p'), param('ns3::Ipv4Address', 'dest')])
## ipv4-interface.h (module 'internet'): void ns3::Ipv4Interface::SetArpCache(ns3::Ptr<ns3::ArpCache> arpCache) [member function]
cls.add_method('SetArpCache',
'void',
[param('ns3::Ptr< ns3::ArpCache >', 'arpCache')])
## ipv4-interface.h (module 'internet'): void ns3::Ipv4Interface::SetDevice(ns3::Ptr<ns3::NetDevice> device) [member function]
cls.add_method('SetDevice',
'void',
[param('ns3::Ptr< ns3::NetDevice >', 'device')])
## ipv4-interface.h (module 'internet'): void ns3::Ipv4Interface::SetDown() [member function]
cls.add_method('SetDown',
'void',
[])
## ipv4-interface.h (module 'internet'): void ns3::Ipv4Interface::SetForwarding(bool val) [member function]
cls.add_method('SetForwarding',
'void',
[param('bool', 'val')])
## ipv4-interface.h (module 'internet'): void ns3::Ipv4Interface::SetMetric(uint16_t metric) [member function]
cls.add_method('SetMetric',
'void',
[param('uint16_t', 'metric')])
## ipv4-interface.h (module 'internet'): void ns3::Ipv4Interface::SetNode(ns3::Ptr<ns3::Node> node) [member function]
cls.add_method('SetNode',
'void',
[param('ns3::Ptr< ns3::Node >', 'node')])
## ipv4-interface.h (module 'internet'): void ns3::Ipv4Interface::SetUp() [member function]
cls.add_method('SetUp',
'void',
[])
## ipv4-interface.h (module 'internet'): void ns3::Ipv4Interface::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3Ipv4L3Protocol_methods(root_module, cls):
## ipv4-l3-protocol.h (module 'internet'): ns3::Ipv4L3Protocol::Ipv4L3Protocol() [constructor]
cls.add_constructor([])
## ipv4-l3-protocol.h (module 'internet'): bool ns3::Ipv4L3Protocol::AddAddress(uint32_t i, ns3::Ipv4InterfaceAddress address) [member function]
cls.add_method('AddAddress',
'bool',
[param('uint32_t', 'i'), param('ns3::Ipv4InterfaceAddress', 'address')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): uint32_t ns3::Ipv4L3Protocol::AddInterface(ns3::Ptr<ns3::NetDevice> device) [member function]
cls.add_method('AddInterface',
'uint32_t',
[param('ns3::Ptr< ns3::NetDevice >', 'device')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): ns3::Ptr<ns3::Socket> ns3::Ipv4L3Protocol::CreateRawSocket() [member function]
cls.add_method('CreateRawSocket',
'ns3::Ptr< ns3::Socket >',
[],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::DeleteRawSocket(ns3::Ptr<ns3::Socket> socket) [member function]
cls.add_method('DeleteRawSocket',
'void',
[param('ns3::Ptr< ns3::Socket >', 'socket')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): ns3::Ipv4InterfaceAddress ns3::Ipv4L3Protocol::GetAddress(uint32_t interfaceIndex, uint32_t addressIndex) const [member function]
cls.add_method('GetAddress',
'ns3::Ipv4InterfaceAddress',
[param('uint32_t', 'interfaceIndex'), param('uint32_t', 'addressIndex')],
is_const=True, is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): ns3::Ptr<ns3::Ipv4Interface> ns3::Ipv4L3Protocol::GetInterface(uint32_t i) const [member function]
cls.add_method('GetInterface',
'ns3::Ptr< ns3::Ipv4Interface >',
[param('uint32_t', 'i')],
is_const=True)
## ipv4-l3-protocol.h (module 'internet'): int32_t ns3::Ipv4L3Protocol::GetInterfaceForAddress(ns3::Ipv4Address addr) const [member function]
cls.add_method('GetInterfaceForAddress',
'int32_t',
[param('ns3::Ipv4Address', 'addr')],
is_const=True, is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): int32_t ns3::Ipv4L3Protocol::GetInterfaceForDevice(ns3::Ptr<const ns3::NetDevice> device) const [member function]
cls.add_method('GetInterfaceForDevice',
'int32_t',
[param('ns3::Ptr< ns3::NetDevice const >', 'device')],
is_const=True, is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): int32_t ns3::Ipv4L3Protocol::GetInterfaceForPrefix(ns3::Ipv4Address addr, ns3::Ipv4Mask mask) const [member function]
cls.add_method('GetInterfaceForPrefix',
'int32_t',
[param('ns3::Ipv4Address', 'addr'), param('ns3::Ipv4Mask', 'mask')],
is_const=True, is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): uint16_t ns3::Ipv4L3Protocol::GetMetric(uint32_t i) const [member function]
cls.add_method('GetMetric',
'uint16_t',
[param('uint32_t', 'i')],
is_const=True, is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): uint16_t ns3::Ipv4L3Protocol::GetMtu(uint32_t i) const [member function]
cls.add_method('GetMtu',
'uint16_t',
[param('uint32_t', 'i')],
is_const=True, is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): uint32_t ns3::Ipv4L3Protocol::GetNAddresses(uint32_t interface) const [member function]
cls.add_method('GetNAddresses',
'uint32_t',
[param('uint32_t', 'interface')],
is_const=True, is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): uint32_t ns3::Ipv4L3Protocol::GetNInterfaces() const [member function]
cls.add_method('GetNInterfaces',
'uint32_t',
[],
is_const=True, is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): ns3::Ptr<ns3::NetDevice> ns3::Ipv4L3Protocol::GetNetDevice(uint32_t i) [member function]
cls.add_method('GetNetDevice',
'ns3::Ptr< ns3::NetDevice >',
[param('uint32_t', 'i')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): ns3::Ptr<ns3::IpL4Protocol> ns3::Ipv4L3Protocol::GetProtocol(int protocolNumber) const [member function]
cls.add_method('GetProtocol',
'ns3::Ptr< ns3::IpL4Protocol >',
[param('int', 'protocolNumber')],
is_const=True, is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): ns3::Ptr<ns3::Ipv4RoutingProtocol> ns3::Ipv4L3Protocol::GetRoutingProtocol() const [member function]
cls.add_method('GetRoutingProtocol',
'ns3::Ptr< ns3::Ipv4RoutingProtocol >',
[],
is_const=True, is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): static ns3::TypeId ns3::Ipv4L3Protocol::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::Insert(ns3::Ptr<ns3::IpL4Protocol> protocol) [member function]
cls.add_method('Insert',
'void',
[param('ns3::Ptr< ns3::IpL4Protocol >', 'protocol')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): bool ns3::Ipv4L3Protocol::IsDestinationAddress(ns3::Ipv4Address address, uint32_t iif) const [member function]
cls.add_method('IsDestinationAddress',
'bool',
[param('ns3::Ipv4Address', 'address'), param('uint32_t', 'iif')],
is_const=True, is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): bool ns3::Ipv4L3Protocol::IsForwarding(uint32_t i) const [member function]
cls.add_method('IsForwarding',
'bool',
[param('uint32_t', 'i')],
is_const=True, is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): bool ns3::Ipv4L3Protocol::IsUp(uint32_t i) const [member function]
cls.add_method('IsUp',
'bool',
[param('uint32_t', 'i')],
is_const=True, is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::Receive(ns3::Ptr<ns3::NetDevice> device, ns3::Ptr<ns3::Packet const> p, uint16_t protocol, ns3::Address const & from, ns3::Address const & to, ns3::NetDevice::PacketType packetType) [member function]
cls.add_method('Receive',
'void',
[param('ns3::Ptr< ns3::NetDevice >', 'device'), param('ns3::Ptr< ns3::Packet const >', 'p'), param('uint16_t', 'protocol'), param('ns3::Address const &', 'from'), param('ns3::Address const &', 'to'), param('ns3::NetDevice::PacketType', 'packetType')])
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::Remove(ns3::Ptr<ns3::IpL4Protocol> protocol) [member function]
cls.add_method('Remove',
'void',
[param('ns3::Ptr< ns3::IpL4Protocol >', 'protocol')])
## ipv4-l3-protocol.h (module 'internet'): bool ns3::Ipv4L3Protocol::RemoveAddress(uint32_t interfaceIndex, uint32_t addressIndex) [member function]
cls.add_method('RemoveAddress',
'bool',
[param('uint32_t', 'interfaceIndex'), param('uint32_t', 'addressIndex')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): bool ns3::Ipv4L3Protocol::RemoveAddress(uint32_t interface, ns3::Ipv4Address address) [member function]
cls.add_method('RemoveAddress',
'bool',
[param('uint32_t', 'interface'), param('ns3::Ipv4Address', 'address')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): ns3::Ipv4Address ns3::Ipv4L3Protocol::SelectSourceAddress(ns3::Ptr<const ns3::NetDevice> device, ns3::Ipv4Address dst, ns3::Ipv4InterfaceAddress::InterfaceAddressScope_e scope) [member function]
cls.add_method('SelectSourceAddress',
'ns3::Ipv4Address',
[param('ns3::Ptr< ns3::NetDevice const >', 'device'), param('ns3::Ipv4Address', 'dst'), param('ns3::Ipv4InterfaceAddress::InterfaceAddressScope_e', 'scope')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::Send(ns3::Ptr<ns3::Packet> packet, ns3::Ipv4Address source, ns3::Ipv4Address destination, uint8_t protocol, ns3::Ptr<ns3::Ipv4Route> route) [member function]
cls.add_method('Send',
'void',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Ipv4Address', 'source'), param('ns3::Ipv4Address', 'destination'), param('uint8_t', 'protocol'), param('ns3::Ptr< ns3::Ipv4Route >', 'route')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::SendWithHeader(ns3::Ptr<ns3::Packet> packet, ns3::Ipv4Header ipHeader, ns3::Ptr<ns3::Ipv4Route> route) [member function]
cls.add_method('SendWithHeader',
'void',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Ipv4Header', 'ipHeader'), param('ns3::Ptr< ns3::Ipv4Route >', 'route')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::SetDefaultTtl(uint8_t ttl) [member function]
cls.add_method('SetDefaultTtl',
'void',
[param('uint8_t', 'ttl')])
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::SetDown(uint32_t i) [member function]
cls.add_method('SetDown',
'void',
[param('uint32_t', 'i')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::SetForwarding(uint32_t i, bool val) [member function]
cls.add_method('SetForwarding',
'void',
[param('uint32_t', 'i'), param('bool', 'val')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::SetMetric(uint32_t i, uint16_t metric) [member function]
cls.add_method('SetMetric',
'void',
[param('uint32_t', 'i'), param('uint16_t', 'metric')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::SetNode(ns3::Ptr<ns3::Node> node) [member function]
cls.add_method('SetNode',
'void',
[param('ns3::Ptr< ns3::Node >', 'node')])
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::SetRoutingProtocol(ns3::Ptr<ns3::Ipv4RoutingProtocol> routingProtocol) [member function]
cls.add_method('SetRoutingProtocol',
'void',
[param('ns3::Ptr< ns3::Ipv4RoutingProtocol >', 'routingProtocol')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::SetUp(uint32_t i) [member function]
cls.add_method('SetUp',
'void',
[param('uint32_t', 'i')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): ns3::Ipv4L3Protocol::PROT_NUMBER [variable]
cls.add_static_attribute('PROT_NUMBER', 'uint16_t const', is_const=True)
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::NotifyNewAggregate() [member function]
cls.add_method('NotifyNewAggregate',
'void',
[],
visibility='protected', is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): bool ns3::Ipv4L3Protocol::GetIpForward() const [member function]
cls.add_method('GetIpForward',
'bool',
[],
is_const=True, visibility='private', is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): bool ns3::Ipv4L3Protocol::GetWeakEsModel() const [member function]
cls.add_method('GetWeakEsModel',
'bool',
[],
is_const=True, visibility='private', is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::SetIpForward(bool forward) [member function]
cls.add_method('SetIpForward',
'void',
[param('bool', 'forward')],
visibility='private', is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::SetWeakEsModel(bool model) [member function]
cls.add_method('SetWeakEsModel',
'void',
[param('bool', 'model')],
visibility='private', is_virtual=True)
return
def register_Ns3Ipv4MaskChecker_methods(root_module, cls):
## ipv4-address.h (module 'network'): ns3::Ipv4MaskChecker::Ipv4MaskChecker() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskChecker::Ipv4MaskChecker(ns3::Ipv4MaskChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4MaskChecker const &', 'arg0')])
return
def register_Ns3Ipv4MaskValue_methods(root_module, cls):
## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue::Ipv4MaskValue() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue::Ipv4MaskValue(ns3::Ipv4MaskValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4MaskValue const &', 'arg0')])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue::Ipv4MaskValue(ns3::Ipv4Mask const & value) [constructor]
cls.add_constructor([param('ns3::Ipv4Mask const &', 'value')])
## ipv4-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv4MaskValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4MaskValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## ipv4-address.h (module 'network'): ns3::Ipv4Mask ns3::Ipv4MaskValue::Get() const [member function]
cls.add_method('Get',
'ns3::Ipv4Mask',
[],
is_const=True)
## ipv4-address.h (module 'network'): std::string ns3::Ipv4MaskValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4MaskValue::Set(ns3::Ipv4Mask const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Ipv4Mask const &', 'value')])
return
def register_Ns3Ipv4MulticastRoute_methods(root_module, cls):
## ipv4-route.h (module 'internet'): ns3::Ipv4MulticastRoute::Ipv4MulticastRoute(ns3::Ipv4MulticastRoute const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4MulticastRoute const &', 'arg0')])
## ipv4-route.h (module 'internet'): ns3::Ipv4MulticastRoute::Ipv4MulticastRoute() [constructor]
cls.add_constructor([])
## ipv4-route.h (module 'internet'): ns3::Ipv4Address ns3::Ipv4MulticastRoute::GetGroup() const [member function]
cls.add_method('GetGroup',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv4-route.h (module 'internet'): ns3::Ipv4Address ns3::Ipv4MulticastRoute::GetOrigin() const [member function]
cls.add_method('GetOrigin',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv4-route.h (module 'internet'): uint32_t ns3::Ipv4MulticastRoute::GetOutputTtl(uint32_t oif) [member function]
cls.add_method('GetOutputTtl',
'uint32_t',
[param('uint32_t', 'oif')],
deprecated=True)
## ipv4-route.h (module 'internet'): std::map<unsigned int, unsigned int, std::less<unsigned int>, std::allocator<std::pair<unsigned int const, unsigned int> > > ns3::Ipv4MulticastRoute::GetOutputTtlMap() const [member function]
cls.add_method('GetOutputTtlMap',
'std::map< unsigned int, unsigned int >',
[],
is_const=True)
## ipv4-route.h (module 'internet'): uint32_t ns3::Ipv4MulticastRoute::GetParent() const [member function]
cls.add_method('GetParent',
'uint32_t',
[],
is_const=True)
## ipv4-route.h (module 'internet'): void ns3::Ipv4MulticastRoute::SetGroup(ns3::Ipv4Address const group) [member function]
cls.add_method('SetGroup',
'void',
[param('ns3::Ipv4Address const', 'group')])
## ipv4-route.h (module 'internet'): void ns3::Ipv4MulticastRoute::SetOrigin(ns3::Ipv4Address const origin) [member function]
cls.add_method('SetOrigin',
'void',
[param('ns3::Ipv4Address const', 'origin')])
## ipv4-route.h (module 'internet'): void ns3::Ipv4MulticastRoute::SetOutputTtl(uint32_t oif, uint32_t ttl) [member function]
cls.add_method('SetOutputTtl',
'void',
[param('uint32_t', 'oif'), param('uint32_t', 'ttl')])
## ipv4-route.h (module 'internet'): void ns3::Ipv4MulticastRoute::SetParent(uint32_t iif) [member function]
cls.add_method('SetParent',
'void',
[param('uint32_t', 'iif')])
## ipv4-route.h (module 'internet'): ns3::Ipv4MulticastRoute::MAX_INTERFACES [variable]
cls.add_static_attribute('MAX_INTERFACES', 'uint32_t const', is_const=True)
## ipv4-route.h (module 'internet'): ns3::Ipv4MulticastRoute::MAX_TTL [variable]
cls.add_static_attribute('MAX_TTL', 'uint32_t const', is_const=True)
return
def register_Ns3Ipv4Route_methods(root_module, cls):
cls.add_output_stream_operator()
## ipv4-route.h (module 'internet'): ns3::Ipv4Route::Ipv4Route(ns3::Ipv4Route const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4Route const &', 'arg0')])
## ipv4-route.h (module 'internet'): ns3::Ipv4Route::Ipv4Route() [constructor]
cls.add_constructor([])
## ipv4-route.h (module 'internet'): ns3::Ipv4Address ns3::Ipv4Route::GetDestination() const [member function]
cls.add_method('GetDestination',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv4-route.h (module 'internet'): ns3::Ipv4Address ns3::Ipv4Route::GetGateway() const [member function]
cls.add_method('GetGateway',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv4-route.h (module 'internet'): ns3::Ptr<ns3::NetDevice> ns3::Ipv4Route::GetOutputDevice() const [member function]
cls.add_method('GetOutputDevice',
'ns3::Ptr< ns3::NetDevice >',
[],
is_const=True)
## ipv4-route.h (module 'internet'): ns3::Ipv4Address ns3::Ipv4Route::GetSource() const [member function]
cls.add_method('GetSource',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv4-route.h (module 'internet'): void ns3::Ipv4Route::SetDestination(ns3::Ipv4Address dest) [member function]
cls.add_method('SetDestination',
'void',
[param('ns3::Ipv4Address', 'dest')])
## ipv4-route.h (module 'internet'): void ns3::Ipv4Route::SetGateway(ns3::Ipv4Address gw) [member function]
cls.add_method('SetGateway',
'void',
[param('ns3::Ipv4Address', 'gw')])
## ipv4-route.h (module 'internet'): void ns3::Ipv4Route::SetOutputDevice(ns3::Ptr<ns3::NetDevice> outputDevice) [member function]
cls.add_method('SetOutputDevice',
'void',
[param('ns3::Ptr< ns3::NetDevice >', 'outputDevice')])
## ipv4-route.h (module 'internet'): void ns3::Ipv4Route::SetSource(ns3::Ipv4Address src) [member function]
cls.add_method('SetSource',
'void',
[param('ns3::Ipv4Address', 'src')])
return
def register_Ns3Ipv4RoutingProtocol_methods(root_module, cls):
## ipv4-routing-protocol.h (module 'internet'): ns3::Ipv4RoutingProtocol::Ipv4RoutingProtocol() [constructor]
cls.add_constructor([])
## ipv4-routing-protocol.h (module 'internet'): ns3::Ipv4RoutingProtocol::Ipv4RoutingProtocol(ns3::Ipv4RoutingProtocol const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4RoutingProtocol const &', 'arg0')])
## ipv4-routing-protocol.h (module 'internet'): static ns3::TypeId ns3::Ipv4RoutingProtocol::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## ipv4-routing-protocol.h (module 'internet'): void ns3::Ipv4RoutingProtocol::NotifyAddAddress(uint32_t interface, ns3::Ipv4InterfaceAddress address) [member function]
cls.add_method('NotifyAddAddress',
'void',
[param('uint32_t', 'interface'), param('ns3::Ipv4InterfaceAddress', 'address')],
is_pure_virtual=True, is_virtual=True)
## ipv4-routing-protocol.h (module 'internet'): void ns3::Ipv4RoutingProtocol::NotifyInterfaceDown(uint32_t interface) [member function]
cls.add_method('NotifyInterfaceDown',
'void',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_virtual=True)
## ipv4-routing-protocol.h (module 'internet'): void ns3::Ipv4RoutingProtocol::NotifyInterfaceUp(uint32_t interface) [member function]
cls.add_method('NotifyInterfaceUp',
'void',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_virtual=True)
## ipv4-routing-protocol.h (module 'internet'): void ns3::Ipv4RoutingProtocol::NotifyRemoveAddress(uint32_t interface, ns3::Ipv4InterfaceAddress address) [member function]
cls.add_method('NotifyRemoveAddress',
'void',
[param('uint32_t', 'interface'), param('ns3::Ipv4InterfaceAddress', 'address')],
is_pure_virtual=True, is_virtual=True)
## ipv4-routing-protocol.h (module 'internet'): void ns3::Ipv4RoutingProtocol::PrintRoutingTable(ns3::Ptr<ns3::OutputStreamWrapper> stream) const [member function]
cls.add_method('PrintRoutingTable',
'void',
[param('ns3::Ptr< ns3::OutputStreamWrapper >', 'stream')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4-routing-protocol.h (module 'internet'): bool ns3::Ipv4RoutingProtocol::RouteInput(ns3::Ptr<ns3::Packet const> p, ns3::Ipv4Header const & header, ns3::Ptr<const ns3::NetDevice> idev, ns3::Callback<void, ns3::Ptr<ns3::Ipv4Route>, ns3::Ptr<ns3::Packet const>, ns3::Ipv4Header const&, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> ucb, ns3::Callback<void,ns3::Ptr<ns3::Ipv4MulticastRoute>,ns3::Ptr<const ns3::Packet>,const ns3::Ipv4Header&,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> mcb, ns3::Callback<void,ns3::Ptr<const ns3::Packet>,const ns3::Ipv4Header&,unsigned int,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> lcb, ns3::Callback<void, ns3::Ptr<ns3::Packet const>, ns3::Ipv4Header const&, ns3::Socket::SocketErrno, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> ecb) [member function]
cls.add_method('RouteInput',
'bool',
[param('ns3::Ptr< ns3::Packet const >', 'p'), param('ns3::Ipv4Header const &', 'header'), param('ns3::Ptr< ns3::NetDevice const >', 'idev'), param('ns3::Callback< void, ns3::Ptr< ns3::Ipv4Route >, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'ucb'), param('ns3::Callback< void, ns3::Ptr< ns3::Ipv4MulticastRoute >, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'mcb'), param('ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'lcb'), param('ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::Socket::SocketErrno, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'ecb')],
is_pure_virtual=True, is_virtual=True)
## ipv4-routing-protocol.h (module 'internet'): ns3::Ptr<ns3::Ipv4Route> ns3::Ipv4RoutingProtocol::RouteOutput(ns3::Ptr<ns3::Packet> p, ns3::Ipv4Header const & header, ns3::Ptr<ns3::NetDevice> oif, ns3::Socket::SocketErrno & sockerr) [member function]
cls.add_method('RouteOutput',
'ns3::Ptr< ns3::Ipv4Route >',
[param('ns3::Ptr< ns3::Packet >', 'p'), param('ns3::Ipv4Header const &', 'header'), param('ns3::Ptr< ns3::NetDevice >', 'oif'), param('ns3::Socket::SocketErrno &', 'sockerr')],
is_pure_virtual=True, is_virtual=True)
## ipv4-routing-protocol.h (module 'internet'): void ns3::Ipv4RoutingProtocol::SetIpv4(ns3::Ptr<ns3::Ipv4> ipv4) [member function]
cls.add_method('SetIpv4',
'void',
[param('ns3::Ptr< ns3::Ipv4 >', 'ipv4')],
is_pure_virtual=True, is_virtual=True)
return
def register_Ns3Ipv6AddressChecker_methods(root_module, cls):
## ipv6-address.h (module 'network'): ns3::Ipv6AddressChecker::Ipv6AddressChecker() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressChecker::Ipv6AddressChecker(ns3::Ipv6AddressChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv6AddressChecker const &', 'arg0')])
return
def register_Ns3Ipv6AddressValue_methods(root_module, cls):
## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue::Ipv6AddressValue() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue::Ipv6AddressValue(ns3::Ipv6AddressValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv6AddressValue const &', 'arg0')])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue::Ipv6AddressValue(ns3::Ipv6Address const & value) [constructor]
cls.add_constructor([param('ns3::Ipv6Address const &', 'value')])
## ipv6-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv6AddressValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6AddressValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## ipv6-address.h (module 'network'): ns3::Ipv6Address ns3::Ipv6AddressValue::Get() const [member function]
cls.add_method('Get',
'ns3::Ipv6Address',
[],
is_const=True)
## ipv6-address.h (module 'network'): std::string ns3::Ipv6AddressValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6AddressValue::Set(ns3::Ipv6Address const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Ipv6Address const &', 'value')])
return
def register_Ns3Ipv6Interface_methods(root_module, cls):
## ipv6-interface.h (module 'internet'): ns3::Ipv6Interface::Ipv6Interface(ns3::Ipv6Interface const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv6Interface const &', 'arg0')])
## ipv6-interface.h (module 'internet'): ns3::Ipv6Interface::Ipv6Interface() [constructor]
cls.add_constructor([])
## ipv6-interface.h (module 'internet'): bool ns3::Ipv6Interface::AddAddress(ns3::Ipv6InterfaceAddress iface) [member function]
cls.add_method('AddAddress',
'bool',
[param('ns3::Ipv6InterfaceAddress', 'iface')])
## ipv6-interface.h (module 'internet'): ns3::Ipv6InterfaceAddress ns3::Ipv6Interface::GetAddress(uint32_t index) const [member function]
cls.add_method('GetAddress',
'ns3::Ipv6InterfaceAddress',
[param('uint32_t', 'index')],
is_const=True)
## ipv6-interface.h (module 'internet'): ns3::Ipv6InterfaceAddress ns3::Ipv6Interface::GetAddressMatchingDestination(ns3::Ipv6Address dst) [member function]
cls.add_method('GetAddressMatchingDestination',
'ns3::Ipv6InterfaceAddress',
[param('ns3::Ipv6Address', 'dst')])
## ipv6-interface.h (module 'internet'): uint16_t ns3::Ipv6Interface::GetBaseReachableTime() const [member function]
cls.add_method('GetBaseReachableTime',
'uint16_t',
[],
is_const=True)
## ipv6-interface.h (module 'internet'): uint8_t ns3::Ipv6Interface::GetCurHopLimit() const [member function]
cls.add_method('GetCurHopLimit',
'uint8_t',
[],
is_const=True)
## ipv6-interface.h (module 'internet'): ns3::Ptr<ns3::NetDevice> ns3::Ipv6Interface::GetDevice() const [member function]
cls.add_method('GetDevice',
'ns3::Ptr< ns3::NetDevice >',
[],
is_const=True, is_virtual=True)
## ipv6-interface.h (module 'internet'): ns3::Ipv6InterfaceAddress ns3::Ipv6Interface::GetLinkLocalAddress() const [member function]
cls.add_method('GetLinkLocalAddress',
'ns3::Ipv6InterfaceAddress',
[],
is_const=True)
## ipv6-interface.h (module 'internet'): uint16_t ns3::Ipv6Interface::GetMetric() const [member function]
cls.add_method('GetMetric',
'uint16_t',
[],
is_const=True)
## ipv6-interface.h (module 'internet'): uint32_t ns3::Ipv6Interface::GetNAddresses() const [member function]
cls.add_method('GetNAddresses',
'uint32_t',
[],
is_const=True)
## ipv6-interface.h (module 'internet'): uint16_t ns3::Ipv6Interface::GetReachableTime() const [member function]
cls.add_method('GetReachableTime',
'uint16_t',
[],
is_const=True)
## ipv6-interface.h (module 'internet'): uint16_t ns3::Ipv6Interface::GetRetransTimer() const [member function]
cls.add_method('GetRetransTimer',
'uint16_t',
[],
is_const=True)
## ipv6-interface.h (module 'internet'): static ns3::TypeId ns3::Ipv6Interface::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## ipv6-interface.h (module 'internet'): bool ns3::Ipv6Interface::IsDown() const [member function]
cls.add_method('IsDown',
'bool',
[],
is_const=True)
## ipv6-interface.h (module 'internet'): bool ns3::Ipv6Interface::IsForwarding() const [member function]
cls.add_method('IsForwarding',
'bool',
[],
is_const=True)
## ipv6-interface.h (module 'internet'): bool ns3::Ipv6Interface::IsUp() const [member function]
cls.add_method('IsUp',
'bool',
[],
is_const=True)
## ipv6-interface.h (module 'internet'): ns3::Ipv6InterfaceAddress ns3::Ipv6Interface::RemoveAddress(uint32_t index) [member function]
cls.add_method('RemoveAddress',
'ns3::Ipv6InterfaceAddress',
[param('uint32_t', 'index')])
## ipv6-interface.h (module 'internet'): ns3::Ipv6InterfaceAddress ns3::Ipv6Interface::RemoveAddress(ns3::Ipv6Address address) [member function]
cls.add_method('RemoveAddress',
'ns3::Ipv6InterfaceAddress',
[param('ns3::Ipv6Address', 'address')])
## ipv6-interface.h (module 'internet'): void ns3::Ipv6Interface::Send(ns3::Ptr<ns3::Packet> p, ns3::Ipv6Address dest) [member function]
cls.add_method('Send',
'void',
[param('ns3::Ptr< ns3::Packet >', 'p'), param('ns3::Ipv6Address', 'dest')])
## ipv6-interface.h (module 'internet'): void ns3::Ipv6Interface::SetBaseReachableTime(uint16_t baseReachableTime) [member function]
cls.add_method('SetBaseReachableTime',
'void',
[param('uint16_t', 'baseReachableTime')])
## ipv6-interface.h (module 'internet'): void ns3::Ipv6Interface::SetCurHopLimit(uint8_t curHopLimit) [member function]
cls.add_method('SetCurHopLimit',
'void',
[param('uint8_t', 'curHopLimit')])
## ipv6-interface.h (module 'internet'): void ns3::Ipv6Interface::SetDevice(ns3::Ptr<ns3::NetDevice> device) [member function]
cls.add_method('SetDevice',
'void',
[param('ns3::Ptr< ns3::NetDevice >', 'device')])
## ipv6-interface.h (module 'internet'): void ns3::Ipv6Interface::SetDown() [member function]
cls.add_method('SetDown',
'void',
[])
## ipv6-interface.h (module 'internet'): void ns3::Ipv6Interface::SetForwarding(bool forward) [member function]
cls.add_method('SetForwarding',
'void',
[param('bool', 'forward')])
## ipv6-interface.h (module 'internet'): void ns3::Ipv6Interface::SetMetric(uint16_t metric) [member function]
cls.add_method('SetMetric',
'void',
[param('uint16_t', 'metric')])
## ipv6-interface.h (module 'internet'): void ns3::Ipv6Interface::SetNode(ns3::Ptr<ns3::Node> node) [member function]
cls.add_method('SetNode',
'void',
[param('ns3::Ptr< ns3::Node >', 'node')])
## ipv6-interface.h (module 'internet'): void ns3::Ipv6Interface::SetNsDadUid(ns3::Ipv6Address address, uint32_t uid) [member function]
cls.add_method('SetNsDadUid',
'void',
[param('ns3::Ipv6Address', 'address'), param('uint32_t', 'uid')])
## ipv6-interface.h (module 'internet'): void ns3::Ipv6Interface::SetReachableTime(uint16_t reachableTime) [member function]
cls.add_method('SetReachableTime',
'void',
[param('uint16_t', 'reachableTime')])
## ipv6-interface.h (module 'internet'): void ns3::Ipv6Interface::SetRetransTimer(uint16_t retransTimer) [member function]
cls.add_method('SetRetransTimer',
'void',
[param('uint16_t', 'retransTimer')])
## ipv6-interface.h (module 'internet'): void ns3::Ipv6Interface::SetState(ns3::Ipv6Address address, ns3::Ipv6InterfaceAddress::State_e state) [member function]
cls.add_method('SetState',
'void',
[param('ns3::Ipv6Address', 'address'), param('ns3::Ipv6InterfaceAddress::State_e', 'state')])
## ipv6-interface.h (module 'internet'): void ns3::Ipv6Interface::SetUp() [member function]
cls.add_method('SetUp',
'void',
[])
## ipv6-interface.h (module 'internet'): void ns3::Ipv6Interface::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3Ipv6PrefixChecker_methods(root_module, cls):
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixChecker::Ipv6PrefixChecker() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixChecker::Ipv6PrefixChecker(ns3::Ipv6PrefixChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv6PrefixChecker const &', 'arg0')])
return
def register_Ns3Ipv6PrefixValue_methods(root_module, cls):
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue::Ipv6PrefixValue() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue::Ipv6PrefixValue(ns3::Ipv6PrefixValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv6PrefixValue const &', 'arg0')])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue::Ipv6PrefixValue(ns3::Ipv6Prefix const & value) [constructor]
cls.add_constructor([param('ns3::Ipv6Prefix const &', 'value')])
## ipv6-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv6PrefixValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6PrefixValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix ns3::Ipv6PrefixValue::Get() const [member function]
cls.add_method('Get',
'ns3::Ipv6Prefix',
[],
is_const=True)
## ipv6-address.h (module 'network'): std::string ns3::Ipv6PrefixValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6PrefixValue::Set(ns3::Ipv6Prefix const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Ipv6Prefix const &', 'value')])
return
def register_Ns3LogNormalRandomVariable_methods(root_module, cls):
## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::LogNormalRandomVariable::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## random-variable-stream.h (module 'core'): ns3::LogNormalRandomVariable::LogNormalRandomVariable() [constructor]
cls.add_constructor([])
## random-variable-stream.h (module 'core'): double ns3::LogNormalRandomVariable::GetMu() const [member function]
cls.add_method('GetMu',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::LogNormalRandomVariable::GetSigma() const [member function]
cls.add_method('GetSigma',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::LogNormalRandomVariable::GetValue(double mu, double sigma) [member function]
cls.add_method('GetValue',
'double',
[param('double', 'mu'), param('double', 'sigma')])
## random-variable-stream.h (module 'core'): uint32_t ns3::LogNormalRandomVariable::GetInteger(uint32_t mu, uint32_t sigma) [member function]
cls.add_method('GetInteger',
'uint32_t',
[param('uint32_t', 'mu'), param('uint32_t', 'sigma')])
## random-variable-stream.h (module 'core'): double ns3::LogNormalRandomVariable::GetValue() [member function]
cls.add_method('GetValue',
'double',
[],
is_virtual=True)
## random-variable-stream.h (module 'core'): uint32_t ns3::LogNormalRandomVariable::GetInteger() [member function]
cls.add_method('GetInteger',
'uint32_t',
[],
is_virtual=True)
return
def register_Ns3NetDevice_methods(root_module, cls):
## net-device.h (module 'network'): ns3::NetDevice::NetDevice() [constructor]
cls.add_constructor([])
## net-device.h (module 'network'): ns3::NetDevice::NetDevice(ns3::NetDevice const & arg0) [copy constructor]
cls.add_constructor([param('ns3::NetDevice const &', 'arg0')])
## net-device.h (module 'network'): void ns3::NetDevice::AddLinkChangeCallback(ns3::Callback<void,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> callback) [member function]
cls.add_method('AddLinkChangeCallback',
'void',
[param('ns3::Callback< void, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'callback')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Address ns3::NetDevice::GetAddress() const [member function]
cls.add_method('GetAddress',
'ns3::Address',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Address ns3::NetDevice::GetBroadcast() const [member function]
cls.add_method('GetBroadcast',
'ns3::Address',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Ptr<ns3::Channel> ns3::NetDevice::GetChannel() const [member function]
cls.add_method('GetChannel',
'ns3::Ptr< ns3::Channel >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): uint32_t ns3::NetDevice::GetIfIndex() const [member function]
cls.add_method('GetIfIndex',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): uint16_t ns3::NetDevice::GetMtu() const [member function]
cls.add_method('GetMtu',
'uint16_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Address ns3::NetDevice::GetMulticast(ns3::Ipv4Address multicastGroup) const [member function]
cls.add_method('GetMulticast',
'ns3::Address',
[param('ns3::Ipv4Address', 'multicastGroup')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Address ns3::NetDevice::GetMulticast(ns3::Ipv6Address addr) const [member function]
cls.add_method('GetMulticast',
'ns3::Address',
[param('ns3::Ipv6Address', 'addr')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Ptr<ns3::Node> ns3::NetDevice::GetNode() const [member function]
cls.add_method('GetNode',
'ns3::Ptr< ns3::Node >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): static ns3::TypeId ns3::NetDevice::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsBridge() const [member function]
cls.add_method('IsBridge',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsBroadcast() const [member function]
cls.add_method('IsBroadcast',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsLinkUp() const [member function]
cls.add_method('IsLinkUp',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsMulticast() const [member function]
cls.add_method('IsMulticast',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsPointToPoint() const [member function]
cls.add_method('IsPointToPoint',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::NeedsArp() const [member function]
cls.add_method('NeedsArp',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::Send(ns3::Ptr<ns3::Packet> packet, ns3::Address const & dest, uint16_t protocolNumber) [member function]
cls.add_method('Send',
'bool',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Address const &', 'dest'), param('uint16_t', 'protocolNumber')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::SendFrom(ns3::Ptr<ns3::Packet> packet, ns3::Address const & source, ns3::Address const & dest, uint16_t protocolNumber) [member function]
cls.add_method('SendFrom',
'bool',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Address const &', 'source'), param('ns3::Address const &', 'dest'), param('uint16_t', 'protocolNumber')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetAddress(ns3::Address address) [member function]
cls.add_method('SetAddress',
'void',
[param('ns3::Address', 'address')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetIfIndex(uint32_t const index) [member function]
cls.add_method('SetIfIndex',
'void',
[param('uint32_t const', 'index')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::SetMtu(uint16_t const mtu) [member function]
cls.add_method('SetMtu',
'bool',
[param('uint16_t const', 'mtu')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetNode(ns3::Ptr<ns3::Node> node) [member function]
cls.add_method('SetNode',
'void',
[param('ns3::Ptr< ns3::Node >', 'node')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetPromiscReceiveCallback(ns3::Callback<bool,ns3::Ptr<ns3::NetDevice>,ns3::Ptr<const ns3::Packet>,short unsigned int,const ns3::Address&,const ns3::Address&,ns3::NetDevice::PacketType,ns3::empty,ns3::empty,ns3::empty> cb) [member function]
cls.add_method('SetPromiscReceiveCallback',
'void',
[param('ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, short unsigned int, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >', 'cb')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetReceiveCallback(ns3::Callback<bool,ns3::Ptr<ns3::NetDevice>,ns3::Ptr<const ns3::Packet>,short unsigned int,const ns3::Address&,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> cb) [member function]
cls.add_method('SetReceiveCallback',
'void',
[param('ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, short unsigned int, ns3::Address const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'cb')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::SupportsSendFrom() const [member function]
cls.add_method('SupportsSendFrom',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3NixVector_methods(root_module, cls):
cls.add_output_stream_operator()
## nix-vector.h (module 'network'): ns3::NixVector::NixVector() [constructor]
cls.add_constructor([])
## nix-vector.h (module 'network'): ns3::NixVector::NixVector(ns3::NixVector const & o) [copy constructor]
cls.add_constructor([param('ns3::NixVector const &', 'o')])
## nix-vector.h (module 'network'): void ns3::NixVector::AddNeighborIndex(uint32_t newBits, uint32_t numberOfBits) [member function]
cls.add_method('AddNeighborIndex',
'void',
[param('uint32_t', 'newBits'), param('uint32_t', 'numberOfBits')])
## nix-vector.h (module 'network'): uint32_t ns3::NixVector::BitCount(uint32_t numberOfNeighbors) const [member function]
cls.add_method('BitCount',
'uint32_t',
[param('uint32_t', 'numberOfNeighbors')],
is_const=True)
## nix-vector.h (module 'network'): ns3::Ptr<ns3::NixVector> ns3::NixVector::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::NixVector >',
[],
is_const=True)
## nix-vector.h (module 'network'): uint32_t ns3::NixVector::Deserialize(uint32_t const * buffer, uint32_t size) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('uint32_t const *', 'buffer'), param('uint32_t', 'size')])
## nix-vector.h (module 'network'): uint32_t ns3::NixVector::ExtractNeighborIndex(uint32_t numberOfBits) [member function]
cls.add_method('ExtractNeighborIndex',
'uint32_t',
[param('uint32_t', 'numberOfBits')])
## nix-vector.h (module 'network'): uint32_t ns3::NixVector::GetRemainingBits() [member function]
cls.add_method('GetRemainingBits',
'uint32_t',
[])
## nix-vector.h (module 'network'): uint32_t ns3::NixVector::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True)
## nix-vector.h (module 'network'): uint32_t ns3::NixVector::Serialize(uint32_t * buffer, uint32_t maxSize) const [member function]
cls.add_method('Serialize',
'uint32_t',
[param('uint32_t *', 'buffer'), param('uint32_t', 'maxSize')],
is_const=True)
return
def register_Ns3Node_methods(root_module, cls):
## node.h (module 'network'): ns3::Node::Node(ns3::Node const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Node const &', 'arg0')])
## node.h (module 'network'): ns3::Node::Node() [constructor]
cls.add_constructor([])
## node.h (module 'network'): ns3::Node::Node(uint32_t systemId) [constructor]
cls.add_constructor([param('uint32_t', 'systemId')])
## node.h (module 'network'): uint32_t ns3::Node::AddApplication(ns3::Ptr<ns3::Application> application) [member function]
cls.add_method('AddApplication',
'uint32_t',
[param('ns3::Ptr< ns3::Application >', 'application')])
## node.h (module 'network'): uint32_t ns3::Node::AddDevice(ns3::Ptr<ns3::NetDevice> device) [member function]
cls.add_method('AddDevice',
'uint32_t',
[param('ns3::Ptr< ns3::NetDevice >', 'device')])
## node.h (module 'network'): static bool ns3::Node::ChecksumEnabled() [member function]
cls.add_method('ChecksumEnabled',
'bool',
[],
is_static=True)
## node.h (module 'network'): ns3::Ptr<ns3::Application> ns3::Node::GetApplication(uint32_t index) const [member function]
cls.add_method('GetApplication',
'ns3::Ptr< ns3::Application >',
[param('uint32_t', 'index')],
is_const=True)
## node.h (module 'network'): ns3::Ptr<ns3::NetDevice> ns3::Node::GetDevice(uint32_t index) const [member function]
cls.add_method('GetDevice',
'ns3::Ptr< ns3::NetDevice >',
[param('uint32_t', 'index')],
is_const=True)
## node.h (module 'network'): uint32_t ns3::Node::GetId() const [member function]
cls.add_method('GetId',
'uint32_t',
[],
is_const=True)
## node.h (module 'network'): uint32_t ns3::Node::GetNApplications() const [member function]
cls.add_method('GetNApplications',
'uint32_t',
[],
is_const=True)
## node.h (module 'network'): uint32_t ns3::Node::GetNDevices() const [member function]
cls.add_method('GetNDevices',
'uint32_t',
[],
is_const=True)
## node.h (module 'network'): uint32_t ns3::Node::GetSystemId() const [member function]
cls.add_method('GetSystemId',
'uint32_t',
[],
is_const=True)
## node.h (module 'network'): static ns3::TypeId ns3::Node::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## node.h (module 'network'): void ns3::Node::RegisterDeviceAdditionListener(ns3::Callback<void,ns3::Ptr<ns3::NetDevice>,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> listener) [member function]
cls.add_method('RegisterDeviceAdditionListener',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'listener')])
## node.h (module 'network'): void ns3::Node::RegisterProtocolHandler(ns3::Callback<void, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<ns3::Packet const>, unsigned short, ns3::Address const&, ns3::Address const&, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty> handler, uint16_t protocolType, ns3::Ptr<ns3::NetDevice> device, bool promiscuous=false) [member function]
cls.add_method('RegisterProtocolHandler',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >', 'handler'), param('uint16_t', 'protocolType'), param('ns3::Ptr< ns3::NetDevice >', 'device'), param('bool', 'promiscuous', default_value='false')])
## node.h (module 'network'): void ns3::Node::UnregisterDeviceAdditionListener(ns3::Callback<void,ns3::Ptr<ns3::NetDevice>,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> listener) [member function]
cls.add_method('UnregisterDeviceAdditionListener',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'listener')])
## node.h (module 'network'): void ns3::Node::UnregisterProtocolHandler(ns3::Callback<void, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<ns3::Packet const>, unsigned short, ns3::Address const&, ns3::Address const&, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty> handler) [member function]
cls.add_method('UnregisterProtocolHandler',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >', 'handler')])
## node.h (module 'network'): void ns3::Node::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
## node.h (module 'network'): void ns3::Node::DoInitialize() [member function]
cls.add_method('DoInitialize',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3NormalRandomVariable_methods(root_module, cls):
## random-variable-stream.h (module 'core'): ns3::NormalRandomVariable::INFINITE_VALUE [variable]
cls.add_static_attribute('INFINITE_VALUE', 'double const', is_const=True)
## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::NormalRandomVariable::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## random-variable-stream.h (module 'core'): ns3::NormalRandomVariable::NormalRandomVariable() [constructor]
cls.add_constructor([])
## random-variable-stream.h (module 'core'): double ns3::NormalRandomVariable::GetMean() const [member function]
cls.add_method('GetMean',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::NormalRandomVariable::GetVariance() const [member function]
cls.add_method('GetVariance',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::NormalRandomVariable::GetBound() const [member function]
cls.add_method('GetBound',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::NormalRandomVariable::GetValue(double mean, double variance, double bound=ns3::NormalRandomVariable::INFINITE_VALUE) [member function]
cls.add_method('GetValue',
'double',
[param('double', 'mean'), param('double', 'variance'), param('double', 'bound', default_value='ns3::NormalRandomVariable::INFINITE_VALUE')])
## random-variable-stream.h (module 'core'): uint32_t ns3::NormalRandomVariable::GetInteger(uint32_t mean, uint32_t variance, uint32_t bound) [member function]
cls.add_method('GetInteger',
'uint32_t',
[param('uint32_t', 'mean'), param('uint32_t', 'variance'), param('uint32_t', 'bound')])
## random-variable-stream.h (module 'core'): double ns3::NormalRandomVariable::GetValue() [member function]
cls.add_method('GetValue',
'double',
[],
is_virtual=True)
## random-variable-stream.h (module 'core'): uint32_t ns3::NormalRandomVariable::GetInteger() [member function]
cls.add_method('GetInteger',
'uint32_t',
[],
is_virtual=True)
return
def register_Ns3ObjectFactoryChecker_methods(root_module, cls):
## object-factory.h (module 'core'): ns3::ObjectFactoryChecker::ObjectFactoryChecker() [constructor]
cls.add_constructor([])
## object-factory.h (module 'core'): ns3::ObjectFactoryChecker::ObjectFactoryChecker(ns3::ObjectFactoryChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ObjectFactoryChecker const &', 'arg0')])
return
def register_Ns3ObjectFactoryValue_methods(root_module, cls):
## object-factory.h (module 'core'): ns3::ObjectFactoryValue::ObjectFactoryValue() [constructor]
cls.add_constructor([])
## object-factory.h (module 'core'): ns3::ObjectFactoryValue::ObjectFactoryValue(ns3::ObjectFactoryValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ObjectFactoryValue const &', 'arg0')])
## object-factory.h (module 'core'): ns3::ObjectFactoryValue::ObjectFactoryValue(ns3::ObjectFactory const & value) [constructor]
cls.add_constructor([param('ns3::ObjectFactory const &', 'value')])
## object-factory.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::ObjectFactoryValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## object-factory.h (module 'core'): bool ns3::ObjectFactoryValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## object-factory.h (module 'core'): ns3::ObjectFactory ns3::ObjectFactoryValue::Get() const [member function]
cls.add_method('Get',
'ns3::ObjectFactory',
[],
is_const=True)
## object-factory.h (module 'core'): std::string ns3::ObjectFactoryValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## object-factory.h (module 'core'): void ns3::ObjectFactoryValue::Set(ns3::ObjectFactory const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::ObjectFactory const &', 'value')])
return
def register_Ns3OutputStreamWrapper_methods(root_module, cls):
## output-stream-wrapper.h (module 'network'): ns3::OutputStreamWrapper::OutputStreamWrapper(ns3::OutputStreamWrapper const & arg0) [copy constructor]
cls.add_constructor([param('ns3::OutputStreamWrapper const &', 'arg0')])
## output-stream-wrapper.h (module 'network'): ns3::OutputStreamWrapper::OutputStreamWrapper(std::string filename, std::_Ios_Openmode filemode) [constructor]
cls.add_constructor([param('std::string', 'filename'), param('std::_Ios_Openmode', 'filemode')])
## output-stream-wrapper.h (module 'network'): ns3::OutputStreamWrapper::OutputStreamWrapper(std::ostream * os) [constructor]
cls.add_constructor([param('std::ostream *', 'os')])
## output-stream-wrapper.h (module 'network'): std::ostream * ns3::OutputStreamWrapper::GetStream() [member function]
cls.add_method('GetStream',
'std::ostream *',
[])
return
def register_Ns3Packet_methods(root_module, cls):
cls.add_output_stream_operator()
## packet.h (module 'network'): ns3::Packet::Packet() [constructor]
cls.add_constructor([])
## packet.h (module 'network'): ns3::Packet::Packet(ns3::Packet const & o) [copy constructor]
cls.add_constructor([param('ns3::Packet const &', 'o')])
## packet.h (module 'network'): ns3::Packet::Packet(uint32_t size) [constructor]
cls.add_constructor([param('uint32_t', 'size')])
## packet.h (module 'network'): ns3::Packet::Packet(uint8_t const * buffer, uint32_t size, bool magic) [constructor]
cls.add_constructor([param('uint8_t const *', 'buffer'), param('uint32_t', 'size'), param('bool', 'magic')])
## packet.h (module 'network'): ns3::Packet::Packet(uint8_t const * buffer, uint32_t size) [constructor]
cls.add_constructor([param('uint8_t const *', 'buffer'), param('uint32_t', 'size')])
## packet.h (module 'network'): void ns3::Packet::AddAtEnd(ns3::Ptr<ns3::Packet const> packet) [member function]
cls.add_method('AddAtEnd',
'void',
[param('ns3::Ptr< ns3::Packet const >', 'packet')])
## packet.h (module 'network'): void ns3::Packet::AddByteTag(ns3::Tag const & tag) const [member function]
cls.add_method('AddByteTag',
'void',
[param('ns3::Tag const &', 'tag')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::AddHeader(ns3::Header const & header) [member function]
cls.add_method('AddHeader',
'void',
[param('ns3::Header const &', 'header')])
## packet.h (module 'network'): void ns3::Packet::AddPacketTag(ns3::Tag const & tag) const [member function]
cls.add_method('AddPacketTag',
'void',
[param('ns3::Tag const &', 'tag')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::AddPaddingAtEnd(uint32_t size) [member function]
cls.add_method('AddPaddingAtEnd',
'void',
[param('uint32_t', 'size')])
## packet.h (module 'network'): void ns3::Packet::AddTrailer(ns3::Trailer const & trailer) [member function]
cls.add_method('AddTrailer',
'void',
[param('ns3::Trailer const &', 'trailer')])
## packet.h (module 'network'): ns3::PacketMetadata::ItemIterator ns3::Packet::BeginItem() const [member function]
cls.add_method('BeginItem',
'ns3::PacketMetadata::ItemIterator',
[],
is_const=True)
## packet.h (module 'network'): ns3::Ptr<ns3::Packet> ns3::Packet::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::Packet >',
[],
is_const=True)
## packet.h (module 'network'): uint32_t ns3::Packet::CopyData(uint8_t * buffer, uint32_t size) const [member function]
cls.add_method('CopyData',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint32_t', 'size')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::CopyData(std::ostream * os, uint32_t size) const [member function]
cls.add_method('CopyData',
'void',
[param('std::ostream *', 'os'), param('uint32_t', 'size')],
is_const=True)
## packet.h (module 'network'): ns3::Ptr<ns3::Packet> ns3::Packet::CreateFragment(uint32_t start, uint32_t length) const [member function]
cls.add_method('CreateFragment',
'ns3::Ptr< ns3::Packet >',
[param('uint32_t', 'start'), param('uint32_t', 'length')],
is_const=True)
## packet.h (module 'network'): static void ns3::Packet::EnableChecking() [member function]
cls.add_method('EnableChecking',
'void',
[],
is_static=True)
## packet.h (module 'network'): static void ns3::Packet::EnablePrinting() [member function]
cls.add_method('EnablePrinting',
'void',
[],
is_static=True)
## packet.h (module 'network'): bool ns3::Packet::FindFirstMatchingByteTag(ns3::Tag & tag) const [member function]
cls.add_method('FindFirstMatchingByteTag',
'bool',
[param('ns3::Tag &', 'tag')],
is_const=True)
## packet.h (module 'network'): ns3::ByteTagIterator ns3::Packet::GetByteTagIterator() const [member function]
cls.add_method('GetByteTagIterator',
'ns3::ByteTagIterator',
[],
is_const=True)
## packet.h (module 'network'): ns3::Ptr<ns3::NixVector> ns3::Packet::GetNixVector() const [member function]
cls.add_method('GetNixVector',
'ns3::Ptr< ns3::NixVector >',
[],
is_const=True)
## packet.h (module 'network'): ns3::PacketTagIterator ns3::Packet::GetPacketTagIterator() const [member function]
cls.add_method('GetPacketTagIterator',
'ns3::PacketTagIterator',
[],
is_const=True)
## packet.h (module 'network'): uint32_t ns3::Packet::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True)
## packet.h (module 'network'): uint32_t ns3::Packet::GetSize() const [member function]
cls.add_method('GetSize',
'uint32_t',
[],
is_const=True)
## packet.h (module 'network'): uint64_t ns3::Packet::GetUid() const [member function]
cls.add_method('GetUid',
'uint64_t',
[],
is_const=True)
## packet.h (module 'network'): uint8_t const * ns3::Packet::PeekData() const [member function]
cls.add_method('PeekData',
'uint8_t const *',
[],
deprecated=True, is_const=True)
## packet.h (module 'network'): uint32_t ns3::Packet::PeekHeader(ns3::Header & header) const [member function]
cls.add_method('PeekHeader',
'uint32_t',
[param('ns3::Header &', 'header')],
is_const=True)
## packet.h (module 'network'): bool ns3::Packet::PeekPacketTag(ns3::Tag & tag) const [member function]
cls.add_method('PeekPacketTag',
'bool',
[param('ns3::Tag &', 'tag')],
is_const=True)
## packet.h (module 'network'): uint32_t ns3::Packet::PeekTrailer(ns3::Trailer & trailer) [member function]
cls.add_method('PeekTrailer',
'uint32_t',
[param('ns3::Trailer &', 'trailer')])
## packet.h (module 'network'): void ns3::Packet::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::PrintByteTags(std::ostream & os) const [member function]
cls.add_method('PrintByteTags',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::PrintPacketTags(std::ostream & os) const [member function]
cls.add_method('PrintPacketTags',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::RemoveAllByteTags() [member function]
cls.add_method('RemoveAllByteTags',
'void',
[])
## packet.h (module 'network'): void ns3::Packet::RemoveAllPacketTags() [member function]
cls.add_method('RemoveAllPacketTags',
'void',
[])
## packet.h (module 'network'): void ns3::Packet::RemoveAtEnd(uint32_t size) [member function]
cls.add_method('RemoveAtEnd',
'void',
[param('uint32_t', 'size')])
## packet.h (module 'network'): void ns3::Packet::RemoveAtStart(uint32_t size) [member function]
cls.add_method('RemoveAtStart',
'void',
[param('uint32_t', 'size')])
## packet.h (module 'network'): uint32_t ns3::Packet::RemoveHeader(ns3::Header & header) [member function]
cls.add_method('RemoveHeader',
'uint32_t',
[param('ns3::Header &', 'header')])
## packet.h (module 'network'): bool ns3::Packet::RemovePacketTag(ns3::Tag & tag) [member function]
cls.add_method('RemovePacketTag',
'bool',
[param('ns3::Tag &', 'tag')])
## packet.h (module 'network'): uint32_t ns3::Packet::RemoveTrailer(ns3::Trailer & trailer) [member function]
cls.add_method('RemoveTrailer',
'uint32_t',
[param('ns3::Trailer &', 'trailer')])
## packet.h (module 'network'): bool ns3::Packet::ReplacePacketTag(ns3::Tag & tag) [member function]
cls.add_method('ReplacePacketTag',
'bool',
[param('ns3::Tag &', 'tag')])
## packet.h (module 'network'): uint32_t ns3::Packet::Serialize(uint8_t * buffer, uint32_t maxSize) const [member function]
cls.add_method('Serialize',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint32_t', 'maxSize')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::SetNixVector(ns3::Ptr<ns3::NixVector> arg0) [member function]
cls.add_method('SetNixVector',
'void',
[param('ns3::Ptr< ns3::NixVector >', 'arg0')])
return
def register_Ns3ParetoRandomVariable_methods(root_module, cls):
## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::ParetoRandomVariable::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## random-variable-stream.h (module 'core'): ns3::ParetoRandomVariable::ParetoRandomVariable() [constructor]
cls.add_constructor([])
## random-variable-stream.h (module 'core'): double ns3::ParetoRandomVariable::GetMean() const [member function]
cls.add_method('GetMean',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::ParetoRandomVariable::GetShape() const [member function]
cls.add_method('GetShape',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::ParetoRandomVariable::GetBound() const [member function]
cls.add_method('GetBound',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::ParetoRandomVariable::GetValue(double mean, double shape, double bound) [member function]
cls.add_method('GetValue',
'double',
[param('double', 'mean'), param('double', 'shape'), param('double', 'bound')])
## random-variable-stream.h (module 'core'): uint32_t ns3::ParetoRandomVariable::GetInteger(uint32_t mean, uint32_t shape, uint32_t bound) [member function]
cls.add_method('GetInteger',
'uint32_t',
[param('uint32_t', 'mean'), param('uint32_t', 'shape'), param('uint32_t', 'bound')])
## random-variable-stream.h (module 'core'): double ns3::ParetoRandomVariable::GetValue() [member function]
cls.add_method('GetValue',
'double',
[],
is_virtual=True)
## random-variable-stream.h (module 'core'): uint32_t ns3::ParetoRandomVariable::GetInteger() [member function]
cls.add_method('GetInteger',
'uint32_t',
[],
is_virtual=True)
return
def register_Ns3TimeValue_methods(root_module, cls):
## nstime.h (module 'core'): ns3::TimeValue::TimeValue() [constructor]
cls.add_constructor([])
## nstime.h (module 'core'): ns3::TimeValue::TimeValue(ns3::TimeValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TimeValue const &', 'arg0')])
## nstime.h (module 'core'): ns3::TimeValue::TimeValue(ns3::Time const & value) [constructor]
cls.add_constructor([param('ns3::Time const &', 'value')])
## nstime.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::TimeValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## nstime.h (module 'core'): bool ns3::TimeValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## nstime.h (module 'core'): ns3::Time ns3::TimeValue::Get() const [member function]
cls.add_method('Get',
'ns3::Time',
[],
is_const=True)
## nstime.h (module 'core'): std::string ns3::TimeValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## nstime.h (module 'core'): void ns3::TimeValue::Set(ns3::Time const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Time const &', 'value')])
return
def register_Ns3TypeIdChecker_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeIdChecker::TypeIdChecker() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeIdChecker::TypeIdChecker(ns3::TypeIdChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TypeIdChecker const &', 'arg0')])
return
def register_Ns3TypeIdValue_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue(ns3::TypeIdValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TypeIdValue const &', 'arg0')])
## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue(ns3::TypeId const & value) [constructor]
cls.add_constructor([param('ns3::TypeId const &', 'value')])
## type-id.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::TypeIdValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## type-id.h (module 'core'): bool ns3::TypeIdValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## type-id.h (module 'core'): ns3::TypeId ns3::TypeIdValue::Get() const [member function]
cls.add_method('Get',
'ns3::TypeId',
[],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeIdValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## type-id.h (module 'core'): void ns3::TypeIdValue::Set(ns3::TypeId const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::TypeId const &', 'value')])
return
def register_Ns3AddressChecker_methods(root_module, cls):
## address.h (module 'network'): ns3::AddressChecker::AddressChecker() [constructor]
cls.add_constructor([])
## address.h (module 'network'): ns3::AddressChecker::AddressChecker(ns3::AddressChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AddressChecker const &', 'arg0')])
return
def register_Ns3AddressValue_methods(root_module, cls):
## address.h (module 'network'): ns3::AddressValue::AddressValue() [constructor]
cls.add_constructor([])
## address.h (module 'network'): ns3::AddressValue::AddressValue(ns3::AddressValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AddressValue const &', 'arg0')])
## address.h (module 'network'): ns3::AddressValue::AddressValue(ns3::Address const & value) [constructor]
cls.add_constructor([param('ns3::Address const &', 'value')])
## address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::AddressValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## address.h (module 'network'): bool ns3::AddressValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## address.h (module 'network'): ns3::Address ns3::AddressValue::Get() const [member function]
cls.add_method('Get',
'ns3::Address',
[],
is_const=True)
## address.h (module 'network'): std::string ns3::AddressValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## address.h (module 'network'): void ns3::AddressValue::Set(ns3::Address const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Address const &', 'value')])
return
def register_Ns3Ipv4ListRouting_methods(root_module, cls):
## ipv4-list-routing.h (module 'internet'): ns3::Ipv4ListRouting::Ipv4ListRouting(ns3::Ipv4ListRouting const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4ListRouting const &', 'arg0')])
## ipv4-list-routing.h (module 'internet'): ns3::Ipv4ListRouting::Ipv4ListRouting() [constructor]
cls.add_constructor([])
## ipv4-list-routing.h (module 'internet'): void ns3::Ipv4ListRouting::AddRoutingProtocol(ns3::Ptr<ns3::Ipv4RoutingProtocol> routingProtocol, int16_t priority) [member function]
cls.add_method('AddRoutingProtocol',
'void',
[param('ns3::Ptr< ns3::Ipv4RoutingProtocol >', 'routingProtocol'), param('int16_t', 'priority')],
is_virtual=True)
## ipv4-list-routing.h (module 'internet'): uint32_t ns3::Ipv4ListRouting::GetNRoutingProtocols() const [member function]
cls.add_method('GetNRoutingProtocols',
'uint32_t',
[],
is_const=True, is_virtual=True)
## ipv4-list-routing.h (module 'internet'): ns3::Ptr<ns3::Ipv4RoutingProtocol> ns3::Ipv4ListRouting::GetRoutingProtocol(uint32_t index, int16_t & priority) const [member function]
cls.add_method('GetRoutingProtocol',
'ns3::Ptr< ns3::Ipv4RoutingProtocol >',
[param('uint32_t', 'index'), param('int16_t &', 'priority', direction=2)],
is_const=True, is_virtual=True)
## ipv4-list-routing.h (module 'internet'): static ns3::TypeId ns3::Ipv4ListRouting::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## ipv4-list-routing.h (module 'internet'): void ns3::Ipv4ListRouting::NotifyAddAddress(uint32_t interface, ns3::Ipv4InterfaceAddress address) [member function]
cls.add_method('NotifyAddAddress',
'void',
[param('uint32_t', 'interface'), param('ns3::Ipv4InterfaceAddress', 'address')],
is_virtual=True)
## ipv4-list-routing.h (module 'internet'): void ns3::Ipv4ListRouting::NotifyInterfaceDown(uint32_t interface) [member function]
cls.add_method('NotifyInterfaceDown',
'void',
[param('uint32_t', 'interface')],
is_virtual=True)
## ipv4-list-routing.h (module 'internet'): void ns3::Ipv4ListRouting::NotifyInterfaceUp(uint32_t interface) [member function]
cls.add_method('NotifyInterfaceUp',
'void',
[param('uint32_t', 'interface')],
is_virtual=True)
## ipv4-list-routing.h (module 'internet'): void ns3::Ipv4ListRouting::NotifyRemoveAddress(uint32_t interface, ns3::Ipv4InterfaceAddress address) [member function]
cls.add_method('NotifyRemoveAddress',
'void',
[param('uint32_t', 'interface'), param('ns3::Ipv4InterfaceAddress', 'address')],
is_virtual=True)
## ipv4-list-routing.h (module 'internet'): void ns3::Ipv4ListRouting::PrintRoutingTable(ns3::Ptr<ns3::OutputStreamWrapper> stream) const [member function]
cls.add_method('PrintRoutingTable',
'void',
[param('ns3::Ptr< ns3::OutputStreamWrapper >', 'stream')],
is_const=True, is_virtual=True)
## ipv4-list-routing.h (module 'internet'): bool ns3::Ipv4ListRouting::RouteInput(ns3::Ptr<ns3::Packet const> p, ns3::Ipv4Header const & header, ns3::Ptr<const ns3::NetDevice> idev, ns3::Callback<void, ns3::Ptr<ns3::Ipv4Route>, ns3::Ptr<ns3::Packet const>, ns3::Ipv4Header const&, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> ucb, ns3::Callback<void,ns3::Ptr<ns3::Ipv4MulticastRoute>,ns3::Ptr<const ns3::Packet>,const ns3::Ipv4Header&,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> mcb, ns3::Callback<void,ns3::Ptr<const ns3::Packet>,const ns3::Ipv4Header&,unsigned int,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> lcb, ns3::Callback<void, ns3::Ptr<ns3::Packet const>, ns3::Ipv4Header const&, ns3::Socket::SocketErrno, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> ecb) [member function]
cls.add_method('RouteInput',
'bool',
[param('ns3::Ptr< ns3::Packet const >', 'p'), param('ns3::Ipv4Header const &', 'header'), param('ns3::Ptr< ns3::NetDevice const >', 'idev'), param('ns3::Callback< void, ns3::Ptr< ns3::Ipv4Route >, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'ucb'), param('ns3::Callback< void, ns3::Ptr< ns3::Ipv4MulticastRoute >, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'mcb'), param('ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'lcb'), param('ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::Socket::SocketErrno, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'ecb')],
is_virtual=True)
## ipv4-list-routing.h (module 'internet'): ns3::Ptr<ns3::Ipv4Route> ns3::Ipv4ListRouting::RouteOutput(ns3::Ptr<ns3::Packet> p, ns3::Ipv4Header const & header, ns3::Ptr<ns3::NetDevice> oif, ns3::Socket::SocketErrno & sockerr) [member function]
cls.add_method('RouteOutput',
'ns3::Ptr< ns3::Ipv4Route >',
[param('ns3::Ptr< ns3::Packet >', 'p'), param('ns3::Ipv4Header const &', 'header'), param('ns3::Ptr< ns3::NetDevice >', 'oif'), param('ns3::Socket::SocketErrno &', 'sockerr')],
is_virtual=True)
## ipv4-list-routing.h (module 'internet'): void ns3::Ipv4ListRouting::SetIpv4(ns3::Ptr<ns3::Ipv4> ipv4) [member function]
cls.add_method('SetIpv4',
'void',
[param('ns3::Ptr< ns3::Ipv4 >', 'ipv4')],
is_virtual=True)
## ipv4-list-routing.h (module 'internet'): void ns3::Ipv4ListRouting::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
## ipv4-list-routing.h (module 'internet'): void ns3::Ipv4ListRouting::DoInitialize() [member function]
cls.add_method('DoInitialize',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3HashImplementation_methods(root_module, cls):
## hash-function.h (module 'core'): ns3::Hash::Implementation::Implementation(ns3::Hash::Implementation const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Hash::Implementation const &', 'arg0')])
## hash-function.h (module 'core'): ns3::Hash::Implementation::Implementation() [constructor]
cls.add_constructor([])
## hash-function.h (module 'core'): uint32_t ns3::Hash::Implementation::GetHash32(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_pure_virtual=True, is_virtual=True)
## hash-function.h (module 'core'): uint64_t ns3::Hash::Implementation::GetHash64(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-function.h (module 'core'): void ns3::Hash::Implementation::clear() [member function]
cls.add_method('clear',
'void',
[],
is_pure_virtual=True, is_virtual=True)
return
def register_Ns3HashFunctionFnv1a_methods(root_module, cls):
## hash-fnv.h (module 'core'): ns3::Hash::Function::Fnv1a::Fnv1a(ns3::Hash::Function::Fnv1a const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Hash::Function::Fnv1a const &', 'arg0')])
## hash-fnv.h (module 'core'): ns3::Hash::Function::Fnv1a::Fnv1a() [constructor]
cls.add_constructor([])
## hash-fnv.h (module 'core'): uint32_t ns3::Hash::Function::Fnv1a::GetHash32(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-fnv.h (module 'core'): uint64_t ns3::Hash::Function::Fnv1a::GetHash64(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-fnv.h (module 'core'): void ns3::Hash::Function::Fnv1a::clear() [member function]
cls.add_method('clear',
'void',
[],
is_virtual=True)
return
def register_Ns3HashFunctionHash32_methods(root_module, cls):
## hash-function.h (module 'core'): ns3::Hash::Function::Hash32::Hash32(ns3::Hash::Function::Hash32 const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Hash::Function::Hash32 const &', 'arg0')])
## hash-function.h (module 'core'): ns3::Hash::Function::Hash32::Hash32(ns3::Hash::Hash32Function_ptr hp) [constructor]
cls.add_constructor([param('ns3::Hash::Hash32Function_ptr', 'hp')])
## hash-function.h (module 'core'): uint32_t ns3::Hash::Function::Hash32::GetHash32(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-function.h (module 'core'): void ns3::Hash::Function::Hash32::clear() [member function]
cls.add_method('clear',
'void',
[],
is_virtual=True)
return
def register_Ns3HashFunctionHash64_methods(root_module, cls):
## hash-function.h (module 'core'): ns3::Hash::Function::Hash64::Hash64(ns3::Hash::Function::Hash64 const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Hash::Function::Hash64 const &', 'arg0')])
## hash-function.h (module 'core'): ns3::Hash::Function::Hash64::Hash64(ns3::Hash::Hash64Function_ptr hp) [constructor]
cls.add_constructor([param('ns3::Hash::Hash64Function_ptr', 'hp')])
## hash-function.h (module 'core'): uint32_t ns3::Hash::Function::Hash64::GetHash32(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-function.h (module 'core'): uint64_t ns3::Hash::Function::Hash64::GetHash64(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-function.h (module 'core'): void ns3::Hash::Function::Hash64::clear() [member function]
cls.add_method('clear',
'void',
[],
is_virtual=True)
return
def register_Ns3HashFunctionMurmur3_methods(root_module, cls):
## hash-murmur3.h (module 'core'): ns3::Hash::Function::Murmur3::Murmur3(ns3::Hash::Function::Murmur3 const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Hash::Function::Murmur3 const &', 'arg0')])
## hash-murmur3.h (module 'core'): ns3::Hash::Function::Murmur3::Murmur3() [constructor]
cls.add_constructor([])
## hash-murmur3.h (module 'core'): uint32_t ns3::Hash::Function::Murmur3::GetHash32(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-murmur3.h (module 'core'): uint64_t ns3::Hash::Function::Murmur3::GetHash64(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-murmur3.h (module 'core'): void ns3::Hash::Function::Murmur3::clear() [member function]
cls.add_method('clear',
'void',
[],
is_virtual=True)
return
def register_Ns3DsdvDsdvHeader_methods(root_module, cls):
cls.add_output_stream_operator()
## dsdv-packet.h (module 'dsdv'): ns3::dsdv::DsdvHeader::DsdvHeader(ns3::dsdv::DsdvHeader const & arg0) [copy constructor]
cls.add_constructor([param('ns3::dsdv::DsdvHeader const &', 'arg0')])
## dsdv-packet.h (module 'dsdv'): ns3::dsdv::DsdvHeader::DsdvHeader(ns3::Ipv4Address dst=ns3::Ipv4Address(), uint32_t hopcount=0, uint32_t dstSeqNo=0) [constructor]
cls.add_constructor([param('ns3::Ipv4Address', 'dst', default_value='ns3::Ipv4Address()'), param('uint32_t', 'hopcount', default_value='0'), param('uint32_t', 'dstSeqNo', default_value='0')])
## dsdv-packet.h (module 'dsdv'): uint32_t ns3::dsdv::DsdvHeader::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start')],
is_virtual=True)
## dsdv-packet.h (module 'dsdv'): ns3::Ipv4Address ns3::dsdv::DsdvHeader::GetDst() const [member function]
cls.add_method('GetDst',
'ns3::Ipv4Address',
[],
is_const=True)
## dsdv-packet.h (module 'dsdv'): uint32_t ns3::dsdv::DsdvHeader::GetDstSeqno() const [member function]
cls.add_method('GetDstSeqno',
'uint32_t',
[],
is_const=True)
## dsdv-packet.h (module 'dsdv'): uint32_t ns3::dsdv::DsdvHeader::GetHopCount() const [member function]
cls.add_method('GetHopCount',
'uint32_t',
[],
is_const=True)
## dsdv-packet.h (module 'dsdv'): ns3::TypeId ns3::dsdv::DsdvHeader::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## dsdv-packet.h (module 'dsdv'): uint32_t ns3::dsdv::DsdvHeader::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## dsdv-packet.h (module 'dsdv'): static ns3::TypeId ns3::dsdv::DsdvHeader::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## dsdv-packet.h (module 'dsdv'): void ns3::dsdv::DsdvHeader::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## dsdv-packet.h (module 'dsdv'): void ns3::dsdv::DsdvHeader::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_const=True, is_virtual=True)
## dsdv-packet.h (module 'dsdv'): void ns3::dsdv::DsdvHeader::SetDst(ns3::Ipv4Address destination) [member function]
cls.add_method('SetDst',
'void',
[param('ns3::Ipv4Address', 'destination')])
## dsdv-packet.h (module 'dsdv'): void ns3::dsdv::DsdvHeader::SetDstSeqno(uint32_t sequenceNumber) [member function]
cls.add_method('SetDstSeqno',
'void',
[param('uint32_t', 'sequenceNumber')])
## dsdv-packet.h (module 'dsdv'): void ns3::dsdv::DsdvHeader::SetHopCount(uint32_t hopCount) [member function]
cls.add_method('SetHopCount',
'void',
[param('uint32_t', 'hopCount')])
return
def register_Ns3DsdvPacketQueue_methods(root_module, cls):
## dsdv-packet-queue.h (module 'dsdv'): ns3::dsdv::PacketQueue::PacketQueue(ns3::dsdv::PacketQueue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::dsdv::PacketQueue const &', 'arg0')])
## dsdv-packet-queue.h (module 'dsdv'): ns3::dsdv::PacketQueue::PacketQueue() [constructor]
cls.add_constructor([])
## dsdv-packet-queue.h (module 'dsdv'): bool ns3::dsdv::PacketQueue::Dequeue(ns3::Ipv4Address dst, ns3::dsdv::QueueEntry & entry) [member function]
cls.add_method('Dequeue',
'bool',
[param('ns3::Ipv4Address', 'dst'), param('ns3::dsdv::QueueEntry &', 'entry')])
## dsdv-packet-queue.h (module 'dsdv'): void ns3::dsdv::PacketQueue::DropPacketWithDst(ns3::Ipv4Address dst) [member function]
cls.add_method('DropPacketWithDst',
'void',
[param('ns3::Ipv4Address', 'dst')])
## dsdv-packet-queue.h (module 'dsdv'): bool ns3::dsdv::PacketQueue::Enqueue(ns3::dsdv::QueueEntry & entry) [member function]
cls.add_method('Enqueue',
'bool',
[param('ns3::dsdv::QueueEntry &', 'entry')])
## dsdv-packet-queue.h (module 'dsdv'): bool ns3::dsdv::PacketQueue::Find(ns3::Ipv4Address dst) [member function]
cls.add_method('Find',
'bool',
[param('ns3::Ipv4Address', 'dst')])
## dsdv-packet-queue.h (module 'dsdv'): uint32_t ns3::dsdv::PacketQueue::GetCountForPacketsWithDst(ns3::Ipv4Address dst) [member function]
cls.add_method('GetCountForPacketsWithDst',
'uint32_t',
[param('ns3::Ipv4Address', 'dst')])
## dsdv-packet-queue.h (module 'dsdv'): uint32_t ns3::dsdv::PacketQueue::GetMaxPacketsPerDst() const [member function]
cls.add_method('GetMaxPacketsPerDst',
'uint32_t',
[],
is_const=True)
## dsdv-packet-queue.h (module 'dsdv'): uint32_t ns3::dsdv::PacketQueue::GetMaxQueueLen() const [member function]
cls.add_method('GetMaxQueueLen',
'uint32_t',
[],
is_const=True)
## dsdv-packet-queue.h (module 'dsdv'): ns3::Time ns3::dsdv::PacketQueue::GetQueueTimeout() const [member function]
cls.add_method('GetQueueTimeout',
'ns3::Time',
[],
is_const=True)
## dsdv-packet-queue.h (module 'dsdv'): uint32_t ns3::dsdv::PacketQueue::GetSize() [member function]
cls.add_method('GetSize',
'uint32_t',
[])
## dsdv-packet-queue.h (module 'dsdv'): void ns3::dsdv::PacketQueue::SetMaxPacketsPerDst(uint32_t len) [member function]
cls.add_method('SetMaxPacketsPerDst',
'void',
[param('uint32_t', 'len')])
## dsdv-packet-queue.h (module 'dsdv'): void ns3::dsdv::PacketQueue::SetMaxQueueLen(uint32_t len) [member function]
cls.add_method('SetMaxQueueLen',
'void',
[param('uint32_t', 'len')])
## dsdv-packet-queue.h (module 'dsdv'): void ns3::dsdv::PacketQueue::SetQueueTimeout(ns3::Time t) [member function]
cls.add_method('SetQueueTimeout',
'void',
[param('ns3::Time', 't')])
return
def register_Ns3DsdvQueueEntry_methods(root_module, cls):
cls.add_binary_comparison_operator('==')
## dsdv-packet-queue.h (module 'dsdv'): ns3::dsdv::QueueEntry::QueueEntry(ns3::dsdv::QueueEntry const & arg0) [copy constructor]
cls.add_constructor([param('ns3::dsdv::QueueEntry const &', 'arg0')])
## dsdv-packet-queue.h (module 'dsdv'): ns3::dsdv::QueueEntry::QueueEntry(ns3::Ptr<ns3::Packet const> pa=0, ns3::Ipv4Header const & h=ns3::Ipv4Header(), ns3::Callback<void, ns3::Ptr<ns3::Ipv4Route>, ns3::Ptr<ns3::Packet const>, ns3::Ipv4Header const&, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> ucb=ns3::Callback<void, ns3::Ptr<ns3::Ipv4Route>, ns3::Ptr<const ns3::Packet>, const ns3::Ipv4Header&, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>(), ns3::Callback<void, ns3::Ptr<ns3::Packet const>, ns3::Ipv4Header const&, ns3::Socket::SocketErrno, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> ecb=ns3::Callback<void, ns3::Ptr<const ns3::Packet>, const ns3::Ipv4Header&, ns3::Socket::SocketErrno, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>()) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::Packet const >', 'pa', default_value='0'), param('ns3::Ipv4Header const &', 'h', default_value='ns3::Ipv4Header()'), param('ns3::Callback< void, ns3::Ptr< ns3::Ipv4Route >, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'ucb', default_value='ns3::Callback<void, ns3::Ptr<ns3::Ipv4Route>, ns3::Ptr<const ns3::Packet>, const ns3::Ipv4Header&, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>()'), param('ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::Socket::SocketErrno, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'ecb', default_value='ns3::Callback<void, ns3::Ptr<const ns3::Packet>, const ns3::Ipv4Header&, ns3::Socket::SocketErrno, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>()')])
## dsdv-packet-queue.h (module 'dsdv'): ns3::Callback<void, ns3::Ptr<ns3::Packet const>, ns3::Ipv4Header const&, ns3::Socket::SocketErrno, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> ns3::dsdv::QueueEntry::GetErrorCallback() const [member function]
cls.add_method('GetErrorCallback',
'ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::Socket::SocketErrno, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >',
[],
is_const=True)
## dsdv-packet-queue.h (module 'dsdv'): ns3::Time ns3::dsdv::QueueEntry::GetExpireTime() const [member function]
cls.add_method('GetExpireTime',
'ns3::Time',
[],
is_const=True)
## dsdv-packet-queue.h (module 'dsdv'): ns3::Ipv4Header ns3::dsdv::QueueEntry::GetIpv4Header() const [member function]
cls.add_method('GetIpv4Header',
'ns3::Ipv4Header',
[],
is_const=True)
## dsdv-packet-queue.h (module 'dsdv'): ns3::Ptr<ns3::Packet const> ns3::dsdv::QueueEntry::GetPacket() const [member function]
cls.add_method('GetPacket',
'ns3::Ptr< ns3::Packet const >',
[],
is_const=True)
## dsdv-packet-queue.h (module 'dsdv'): ns3::Callback<void, ns3::Ptr<ns3::Ipv4Route>, ns3::Ptr<ns3::Packet const>, ns3::Ipv4Header const&, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> ns3::dsdv::QueueEntry::GetUnicastForwardCallback() const [member function]
cls.add_method('GetUnicastForwardCallback',
'ns3::Callback< void, ns3::Ptr< ns3::Ipv4Route >, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >',
[],
is_const=True)
## dsdv-packet-queue.h (module 'dsdv'): void ns3::dsdv::QueueEntry::SetErrorCallback(ns3::Callback<void, ns3::Ptr<ns3::Packet const>, ns3::Ipv4Header const&, ns3::Socket::SocketErrno, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> ecb) [member function]
cls.add_method('SetErrorCallback',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::Socket::SocketErrno, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'ecb')])
## dsdv-packet-queue.h (module 'dsdv'): void ns3::dsdv::QueueEntry::SetExpireTime(ns3::Time exp) [member function]
cls.add_method('SetExpireTime',
'void',
[param('ns3::Time', 'exp')])
## dsdv-packet-queue.h (module 'dsdv'): void ns3::dsdv::QueueEntry::SetIpv4Header(ns3::Ipv4Header h) [member function]
cls.add_method('SetIpv4Header',
'void',
[param('ns3::Ipv4Header', 'h')])
## dsdv-packet-queue.h (module 'dsdv'): void ns3::dsdv::QueueEntry::SetPacket(ns3::Ptr<ns3::Packet const> p) [member function]
cls.add_method('SetPacket',
'void',
[param('ns3::Ptr< ns3::Packet const >', 'p')])
## dsdv-packet-queue.h (module 'dsdv'): void ns3::dsdv::QueueEntry::SetUnicastForwardCallback(ns3::Callback<void, ns3::Ptr<ns3::Ipv4Route>, ns3::Ptr<ns3::Packet const>, ns3::Ipv4Header const&, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> ucb) [member function]
cls.add_method('SetUnicastForwardCallback',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::Ipv4Route >, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'ucb')])
return
def register_Ns3DsdvRoutingProtocol_methods(root_module, cls):
## dsdv-routing-protocol.h (module 'dsdv'): ns3::dsdv::RoutingProtocol::RoutingProtocol(ns3::dsdv::RoutingProtocol const & arg0) [copy constructor]
cls.add_constructor([param('ns3::dsdv::RoutingProtocol const &', 'arg0')])
## dsdv-routing-protocol.h (module 'dsdv'): ns3::dsdv::RoutingProtocol::RoutingProtocol() [constructor]
cls.add_constructor([])
## dsdv-routing-protocol.h (module 'dsdv'): int64_t ns3::dsdv::RoutingProtocol::AssignStreams(int64_t stream) [member function]
cls.add_method('AssignStreams',
'int64_t',
[param('int64_t', 'stream')])
## dsdv-routing-protocol.h (module 'dsdv'): void ns3::dsdv::RoutingProtocol::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
is_virtual=True)
## dsdv-routing-protocol.h (module 'dsdv'): bool ns3::dsdv::RoutingProtocol::GetEnableBufferFlag() const [member function]
cls.add_method('GetEnableBufferFlag',
'bool',
[],
is_const=True)
## dsdv-routing-protocol.h (module 'dsdv'): bool ns3::dsdv::RoutingProtocol::GetEnableRAFlag() const [member function]
cls.add_method('GetEnableRAFlag',
'bool',
[],
is_const=True)
## dsdv-routing-protocol.h (module 'dsdv'): static ns3::TypeId ns3::dsdv::RoutingProtocol::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## dsdv-routing-protocol.h (module 'dsdv'): bool ns3::dsdv::RoutingProtocol::GetWSTFlag() const [member function]
cls.add_method('GetWSTFlag',
'bool',
[],
is_const=True)
## dsdv-routing-protocol.h (module 'dsdv'): void ns3::dsdv::RoutingProtocol::NotifyAddAddress(uint32_t interface, ns3::Ipv4InterfaceAddress address) [member function]
cls.add_method('NotifyAddAddress',
'void',
[param('uint32_t', 'interface'), param('ns3::Ipv4InterfaceAddress', 'address')],
is_virtual=True)
## dsdv-routing-protocol.h (module 'dsdv'): void ns3::dsdv::RoutingProtocol::NotifyInterfaceDown(uint32_t interface) [member function]
cls.add_method('NotifyInterfaceDown',
'void',
[param('uint32_t', 'interface')],
is_virtual=True)
## dsdv-routing-protocol.h (module 'dsdv'): void ns3::dsdv::RoutingProtocol::NotifyInterfaceUp(uint32_t interface) [member function]
cls.add_method('NotifyInterfaceUp',
'void',
[param('uint32_t', 'interface')],
is_virtual=True)
## dsdv-routing-protocol.h (module 'dsdv'): void ns3::dsdv::RoutingProtocol::NotifyRemoveAddress(uint32_t interface, ns3::Ipv4InterfaceAddress address) [member function]
cls.add_method('NotifyRemoveAddress',
'void',
[param('uint32_t', 'interface'), param('ns3::Ipv4InterfaceAddress', 'address')],
is_virtual=True)
## dsdv-routing-protocol.h (module 'dsdv'): void ns3::dsdv::RoutingProtocol::PrintRoutingTable(ns3::Ptr<ns3::OutputStreamWrapper> stream) const [member function]
cls.add_method('PrintRoutingTable',
'void',
[param('ns3::Ptr< ns3::OutputStreamWrapper >', 'stream')],
is_const=True, is_virtual=True)
## dsdv-routing-protocol.h (module 'dsdv'): bool ns3::dsdv::RoutingProtocol::RouteInput(ns3::Ptr<ns3::Packet const> p, ns3::Ipv4Header const & header, ns3::Ptr<const ns3::NetDevice> idev, ns3::Callback<void, ns3::Ptr<ns3::Ipv4Route>, ns3::Ptr<ns3::Packet const>, ns3::Ipv4Header const&, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> ucb, ns3::Callback<void,ns3::Ptr<ns3::Ipv4MulticastRoute>,ns3::Ptr<const ns3::Packet>,const ns3::Ipv4Header&,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> mcb, ns3::Callback<void,ns3::Ptr<const ns3::Packet>,const ns3::Ipv4Header&,unsigned int,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> lcb, ns3::Callback<void, ns3::Ptr<ns3::Packet const>, ns3::Ipv4Header const&, ns3::Socket::SocketErrno, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> ecb) [member function]
cls.add_method('RouteInput',
'bool',
[param('ns3::Ptr< ns3::Packet const >', 'p'), param('ns3::Ipv4Header const &', 'header'), param('ns3::Ptr< ns3::NetDevice const >', 'idev'), param('ns3::Callback< void, ns3::Ptr< ns3::Ipv4Route >, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'ucb'), param('ns3::Callback< void, ns3::Ptr< ns3::Ipv4MulticastRoute >, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'mcb'), param('ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'lcb'), param('ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::Socket::SocketErrno, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'ecb')],
is_virtual=True)
## dsdv-routing-protocol.h (module 'dsdv'): ns3::Ptr<ns3::Ipv4Route> ns3::dsdv::RoutingProtocol::RouteOutput(ns3::Ptr<ns3::Packet> p, ns3::Ipv4Header const & header, ns3::Ptr<ns3::NetDevice> oif, ns3::Socket::SocketErrno & sockerr) [member function]
cls.add_method('RouteOutput',
'ns3::Ptr< ns3::Ipv4Route >',
[param('ns3::Ptr< ns3::Packet >', 'p'), param('ns3::Ipv4Header const &', 'header'), param('ns3::Ptr< ns3::NetDevice >', 'oif'), param('ns3::Socket::SocketErrno &', 'sockerr')],
is_virtual=True)
## dsdv-routing-protocol.h (module 'dsdv'): void ns3::dsdv::RoutingProtocol::SetEnableBufferFlag(bool f) [member function]
cls.add_method('SetEnableBufferFlag',
'void',
[param('bool', 'f')])
## dsdv-routing-protocol.h (module 'dsdv'): void ns3::dsdv::RoutingProtocol::SetEnableRAFlag(bool f) [member function]
cls.add_method('SetEnableRAFlag',
'void',
[param('bool', 'f')])
## dsdv-routing-protocol.h (module 'dsdv'): void ns3::dsdv::RoutingProtocol::SetIpv4(ns3::Ptr<ns3::Ipv4> ipv4) [member function]
cls.add_method('SetIpv4',
'void',
[param('ns3::Ptr< ns3::Ipv4 >', 'ipv4')],
is_virtual=True)
## dsdv-routing-protocol.h (module 'dsdv'): void ns3::dsdv::RoutingProtocol::SetWSTFlag(bool f) [member function]
cls.add_method('SetWSTFlag',
'void',
[param('bool', 'f')])
## dsdv-routing-protocol.h (module 'dsdv'): ns3::dsdv::RoutingProtocol::DSDV_PORT [variable]
cls.add_static_attribute('DSDV_PORT', 'uint32_t const', is_const=True)
return
def register_Ns3DsdvRoutingTable_methods(root_module, cls):
## dsdv-rtable.h (module 'dsdv'): ns3::dsdv::RoutingTable::RoutingTable(ns3::dsdv::RoutingTable const & arg0) [copy constructor]
cls.add_constructor([param('ns3::dsdv::RoutingTable const &', 'arg0')])
## dsdv-rtable.h (module 'dsdv'): ns3::dsdv::RoutingTable::RoutingTable() [constructor]
cls.add_constructor([])
## dsdv-rtable.h (module 'dsdv'): bool ns3::dsdv::RoutingTable::AddIpv4Event(ns3::Ipv4Address address, ns3::EventId id) [member function]
cls.add_method('AddIpv4Event',
'bool',
[param('ns3::Ipv4Address', 'address'), param('ns3::EventId', 'id')])
## dsdv-rtable.h (module 'dsdv'): bool ns3::dsdv::RoutingTable::AddRoute(ns3::dsdv::RoutingTableEntry & r) [member function]
cls.add_method('AddRoute',
'bool',
[param('ns3::dsdv::RoutingTableEntry &', 'r')])
## dsdv-rtable.h (module 'dsdv'): bool ns3::dsdv::RoutingTable::AnyRunningEvent(ns3::Ipv4Address address) [member function]
cls.add_method('AnyRunningEvent',
'bool',
[param('ns3::Ipv4Address', 'address')])
## dsdv-rtable.h (module 'dsdv'): void ns3::dsdv::RoutingTable::Clear() [member function]
cls.add_method('Clear',
'void',
[])
## dsdv-rtable.h (module 'dsdv'): void ns3::dsdv::RoutingTable::DeleteAllRoutesFromInterface(ns3::Ipv4InterfaceAddress iface) [member function]
cls.add_method('DeleteAllRoutesFromInterface',
'void',
[param('ns3::Ipv4InterfaceAddress', 'iface')])
## dsdv-rtable.h (module 'dsdv'): bool ns3::dsdv::RoutingTable::DeleteIpv4Event(ns3::Ipv4Address address) [member function]
cls.add_method('DeleteIpv4Event',
'bool',
[param('ns3::Ipv4Address', 'address')])
## dsdv-rtable.h (module 'dsdv'): bool ns3::dsdv::RoutingTable::DeleteRoute(ns3::Ipv4Address dst) [member function]
cls.add_method('DeleteRoute',
'bool',
[param('ns3::Ipv4Address', 'dst')])
## dsdv-rtable.h (module 'dsdv'): bool ns3::dsdv::RoutingTable::ForceDeleteIpv4Event(ns3::Ipv4Address address) [member function]
cls.add_method('ForceDeleteIpv4Event',
'bool',
[param('ns3::Ipv4Address', 'address')])
## dsdv-rtable.h (module 'dsdv'): ns3::EventId ns3::dsdv::RoutingTable::GetEventId(ns3::Ipv4Address address) [member function]
cls.add_method('GetEventId',
'ns3::EventId',
[param('ns3::Ipv4Address', 'address')])
## dsdv-rtable.h (module 'dsdv'): void ns3::dsdv::RoutingTable::GetListOfAllRoutes(std::map<ns3::Ipv4Address, ns3::dsdv::RoutingTableEntry, std::less<ns3::Ipv4Address>, std::allocator<std::pair<ns3::Ipv4Address const, ns3::dsdv::RoutingTableEntry> > > & allRoutes) [member function]
cls.add_method('GetListOfAllRoutes',
'void',
[param('std::map< ns3::Ipv4Address, ns3::dsdv::RoutingTableEntry > &', 'allRoutes')])
## dsdv-rtable.h (module 'dsdv'): void ns3::dsdv::RoutingTable::GetListOfDestinationWithNextHop(ns3::Ipv4Address nxtHp, std::map<ns3::Ipv4Address, ns3::dsdv::RoutingTableEntry, std::less<ns3::Ipv4Address>, std::allocator<std::pair<ns3::Ipv4Address const, ns3::dsdv::RoutingTableEntry> > > & dstList) [member function]
cls.add_method('GetListOfDestinationWithNextHop',
'void',
[param('ns3::Ipv4Address', 'nxtHp'), param('std::map< ns3::Ipv4Address, ns3::dsdv::RoutingTableEntry > &', 'dstList')])
## dsdv-rtable.h (module 'dsdv'): ns3::Time ns3::dsdv::RoutingTable::Getholddowntime() const [member function]
cls.add_method('Getholddowntime',
'ns3::Time',
[],
is_const=True)
## dsdv-rtable.h (module 'dsdv'): bool ns3::dsdv::RoutingTable::LookupRoute(ns3::Ipv4Address dst, ns3::dsdv::RoutingTableEntry & rt) [member function]
cls.add_method('LookupRoute',
'bool',
[param('ns3::Ipv4Address', 'dst'), param('ns3::dsdv::RoutingTableEntry &', 'rt')])
## dsdv-rtable.h (module 'dsdv'): bool ns3::dsdv::RoutingTable::LookupRoute(ns3::Ipv4Address id, ns3::dsdv::RoutingTableEntry & rt, bool forRouteInput) [member function]
cls.add_method('LookupRoute',
'bool',
[param('ns3::Ipv4Address', 'id'), param('ns3::dsdv::RoutingTableEntry &', 'rt'), param('bool', 'forRouteInput')])
## dsdv-rtable.h (module 'dsdv'): void ns3::dsdv::RoutingTable::Print(ns3::Ptr<ns3::OutputStreamWrapper> stream) const [member function]
cls.add_method('Print',
'void',
[param('ns3::Ptr< ns3::OutputStreamWrapper >', 'stream')],
is_const=True)
## dsdv-rtable.h (module 'dsdv'): void ns3::dsdv::RoutingTable::Purge(std::map<ns3::Ipv4Address, ns3::dsdv::RoutingTableEntry, std::less<ns3::Ipv4Address>, std::allocator<std::pair<ns3::Ipv4Address const, ns3::dsdv::RoutingTableEntry> > > & removedAddresses) [member function]
cls.add_method('Purge',
'void',
[param('std::map< ns3::Ipv4Address, ns3::dsdv::RoutingTableEntry > &', 'removedAddresses')])
## dsdv-rtable.h (module 'dsdv'): uint32_t ns3::dsdv::RoutingTable::RoutingTableSize() [member function]
cls.add_method('RoutingTableSize',
'uint32_t',
[])
## dsdv-rtable.h (module 'dsdv'): void ns3::dsdv::RoutingTable::Setholddowntime(ns3::Time t) [member function]
cls.add_method('Setholddowntime',
'void',
[param('ns3::Time', 't')])
## dsdv-rtable.h (module 'dsdv'): bool ns3::dsdv::RoutingTable::Update(ns3::dsdv::RoutingTableEntry & rt) [member function]
cls.add_method('Update',
'bool',
[param('ns3::dsdv::RoutingTableEntry &', 'rt')])
return
def register_Ns3DsdvRoutingTableEntry_methods(root_module, cls):
## dsdv-rtable.h (module 'dsdv'): ns3::dsdv::RoutingTableEntry::RoutingTableEntry(ns3::dsdv::RoutingTableEntry const & arg0) [copy constructor]
cls.add_constructor([param('ns3::dsdv::RoutingTableEntry const &', 'arg0')])
## dsdv-rtable.h (module 'dsdv'): ns3::dsdv::RoutingTableEntry::RoutingTableEntry(ns3::Ptr<ns3::NetDevice> dev=0, ns3::Ipv4Address dst=ns3::Ipv4Address(), uint32_t m_seqNo=0, ns3::Ipv4InterfaceAddress iface=ns3::Ipv4InterfaceAddress(), uint32_t hops=0, ns3::Ipv4Address nextHop=ns3::Ipv4Address(), ns3::Time lifetime=ns3::Simulator::Now( ), ns3::Time SettlingTime=ns3::Simulator::Now( ), bool changedEntries=false) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::NetDevice >', 'dev', default_value='0'), param('ns3::Ipv4Address', 'dst', default_value='ns3::Ipv4Address()'), param('uint32_t', 'm_seqNo', default_value='0'), param('ns3::Ipv4InterfaceAddress', 'iface', default_value='ns3::Ipv4InterfaceAddress()'), param('uint32_t', 'hops', default_value='0'), param('ns3::Ipv4Address', 'nextHop', default_value='ns3::Ipv4Address()'), param('ns3::Time', 'lifetime', default_value='ns3::Simulator::Now( )'), param('ns3::Time', 'SettlingTime', default_value='ns3::Simulator::Now( )'), param('bool', 'changedEntries', default_value='false')])
## dsdv-rtable.h (module 'dsdv'): ns3::Ipv4Address ns3::dsdv::RoutingTableEntry::GetDestination() const [member function]
cls.add_method('GetDestination',
'ns3::Ipv4Address',
[],
is_const=True)
## dsdv-rtable.h (module 'dsdv'): bool ns3::dsdv::RoutingTableEntry::GetEntriesChanged() const [member function]
cls.add_method('GetEntriesChanged',
'bool',
[],
is_const=True)
## dsdv-rtable.h (module 'dsdv'): ns3::dsdv::RouteFlags ns3::dsdv::RoutingTableEntry::GetFlag() const [member function]
cls.add_method('GetFlag',
'ns3::dsdv::RouteFlags',
[],
is_const=True)
## dsdv-rtable.h (module 'dsdv'): uint32_t ns3::dsdv::RoutingTableEntry::GetHop() const [member function]
cls.add_method('GetHop',
'uint32_t',
[],
is_const=True)
## dsdv-rtable.h (module 'dsdv'): ns3::Ipv4InterfaceAddress ns3::dsdv::RoutingTableEntry::GetInterface() const [member function]
cls.add_method('GetInterface',
'ns3::Ipv4InterfaceAddress',
[],
is_const=True)
## dsdv-rtable.h (module 'dsdv'): ns3::Time ns3::dsdv::RoutingTableEntry::GetLifeTime() const [member function]
cls.add_method('GetLifeTime',
'ns3::Time',
[],
is_const=True)
## dsdv-rtable.h (module 'dsdv'): ns3::Ipv4Address ns3::dsdv::RoutingTableEntry::GetNextHop() const [member function]
cls.add_method('GetNextHop',
'ns3::Ipv4Address',
[],
is_const=True)
## dsdv-rtable.h (module 'dsdv'): ns3::Ptr<ns3::NetDevice> ns3::dsdv::RoutingTableEntry::GetOutputDevice() const [member function]
cls.add_method('GetOutputDevice',
'ns3::Ptr< ns3::NetDevice >',
[],
is_const=True)
## dsdv-rtable.h (module 'dsdv'): ns3::Ptr<ns3::Ipv4Route> ns3::dsdv::RoutingTableEntry::GetRoute() const [member function]
cls.add_method('GetRoute',
'ns3::Ptr< ns3::Ipv4Route >',
[],
is_const=True)
## dsdv-rtable.h (module 'dsdv'): uint32_t ns3::dsdv::RoutingTableEntry::GetSeqNo() const [member function]
cls.add_method('GetSeqNo',
'uint32_t',
[],
is_const=True)
## dsdv-rtable.h (module 'dsdv'): ns3::Time ns3::dsdv::RoutingTableEntry::GetSettlingTime() const [member function]
cls.add_method('GetSettlingTime',
'ns3::Time',
[],
is_const=True)
## dsdv-rtable.h (module 'dsdv'): void ns3::dsdv::RoutingTableEntry::Print(ns3::Ptr<ns3::OutputStreamWrapper> stream) const [member function]
cls.add_method('Print',
'void',
[param('ns3::Ptr< ns3::OutputStreamWrapper >', 'stream')],
is_const=True)
## dsdv-rtable.h (module 'dsdv'): void ns3::dsdv::RoutingTableEntry::SetEntriesChanged(bool entriesChanged) [member function]
cls.add_method('SetEntriesChanged',
'void',
[param('bool', 'entriesChanged')])
## dsdv-rtable.h (module 'dsdv'): void ns3::dsdv::RoutingTableEntry::SetFlag(ns3::dsdv::RouteFlags flag) [member function]
cls.add_method('SetFlag',
'void',
[param('ns3::dsdv::RouteFlags', 'flag')])
## dsdv-rtable.h (module 'dsdv'): void ns3::dsdv::RoutingTableEntry::SetHop(uint32_t hopCount) [member function]
cls.add_method('SetHop',
'void',
[param('uint32_t', 'hopCount')])
## dsdv-rtable.h (module 'dsdv'): void ns3::dsdv::RoutingTableEntry::SetInterface(ns3::Ipv4InterfaceAddress iface) [member function]
cls.add_method('SetInterface',
'void',
[param('ns3::Ipv4InterfaceAddress', 'iface')])
## dsdv-rtable.h (module 'dsdv'): void ns3::dsdv::RoutingTableEntry::SetLifeTime(ns3::Time lifeTime) [member function]
cls.add_method('SetLifeTime',
'void',
[param('ns3::Time', 'lifeTime')])
## dsdv-rtable.h (module 'dsdv'): void ns3::dsdv::RoutingTableEntry::SetNextHop(ns3::Ipv4Address nextHop) [member function]
cls.add_method('SetNextHop',
'void',
[param('ns3::Ipv4Address', 'nextHop')])
## dsdv-rtable.h (module 'dsdv'): void ns3::dsdv::RoutingTableEntry::SetOutputDevice(ns3::Ptr<ns3::NetDevice> device) [member function]
cls.add_method('SetOutputDevice',
'void',
[param('ns3::Ptr< ns3::NetDevice >', 'device')])
## dsdv-rtable.h (module 'dsdv'): void ns3::dsdv::RoutingTableEntry::SetRoute(ns3::Ptr<ns3::Ipv4Route> route) [member function]
cls.add_method('SetRoute',
'void',
[param('ns3::Ptr< ns3::Ipv4Route >', 'route')])
## dsdv-rtable.h (module 'dsdv'): void ns3::dsdv::RoutingTableEntry::SetSeqNo(uint32_t sequenceNumber) [member function]
cls.add_method('SetSeqNo',
'void',
[param('uint32_t', 'sequenceNumber')])
## dsdv-rtable.h (module 'dsdv'): void ns3::dsdv::RoutingTableEntry::SetSettlingTime(ns3::Time settlingTime) [member function]
cls.add_method('SetSettlingTime',
'void',
[param('ns3::Time', 'settlingTime')])
return
def register_functions(root_module):
module = root_module
register_functions_ns3_FatalImpl(module.get_submodule('FatalImpl'), root_module)
register_functions_ns3_Hash(module.get_submodule('Hash'), root_module)
register_functions_ns3_dsdv(module.get_submodule('dsdv'), root_module)
return
def register_functions_ns3_FatalImpl(module, root_module):
return
def register_functions_ns3_Hash(module, root_module):
register_functions_ns3_Hash_Function(module.get_submodule('Function'), root_module)
return
def register_functions_ns3_Hash_Function(module, root_module):
return
def register_functions_ns3_dsdv(module, root_module):
return
def main():
out = FileCodeSink(sys.stdout)
root_module = module_init()
register_types(root_module)
register_methods(root_module)
register_functions(root_module)
root_module.generate(out)
if __name__ == '__main__':
main()
|
mkheirkhah/mptcp
|
src/dsdv/bindings/modulegen__gcc_LP64.py
|
Python
|
gpl-2.0
| 466,437 | 0.015177 |
#!/usr/bin/env python
"""
Echo the s3s3 configuration template.
"""
from pkg_resources import Requirement, resource_string
def try_resource(location):
"""
Try to get the resource in ``location``.
"""
try:
return resource_string(Requirement.parse('s3s3'), location)
except FileNotFoundError:
return ''
def echo():
"""
Echo the s3s3 configuration file in.
Use pkg_resources module to try to find the s3s3.ini.dist configuration
file. The file is located in a different location depending on if it's
a sdist or bdist_wheel install.
"""
try:
conf = try_resource('extras/s3s3.ini.dist') # sdist
if not conf:
conf = try_resource('../../../extras/s3s3.ini.dist') # bdist
print(conf.decode('utf-8'))
return True
except Exception as e:
return False
def main():
if echo():
exit(0)
else:
exit(1)
if __name__ == "__main__":
main()
|
lsst-sqre/s3s3
|
s3s3/scripts/echo_s3s3_ini_template.py
|
Python
|
mit
| 978 | 0 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import applications.utils.captcha.utils
class Migration(migrations.Migration):
dependencies = [
('authModel', '0007_email_test'),
]
operations = [
migrations.AddField(
model_name='email',
name='hash',
field=models.CharField(default=applications.utils.captcha.utils.key_generator, max_length=16, verbose_name='Hash'),
),
]
|
AlexStarov/Shop
|
applications/authModel/migrations/0008_email_hash.py
|
Python
|
apache-2.0
| 508 | 0.001969 |
# -*- coding: utf-8 -*-
"""The zip file system implementation."""
import zipfile
# This is necessary to prevent a circular import.
import dfvfs.vfs.zip_file_entry
from dfvfs.lib import definitions
from dfvfs.lib import errors
from dfvfs.path import zip_path_spec
from dfvfs.resolver import resolver
from dfvfs.vfs import file_system
class ZipFileSystem(file_system.FileSystem):
"""File system that uses zipfile.
Attributes:
encoding (str): encoding of the file entry name.
"""
LOCATION_ROOT = u'/'
TYPE_INDICATOR = definitions.TYPE_INDICATOR_ZIP
def __init__(self, resolver_context, encoding=u'utf-8'):
"""Initializes a file system.
Args:
resolver_context: the resolver context (instance of resolver.Context).
encoding (Optional[str]): encoding of the file entry name.
"""
super(ZipFileSystem, self).__init__(resolver_context)
self._file_object = None
self._zip_file = None
self.encoding = encoding
def _Close(self):
"""Closes the file system object.
Raises:
IOError: if the close failed.
"""
self._zip_file.close()
self._zip_file = None
self._file_object.close()
self._file_object = None
def _Open(self, path_spec, mode='rb'):
"""Opens the file system object defined by path specification.
Args:
path_spec (PathSpec): path specification of the file system.
mode (Optional[str]): file access mode.
Raises:
AccessError: if the access to open the file was denied.
IOError: if the file system object could not be opened.
PathSpecError: if the path specification is incorrect.
ValueError: if the path specification is invalid.
"""
if not path_spec.HasParent():
raise errors.PathSpecError(
u'Unsupported path specification without parent.')
file_object = resolver.Resolver.OpenFileObject(
path_spec.parent, resolver_context=self._resolver_context)
try:
zip_file = zipfile.ZipFile(file_object, 'r')
except:
file_object.close()
raise
self._file_object = file_object
self._zip_file = zip_file
def FileEntryExistsByPathSpec(self, path_spec):
"""Determines if a file entry for a path specification exists.
Args:
path_spec (PathSpec): path specification of the file entry.
Returns:
bool: True if the file entry exists.
"""
location = getattr(path_spec, u'location', None)
if (location is None or
not location.startswith(self.LOCATION_ROOT)):
return False
if len(location) == 1:
return True
try:
self._zip_file.getinfo(location[1:])
return True
except KeyError:
pass
# Check if location could be a virtual directory.
for name in iter(self._zip_file.namelist()):
# The ZIP info name does not have the leading path separator as
# the location string does.
if name.startswith(location[1:]):
return True
return False
def GetFileEntryByPathSpec(self, path_spec):
"""Retrieves a file entry for a path specification.
Args:
path_spec (PathSpec): path specification of the file entry.
Returns:
ZipFileEntry: a file entry or None.
"""
if not self.FileEntryExistsByPathSpec(path_spec):
return
location = getattr(path_spec, u'location', None)
if len(location) == 1:
return dfvfs.vfs.zip_file_entry.ZipFileEntry(
self._resolver_context, self, path_spec, is_root=True,
is_virtual=True)
kwargs = {}
try:
kwargs[u'zip_info'] = self._zip_file.getinfo(location[1:])
except KeyError:
kwargs[u'is_virtual'] = True
return dfvfs.vfs.zip_file_entry.ZipFileEntry(
self._resolver_context, self, path_spec, **kwargs)
def GetRootFileEntry(self):
"""Retrieves the root file entry.
Returns:
ZipFileEntry: a file entry or None.
"""
path_spec = zip_path_spec.ZipPathSpec(
location=self.LOCATION_ROOT, parent=self._path_spec.parent)
return self.GetFileEntryByPathSpec(path_spec)
def GetZipFile(self):
"""Retrieves the ZIP file object.
Returns:
zipfile.ZipFile: a ZIP file object or None.
"""
return self._zip_file
def GetZipInfoByPathSpec(self, path_spec):
"""Retrieves the ZIP info object for a path specification.
Args:
path_spec (PathSpec): a path specification.
Returns:
zipfile.ZipInfo: a ZIP info object or None if not available.
Raises:
PathSpecError: if the path specification is incorrect.
"""
location = getattr(path_spec, u'location', None)
if location is None:
raise errors.PathSpecError(u'Path specification missing location.')
if not location.startswith(self.LOCATION_ROOT):
raise errors.PathSpecError(u'Invalid location in path specification.')
if len(location) > 1:
return self._zip_file.getinfo(location[1:])
|
dc3-plaso/dfvfs
|
dfvfs/vfs/zip_file_system.py
|
Python
|
apache-2.0
| 4,907 | 0.006521 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
from django.conf import settings
import djangobmf.storage
import djangobmf.fields.file
import django.utils.timezone
import djangobmf.utils.generate_filename
class Migration(migrations.Migration):
replaces = [('djangobmf', '0001_squashed_0_2_0'), ('djangobmf', '0002_dashboard_update'), ('djangobmf', '0003_delete_workspace'), ('djangobmf', '0004_added_active_field'), ('djangobmf', '0005_added_unique_together'), ('djangobmf', '0006_report_settings'), ('djangobmf', '0007_update_renderer'), ('djangobmf', '0008_renderer_filefields'), ('djangobmf', '0009_notification_rename'), ('djangobmf', '0010_notification_db_optimization'), ('djangobmf', '0011_added_numberrange'), ('djangobmf', '0012_delete_dashboard'), ('djangobmf', '0013_update_document')]
dependencies = [
migrations.swappable_dependency(settings.BMF_CONTRIB_CUSTOMER),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('contenttypes', '0002_remove_content_type_name'),
migrations.swappable_dependency(settings.BMF_CONTRIB_PROJECT),
]
operations = [
migrations.CreateModel(
name='Activity',
fields=[
('id', models.AutoField(serialize=False, verbose_name='ID', auto_created=True, primary_key=True)),
('topic', models.CharField(verbose_name='Topic', blank=True, null=True, max_length=100)),
('text', models.TextField(verbose_name='Text', blank=True, null=True)),
('action', models.PositiveSmallIntegerField(verbose_name='Action', default=1, choices=[(1, 'Comment'), (2, 'Created'), (3, 'Updated'), (4, 'Workflow'), (5, 'File')], editable=False, null=True)),
('template', models.CharField(verbose_name='Template', editable=False, null=True, max_length=100)),
('parent_id', models.PositiveIntegerField()),
('modified', models.DateTimeField(auto_now=True, verbose_name='Modified')),
('parent_ct', models.ForeignKey(related_name='bmf_history_parent', to='contenttypes.ContentType')),
('user', models.ForeignKey(null=True, blank=True, to=settings.AUTH_USER_MODEL)),
],
options={
'get_latest_by': 'modified',
'verbose_name': 'Activity',
'ordering': ('-modified',),
'verbose_name_plural': 'Activity',
'abstract': False,
},
),
migrations.CreateModel(
name='Configuration',
fields=[
('id', models.AutoField(serialize=False, verbose_name='ID', auto_created=True, primary_key=True)),
('app_label', models.CharField(verbose_name='Application', editable=False, null=True, max_length=100)),
('field_name', models.CharField(verbose_name='Fieldname', editable=False, null=True, max_length=100)),
('value', models.TextField(verbose_name='Value', null=True)),
('active', models.BooleanField(verbose_name='Active', default=True)),
],
options={
'ordering': ['app_label', 'field_name'],
'verbose_name_plural': 'Configurations',
'abstract': False,
'verbose_name': 'Configuration',
'default_permissions': ('change',),
},
),
migrations.CreateModel(
name='Document',
fields=[
('id', models.AutoField(serialize=False, verbose_name='ID', auto_created=True, primary_key=True)),
('name', models.CharField(verbose_name='Name', blank=True, null=True, max_length=120)),
('mimetype', models.CharField(verbose_name='Mimetype', editable=False, null=True, max_length=120)),
('encoding', models.CharField(verbose_name='Encoding', editable=False, null=True, max_length=60)),
('description', models.TextField(verbose_name='Description', blank=True, null=True)),
('file', models.FileField(upload_to=djangobmf.utils.generate_filename.generate_filename, storage=djangobmf.storage.Storage(), verbose_name='File')),
('size', models.PositiveIntegerField(editable=False, blank=True, null=True)),
('sha1', models.CharField(verbose_name='SHA1', editable=False, null=True, max_length=40)),
('is_static', models.BooleanField(editable=False, default=True)),
('file_exists', models.BooleanField(default=True)),
('content_id', models.PositiveIntegerField(editable=False, blank=True, null=True)),
('modified', models.DateTimeField(auto_now=True, verbose_name='Modified', null=True)),
('created', models.DateTimeField(auto_now_add=True, verbose_name='Created', null=True)),
('content_type', models.ForeignKey(related_name='bmf_document', null=True, editable=False, on_delete=django.db.models.deletion.SET_NULL, blank=True, to='contenttypes.ContentType')),
('created_by', models.ForeignKey(related_name='+', null=True, editable=False, on_delete=django.db.models.deletion.SET_NULL, blank=True, to=settings.AUTH_USER_MODEL)),
('customer', models.ForeignKey(related_name='documents', null=True, editable=False, on_delete=django.db.models.deletion.SET_NULL, blank=True, to=settings.BMF_CONTRIB_CUSTOMER)),
('modified_by', models.ForeignKey(related_name='+', null=True, editable=False, on_delete=django.db.models.deletion.SET_NULL, blank=True, to=settings.AUTH_USER_MODEL)),
('project', models.ForeignKey(related_name='documents', null=True, editable=False, on_delete=django.db.models.deletion.SET_NULL, blank=True, to=settings.BMF_CONTRIB_PROJECT)),
],
options={
'get_latest_by': 'modified',
'verbose_name': 'Document',
'permissions': [('view_document', 'Can view documents')],
'verbose_name_plural': 'Documents',
'abstract': False,
},
),
migrations.CreateModel(
name='Notification',
fields=[
('id', models.AutoField(serialize=False, verbose_name='ID', auto_created=True, primary_key=True)),
('watch_id', models.PositiveIntegerField(null=True, db_index=True)),
('triggered', models.BooleanField(verbose_name='Triggered', default=True, editable=False, db_index=True)),
('unread', models.BooleanField(verbose_name='Unread', default=True, editable=False, db_index=True)),
('last_seen_object', models.PositiveIntegerField(null=True)),
('new_entry', models.BooleanField(verbose_name='New entry', default=False, db_index=True)),
('comments', models.BooleanField(verbose_name='Comment written', default=False, db_index=True)),
('files', models.BooleanField(verbose_name='File added', default=False, db_index=True)),
('detectchanges', models.BooleanField(verbose_name='Object changed', default=False, db_index=True)),
('workflow', models.BooleanField(verbose_name='Workflowstate changed', default=False, db_index=True)),
('modified', models.DateTimeField(verbose_name='Modified', default=django.utils.timezone.now, editable=False, null=True)),
('user', models.ForeignKey(null=True, blank=True, to=settings.AUTH_USER_MODEL)),
('watch_ct', models.ForeignKey(to='contenttypes.ContentType', null=True)),
],
options={
'ordering': ('-modified',),
'verbose_name_plural': 'Watched activities',
'abstract': False,
'verbose_name': 'Watched activity',
'get_latest_by': 'modified',
'default_permissions': (),
},
),
migrations.CreateModel(
name='NumberRange',
fields=[
('id', models.AutoField(serialize=False, verbose_name='ID', auto_created=True, primary_key=True)),
('period_start', models.DateField(null=True, db_index=True)),
('period_final', models.DateField(null=True, db_index=True)),
('counter', models.PositiveIntegerField(default=1, null=True)),
('ct', models.ForeignKey(related_name='+', null=True, editable=False, to='contenttypes.ContentType')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Renderer',
fields=[
('id', models.AutoField(serialize=False, verbose_name='ID', auto_created=True, primary_key=True)),
('name', models.CharField(verbose_name='Name', max_length=20)),
('size', models.CharField(verbose_name='Size', default='A4/A', max_length=20)),
('letter', models.BooleanField(verbose_name='Letter', default=True)),
('extra', models.BooleanField(verbose_name='Extra', default=False)),
('letter_margin_right', models.PositiveIntegerField(verbose_name='Letter margin right', default=10)),
('letter_margin_bottom', models.PositiveIntegerField(verbose_name='Letter margin bottom', default=40)),
('letter_extra_right', models.PositiveIntegerField(verbose_name='Letter extra right', default=10)),
('letter_extra_top', models.PositiveIntegerField(verbose_name='Letter extra top', default=10)),
('letter_footer_right', models.PositiveIntegerField(verbose_name='Letter footer height', default=10)),
('page_margin_right', models.PositiveIntegerField(verbose_name='Letter margin right', default=10)),
('page_margin_bottom', models.PositiveIntegerField(verbose_name='Letter margin bottom', default=15)),
('page_margin_top', models.PositiveIntegerField(verbose_name='Letter margin top', default=20)),
('modified', models.DateTimeField(auto_now=True, verbose_name='Modified')),
('letter_background', djangobmf.fields.file.FileField(related_name='+', null=True, verbose_name='Letter background', blank=True, to='djangobmf.Document')),
('page_background', djangobmf.fields.file.FileField(related_name='+', null=True, verbose_name='Page background', blank=True, to='djangobmf.Document')),
],
options={
'verbose_name': 'Renderer',
'get_latest_by': 'modified',
'verbose_name_plural': 'Renderer',
'abstract': False,
},
),
migrations.CreateModel(
name='Report',
fields=[
('id', models.AutoField(serialize=False, verbose_name='ID', auto_created=True, primary_key=True)),
('key', models.CharField(editable=False, null=True, db_index=True, verbose_name='Key', blank=True, max_length=255)),
('modified', models.DateTimeField(auto_now=True, verbose_name='Modified')),
('contenttype', models.ForeignKey(help_text='Connect a Report to an BMF-Model', related_name='bmf_report', null=True, editable=False, blank=True, to='contenttypes.ContentType')),
('renderer', models.ForeignKey(help_text='Connect a Report to an Renderer', related_name='reports', null=True, on_delete=django.db.models.deletion.SET_NULL, blank=True, to='djangobmf.Renderer')),
],
options={
'verbose_name': 'Report',
'get_latest_by': 'modified',
'verbose_name_plural': 'Reports',
'abstract': False,
},
),
migrations.AlterUniqueTogether(
name='configuration',
unique_together=set([('app_label', 'field_name')]),
),
migrations.AlterUniqueTogether(
name='numberrange',
unique_together=set([('ct', 'period_start', 'period_final')]),
),
migrations.AlterUniqueTogether(
name='notification',
unique_together=set([('user', 'watch_ct', 'watch_id')]),
),
]
|
django-bmf/django-bmf
|
djangobmf/migrations/0001_squashed_0_2_9.py
|
Python
|
bsd-3-clause
| 12,381 | 0.005169 |
# This Python file uses the following encoding: utf-8
"""autogenerated by genpy from ardrone_autonomy/navdata_time.msg. Do not edit."""
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
import std_msgs.msg
class navdata_time(genpy.Message):
_md5sum = "8642a5656fdcc9310938a1807c46d608"
_type = "ardrone_autonomy/navdata_time"
_has_header = True #flag to mark the presence of a Header object
_full_text = """Header header
float64 drone_time
uint16 tag
uint16 size
uint32 time
================================================================================
MSG: std_msgs/Header
# Standard metadata for higher-level stamped data types.
# This is generally used to communicate timestamped data
# in a particular coordinate frame.
#
# sequence ID: consecutively increasing ID
uint32 seq
#Two-integer timestamp that is expressed as:
# * stamp.sec: seconds (stamp_secs) since epoch (in Python the variable is called 'secs')
# * stamp.nsec: nanoseconds since stamp_secs (in Python the variable is called 'nsecs')
# time-handling sugar is provided by the client library
time stamp
#Frame this data is associated with
# 0: no frame
# 1: global frame
string frame_id
"""
__slots__ = ['header','drone_time','tag','size','time']
_slot_types = ['std_msgs/Header','float64','uint16','uint16','uint32']
def __init__(self, *args, **kwds):
"""
Constructor. Any message fields that are implicitly/explicitly
set to None will be assigned a default value. The recommend
use is keyword arguments as this is more robust to future message
changes. You cannot mix in-order arguments and keyword arguments.
The available fields are:
header,drone_time,tag,size,time
:param args: complete set of field values, in .msg order
:param kwds: use keyword arguments corresponding to message field names
to set specific fields.
"""
if args or kwds:
super(navdata_time, self).__init__(*args, **kwds)
#message fields cannot be None, assign default values for those that are
if self.header is None:
self.header = std_msgs.msg.Header()
if self.drone_time is None:
self.drone_time = 0.
if self.tag is None:
self.tag = 0
if self.size is None:
self.size = 0
if self.time is None:
self.time = 0
else:
self.header = std_msgs.msg.Header()
self.drone_time = 0.
self.tag = 0
self.size = 0
self.time = 0
def _get_types(self):
"""
internal API method
"""
return self._slot_types
def serialize(self, buff):
"""
serialize message into buffer
:param buff: buffer, ``StringIO``
"""
try:
_x = self
buff.write(_struct_3I.pack(_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs))
_x = self.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
if python3:
buff.write(struct.pack('<I%sB'%length, length, *_x))
else:
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = self
buff.write(_struct_d2HI.pack(_x.drone_time, _x.tag, _x.size, _x.time))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize(self, str):
"""
unpack serialized message in str into this message instance
:param str: byte array of serialized message, ``str``
"""
try:
if self.header is None:
self.header = std_msgs.msg.Header()
end = 0
_x = self
start = end
end += 12
(_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs,) = _struct_3I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.header.frame_id = str[start:end].decode('utf-8')
else:
self.header.frame_id = str[start:end]
_x = self
start = end
end += 16
(_x.drone_time, _x.tag, _x.size, _x.time,) = _struct_d2HI.unpack(str[start:end])
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
def serialize_numpy(self, buff, numpy):
"""
serialize message with numpy array types into buffer
:param buff: buffer, ``StringIO``
:param numpy: numpy python module
"""
try:
_x = self
buff.write(_struct_3I.pack(_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs))
_x = self.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
if python3:
buff.write(struct.pack('<I%sB'%length, length, *_x))
else:
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = self
buff.write(_struct_d2HI.pack(_x.drone_time, _x.tag, _x.size, _x.time))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize_numpy(self, str, numpy):
"""
unpack serialized message in str into this message instance using numpy for array types
:param str: byte array of serialized message, ``str``
:param numpy: numpy python module
"""
try:
if self.header is None:
self.header = std_msgs.msg.Header()
end = 0
_x = self
start = end
end += 12
(_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs,) = _struct_3I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.header.frame_id = str[start:end].decode('utf-8')
else:
self.header.frame_id = str[start:end]
_x = self
start = end
end += 16
(_x.drone_time, _x.tag, _x.size, _x.time,) = _struct_d2HI.unpack(str[start:end])
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
_struct_I = genpy.struct_I
_struct_3I = struct.Struct("<3I")
_struct_d2HI = struct.Struct("<d2HI")
|
aslab/rct
|
mrt/src/tum_simulator_ws/devel/lib/python2.7/dist-packages/ardrone_autonomy/msg/_navdata_time.py
|
Python
|
gpl-3.0
| 6,555 | 0.019222 |
#!/usr/bin/env python
"""
Framework to start a simulated vehicle and connect it to MAVProxy.
Peter Barker, April 2016
based on sim_vehicle.sh by Andrew Tridgell, October 2011
"""
import atexit
import getpass
import optparse
import os
import os.path
import signal
import subprocess
import sys
import tempfile
import time
import shlex
# List of open terminal windows for macosx
windowID = []
class CompatError(Exception):
"""A custom exception class to hold state if we encounter the parse error we are looking for"""
def __init__(self, error, opts, rargs):
Exception.__init__(self, error)
self.opts = opts
self.rargs = rargs
class CompatOptionParser(optparse.OptionParser):
"""An option parser which emulates the behaviour of the old sim_vehicle.sh; if passed -C, the first argument not understood starts a list of arguments that are passed straight to mavproxy"""
def __init__(self, *args, **kwargs):
optparse.OptionParser.__init__(self, *args, **kwargs)
def error(self, error):
"""Override default error handler called by optparse.OptionParser.parse_args when a parse error occurs; raise a detailed exception which can be caught"""
if error.find("no such option") != -1:
raise CompatError(error, self.values, self.rargs)
optparse.OptionParser.error(self, error)
def parse_args(self, args=None, values=None):
"""Wrap parse_args so we can catch the exception raised upon discovering the known parameter parsing error"""
try:
opts, args = optparse.OptionParser.parse_args(self)
except CompatError as e:
if not e.opts.sim_vehicle_sh_compatible:
print(e)
print("Perhaps you want --sim_vehicle_sh_compatible (-C)?")
sys.exit(1)
if e.opts.mavproxy_args:
print("--mavproxy-args not permitted in compat mode")
sys.exit(1)
args = []
opts = e.opts
mavproxy_args = [str(e)[16:]] # this trims "no such option" off
mavproxy_args.extend(e.rargs)
opts.ensure_value("mavproxy_args", " ".join(mavproxy_args))
return opts, args
def cygwin_pidof(proc_name):
""" Thanks to kata198 for this:
https://github.com/kata198/cygwin-ps-misc/blob/master/pidof
"""
pipe = subprocess.Popen("ps -ea | grep " + proc_name, shell=True, stdout=subprocess.PIPE)
output_lines = pipe.stdout.read().replace("\r", "").split("\n")
ret = pipe.wait()
pids = []
if ret != 0:
# No results
return []
for line in output_lines:
if not line:
continue
line_split = [item for item in line.split(' ') if item]
cmd = line_split[-1].split('/')[-1]
if cmd == proc_name:
try:
pid = int(line_split[0].strip())
except:
pid = int(line_split[1].strip())
if pid not in pids:
pids.append(pid)
return pids
def under_cygwin():
"""Return if Cygwin binary exist"""
return os.path.exists("/usr/bin/cygstart")
def under_macos():
return sys.platform == 'darwin'
def kill_tasks_cygwin(victims):
"""Shell out to ps -ea to find processes to kill"""
for victim in list(victims):
pids = cygwin_pidof(victim)
# progress("pids for (%s): %s" % (victim,",".join([ str(p) for p in pids])))
for apid in pids:
os.kill(apid, signal.SIGKILL)
def kill_tasks_macos():
for window in windowID:
cmd = "osascript -e \'tell application \"Terminal\" to close (window(get index of window id %s))\'" % window
os.system(cmd)
def kill_tasks_psutil(victims):
"""Use the psutil module to kill tasks by name. Sadly, this module is not available on Windows, but when it is we should be able to *just* use this routine"""
import psutil
for proc in psutil.process_iter():
if proc.status == psutil.STATUS_ZOMBIE:
continue
if proc.name in victims:
proc.kill()
def kill_tasks_pkill(victims):
"""Shell out to pkill(1) to kill processed by name"""
for victim in victims: # pkill takes a single pattern, so iterate
cmd = ["pkill", victim]
run_cmd_blocking("pkill", cmd, quiet=True)
class BobException(Exception):
"""Handle Bob's Exceptions"""
pass
def kill_tasks():
"""Clean up stray processes by name. This is a somewhat shotgun approach"""
progress("Killing tasks")
try:
victim_names = {
'JSBSim',
'lt-JSBSim',
'ArduPlane.elf',
'ArduCopter.elf',
'APMrover2.elf',
'AntennaTracker.elf',
'JSBSIm.exe',
'MAVProxy.exe',
'runsim.py',
'AntennaTracker.elf',
}
for frame in _options_for_frame.keys():
if "waf_target" not in _options_for_frame[frame]:
continue
exe_name = os.path.basename(_options_for_frame[frame]["waf_target"])
victim_names.add(exe_name)
if under_cygwin():
return kill_tasks_cygwin(victim_names)
if under_macos():
return kill_tasks_macos()
try:
kill_tasks_psutil(victim_names)
except ImportError:
kill_tasks_pkill(victim_names)
except Exception as e:
progress("kill_tasks failed: {}".format(str(e)))
def check_jsbsim_version():
"""Assert that the JSBSim we will run is the one we expect to run"""
jsbsim_cmd = ["JSBSim", "--version"]
progress_cmd("Get JSBSim version", jsbsim_cmd)
try:
jsbsim_version = subprocess.Popen(jsbsim_cmd, stdout=subprocess.PIPE).communicate()[0]
except OSError:
jsbsim_version = '' # this value will trigger the ".index"
# check below and produce a reasonable
# error message
try:
jsbsim_version.index("ArduPilot")
except ValueError:
print(r"""
=========================================================
You need the latest ArduPilot version of JSBSim installed
and in your \$PATH
Please get it from git://github.com/tridge/jsbsim.git
See
http://ardupilot.org/dev/docs/setting-up-sitl-on-linux.html
for more details
=========================================================
""")
sys.exit(1)
def progress(text):
"""Display sim_vehicle progress text"""
print("SIM_VEHICLE: " + text)
def find_autotest_dir():
"""Return path to autotest directory"""
return os.path.dirname(os.path.realpath(__file__))
def find_root_dir():
"""Return path to root directory"""
return os.path.realpath(os.path.join(find_autotest_dir(), '../..'))
"""
make_target: option passed to make to create binaries. Usually sitl, and "-debug" may be appended if -D is passed to sim_vehicle.py
default_params_filename: filename of default parameters file. Taken to be relative to autotest dir.
extra_mavlink_cmds: extra parameters that will be passed to mavproxy
"""
_options_for_frame = {
"calibration": {
"extra_mavlink_cmds": "module load sitl_calibration;",
},
# COPTER
"+": {
"waf_target": "bin/arducopter-quad",
"default_params_filename": "default_params/copter.parm",
},
"quad": {
"model": "+",
"waf_target": "bin/arducopter-quad",
"default_params_filename": "default_params/copter.parm",
},
"X": {
"waf_target": "bin/arducopter-quad",
# this param set FRAME doesn't actually work because mavproxy
# won't set a parameter unless it knows of it, and the param fetch happens asynchronously
"default_params_filename": "default_params/copter.parm",
"extra_mavlink_cmds": "param fetch frame; param set FRAME 1;",
},
"hexa": {
"make_target": "sitl-hexa",
"waf_target": "bin/arducopter-hexa",
"default_params_filename": "default_params/copter.parm",
},
"octa": {
"make_target": "sitl-octa",
"waf_target": "bin/arducopter-octa",
"default_params_filename": "default_params/copter.parm",
},
"tri": {
"make_target": "sitl-tri",
"waf_target": "bin/arducopter-tri",
"default_params_filename": "default_params/copter-tri.parm",
},
"y6": {
"make_target": "sitl-y6",
"waf_target": "bin/arducopter-y6",
"default_params_filename": "default_params/copter-y6.parm",
},
# COPTER TYPES
"IrisRos": {
"waf_target": "bin/arducopter-quad",
"default_params_filename": "default_params/copter.parm",
},
"firefly": {
"waf_target": "bin/arducopter-firefly",
"default_params_filename": "default_params/firefly.parm",
},
# HELICOPTER
"heli": {
"make_target": "sitl-heli",
"waf_target": "bin/arducopter-heli",
"default_params_filename": "default_params/copter-heli.parm",
},
"heli-dual": {
"make_target": "sitl-heli-dual",
"waf_target": "bin/arducopter-coax", # is this correct? -pb201604301447
},
"heli-compound": {
"make_target": "sitl-heli-compound",
"waf_target": "bin/arducopter-coax", # is this correct? -pb201604301447
},
"singlecopter": {
"make_target": "sitl-single",
"waf_target": "bin/arducopter-single",
"default_params_filename": "default_params/copter-single.parm",
},
"coaxcopter": {
"make_target": "sitl-coax",
"waf_target": "bin/arducopter-coax",
"default_params_filename": "default_params/copter-coax.parm",
},
# PLANE
"quadplane-tilttri": {
"make_target": "sitl-tri",
"waf_target": "bin/arduplane-tri",
"default_params_filename": "default_params/quadplane-tilttri.parm",
},
"quadplane-tri": {
"make_target": "sitl-tri",
"waf_target": "bin/arduplane-tri",
"default_params_filename": "default_params/quadplane-tri.parm",
},
"quadplane": {
"waf_target": "bin/arduplane",
"default_params_filename": "default_params/quadplane.parm",
},
"plane-elevon": {
"waf_target": "bin/arduplane",
"default_params_filename": "default_params/plane-elevons.parm",
},
"plane-vtail": {
"waf_target": "bin/arduplane",
"default_params_filename": "default_params/plane-vtail.parm",
},
"plane": {
"waf_target": "bin/arduplane",
"default_params_filename": "default_params/plane.parm",
},
# ROVER
"rover": {
"waf_target": "bin/ardurover",
"default_params_filename": "default_params/rover.parm",
},
"rover-skid": {
"waf_target": "bin/ardurover",
"default_params_filename": "default_params/rover-skid.parm",
},
# SIM
"gazebo-iris": {
"waf_target": "bin/arducopter-quad",
"default_params_filename": "default_params/gazebo-iris.parm",
},
"gazebo-zephyr": {
"waf_target": "bin/arduplane",
"default_params_filename": "default_params/gazebo-zephyr.parm",
},
"last_letter": {
"waf_target": "bin/arduplane",
},
"CRRCSim": {
"waf_target": "bin/arduplane",
},
"jsbsim": {
"waf_target": "bin/arduplane",
"default_params_filename": "default_params/plane-jsbsim.parm",
},
}
_default_waf_target = {
"ArduPlane": "bin/arduplane",
"ArduCopter": "bin/arducopter-quad",
"APMrover2": "bin/ardurover",
"AntennaTracker": "bin/antennatracker",
}
def default_waf_target(vehicle):
"""Returns a waf target based on vehicle type, which is often determined by which directory the user is in"""
return _default_waf_target[vehicle]
def options_for_frame(frame, vehicle, opts):
"""Return informatiom about how to sitl for frame e.g. build-type==sitl"""
ret = None
if frame in _options_for_frame:
ret = _options_for_frame[frame]
else:
for p in ["octa", "tri", "y6", "firefly", "heli", "gazebo", "last_letter", "jsbsim", "quadplane", "plane-elevon", "plane-vtail", "plane"]:
if frame.startswith(p):
ret = _options_for_frame[p]
break
if ret is None:
if frame.endswith("-heli"):
ret = _options_for_frame["heli"]
if ret is None:
ret = {}
if "model" not in ret:
ret["model"] = frame
if "sitl-port" not in ret:
ret["sitl-port"] = True
if opts.model is not None:
ret["model"] = opts.model
if (ret["model"].find("xplane") != -1 or ret["model"].find("flightaxis") != -1):
ret["sitl-port"] = False
if "make_target" not in ret:
ret["make_target"] = "sitl"
if "waf_target" not in ret:
ret["waf_target"] = default_waf_target(vehicle)
if opts.build_target is not None:
ret["make_target"] = opts.build_target
ret["waf_target"] = opts.build_target
return ret
def do_build_waf(opts, frame_options):
"""Build sitl using waf"""
progress("WAF build")
old_dir = os.getcwd()
root_dir = find_root_dir()
os.chdir(root_dir)
waf_light = os.path.join(root_dir, "modules/waf/waf-light")
cmd_configure = [waf_light, "configure", "--board", "sitl"]
if opts.debug:
cmd_configure.append("--debug")
pieces = [ shlex.split(x) for x in opts.waf_configure_args ]
for piece in pieces:
cmd_configure.extend(piece)
run_cmd_blocking("Configure waf", cmd_configure, check=True)
if opts.clean:
run_cmd_blocking("Building clean", [waf_light, "clean"])
cmd_build = [waf_light, "build", "--target", frame_options["waf_target"]]
if opts.jobs is not None:
cmd_build += ['-j', str(opts.jobs)]
pieces = [ shlex.split(x) for x in opts.waf_build_args ]
for piece in pieces:
cmd_build.extend(piece)
_, sts = run_cmd_blocking("Building", cmd_build)
if sts != 0: # build failed
if opts.rebuild_on_failure:
progress("Build failed; cleaning and rebuilding")
run_cmd_blocking("Building clean", [waf_light, "clean"])
_, sts = run_cmd_blocking("Building", cmd_build)
if sts != 0:
progress("Build failed")
sys.exit(1)
else:
progress("Build failed")
sys.exit(1)
os.chdir(old_dir)
def do_build(vehicledir, opts, frame_options):
"""Build build target (e.g. sitl) in directory vehicledir"""
if opts.build_system == 'waf':
return do_build_waf(opts, frame_options)
old_dir = os.getcwd()
os.chdir(vehicledir)
if opts.clean:
run_cmd_blocking("Building clean", ["make", "clean"])
build_target = frame_options["make_target"]
if opts.debug:
build_target += "-debug"
build_cmd = ["make", build_target]
if opts.jobs is not None:
build_cmd += ['-j', str(opts.jobs)]
_, sts = run_cmd_blocking("Building %s" % build_target, build_cmd)
if sts != 0:
progress("Build failed; cleaning and rebuilding")
run_cmd_blocking("Cleaning", ["make", "clean"])
_, sts = run_cmd_blocking("Building %s" % build_target, build_cmd)
if sts != 0:
progress("Build failed")
sys.exit(1)
os.chdir(old_dir)
def find_location_by_name(autotest, locname):
"""Search locations.txt for locname, return GPS coords"""
locations_filepath = os.path.join(autotest, "locations.txt")
for line in open(locations_filepath, 'r'):
line = line.rstrip("\n")
(name, loc) = line.split("=")
if name == locname:
return loc
print("Failed to find location (%s)" % cmd_opts.location)
sys.exit(1)
def progress_cmd(what, cmd):
"""Print cmd in a way a user could cut-and-paste to get the same effect"""
progress(what)
shell_text = "%s" % (" ".join(['"%s"' % x for x in cmd]))
progress(shell_text)
def run_cmd_blocking(what, cmd, quiet=False, check=False, **kw):
if not quiet:
progress_cmd(what, cmd)
p = subprocess.Popen(cmd, **kw)
ret = os.waitpid(p.pid, 0)
_, sts = ret
if check and sts != 0:
progress("(%s) exited with code %d" % (what,sts,))
sys.exit(1)
return ret
def run_in_terminal_window(autotest, name, cmd):
"""Execute the run_in_terminal_window.sh command for cmd"""
global windowID
runme = [os.path.join(autotest, "run_in_terminal_window.sh"), name]
runme.extend(cmd)
progress_cmd("Run " + name, runme)
if under_macos():
# on MacOS record the window IDs so we can close them later
out = subprocess.Popen(runme, stdout=subprocess.PIPE).communicate()[0]
import re
p = re.compile('tab 1 of window id (.*)')
windowID.append(p.findall(out)[0])
else:
p = subprocess.Popen(runme)
tracker_uarta = None # blemish
def start_antenna_tracker(autotest, opts):
"""Compile and run the AntennaTracker, add tracker to mavproxy"""
global tracker_uarta
progress("Preparing antenna tracker")
tracker_home = find_location_by_name(find_autotest_dir(), opts.tracker_location)
vehicledir = os.path.join(autotest, "../../" + "AntennaTracker")
tracker_frame_options = {
"waf_target": _default_waf_target["AntennaTracker"],
}
do_build(vehicledir, opts, tracker_frame_options)
tracker_instance = 1
os.chdir(vehicledir)
tracker_uarta = "tcp:127.0.0.1:" + str(5760 + 10 * tracker_instance)
exe = os.path.join(vehicledir, "AntennaTracker.elf")
run_in_terminal_window(autotest, "AntennaTracker", ["nice", exe, "-I" + str(tracker_instance), "--model=tracker", "--home=" + tracker_home])
def start_vehicle(binary, autotest, opts, stuff, loc):
"""Run the ArduPilot binary"""
cmd_name = opts.vehicle
cmd = []
if opts.valgrind:
cmd_name += " (valgrind)"
cmd.append("valgrind")
if opts.gdb:
cmd_name += " (gdb)"
cmd.append("gdb")
gdb_commands_file = tempfile.NamedTemporaryFile(delete=False)
atexit.register(os.unlink, gdb_commands_file.name)
for breakpoint in opts.breakpoint:
gdb_commands_file.write("b %s\n" % (breakpoint,))
gdb_commands_file.write("r\n")
gdb_commands_file.close()
cmd.extend(["-x", gdb_commands_file.name])
cmd.append("--args")
if opts.strace:
cmd_name += " (strace)"
cmd.append("strace")
strace_options = ['-o', binary + '.strace', '-s', '8000', '-ttt']
cmd.extend(strace_options)
cmd.append(binary)
cmd.append("-S")
cmd.append("-I" + str(opts.instance))
cmd.extend(["--home", loc])
if opts.wipe_eeprom:
cmd.append("-w")
cmd.extend(["--model", stuff["model"]])
cmd.extend(["--speedup", str(opts.speedup)])
if opts.sitl_instance_args:
cmd.extend(opts.sitl_instance_args.split(" ")) # this could be a lot better..
if opts.mavlink_gimbal:
cmd.append("--gimbal")
if "default_params_filename" in stuff:
path = os.path.join(autotest, stuff["default_params_filename"])
progress("Using defaults from (%s)" % (path,))
cmd.extend(["--defaults", path])
run_in_terminal_window(autotest, cmd_name, cmd)
def start_mavproxy(opts, stuff):
"""Run mavproxy"""
# FIXME: would be nice to e.g. "mavproxy.mavproxy(....).run" rather than shelling out
extra_cmd = ""
cmd = []
if under_cygwin():
cmd.append("/usr/bin/cygstart")
cmd.append("-w")
cmd.append("/cygdrive/c/Program Files (x86)/MAVProxy/mavproxy.exe")
else:
cmd.append("mavproxy.py")
if opts.hil:
cmd.extend(["--load-module", "HIL"])
else:
cmd.extend(["--master", mavlink_port])
if stuff["sitl-port"]:
cmd.extend(["--sitl", simout_port])
# If running inside of a vagrant guest, then we probably want to forward our mavlink out to the containing host OS
ports = [p + 10 * cmd_opts.instance for p in [14550,14551]]
for port in ports:
if getpass.getuser() == "vagrant":
cmd.extend(["--out", "10.0.2.2:" + str(port)])
else:
cmd.extend(["--out", "127.0.0.1:" + str(port)])
if opts.tracker:
cmd.extend(["--load-module", "tracker"])
global tracker_uarta
# tracker_uarta is set when we start the tracker...
extra_cmd += "module load map; tracker set port %s; tracker start; tracker arm;" % (tracker_uarta,)
if opts.mavlink_gimbal:
cmd.extend(["--load-module", "gimbal"])
if "extra_mavlink_cmds" in stuff:
extra_cmd += " " + stuff["extra_mavlink_cmds"]
if opts.mavproxy_args:
cmd.extend(opts.mavproxy_args.split(" ")) # this could be a lot better..
# compatibility pass-through parameters (for those that don't want
# to use -C :-)
for out in opts.out:
cmd.extend(['--out', out])
if opts.map:
cmd.append('--map')
if opts.console:
cmd.append('--console')
if opts.aircraft is not None:
cmd.extend(['--aircraft', opts.aircraft])
if len(extra_cmd):
cmd.extend(['--cmd', extra_cmd])
local_mp_modules_dir = os.path.abspath(
os.path.join(__file__, '..', '..', 'mavproxy_modules'))
env = dict(os.environ)
env['PYTHONPATH'] = local_mp_modules_dir + os.pathsep + env.get('PYTHONPATH', '')
run_cmd_blocking("Run MavProxy", cmd, env=env)
progress("MAVProxy exitted")
# define and run parser
parser = CompatOptionParser("sim_vehicle.py",
epilog="eeprom.bin in the starting directory contains the parameters for your " \
"simulated vehicle. Always start from the same directory. It is "\
"recommended that you start in the main vehicle directory for the vehicle" \
"you are simulating, for example, start in the ArduPlane directory to " \
"simulate ArduPlane")
parser.add_option("-v", "--vehicle", type='string', default=None, help="vehicle type (ArduPlane, ArduCopter or APMrover2)")
parser.add_option("-f", "--frame", type='string', default=None, help="""set aircraft frame type
for copters can choose +, X, quad or octa
for planes can choose elevon or vtail""")
parser.add_option("-C", "--sim_vehicle_sh_compatible", action='store_true', default=False, help="be compatible with the way sim_vehicle.sh works; make this the first option")
parser.add_option("-H", "--hil", action='store_true', default=False, help="start HIL")
group_build = optparse.OptionGroup(parser, "Build options")
group_build.add_option("-N", "--no-rebuild", action='store_true', default=False, help="don't rebuild before starting ardupilot")
group_build.add_option("-D", "--debug", action='store_true', default=False, help="build with debugging")
group_build.add_option("-c", "--clean", action='store_true', default=False, help="do a make clean before building")
group_build.add_option("-j", "--jobs", default=None, type='int', help="number of processors to use during build (default for waf : number of processor, for make : 1)")
group_build.add_option("-b", "--build-target", default=None, type='string', help="override SITL build target")
group_build.add_option("-s", "--build-system", default="waf", type='choice', choices=["make", "waf"], help="build system to use")
group_build.add_option("", "--rebuild-on-failure", dest="rebuild_on_failure", action='store_true', default=False, help="if build fails, do not clean and rebuild")
group_build.add_option("", "--waf-configure-arg", action="append", dest="waf_configure_args", type="string", default=[], help="extra arguments to pass to waf in its configure step")
group_build.add_option("", "--waf-build-arg", action="append", dest="waf_build_args", type="string", default=[], help="extra arguments to pass to waf in its build step")
parser.add_option_group(group_build)
group_sim = optparse.OptionGroup(parser, "Simulation options")
group_sim.add_option("-I", "--instance", default=0, type='int', help="instance of simulator")
group_sim.add_option("-V", "--valgrind", action='store_true', default=False, help="enable valgrind for memory access checking (very slow!)")
group_sim.add_option("-T", "--tracker", action='store_true', default=False, help="start an antenna tracker instance")
group_sim.add_option("-A", "--sitl-instance-args", type='string', default=None, help="pass arguments to SITL instance")
# group_sim.add_option("-R", "--reverse-throttle", action='store_true', default=False, help="reverse throttle in plane")
group_sim.add_option("-G", "--gdb", action='store_true', default=False, help="use gdb for debugging ardupilot")
group_sim.add_option("-g", "--gdb-stopped", action='store_true', default=False, help="use gdb for debugging ardupilot (no auto-start)")
group_sim.add_option("-d", "--delay-start", default=0, type='float', help="delays the start of mavproxy by the number of seconds")
group_sim.add_option("-B", "--breakpoint", type='string', action="append", default=[], help="add a breakpoint at given location in debugger")
group_sim.add_option("-M", "--mavlink-gimbal", action='store_true', default=False, help="enable MAVLink gimbal")
group_sim.add_option("-L", "--location", type='string', default='CMAC', help="select start location from Tools/autotest/locations.txt")
group_sim.add_option("-l", "--custom-location", type='string', default=None, help="set custom start location")
group_sim.add_option("-S", "--speedup", default=1, type='int', help="set simulation speedup (1 for wall clock time)")
group_sim.add_option("-t", "--tracker-location", default='CMAC_PILOTSBOX', type='string', help="set antenna tracker start location")
group_sim.add_option("-w", "--wipe-eeprom", action='store_true', default=False, help="wipe EEPROM and reload parameters")
group_sim.add_option("-m", "--mavproxy-args", default=None, type='string', help="additional arguments to pass to mavproxy.py")
group_sim.add_option("", "--strace", action='store_true', default=False, help="strace the ArduPilot binary")
group_sim.add_option("", "--model", type='string', default=None, help="Override simulation model to use")
parser.add_option_group(group_sim)
# special-cased parameters for mavproxy, because some people's fingers
# have long memories, and they don't want to use -C :-)
group = optparse.OptionGroup(parser, "Compatibility MAVProxy options (consider using --mavproxy-args instead)")
group.add_option("", "--out", default=[], type='string', action="append", help="create an additional mavlink output")
group.add_option("", "--map", default=False, action='store_true', help="load map module on startup")
group.add_option("", "--console", default=False, action='store_true', help="load console module on startup")
group.add_option("", "--aircraft", default=None, help="store state and logs in named directory")
parser.add_option_group(group)
cmd_opts, cmd_args = parser.parse_args()
# clean up processes at exit:
atexit.register(kill_tasks)
progress("Start")
if cmd_opts.sim_vehicle_sh_compatible and cmd_opts.jobs is None:
cmd_opts.jobs = 1
# validate parameters
if cmd_opts.hil:
if cmd_opts.valgrind:
print("May not use valgrind with hil")
sys.exit(1)
if cmd_opts.gdb or cmd_opts.gdb_stopped:
print("May not use gdb with hil")
sys.exit(1)
if cmd_opts.strace:
print("May not use strace with hil")
sys.exit(1)
if cmd_opts.valgrind and (cmd_opts.gdb or cmd_opts.gdb_stopped):
print("May not use valgrind with gdb")
sys.exit(1)
if cmd_opts.strace and (cmd_opts.gdb or cmd_opts.gdb_stopped):
print("May not use strace with gdb")
sys.exit(1)
if cmd_opts.strace and cmd_opts.valgrind:
print("valgrind and strace almost certainly not a good idea")
# magically determine vehicle type (if required):
if cmd_opts.vehicle is None:
cwd = os.getcwd()
cmd_opts.vehicle = os.path.basename(cwd)
# determine a frame type if not specified:
default_frame_for_vehicle = {
"APMrover2": "rover",
"ArduPlane": "jsbsim",
"ArduCopter": "quad",
"AntennaTracker": "tracker",
}
if cmd_opts.vehicle not in default_frame_for_vehicle:
# try in parent directories, useful for having config in subdirectories
cwd = os.getcwd()
while cwd:
bname = os.path.basename(cwd)
if not bname:
break
if bname in default_frame_for_vehicle:
cmd_opts.vehicle = bname
break
cwd = os.path.dirname(cwd)
# try to validate vehicle
if cmd_opts.vehicle not in default_frame_for_vehicle:
progress("** Is (%s) really your vehicle type? Try -v VEHICLETYPE if not, or be in the e.g. ArduCopter subdirectory" % (cmd_opts.vehicle,))
# determine frame options (e.g. build type might be "sitl")
if cmd_opts.frame is None:
cmd_opts.frame = default_frame_for_vehicle[cmd_opts.vehicle]
# setup ports for this instance
mavlink_port = "tcp:127.0.0.1:" + str(5760 + 10 * cmd_opts.instance)
simout_port = "127.0.0.1:" + str(5501 + 10 * cmd_opts.instance)
frame_infos = options_for_frame(cmd_opts.frame, cmd_opts.vehicle, cmd_opts)
if frame_infos["model"] == "jsbsim":
check_jsbsim_version()
vehicle_dir = os.path.realpath(os.path.join(find_root_dir(), cmd_opts.vehicle))
if not os.path.exists(vehicle_dir):
print("vehicle directory (%s) does not exist" % (vehicle_dir,))
sys.exit(1)
if not cmd_opts.hil:
if cmd_opts.instance == 0:
kill_tasks()
if cmd_opts.tracker:
start_antenna_tracker(find_autotest_dir(), cmd_opts)
if cmd_opts.custom_location:
location = cmd_opts.custom_location
progress("Starting up at %s" % (location,))
else:
location = find_location_by_name(find_autotest_dir(), cmd_opts.location)
progress("Starting up at %s (%s)" % (location, cmd_opts.location))
if cmd_opts.hil:
# (unlikely)
run_in_terminal_window(find_autotest_dir(), "JSBSim", [os.path.join(find_autotest_dir(), "jsb_sim/runsim.py"), "--home", location, "--speedup=" + str(cmd_opts.speedup)])
else:
if not cmd_opts.no_rebuild: # i.e. we should rebuild
do_build(vehicle_dir, cmd_opts, frame_infos)
if cmd_opts.build_system == "waf":
if cmd_opts.debug:
binary_basedir = "build/sitl-debug"
else:
binary_basedir = "build/sitl"
vehicle_binary = os.path.join(find_root_dir(), binary_basedir, frame_infos["waf_target"])
else:
vehicle_binary = os.path.join(vehicle_dir, cmd_opts.vehicle + ".elf")
if not os.path.exists(vehicle_binary):
print("Vehicle binary (%s) does not exist" % (vehicle_binary,))
sys.exit(1)
start_vehicle(vehicle_binary, find_autotest_dir(), cmd_opts, frame_infos, location)
if cmd_opts.delay_start:
progress("Sleeping for %f seconds" % (cmd_opts.delay_start,))
time.sleep(float(cmd_opts.delay_start))
start_mavproxy(cmd_opts, frame_infos)
sys.exit(0)
|
hsteinhaus/ardupilot
|
Tools/autotest/sim_vehicle.py
|
Python
|
gpl-3.0
| 31,053 | 0.00293 |
from pyIST import *
##############
terra_dir = '/Users/tohodson/Desktop/modis_download/MOST/MOD29E1D.005/'
aqua_dir = '/Users/tohodson/Desktop/modis_download/MOSA/MYD29E1D.005/'
hdf_file = '/Users/tohodson/Desktop/AQ_IST.hdf5'
DATAFIELD_NAME='Ice_Surface_Temperature_SP'
aqua_count = count_files(aqua_dir,'*.hdf')
terra_count = count_files(terra_dir,'*.hdf')
print aqua_count
print terra_count
print terra_count - aqua_count
day_count = 90 * 15 # 15 years with at most 90 days per year
################################################################################
# main
f = h5py.File(hdf_file,'r+') #turned off for saftey
terra_set = f['/daily/MOST'] # remove XXX
terra_set.attrs.create('dates',MODIS_dates(terra_dir),dtype='S10') #remove
'''
#load terra files
######################
terra_set = f.create_dataset("/daily/MOST", (terra_count,4501,4501), dtype=np.uint16, compression="lzf", shuffle=True)
terra_set = f['/daily/MOST']
terra_set.attrs.create('dates',MODIS_dates(terra_dir),dtype='S10')
counter = 0
for filename in find_files(terra_dir, '*.hdf'):
hdf = SD(filename, SDC.READ)
# Read dataset.
data_raw = hdf.select(DATAFIELD_NAME)
aqua_set[counter] = data_raw[:,:]
counter = counter +1
print 'terra ' + str(counter)
#load aqua files
##################
aqua_set = f.create_dataset("/daily/MOSA", (aqua_count,4501,4501), dtype=np.uint16, compression="lzf", shuffle=True)
aqua_set.attrs.create('dates',MODIS_dates(aqua_dir),dtype='S10')
counter = terra_count - aqua_count #align daily records with terra
for filename in find_files(aqua_dir, '*.hdf'):
hdf = SD(filename, SDC.READ)
# Read dataset.
data_raw = hdf.select(DATAFIELD_NAME)
aqua_set[counter] = data_raw[:,:]
counter = counter +1
print 'aqua ' + str(counter)
'''
f.close()
|
tohodson/modis-hdf5
|
code/compile_hdf.py
|
Python
|
gpl-2.0
| 1,818 | 0.008801 |
#!/usr/bin/env python
from __future__ import print_function
## Automatically adapted for numpy.oldnumeric Sep 06, 2007 by alter_code1.py
# ImageD11_v0.4 Software for beamline ID11
# Copyright (C) 2005 Jon Wright
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""
A script to convert edf images into bruker format
"""
import time
import numpy
from fabio.openimage import openimage
from fabio.brukerimage import brukerimage
from fabio import file_series
from pyFAI import detectors, _distortion
class darkflood:
""" apply dark and flood corrections """
def __init__(self,
darkfile = None,
darkoffset = None,
floodfile = None,
floodmultiplier = None,
splinefile = None,
border = None,
powfac = 1.0,
overflow= 65534,
detrend = None,
monitorcol = None,
monitorval = None,
maskfilename = None
):
self.overflow=overflow
self.darkfile = darkfile
self.darkoffset = darkoffset
self.floodfile = floodfile
self.splinefile = splinefile
self.distnorm = None
self.border = border
self.floodmultiplier = None
#
self.darkimage = None
self.floodimage = None
self.flmult = None
self.powfac = powfac
self.detrend = detrend
self.monitorcol = monitorcol
self.monitorval = monitorval
if maskfilename is not None:
self.mask = 1-openimage( maskfilename ).data
else:
self.mask = None
def readdark(self,darkfile):
""" read the dark"""
try:
self.darkdata = openimage(darkfile)
self.darkfile = darkfile
self.darkimage = self.darkdata.data.astype(numpy.float32)
if self.powfac != 1.0:
print("apply 0.96 to dark")
self.darkimage = numpy.power(self.darkimage, 0.96)
except:
print("No dark file")
self.darkdata = None
self.darkimage= None
self.darkfile = None
def readflood(self, floodfile):
""" read the flood """
try:
self.flooddata = openimage(floodfile)
self.floodfile = floodfile
self.floodimage = self.flooddata.data.astype(numpy.float32)
if self.floodmultiplier is None:
# centre = self.floodimage[100:-100,100:-100]
# npix = centre.shape[0]*centre.shape[1]
# self.floodmultiplier = numpy.sum(numpy.ravel(
# centre).astype(numpy.float32))/npix
self.flmult = 1 / (self.floodimage)
print("using flood from",floodfile)
except:
print("No flood file")
self.flooddata = None
self.floodimage= None
self.floodfile = None
self.floodmultiplier = None
def readspline(self, splinefile):
"""read the spline """
self.det = detectors.FReLoN(splinefile)
self.dis = _distortion.Distortion(self.det)
self.dis.calc_LUT_size()
self.dis.calc_LUT()
# remove the pixel size normalisation
im = numpy.ones((2048,2048),numpy.float32)
im2 = self.dis.correct(im)
im2 = numpy.where(im2<0.1,1,im2)
self.distnorm = 1/im2
def correct(self, dataobj, detrend = None):
""" correct the data """
tin = dataobj.data.dtype
# Start by copying
cor = dataobj.data.astype(numpy.float32).copy()
self.report = ""
msk = numpy.where( cor > self.overflow,
65534,
0 )
if self.powfac != 1.0:
self.report += "powfac %f;"%(self.powfac)
numpy.power(cor, 0.96, cor)
if self.darkimage is not None:
numpy.subtract(cor, self.darkimage, cor)
self.report += "dark;"
if self.detrend is not None:
assert cor.dtype == numpy.float32, cor.dtype
cor = self.do_detrend( cor )
self.report += "detrend;"
if self.flmult is not None:
numpy.multiply(cor , self.flmult, cor)
self.report +="flood;"
if self.monitorcol is not None:
scal = self.monitorval / float( dataobj.header[self.monitorcol] )
numpy.multiply( cor, scal, cor )
self.report += '%s %.4f;'%(self.monitorcol, scal )
if self.border is not None:
# set the edges to zero
b=self.border
cor[:b,:]=0
cor[:,:b]=0
cor[-b:,:]=0
cor[:,-b:]=0
self.report += "border b(%d)=0;"%(self.border)
if self.splinefile is not None:
cor = self.dis.correct( cor )
numpy.multiply(cor, self.distnorm, cor)
self.report += "splinen;"
if self.darkoffset is not None:
# print "applying offset of",self.darkoffset,
numpy.add(cor, self.darkoffset, cor)
self.report += "+darkoffset %.2f;"%(self.darkoffset)
if self.mask is not None:
numpy.multiply(cor, self.mask, cor)
self.report += "fit2d style mask"
# Should we bother with this - yes please - noisy pixels overflow
cor = numpy.where(cor>0.1, cor, 0.) # truncate zero
self.report += ">0;"
cor = numpy.where(msk != 0 , msk, cor) # mask overflows
self.report += "msk>%.2f"%(self.overflow)
ret = cor.astype(tin)
return ret
def do_detrend( self, ar):
if self.detrend is None:
return ar
# print "detrending",
s = ar.copy()
np = ar.shape[1]/2
s[:,:np].sort()
s[:,np:].sort()
n = self.detrend
nb = 5 # bad pixels (eg negative outliers)
o1 = s[:,nb:(n+nb)].sum(axis=1)/n
o2 = s[:,(np+nb):(np+n+nb)].sum(axis=1)/n
s1 = ar.copy()
s1[:,:np] = (ar[:,:np].T - o1 + o1.mean() ).T
s1[:,np:] = (ar[:,np:].T - o2 + o2.mean() ).T
return s1
class edf2bruker:
def __init__(self,
dark,
flood,
template,
darkoffset = 100,
distance = 5.0,
border = None,
wvln = 0.5,
omegasign = 1.,
powfac = 1.0,
overflow=65534,
detrend = None,
monitorval = None,
monitorcol = None,
maskfilename=None,
splinefile = None,
omega_zero=0,
chi_zero=0):
""" converts edf (esrf id11 frelon) to bruker (esrf id11 smart6500)"""
self.distance = distance
self.overflow = overflow
self.omegasign = omegasign
self.wvln = wvln
self.powfac = powfac
self.darkflood = darkflood(darkoffset = darkoffset,
border = border,
powfac = self.powfac,
overflow = self.overflow,
splinefile = splinefile,
detrend = detrend,
monitorval = monitorval,
monitorcol = monitorcol,
maskfilename=maskfilename)
self.darkflood.readdark(dark)
self.darkflood.readflood(flood)
self.darkflood.readspline(splinefile)
self.templatefile = template
im = brukerimage()
im.readheader(template)
self.header = im.header
self.h = im.__headerstring__
self.last_time = time.time()
self.omega_zero = omega_zero
self.chi_zero = chi_zero
def putitem(self, TITLE, new):
""" put something in the header"""
if len(new)!=80:
print(new)
raise Exception(
"Sorry, you are about to corrupt the bruker header, len was %d"%(
len(new)))
h = self.h
p = h.find(TITLE)
if p!=-1:
self.h = h[:p]+new+h[p+80:]
else:
raise Exception(TITLE+" not found")
def tlog(self, msg):
new = time.time()
print("%4.2f %s"%(new-self.last_time, msg), end=' ')
self.last_time = new
def convert(self,filein,fileout):
""" convert a file """
# Read input file
data_in = openimage(filein)
#self.tlog("read")
# Apply dark and flood
corrected_image = self.darkflood.correct(data_in)
#self.tlog("corr")
# make new header
try:
sgn = self.omegasign
om = float(data_in.header["Omega"])*sgn
oms= float(data_in.header["OmegaStep"])*sgn
except:
om = 0.
oms = 0.
self.putitem("WAVELEN",
"WAVELEN:%14.7f%14.7f%14.7f"%(self.wvln,0,0)+\
" "*(80-14*3-8))
# angles are twotheta omega phi chi
try:
hphi = float(data_in.header["hphi"])
except:
hphi = 0.0
try:
hchi = float(data_in.header["hchi"])
except:
hchi = -45.0
if corrected_image.shape[0] != corrected_image.shape[1]:
# we expand to square and pad the centre
sz = max(corrected_image.shape[0], corrected_image.shape[1])
sh = corrected_image.shape
im = numpy.zeros((sz,sz),corrected_image.dtype)
numpy.add( im[0:sh[0],0:sh[1]], corrected_image[0:sh[0],0:sh[1]], im[0:sh[0],0:sh[1]] )
corrected_image = im
self.putitem("ANGLES",
"ANGLES :%14f%14f%14f%14f"%(0,om-self.omega_zero,hphi,hchi-self.chi_zero) +
" "*(80-14*4-8))
self.putitem("DISTANC",
"DISTANC:%14f"%(self.distance)+" "*(80-14-8))
self.putitem("RANGE",
"RANGE : %9f"%( abs(oms) ) + " "*58)
self.putitem("INCREME:",
"INCREME: %9f"%( oms ) + " "*58 )
self.putitem("START",
"START :%14f"%( om )+" "*(80-14-8))
self.putitem("ENDING",
"ENDING :%14f%14f%14f%14f"%(0,om+oms,hphi,hchi)+" "*(80-14*4-8))
self.putitem("NROWS",
"NROWS :%10d"%(corrected_image.shape[0])+" "*62)
self.putitem("NCOLS",
"NCOLS :%10d"%(corrected_image.shape[1])+" "*62)
self.putitem("AXIS",
"AXIS :%10d"%(2)+" "*62) # omega
# beam centre and sample to detector distance and wavelength ??
# x=hs.find("CENTER")
#self.tlog("header edit")
outf = open(fileout,"wb")
outf.write(self.h)
#self.tlog("write header")
outim = numpy.flipud(corrected_image)
outf.write( outim.astype(numpy.uint16).tostring() )
#self.tlog("write")
outf.close()
self.tlog("/s "+fileout + " " + self.darkflood.report + "\n")
if __name__=="__main__":
import sys
try:
from optparse import OptionParser
parser = OptionParser()
parser.add_option("-e","--extn",action="store",type="string",
dest="extn",
default=".edf",
help="Filename extension, probably edf, might be cor")
parser.add_option("-n","--namestem",action="store",
type="string", dest="stem",
help="Name of the files up the digits part, eg mydata in mydata0000.edf" )
parser.add_option("-N","--newnamestem",action="store",
type="string", dest="newstem", default=None,
help="Name of the files up the digits part for output")
parser.add_option("-f","--first",action="store",
type="int", dest="first",default=0,
help="Number of first file to process, default=0")
parser.add_option("-l","--last",action="store", type="int",
dest="last",default=0,
help="Number of last file to process, default=0")
parser.add_option("-d","--dark",action="store", type="string",
dest="dark",
help="Dark current")
parser.add_option("-w","--wavelength",action="store",
type="float", dest="wvln",
default = 0.5,
help="Wavelength")
parser.add_option("-s","--sign of omega",action="store",
type="float", dest="omegasign",
default = 1.0,
help="Sign of ID11 omega rotation, +1 is right handed")
parser.add_option("-F","--Flood",action="store",
type="string", dest="flood",
default=None,
# default="/data/id11/inhouse/Frelon2K/Ags_mask0000.edf",
help="Flood field")
parser.add_option("-D","--distance",action="store",type="float",
dest="distance",
default=5.0,help="Sample to detector distance")
parser.add_option("-b","--border",action="store",type="int",
dest="border",
default=None,
help="Border of image to zero out")
parser.add_option("-p","--powfac",action="store",type="float",
dest="powfac",
default=1.0,
help="power to raise image to (1.0 or 0.96)")
parser.add_option("-t","--template",action="store", type="string",
dest="template",
default = "/data/id11/3dxrd/inhouse/Frelon2K/brukertemplate.0000")
parser.add_option("-o","--overflow",action="store",type="float",
dest="overflow",
default=65534,
help="Overflow value, greater than this set to 65534")
parser.add_option("-j","--jthreads",action="store",type="int",
dest="jthreads",
default=1,
help="Number of threads to use for processing")
parser.add_option("-R","--detRend",action="store",type="int",
dest="detrend",
default=None,
help="Number of pixels in background filter")
parser.add_option("-O","--darkoffset",action="store",type="float",
dest="darkoffset",
default=100.0,
help="Offset platform to add to image")
parser.add_option("-r","--run",action="store",type="int",
dest="run",
default=0,
help="Run number for bruker")
parser.add_option("--monitorcol", action="store", type="string",
dest="monitorcol",
default = None,
help="Header value for incident beam intensity")
parser.add_option("--monitorval", action="store", type="float",
dest="monitorval",
default = None,
help="Incident beam intensity value to normalise to")
parser.add_option("--maskfilename",
action="store", type="string",
dest="maskfilename",
default = None,
help="Apply a fit2d style mask to image")
parser.add_option("--splinefile",
action="store", type="string",
dest="splinefile",
default = None,
help="Apply a spatial distortion" )
parser.add_option("--omega_zero",
action="store", type="float",
dest="omega_zero",
default = 0.0,
help="Omega zero from smart")
parser.add_option("--chi_zero",
action="store", type="float",
dest="chi_zero",
default = 0.0,
help="chi zero from smart")
parser.add_option("--ndigits",
action="store", type="int",
dest="ndigits",
default = 4,
help="Number of digits in output name")
options, args = parser.parse_args()
if options.stem is None:
print("Fill -n option or use --help")
sys.exit()
from ImageD11.ImageD11_thread import ImageD11_thread
import ImageD11.ImageD11_thread
class threaded_converter(ImageD11_thread):
def __init__(self, files, c, name="converter"):
self.files = files
self.c = c
ImageD11_thread.__init__(self,myname=name)
def ImageD11_run(self):
for fin, fout in self.files:
self.c.convert(fin, fout)
if self.ImageD11_stop_now():
break
# Make jthreads converters
converters = [ edf2bruker(options.dark , options.flood ,
options.template,
darkoffset=options.darkoffset,
splinefile=options.splinefile,
distance=options.distance,
border=options.border,
wvln=options.wvln,
omegasign=options.omegasign,
powfac = options.powfac,
overflow = options.overflow,
detrend = options.detrend,
monitorval = options.monitorval,
monitorcol = options.monitorcol ,
maskfilename = options.maskfilename,
omega_zero = options.omega_zero,
chi_zero = options.chi_zero
)
for j in range(options.jthreads)]
tmp_in = file_series.numbered_file_series(
options.stem,
options.first,
options.last,
options.extn)
import os
files_in = []
for f in tmp_in:
if os.path.exists(f):
files_in.append(f)
else:
print("Missing image",f)
ostem = options.stem
if options.newstem is not None:
ostem = options.newstem
files_out = file_series.numbered_file_series(
ostem+"_%d."%(options.run),
options.first+1,
options.last+1,
"", # extn
options.ndigits )
allfiles = list(zip(files_in, files_out))
# Divide files over threads
fl = [ allfiles[j::options.jthreads] for j in range(options.jthreads) ]
# Create threads
my_threads = [ threaded_converter( f, c) for
c, f in zip(converters, fl) ]
# Off we go!
for t in my_threads:
t.start()
nalive = 1
while nalive > 0:
try:
nalive = 0
for t in my_threads:
if t.isAlive():
nalive += 1
time.sleep(1)
except KeyboardInterrupt:
ImageD11.ImageD11_thread.stop_now = True
print("Got your control-c - terminating threads")
for t in my_threads:
if t.isAlive():
t.join(timeout=10)
except:
parser.print_help()
raise
|
jonwright/ImageD11
|
scripts/huber2bruker.py
|
Python
|
gpl-2.0
| 20,959 | 0.015172 |
from dolfin import error, info
class Heart:
def __init__(self, cell_model):
self._cell_model = cell_model
# Mandatory stuff
def mesh(self):
error("Need to prescribe domain")
def conductivities(self):
error("Need to prescribe conducitivites")
# Optional stuff
def applied_current(self):
return None
def end_time(self):
info("Using default end time (T = 1.0)")
return 1.0
def essential_boundaries(self):
return None
def essential_boundary_values(self):
return None
def initial_conditions(self):
return None
def neumann_boundaries(self):
return None
def boundary_current(self):
return None
# Peculiar stuff (for now)
def is_dynamic(self):
return True
# Helper functions
def cell_model(self):
return self._cell_model
|
Juanlu001/CBC.Solve
|
cbc/beat/heart.py
|
Python
|
gpl-3.0
| 897 | 0.001115 |
from __future__ import absolute_import
import six
from collections import defaultdict
from django.conf import settings
from sentry.app import env
from sentry.api.serializers import Serializer, register
from sentry.models import AuthIdentity, Authenticator, User, UserAvatar, UserOption
from sentry.utils.avatar import get_gravatar_url
@register(User)
class UserSerializer(Serializer):
def _get_identities(self, item_list, user):
if not (env.request and env.request.is_superuser()):
item_list = [x for x in item_list if x == user]
queryset = AuthIdentity.objects.filter(
user__in=item_list,
).select_related('auth_provider', 'auth_provider__organization')
results = {i.id: [] for i in item_list}
for item in queryset:
results[item.user_id].append(item)
return results
def get_attrs(self, item_list, user):
avatars = {
a.user_id: a
for a in UserAvatar.objects.filter(
user__in=item_list
)
}
identities = self._get_identities(item_list, user)
authenticators = Authenticator.objects.bulk_users_have_2fa([i.id for i in item_list])
data = {}
for item in item_list:
data[item] = {
'avatar': avatars.get(item.id),
'identities': identities.get(item.id),
'has2fa': authenticators[item.id],
}
return data
def serialize(self, obj, attrs, user):
d = {
'id': six.text_type(obj.id),
'name': obj.get_display_name(),
'username': obj.username,
'email': obj.email,
'avatarUrl': get_gravatar_url(obj.email, size=32),
'isActive': obj.is_active,
'isManaged': obj.is_managed,
'dateJoined': obj.date_joined,
'lastLogin': obj.last_login,
'has2fa': attrs['has2fa'],
}
if obj == user:
options = {
o.key: o.value
for o in UserOption.objects.filter(
user=user,
project__isnull=True,
)
}
stacktrace_order = int(options.get('stacktrace_order', -1) or -1)
if stacktrace_order == -1:
stacktrace_order = 'default'
elif stacktrace_order == 2:
stacktrace_order = 'newestFirst'
elif stacktrace_order == 1:
stacktrace_order = 'newestLast'
d['options'] = {
'language': options.get('language') or 'en',
'stacktraceOrder': stacktrace_order,
'timezone': options.get('timezone') or settings.SENTRY_DEFAULT_TIME_ZONE,
'clock24Hours': options.get('clock_24_hours') or False,
}
if attrs.get('avatar'):
avatar = {
'avatarType': attrs['avatar'].get_avatar_type_display(),
'avatarUuid': attrs['avatar'].ident if attrs['avatar'].file else None
}
else:
avatar = {'avatarType': 'letter_avatar', 'avatarUuid': None}
d['avatar'] = avatar
if attrs['identities'] is not None:
d['identities'] = [{
'id': six.text_type(i.id),
'name': i.ident,
'organization': {
'slug': i.auth_provider.organization.slug,
'name': i.auth_provider.organization.name,
},
'provider': {
'id': i.auth_provider.provider,
'name': i.auth_provider.get_provider().name,
},
'dateSynced': i.last_synced,
'dateVerified': i.last_verified,
} for i in attrs['identities']]
return d
class DetailedUserSerializer(UserSerializer):
def get_attrs(self, item_list, user):
attrs = super(DetailedUserSerializer, self).get_attrs(item_list, user)
authenticators = defaultdict(list)
queryset = Authenticator.objects.filter(
user__in=item_list,
)
for auth in queryset:
# ignore things that aren't user controlled (like recovery codes)
if auth.interface.is_backup_interface:
continue
authenticators[auth.user_id].append(auth)
for item in item_list:
attrs[item]['authenticators'] = authenticators[item.id]
return attrs
def serialize(self, obj, attrs, user):
d = super(DetailedUserSerializer, self).serialize(obj, attrs, user)
d['authenticators'] = [{
'id': six.text_type(a.id),
'type': a.interface.interface_id,
'name': a.interface.name,
'dateCreated': a.created_at,
'dateUsed': a.last_used_at,
} for a in attrs['authenticators']]
return d
|
alexm92/sentry
|
src/sentry/api/serializers/models/user.py
|
Python
|
bsd-3-clause
| 4,943 | 0.000809 |
# Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
import time
import netaddr
from neutron_lib import constants
from oslo_log import log as logging
import six
from neutron.agent.common import ovs_lib
from neutron.agent.linux import ip_lib
from neutron.common import constants as n_const
from neutron.common import exceptions
LOG = logging.getLogger(__name__)
def _get_veth(name1, name2, namespace2):
return (ip_lib.IPDevice(name1),
ip_lib.IPDevice(name2, namespace=namespace2))
@six.add_metaclass(abc.ABCMeta)
class LinuxInterfaceDriver(object):
DEV_NAME_LEN = n_const.LINUX_DEV_LEN
DEV_NAME_PREFIX = constants.TAP_DEVICE_PREFIX
def __init__(self, conf):
self.conf = conf
self._mtu_update_warn_logged = False
@property
def use_gateway_ips(self):
"""Whether to use gateway IPs instead of unique IP allocations.
In each place where the DHCP agent runs, and for each subnet for
which DHCP is handling out IP addresses, the DHCP port needs -
at the Linux level - to have an IP address within that subnet.
Generally this needs to be a unique Neutron-allocated IP
address, because the subnet's underlying L2 domain is bridged
across multiple compute hosts and network nodes, and for HA
there may be multiple DHCP agents running on that same bridged
L2 domain.
However, if the DHCP ports - on multiple compute/network nodes
but for the same network - are _not_ bridged to each other,
they do not need each to have a unique IP address. Instead
they can all share the same address from the relevant subnet.
This works, without creating any ambiguity, because those
ports are not all present on the same L2 domain, and because
no data within the network is ever sent to that address.
(DHCP requests are broadcast, and it is the network's job to
ensure that such a broadcast will reach at least one of the
available DHCP servers. DHCP responses will be sent _from_
the DHCP port address.)
Specifically, for networking backends where it makes sense,
the DHCP agent allows all DHCP ports to use the subnet's
gateway IP address, and thereby to completely avoid any unique
IP address allocation. This behaviour is selected by running
the DHCP agent with a configured interface driver whose
'use_gateway_ips' property is True.
When an operator deploys Neutron with an interface driver that
makes use_gateway_ips True, they should also ensure that a
gateway IP address is defined for each DHCP-enabled subnet,
and that the gateway IP address doesn't change during the
subnet's lifetime.
"""
return False
def init_l3(self, device_name, ip_cidrs, namespace=None,
preserve_ips=None, clean_connections=False):
"""Set the L3 settings for the interface using data from the port.
ip_cidrs: list of 'X.X.X.X/YY' strings
preserve_ips: list of ip cidrs that should not be removed from device
clean_connections: Boolean to indicate if we should cleanup connections
associated to removed ips
"""
preserve_ips = preserve_ips or []
device = ip_lib.IPDevice(device_name, namespace=namespace)
# The LLA generated by the operating system is not known to
# Neutron, so it would be deleted if we added it to the 'previous'
# list here
default_ipv6_lla = ip_lib.get_ipv6_lladdr(device.link.address)
cidrs = set()
remove_ips = set()
# normalize all the IP addresses first
for ip_cidr in ip_cidrs:
net = netaddr.IPNetwork(ip_cidr)
# Convert to compact IPv6 address because the return values of
# "ip addr list" are compact.
if net.version == 6:
ip_cidr = str(net)
cidrs.add(ip_cidr)
# Determine the addresses that must be added and removed
for address in device.addr.list():
cidr = address['cidr']
dynamic = address['dynamic']
# skip the IPv6 link-local
if cidr == default_ipv6_lla:
# it's already configured, leave it alone
cidrs.discard(cidr)
continue
if cidr in preserve_ips:
continue
# Statically created addresses are OK, dynamically created
# addresses must be removed and replaced
if cidr in cidrs and not dynamic:
cidrs.remove(cidr)
continue
remove_ips.add(cidr)
# Clean up any old addresses. This must be done first since there
# could be a dynamic address being replaced with a static one.
for ip_cidr in remove_ips:
if clean_connections:
device.delete_addr_and_conntrack_state(ip_cidr)
else:
device.addr.delete(ip_cidr)
# add any new addresses
for ip_cidr in cidrs:
device.addr.add(ip_cidr)
def init_router_port(self,
device_name,
ip_cidrs,
namespace,
preserve_ips=None,
extra_subnets=None,
clean_connections=False):
"""Set the L3 settings for a router interface using data from the port.
ip_cidrs: list of 'X.X.X.X/YY' strings
preserve_ips: list of ip cidrs that should not be removed from device
clean_connections: Boolean to indicate if we should cleanup connections
associated to removed ips
extra_subnets: An iterable of cidrs to add as routes without address
"""
LOG.debug("init_router_port: device_name(%s), namespace(%s)",
device_name, namespace)
self.init_l3(device_name=device_name,
ip_cidrs=ip_cidrs,
namespace=namespace,
preserve_ips=preserve_ips or [],
clean_connections=clean_connections)
device = ip_lib.IPDevice(device_name, namespace=namespace)
# Manage on-link routes (routes without an associated address)
new_onlink_cidrs = set(s['cidr'] for s in extra_subnets or [])
v4_onlink = device.route.list_onlink_routes(constants.IP_VERSION_4)
v6_onlink = device.route.list_onlink_routes(constants.IP_VERSION_6)
existing_onlink_cidrs = set(r['cidr'] for r in v4_onlink + v6_onlink)
for route in new_onlink_cidrs - existing_onlink_cidrs:
LOG.debug("adding onlink route(%s)", route)
device.route.add_onlink_route(route)
for route in (existing_onlink_cidrs - new_onlink_cidrs -
set(preserve_ips or [])):
LOG.debug("deleting onlink route(%s)", route)
device.route.delete_onlink_route(route)
def add_ipv6_addr(self, device_name, v6addr, namespace, scope='global'):
device = ip_lib.IPDevice(device_name,
namespace=namespace)
net = netaddr.IPNetwork(v6addr)
device.addr.add(str(net), scope)
def delete_ipv6_addr(self, device_name, v6addr, namespace):
device = ip_lib.IPDevice(device_name,
namespace=namespace)
device.delete_addr_and_conntrack_state(v6addr)
def delete_ipv6_addr_with_prefix(self, device_name, prefix, namespace):
"""Delete the first listed IPv6 address that falls within a given
prefix.
"""
device = ip_lib.IPDevice(device_name, namespace=namespace)
net = netaddr.IPNetwork(prefix)
for address in device.addr.list(scope='global', filters=['permanent']):
ip_address = netaddr.IPNetwork(address['cidr'])
if ip_address in net:
device.delete_addr_and_conntrack_state(address['cidr'])
break
def get_ipv6_llas(self, device_name, namespace):
device = ip_lib.IPDevice(device_name,
namespace=namespace)
return device.addr.list(scope='link', ip_version=6)
def check_bridge_exists(self, bridge):
if not ip_lib.device_exists(bridge):
raise exceptions.BridgeDoesNotExist(bridge=bridge)
def get_device_name(self, port):
return (self.DEV_NAME_PREFIX + port.id)[:self.DEV_NAME_LEN]
def remove_vlan_tag(self, bridge, interface_name):
"""Remove vlan tag from given interface.
This method is necessary only for the case when deprecated
option 'external_network_bridge' is used in L3 agent as
external gateway port is then created in this external bridge
directly and it will have DEAD_VLAN_TAG added by default.
"""
# TODO(slaweq): remove it when external_network_bridge option will be
# removed
@staticmethod
def configure_ipv6_ra(namespace, dev_name, value):
"""Configure handling of IPv6 Router Advertisements on an
interface. See common/constants.py for possible values.
"""
cmd = ['net.ipv6.conf.%(dev)s.accept_ra=%(value)s' % {'dev': dev_name,
'value': value}]
ip_lib.sysctl(cmd, namespace=namespace)
@staticmethod
def configure_ipv6_forwarding(namespace, dev_name, enabled):
"""Configure IPv6 forwarding on an interface."""
cmd = ['net.ipv6.conf.%(dev)s.forwarding=%(enabled)s' %
{'dev': dev_name, 'enabled': int(enabled)}]
ip_lib.sysctl(cmd, namespace=namespace)
@abc.abstractmethod
def plug_new(self, network_id, port_id, device_name, mac_address,
bridge=None, namespace=None, prefix=None, mtu=None):
"""Plug in the interface only for new devices that don't exist yet."""
def plug(self, network_id, port_id, device_name, mac_address,
bridge=None, namespace=None, prefix=None, mtu=None):
if not ip_lib.device_exists(device_name,
namespace=namespace):
self.plug_new(network_id, port_id, device_name, mac_address,
bridge, namespace, prefix, mtu)
else:
LOG.info("Device %s already exists", device_name)
if mtu:
self.set_mtu(
device_name, mtu, namespace=namespace, prefix=prefix)
else:
LOG.warning("No MTU configured for port %s", port_id)
@abc.abstractmethod
def unplug(self, device_name, bridge=None, namespace=None, prefix=None):
"""Unplug the interface."""
@property
def bridged(self):
"""Whether the DHCP port is bridged to the VM TAP interfaces.
When the DHCP port is bridged to the TAP interfaces for the
VMs for which it is providing DHCP service - as is the case
for most Neutron network implementations - the DHCP server
only needs to listen on the DHCP port, and will still receive
DHCP requests from all the relevant VMs.
If the DHCP port is not bridged to the relevant VM TAP
interfaces, the DHCP server needs to listen explicitly on
those TAP interfaces, and to treat those as aliases of the
DHCP port where the IP subnet is defined.
"""
return True
def set_mtu(self, device_name, mtu, namespace=None, prefix=None):
"""Set MTU on the interface."""
if not self._mtu_update_warn_logged:
LOG.warning("Interface driver cannot update MTU for ports")
self._mtu_update_warn_logged = True
class NullDriver(LinuxInterfaceDriver):
def plug_new(self, network_id, port_id, device_name, mac_address,
bridge=None, namespace=None, prefix=None, mtu=None):
pass
def unplug(self, device_name, bridge=None, namespace=None, prefix=None):
pass
class OVSInterfaceDriver(LinuxInterfaceDriver):
"""Driver for creating an internal interface on an OVS bridge."""
DEV_NAME_PREFIX = constants.TAP_DEVICE_PREFIX
def __init__(self, conf):
super(OVSInterfaceDriver, self).__init__(conf)
if self.conf.ovs_use_veth:
self.DEV_NAME_PREFIX = 'ns-'
def _get_tap_name(self, dev_name, prefix=None):
if self.conf.ovs_use_veth:
dev_name = dev_name.replace(prefix or self.DEV_NAME_PREFIX,
constants.TAP_DEVICE_PREFIX)
return dev_name
def _ovs_add_port(self, bridge, device_name, port_id, mac_address,
internal=True):
attrs = [('external_ids', {'iface-id': port_id,
'iface-status': 'active',
'attached-mac': mac_address})]
if internal:
attrs.insert(0, ('type', 'internal'))
ovs = ovs_lib.OVSBridge(bridge)
ovs.replace_port(device_name, *attrs)
def remove_vlan_tag(self, bridge, interface):
ovs = ovs_lib.OVSBridge(bridge)
ovs.clear_db_attribute("Port", interface, "tag")
def plug_new(self, network_id, port_id, device_name, mac_address,
bridge=None, namespace=None, prefix=None, mtu=None):
"""Plug in the interface."""
if not bridge:
bridge = self.conf.ovs_integration_bridge
self.check_bridge_exists(bridge)
ip = ip_lib.IPWrapper()
tap_name = self._get_tap_name(device_name, prefix)
if self.conf.ovs_use_veth:
# Create ns_dev in a namespace if one is configured.
root_dev, ns_dev = ip.add_veth(tap_name,
device_name,
namespace2=namespace)
root_dev.disable_ipv6()
else:
ns_dev = ip.device(device_name)
internal = not self.conf.ovs_use_veth
self._ovs_add_port(bridge, tap_name, port_id, mac_address,
internal=internal)
for i in range(9):
# workaround for the OVS shy port syndrome. ports sometimes
# hide for a bit right after they are first created.
# see bug/1618987
try:
ns_dev.link.set_address(mac_address)
break
except RuntimeError as e:
LOG.warning("Got error trying to set mac, retrying: %s",
str(e))
time.sleep(1)
else:
# didn't break, we give it one last shot without catching
ns_dev.link.set_address(mac_address)
# Add an interface created by ovs to the namespace.
if not self.conf.ovs_use_veth and namespace:
namespace_obj = ip.ensure_namespace(namespace)
namespace_obj.add_device_to_namespace(ns_dev)
# NOTE(ihrachys): the order here is significant: we must set MTU after
# the device is moved into a namespace, otherwise OVS bridge does not
# allow to set MTU that is higher than the least of all device MTUs on
# the bridge
if mtu:
self.set_mtu(device_name, mtu, namespace=namespace, prefix=prefix)
else:
LOG.warning("No MTU configured for port %s", port_id)
ns_dev.link.set_up()
if self.conf.ovs_use_veth:
root_dev.link.set_up()
def unplug(self, device_name, bridge=None, namespace=None, prefix=None):
"""Unplug the interface."""
if not bridge:
bridge = self.conf.ovs_integration_bridge
tap_name = self._get_tap_name(device_name, prefix)
self.check_bridge_exists(bridge)
ovs = ovs_lib.OVSBridge(bridge)
try:
ovs.delete_port(tap_name)
if self.conf.ovs_use_veth:
device = ip_lib.IPDevice(device_name, namespace=namespace)
device.link.delete()
LOG.debug("Unplugged interface '%s'", device_name)
except RuntimeError:
LOG.error("Failed unplugging interface '%s'",
device_name)
def set_mtu(self, device_name, mtu, namespace=None, prefix=None):
if self.conf.ovs_use_veth:
tap_name = self._get_tap_name(device_name, prefix)
root_dev, ns_dev = _get_veth(
tap_name, device_name, namespace2=namespace)
root_dev.link.set_mtu(mtu)
else:
ns_dev = ip_lib.IPWrapper(namespace=namespace).device(device_name)
ns_dev.link.set_mtu(mtu)
class BridgeInterfaceDriver(LinuxInterfaceDriver):
"""Driver for creating bridge interfaces."""
DEV_NAME_PREFIX = 'ns-'
def plug_new(self, network_id, port_id, device_name, mac_address,
bridge=None, namespace=None, prefix=None, mtu=None):
"""Plugin the interface."""
ip = ip_lib.IPWrapper()
# Enable agent to define the prefix
tap_name = device_name.replace(prefix or self.DEV_NAME_PREFIX,
constants.TAP_DEVICE_PREFIX)
# Create ns_veth in a namespace if one is configured.
root_veth, ns_veth = ip.add_veth(tap_name, device_name,
namespace2=namespace)
root_veth.disable_ipv6()
ns_veth.link.set_address(mac_address)
if mtu:
self.set_mtu(device_name, mtu, namespace=namespace, prefix=prefix)
else:
LOG.warning("No MTU configured for port %s", port_id)
root_veth.link.set_up()
ns_veth.link.set_up()
def unplug(self, device_name, bridge=None, namespace=None, prefix=None):
"""Unplug the interface."""
device = ip_lib.IPDevice(device_name, namespace=namespace)
try:
device.link.delete()
LOG.debug("Unplugged interface '%s'", device_name)
except RuntimeError:
LOG.error("Failed unplugging interface '%s'",
device_name)
def set_mtu(self, device_name, mtu, namespace=None, prefix=None):
tap_name = device_name.replace(prefix or self.DEV_NAME_PREFIX,
constants.TAP_DEVICE_PREFIX)
root_dev, ns_dev = _get_veth(
tap_name, device_name, namespace2=namespace)
root_dev.link.set_mtu(mtu)
ns_dev.link.set_mtu(mtu)
|
huntxu/neutron
|
neutron/agent/linux/interface.py
|
Python
|
apache-2.0
| 19,111 | 0 |
#!/bin/env python3
# -*- coding: utf-8 -*-
#
# Hid replay / hid.py: table of hid usages and definitions
#
# Copyright (c) 2012-2017 Benjamin Tissoires <benjamin.tissoires@gmail.com>
# Copyright (c) 2012-2017 Red Hat, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import parse_hut
hid_items = {
"Main": {
"Input" : 0b10000000,
"Output" : 0b10010000,
"Feature" : 0b10110000,
"Collection" : 0b10100000,
"End Collection" : 0b11000000,
},
"Global": {
"Usage Page" : 0b00000100,
"Logical Minimum" : 0b00010100,
"Logical Maximum" : 0b00100100,
"Physical Minimum" : 0b00110100,
"Physical Maximum" : 0b01000100,
"Unit Exponent" : 0b01010100,
"Unit" : 0b01100100,
"Report Size" : 0b01110100,
"Report ID" : 0b10000100,
"Report Count" : 0b10010100,
"Push" : 0b10100100,
"Pop" : 0b10110100,
},
"Local": {
"Usage" : 0b00001000,
"Usage Minimum" : 0b00011000,
"Usage Maximum" : 0b00101000,
"Designator Index" : 0b00111000,
"Designator Minimum" : 0b01001000,
"Designator Maximum" : 0b01011000,
"String Index" : 0b01111000,
"String Minimum" : 0b10001000,
"String Maximum" : 0b10011000,
"Delimiter" : 0b10101000,
},
}
collections = {
'PHYSICAL' : 0,
'APPLICATION' : 1,
'LOGICAL' : 2,
}
sensor_mods = {
0x00: 'Mod None',
0x10: 'Mod Change Sensitivity Abs',
0x20: 'Mod Max',
0x30: 'Mod Min',
0x40: 'Mod Accuracy',
0x50: 'Mod Resolution',
0x60: 'Mod Threshold High',
0x70: 'Mod Threshold Low',
0x80: 'Mod Calibration Offset',
0x90: 'Mod Calibration Multiplier',
0xa0: 'Mod Report Interval',
0xb0: 'Mod Frequency Max',
0xc0: 'Mod Period Max',
0xd0: 'Mod Change Sensitivity Range Percent',
0xe0: 'Mod Change Sensitivity Rel Percent',
0xf0: 'Mod Vendor Reserved',
}
inv_hid = {}
hid_type = {}
for type, items in list(hid_items.items()):
for k, v in list(items.items()):
inv_hid[v] = k
hid_type[k] = type
usages = parse_hut.parse()
usage_pages = {}
inv_usage_pages = {}
inv_usages = {}
for usage, (name, filename, usage_list) in usages.items():
inv_usage_pages[usage] = name
usage_pages[name] = usage
for k, v in list(usage_list.items()):
inv_usages[(usage << 16) | k] = v
inv_collections = dict([(v, k) for k, v in list(collections.items())])
|
bentiss/hid-replay
|
tools/hid.py
|
Python
|
gpl-2.0
| 3,108 | 0.009653 |
# Copyright (c) 2014 Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import functools
import os.path
import mock
import netaddr
from oslo_config import cfg
from oslo_log import log as logging
import testtools
import webob
import webob.dec
import webob.exc
from neutron.agent.common import config as agent_config
from neutron.agent.common import ovs_lib
from neutron.agent.l3 import agent as neutron_l3_agent
from neutron.agent.l3 import dvr_snat_ns
from neutron.agent.l3 import namespace_manager
from neutron.agent.l3 import namespaces
from neutron.agent import l3_agent as l3_agent_main
from neutron.agent.linux import dhcp
from neutron.agent.linux import external_process
from neutron.agent.linux import ip_lib
from neutron.agent.linux import utils
from neutron.callbacks import events
from neutron.callbacks import manager
from neutron.callbacks import registry
from neutron.callbacks import resources
from neutron.common import config as common_config
from neutron.common import constants as l3_constants
from neutron.common import utils as common_utils
from neutron.openstack.common import uuidutils
from neutron.tests.common import net_helpers
from neutron.tests.functional.agent.linux import base
from neutron.tests.functional.agent.linux import helpers
from neutron.tests.unit.agent.l3 import test_agent as test_l3_agent
LOG = logging.getLogger(__name__)
_uuid = uuidutils.generate_uuid
METADATA_REQUEST_TIMEOUT = 60
def get_ovs_bridge(br_name):
return ovs_lib.OVSBridge(br_name)
class L3AgentTestFramework(base.BaseLinuxTestCase):
def setUp(self):
super(L3AgentTestFramework, self).setUp()
mock.patch('neutron.agent.l3.agent.L3PluginApi').start()
# TODO(pcm): Move this to BaseTestCase, if we find that more tests
# use this mechanism.
self._callback_manager = manager.CallbacksManager()
mock.patch.object(registry, '_get_callback_manager',
return_value=self._callback_manager).start()
self.agent = self._configure_agent('agent1')
def _get_config_opts(self):
config = cfg.ConfigOpts()
config.register_opts(common_config.core_opts)
config.register_opts(common_config.core_cli_opts)
logging.register_options(config)
agent_config.register_process_monitor_opts(config)
return config
def _configure_agent(self, host):
conf = self._get_config_opts()
l3_agent_main.register_opts(conf)
cfg.CONF.set_override('debug', False)
agent_config.setup_logging()
conf.set_override(
'interface_driver',
'neutron.agent.linux.interface.OVSInterfaceDriver')
conf.set_override('router_delete_namespaces', True)
br_int = self.useFixture(net_helpers.OVSBridgeFixture()).bridge
br_ex = self.useFixture(net_helpers.OVSBridgeFixture()).bridge
conf.set_override('ovs_integration_bridge', br_int.br_name)
conf.set_override('external_network_bridge', br_ex.br_name)
temp_dir = self.get_new_temp_dir()
get_temp_file_path = functools.partial(self.get_temp_file_path,
root=temp_dir)
conf.set_override('state_path', temp_dir.path)
conf.set_override('metadata_proxy_socket',
get_temp_file_path('metadata_proxy'))
conf.set_override('ha_confs_path',
get_temp_file_path('ha_confs'))
conf.set_override('external_pids',
get_temp_file_path('external/pids'))
conf.set_override('host', host)
agent = neutron_l3_agent.L3NATAgentWithStateReport(host, conf)
mock.patch.object(ip_lib, '_arping').start()
return agent
def generate_router_info(self, enable_ha, ip_version=4, extra_routes=True,
enable_fip=True, enable_snat=True,
dual_stack=False, v6_ext_gw_with_sub=True):
if ip_version == 6 and not dual_stack:
enable_snat = False
enable_fip = False
extra_routes = False
if not v6_ext_gw_with_sub:
self.agent.conf.set_override('ipv6_gateway',
'fe80::f816:3eff:fe2e:1')
return test_l3_agent.prepare_router_data(ip_version=ip_version,
enable_snat=enable_snat,
enable_floating_ip=enable_fip,
enable_ha=enable_ha,
extra_routes=extra_routes,
dual_stack=dual_stack,
v6_ext_gw_with_sub=(
v6_ext_gw_with_sub))
def manage_router(self, agent, router):
self.addCleanup(self._delete_router, agent, router['id'])
ri = self._create_router(agent, router)
return ri
def _create_router(self, agent, router):
agent._process_added_router(router)
return agent.router_info[router['id']]
def _delete_router(self, agent, router_id):
agent._router_removed(router_id)
def _add_fip(self, router, fip_address, fixed_address='10.0.0.2',
host=None):
fip = {'id': _uuid(),
'port_id': _uuid(),
'floating_ip_address': fip_address,
'fixed_ip_address': fixed_address,
'host': host}
router.router[l3_constants.FLOATINGIP_KEY].append(fip)
def _add_internal_interface_by_subnet(self, router, count=1,
ip_version=4,
ipv6_subnet_modes=None,
interface_id=None):
return test_l3_agent.router_append_subnet(router, count,
ip_version, ipv6_subnet_modes, interface_id)
def _namespace_exists(self, namespace):
ip = ip_lib.IPWrapper(namespace=namespace)
return ip.netns.exists(namespace)
def _metadata_proxy_exists(self, conf, router):
pm = external_process.ProcessManager(
conf,
router.router_id,
router.ns_name)
return pm.active
def device_exists_with_ips_and_mac(self, expected_device, name_getter,
namespace):
ip_cidrs = common_utils.fixed_ip_cidrs(expected_device['fixed_ips'])
return ip_lib.device_exists_with_ips_and_mac(
name_getter(expected_device['id']), ip_cidrs,
expected_device['mac_address'], namespace)
@staticmethod
def _port_first_ip_cidr(port):
fixed_ip = port['fixed_ips'][0]
return common_utils.ip_to_cidr(fixed_ip['ip_address'],
fixed_ip['prefixlen'])
def get_device_mtu(self, target_device, name_getter, namespace):
device = ip_lib.IPDevice(name_getter(target_device), namespace)
return device.link.mtu
def get_expected_keepalive_configuration(self, router):
router_id = router.router_id
ha_device_name = router.get_ha_device_name()
ha_device_cidr = self._port_first_ip_cidr(router.ha_port)
external_port = router.get_ex_gw_port()
ex_port_ipv6 = ip_lib.get_ipv6_lladdr(external_port['mac_address'])
external_device_name = router.get_external_device_name(
external_port['id'])
external_device_cidr = self._port_first_ip_cidr(external_port)
internal_port = router.router[l3_constants.INTERFACE_KEY][0]
int_port_ipv6 = ip_lib.get_ipv6_lladdr(internal_port['mac_address'])
internal_device_name = router.get_internal_device_name(
internal_port['id'])
internal_device_cidr = self._port_first_ip_cidr(internal_port)
floating_ip_cidr = common_utils.ip_to_cidr(
router.get_floating_ips()[0]['floating_ip_address'])
default_gateway_ip = external_port['subnets'][0].get('gateway_ip')
return """vrrp_instance VR_1 {
state BACKUP
interface %(ha_device_name)s
virtual_router_id 1
priority 50
nopreempt
advert_int 2
track_interface {
%(ha_device_name)s
}
virtual_ipaddress {
169.254.0.1/24 dev %(ha_device_name)s
}
virtual_ipaddress_excluded {
%(floating_ip_cidr)s dev %(external_device_name)s
%(external_device_cidr)s dev %(external_device_name)s
%(internal_device_cidr)s dev %(internal_device_name)s
%(ex_port_ipv6)s dev %(external_device_name)s scope link
%(int_port_ipv6)s dev %(internal_device_name)s scope link
}
virtual_routes {
0.0.0.0/0 via %(default_gateway_ip)s dev %(external_device_name)s
8.8.8.0/24 via 19.4.4.4
}
}""" % {
'router_id': router_id,
'ha_device_name': ha_device_name,
'ha_device_cidr': ha_device_cidr,
'external_device_name': external_device_name,
'external_device_cidr': external_device_cidr,
'internal_device_name': internal_device_name,
'internal_device_cidr': internal_device_cidr,
'floating_ip_cidr': floating_ip_cidr,
'default_gateway_ip': default_gateway_ip,
'int_port_ipv6': int_port_ipv6,
'ex_port_ipv6': ex_port_ipv6
}
def _get_rule(self, iptables_manager, table, chain, predicate):
rules = iptables_manager.get_chain(table, chain)
result = next(rule for rule in rules if predicate(rule))
return result
def _assert_router_does_not_exist(self, router):
# If the namespace assertion succeeds
# then the devices and iptable rules have also been deleted,
# so there's no need to check that explicitly.
self.assertFalse(self._namespace_exists(router.ns_name))
utils.wait_until_true(
lambda: not self._metadata_proxy_exists(self.agent.conf, router))
def _assert_snat_chains(self, router):
self.assertFalse(router.iptables_manager.is_chain_empty(
'nat', 'snat'))
self.assertFalse(router.iptables_manager.is_chain_empty(
'nat', 'POSTROUTING'))
def _assert_floating_ip_chains(self, router):
self.assertFalse(router.iptables_manager.is_chain_empty(
'nat', 'float-snat'))
def _assert_metadata_chains(self, router):
metadata_port_filter = lambda rule: (
str(self.agent.conf.metadata_port) in rule.rule)
self.assertTrue(self._get_rule(router.iptables_manager,
'nat',
'PREROUTING',
metadata_port_filter))
self.assertTrue(self._get_rule(router.iptables_manager,
'filter',
'INPUT',
metadata_port_filter))
def _assert_internal_devices(self, router):
internal_devices = router.router[l3_constants.INTERFACE_KEY]
self.assertTrue(len(internal_devices))
for device in internal_devices:
self.assertTrue(self.device_exists_with_ips_and_mac(
device, router.get_internal_device_name, router.ns_name))
def _assert_extra_routes(self, router):
routes = ip_lib.get_routing_table(namespace=router.ns_name)
routes = [{'nexthop': route['nexthop'],
'destination': route['destination']} for route in routes]
for extra_route in router.router['routes']:
self.assertIn(extra_route, routes)
def _assert_interfaces_deleted_from_ovs(self):
def assert_ovs_bridge_empty(bridge_name):
bridge = ovs_lib.OVSBridge(bridge_name)
self.assertFalse(bridge.get_port_name_list())
assert_ovs_bridge_empty(self.agent.conf.ovs_integration_bridge)
assert_ovs_bridge_empty(self.agent.conf.external_network_bridge)
def floating_ips_configured(self, router):
floating_ips = router.router[l3_constants.FLOATINGIP_KEY]
external_port = router.get_ex_gw_port()
return len(floating_ips) and all(
ip_lib.device_exists_with_ips_and_mac(
router.get_external_device_name(external_port['id']),
['%s/32' % fip['floating_ip_address']],
external_port['mac_address'],
namespace=router.ns_name) for fip in floating_ips)
def fail_ha_router(self, router):
device_name = router.get_ha_device_name()
ha_device = ip_lib.IPDevice(device_name, router.ns_name)
ha_device.link.set_down()
class L3AgentTestCase(L3AgentTestFramework):
def test_keepalived_state_change_notification(self):
enqueue_mock = mock.patch.object(
self.agent, 'enqueue_state_change').start()
router_info = self.generate_router_info(enable_ha=True)
router = self.manage_router(self.agent, router_info)
utils.wait_until_true(lambda: router.ha_state == 'master')
self.fail_ha_router(router)
utils.wait_until_true(lambda: router.ha_state == 'backup')
utils.wait_until_true(lambda: enqueue_mock.call_count == 3)
calls = [args[0] for args in enqueue_mock.call_args_list]
self.assertEqual((router.router_id, 'backup'), calls[0])
self.assertEqual((router.router_id, 'master'), calls[1])
self.assertEqual((router.router_id, 'backup'), calls[2])
def _expected_rpc_report(self, expected):
calls = (args[0][1] for args in
self.agent.plugin_rpc.update_ha_routers_states.call_args_list)
# Get the last state reported for each router
actual_router_states = {}
for call in calls:
for router_id, state in call.iteritems():
actual_router_states[router_id] = state
return actual_router_states == expected
def test_keepalived_state_change_bulk_rpc(self):
router_info = self.generate_router_info(enable_ha=True)
router1 = self.manage_router(self.agent, router_info)
self.fail_ha_router(router1)
router_info = self.generate_router_info(enable_ha=True)
router2 = self.manage_router(self.agent, router_info)
utils.wait_until_true(lambda: router1.ha_state == 'backup')
utils.wait_until_true(lambda: router2.ha_state == 'master')
utils.wait_until_true(
lambda: self._expected_rpc_report(
{router1.router_id: 'standby', router2.router_id: 'active'}))
def test_agent_notifications_for_router_events(self):
"""Test notifications for router create, update, and delete.
Make sure that when the agent sends notifications of router events
for router create, update, and delete, that the correct handler is
called with the right resource, event, and router information.
"""
event_handler = mock.Mock()
registry.subscribe(event_handler,
resources.ROUTER, events.BEFORE_CREATE)
registry.subscribe(event_handler,
resources.ROUTER, events.AFTER_CREATE)
registry.subscribe(event_handler,
resources.ROUTER, events.BEFORE_UPDATE)
registry.subscribe(event_handler,
resources.ROUTER, events.AFTER_UPDATE)
registry.subscribe(event_handler,
resources.ROUTER, events.BEFORE_DELETE)
registry.subscribe(event_handler,
resources.ROUTER, events.AFTER_DELETE)
router_info = self.generate_router_info(enable_ha=False)
router = self.manage_router(self.agent, router_info)
self.agent._process_updated_router(router.router)
self._delete_router(self.agent, router.router_id)
expected_calls = [
mock.call('router', 'before_create', self.agent, router=router),
mock.call('router', 'after_create', self.agent, router=router),
mock.call('router', 'before_update', self.agent, router=router),
mock.call('router', 'after_update', self.agent, router=router),
mock.call('router', 'before_delete', self.agent, router=router),
mock.call('router', 'after_delete', self.agent, router=router)]
event_handler.assert_has_calls(expected_calls)
def test_legacy_router_lifecycle(self):
self._router_lifecycle(enable_ha=False, dual_stack=True)
def test_legacy_router_lifecycle_with_no_gateway_subnet(self):
self._router_lifecycle(enable_ha=False, dual_stack=True,
v6_ext_gw_with_sub=False)
def test_ha_router_lifecycle(self):
self._router_lifecycle(enable_ha=True)
def test_conntrack_disassociate_fip(self):
'''Test that conntrack immediately drops stateful connection
that uses floating IP once it's disassociated.
'''
router_info = self.generate_router_info(enable_ha=False)
router = self.manage_router(self.agent, router_info)
port = helpers.get_free_namespace_port(router.ns_name)
client_address = '19.4.4.3'
server_address = '35.4.0.4'
def clean_fips(router):
router.router[l3_constants.FLOATINGIP_KEY] = []
clean_fips(router)
self._add_fip(router, client_address, fixed_address=server_address)
router.process(self.agent)
router_ns = ip_lib.IPWrapper(namespace=router.ns_name)
netcat = helpers.NetcatTester(router_ns, router_ns,
server_address, port,
client_address=client_address,
run_as_root=True,
udp=False)
self.addCleanup(netcat.stop_processes)
def assert_num_of_conntrack_rules(n):
out = router_ns.netns.execute(["conntrack", "-L",
"--orig-src", client_address])
self.assertEqual(
n, len([line for line in out.strip().split('\n') if line]))
with self.assert_max_execution_time(100):
assert_num_of_conntrack_rules(0)
self.assertTrue(netcat.test_connectivity())
assert_num_of_conntrack_rules(1)
clean_fips(router)
router.process(self.agent)
assert_num_of_conntrack_rules(0)
with testtools.ExpectedException(RuntimeError):
netcat.test_connectivity()
def test_ipv6_ha_router_lifecycle(self):
self._router_lifecycle(enable_ha=True, ip_version=6)
def test_keepalived_configuration(self):
router_info = self.generate_router_info(enable_ha=True)
router = self.manage_router(self.agent, router_info)
expected = self.get_expected_keepalive_configuration(router)
self.assertEqual(expected,
router.keepalived_manager.get_conf_on_disk())
# Add a new FIP and change the GW IP address
router.router = copy.deepcopy(router.router)
existing_fip = '19.4.4.2'
new_fip = '19.4.4.3'
self._add_fip(router, new_fip)
subnet_id = _uuid()
fixed_ips = [{'ip_address': '19.4.4.10',
'prefixlen': 24,
'subnet_id': subnet_id}]
subnets = [{'id': subnet_id,
'cidr': '19.4.4.0/24',
'gateway_ip': '19.4.4.5'}]
router.router['gw_port']['subnets'] = subnets
router.router['gw_port']['fixed_ips'] = fixed_ips
router.process(self.agent)
# Get the updated configuration and assert that both FIPs are in,
# and that the GW IP address was updated.
new_config = router.keepalived_manager.config.get_config_str()
old_gw = '0.0.0.0/0 via 19.4.4.1'
new_gw = '0.0.0.0/0 via 19.4.4.5'
old_external_device_ip = '19.4.4.4'
new_external_device_ip = '19.4.4.10'
self.assertIn(existing_fip, new_config)
self.assertIn(new_fip, new_config)
self.assertNotIn(old_gw, new_config)
self.assertIn(new_gw, new_config)
external_port = router.get_ex_gw_port()
external_device_name = router.get_external_device_name(
external_port['id'])
self.assertNotIn('%s/24 dev %s' %
(old_external_device_ip, external_device_name),
new_config)
self.assertIn('%s/24 dev %s' %
(new_external_device_ip, external_device_name),
new_config)
def test_periodic_sync_routers_task(self):
routers_to_keep = []
routers_to_delete = []
ns_names_to_retrieve = set()
for i in range(2):
routers_to_keep.append(self.generate_router_info(False))
self.manage_router(self.agent, routers_to_keep[i])
ns_names_to_retrieve.add(namespaces.NS_PREFIX +
routers_to_keep[i]['id'])
for i in range(2):
routers_to_delete.append(self.generate_router_info(False))
self.manage_router(self.agent, routers_to_delete[i])
ns_names_to_retrieve.add(namespaces.NS_PREFIX +
routers_to_delete[i]['id'])
# Mock the plugin RPC API to Simulate a situation where the agent
# was handling the 4 routers created above, it went down and after
# starting up again, two of the routers were deleted via the API
mocked_get_routers = (
neutron_l3_agent.L3PluginApi.return_value.get_routers)
mocked_get_routers.return_value = routers_to_keep
# Synchonize the agent with the plug-in
with mock.patch.object(namespace_manager.NamespaceManager, 'list_all',
return_value=ns_names_to_retrieve):
self.agent.periodic_sync_routers_task(self.agent.context)
# Mock the plugin RPC API so a known external network id is returned
# when the router updates are processed by the agent
external_network_id = _uuid()
mocked_get_external_network_id = (
neutron_l3_agent.L3PluginApi.return_value.get_external_network_id)
mocked_get_external_network_id.return_value = external_network_id
# Plug external_gateway_info in the routers that are not going to be
# deleted by the agent when it processes the updates. Otherwise,
# _process_router_if_compatible in the agent fails
for i in range(2):
routers_to_keep[i]['external_gateway_info'] = {'network_id':
external_network_id}
# Have the agent process the update from the plug-in and verify
# expected behavior
for _ in routers_to_keep + routers_to_delete:
self.agent._process_router_update()
for i in range(2):
self.assertIn(routers_to_keep[i]['id'], self.agent.router_info)
self.assertTrue(self._namespace_exists(namespaces.NS_PREFIX +
routers_to_keep[i]['id']))
for i in range(2):
self.assertNotIn(routers_to_delete[i]['id'],
self.agent.router_info)
self.assertFalse(self._namespace_exists(
namespaces.NS_PREFIX + routers_to_delete[i]['id']))
def _router_lifecycle(self, enable_ha, ip_version=4,
dual_stack=False, v6_ext_gw_with_sub=True):
router_info = self.generate_router_info(enable_ha, ip_version,
dual_stack=dual_stack,
v6_ext_gw_with_sub=(
v6_ext_gw_with_sub))
router = self.manage_router(self.agent, router_info)
# Add multiple-IPv6-prefix internal router port
slaac = l3_constants.IPV6_SLAAC
slaac_mode = {'ra_mode': slaac, 'address_mode': slaac}
subnet_modes = [slaac_mode] * 2
self._add_internal_interface_by_subnet(router.router, count=2,
ip_version=6, ipv6_subnet_modes=subnet_modes)
router.process(self.agent)
if enable_ha:
port = router.get_ex_gw_port()
interface_name = router.get_external_device_name(port['id'])
self._assert_no_ip_addresses_on_interface(router.ns_name,
interface_name)
utils.wait_until_true(lambda: router.ha_state == 'master')
# Keepalived notifies of a state transition when it starts,
# not when it ends. Thus, we have to wait until keepalived finishes
# configuring everything. We verify this by waiting until the last
# device has an IP address.
device = router.router[l3_constants.INTERFACE_KEY][-1]
device_exists = functools.partial(
self.device_exists_with_ips_and_mac,
device,
router.get_internal_device_name,
router.ns_name)
utils.wait_until_true(device_exists)
self.assertTrue(self._namespace_exists(router.ns_name))
utils.wait_until_true(
lambda: self._metadata_proxy_exists(self.agent.conf, router))
self._assert_internal_devices(router)
self._assert_external_device(router)
if not (enable_ha and (ip_version == 6 or dual_stack)):
# Note(SridharG): enable the assert_gateway for IPv6 once
# keepalived on Ubuntu14.04 (i.e., check-neutron-dsvm-functional
# platform) is updated to 1.2.10 (or above).
# For more details: https://review.openstack.org/#/c/151284/
self._assert_gateway(router, v6_ext_gw_with_sub)
self.assertTrue(self.floating_ips_configured(router))
self._assert_snat_chains(router)
self._assert_floating_ip_chains(router)
self._assert_extra_routes(router)
self._assert_metadata_chains(router)
if enable_ha:
self._assert_ha_device(router)
self.assertTrue(router.keepalived_manager.get_process().active)
self._delete_router(self.agent, router.router_id)
self._assert_interfaces_deleted_from_ovs()
self._assert_router_does_not_exist(router)
if enable_ha:
self.assertFalse(router.keepalived_manager.get_process().active)
def _assert_external_device(self, router):
external_port = router.get_ex_gw_port()
self.assertTrue(self.device_exists_with_ips_and_mac(
external_port, router.get_external_device_name,
router.ns_name))
def _assert_gateway(self, router, v6_ext_gw_with_sub=True):
external_port = router.get_ex_gw_port()
external_device_name = router.get_external_device_name(
external_port['id'])
external_device = ip_lib.IPDevice(external_device_name,
namespace=router.ns_name)
for subnet in external_port['subnets']:
self._gateway_check(subnet['gateway_ip'], external_device)
if not v6_ext_gw_with_sub:
self._gateway_check(self.agent.conf.ipv6_gateway,
external_device)
def _gateway_check(self, gateway_ip, external_device):
expected_gateway = gateway_ip
ip_vers = netaddr.IPAddress(expected_gateway).version
existing_gateway = (external_device.route.get_gateway(
ip_version=ip_vers).get('gateway'))
self.assertEqual(expected_gateway, existing_gateway)
def _assert_ha_device(self, router):
def ha_router_dev_name_getter(not_used):
return router.get_ha_device_name()
self.assertTrue(self.device_exists_with_ips_and_mac(
router.router[l3_constants.HA_INTERFACE_KEY],
ha_router_dev_name_getter, router.ns_name))
@classmethod
def _get_addresses_on_device(cls, namespace, interface):
return [address['cidr'] for address in
ip_lib.IPDevice(interface, namespace=namespace).addr.list()]
def _assert_no_ip_addresses_on_interface(self, namespace, interface):
self.assertEqual(
[], self._get_addresses_on_device(namespace, interface))
def test_ha_router_conf_on_restarted_agent(self):
router_info = self.generate_router_info(enable_ha=True)
router1 = self.manage_router(self.agent, router_info)
self._add_fip(router1, '192.168.111.12')
restarted_agent = neutron_l3_agent.L3NATAgentWithStateReport(
self.agent.host, self.agent.conf)
self._create_router(restarted_agent, router1.router)
utils.wait_until_true(lambda: self.floating_ips_configured(router1))
self.assertIn(
router1._get_primary_vip(),
self._get_addresses_on_device(
router1.ns_name,
router1.get_ha_device_name()))
def test_fip_connection_from_same_subnet(self):
'''Test connection to floatingip which is associated with
fixed_ip on the same subnet of the source fixed_ip.
In other words it confirms that return packets surely
go through the router.
'''
router_info = self.generate_router_info(enable_ha=False)
router = self.manage_router(self.agent, router_info)
router_ip_cidr = self._port_first_ip_cidr(router.internal_ports[0])
router_ip = router_ip_cidr.partition('/')[0]
src_ip_cidr = net_helpers.increment_ip_cidr(router_ip_cidr)
dst_ip_cidr = net_helpers.increment_ip_cidr(src_ip_cidr)
dst_ip = dst_ip_cidr.partition('/')[0]
dst_fip = '19.4.4.10'
router.router[l3_constants.FLOATINGIP_KEY] = []
self._add_fip(router, dst_fip, fixed_address=dst_ip)
router.process(self.agent)
br_int = get_ovs_bridge(self.agent.conf.ovs_integration_bridge)
# FIXME(cbrandily): temporary, will be replaced by fake machines
src_ns = self._create_namespace(prefix='test-src-')
src_port = self.useFixture(
net_helpers.OVSPortFixture(br_int, src_ns.namespace)).port
src_port.addr.add(src_ip_cidr)
net_helpers.set_namespace_gateway(src_port, router_ip)
dst_ns = self._create_namespace(prefix='test-dst-')
dst_port = self.useFixture(
net_helpers.OVSPortFixture(br_int, dst_ns.namespace)).port
dst_port.addr.add(dst_ip_cidr)
net_helpers.set_namespace_gateway(dst_port, router_ip)
protocol_port = helpers.get_free_namespace_port(dst_ns)
# client sends to fip
netcat = helpers.NetcatTester(src_ns, dst_ns, dst_ip,
protocol_port,
client_address=dst_fip,
run_as_root=True,
udp=False)
self.addCleanup(netcat.stop_processes)
self.assertTrue(netcat.test_connectivity())
class L3HATestFramework(L3AgentTestFramework):
NESTED_NAMESPACE_SEPARATOR = '@'
def setUp(self):
super(L3HATestFramework, self).setUp()
self.failover_agent = self._configure_agent('agent2')
br_int_1 = get_ovs_bridge(self.agent.conf.ovs_integration_bridge)
br_int_2 = get_ovs_bridge(
self.failover_agent.conf.ovs_integration_bridge)
veth1, veth2 = self.create_veth()
br_int_1.add_port(veth1.name)
br_int_2.add_port(veth2.name)
def test_ha_router_failover(self):
router_info = self.generate_router_info(enable_ha=True)
ns_name = "%s%s%s" % (
namespaces.RouterNamespace._get_ns_name(router_info['id']),
self.NESTED_NAMESPACE_SEPARATOR, self.agent.host)
mock.patch.object(namespaces.RouterNamespace, '_get_ns_name',
return_value=ns_name).start()
router1 = self.manage_router(self.agent, router_info)
router_info_2 = copy.deepcopy(router_info)
router_info_2[l3_constants.HA_INTERFACE_KEY] = (
test_l3_agent.get_ha_interface(ip='169.254.192.2',
mac='22:22:22:22:22:22'))
ns_name = "%s%s%s" % (
namespaces.RouterNamespace._get_ns_name(router_info_2['id']),
self.NESTED_NAMESPACE_SEPARATOR, self.failover_agent.host)
mock.patch.object(namespaces.RouterNamespace, '_get_ns_name',
return_value=ns_name).start()
router2 = self.manage_router(self.failover_agent, router_info_2)
utils.wait_until_true(lambda: router1.ha_state == 'master')
utils.wait_until_true(lambda: router2.ha_state == 'backup')
device_name = router1.get_ha_device_name()
ha_device = ip_lib.IPDevice(device_name, namespace=router1.ns_name)
ha_device.link.set_down()
utils.wait_until_true(lambda: router2.ha_state == 'master')
utils.wait_until_true(lambda: router1.ha_state == 'backup')
class MetadataFakeProxyHandler(object):
def __init__(self, status):
self.status = status
@webob.dec.wsgify()
def __call__(self, req):
return webob.Response(status=self.status)
class MetadataL3AgentTestCase(L3AgentTestFramework):
SOCKET_MODE = 0o644
def _create_metadata_fake_server(self, status):
server = utils.UnixDomainWSGIServer('metadata-fake-server')
self.addCleanup(server.stop)
# NOTE(cbrandily): TempDir fixture creates a folder with 0o700
# permissions but metadata_proxy_socket folder must be readable by all
# users
self.useFixture(
helpers.RecursivePermDirFixture(
os.path.dirname(self.agent.conf.metadata_proxy_socket), 0o555))
server.start(MetadataFakeProxyHandler(status),
self.agent.conf.metadata_proxy_socket,
workers=0, backlog=4096, mode=self.SOCKET_MODE)
def test_access_to_metadata_proxy(self):
"""Test access to the l3-agent metadata proxy.
The test creates:
* A l3-agent metadata service:
* A router (which creates a metadata proxy in the router namespace),
* A fake metadata server
* A "client" namespace (simulating a vm) with a port on router
internal subnet.
The test queries from the "client" namespace the metadata proxy on
http://169.254.169.254 and asserts that the metadata proxy added
the X-Forwarded-For and X-Neutron-Router-Id headers to the request
and forwarded the http request to the fake metadata server and the
response to the "client" namespace.
"""
router_info = self.generate_router_info(enable_ha=False)
router = self.manage_router(self.agent, router_info)
self._create_metadata_fake_server(webob.exc.HTTPOk.code)
# Create and configure client namespace
client_ns = self._create_namespace()
router_ip_cidr = self._port_first_ip_cidr(router.internal_ports[0])
ip_cidr = net_helpers.increment_ip_cidr(router_ip_cidr)
br_int = get_ovs_bridge(self.agent.conf.ovs_integration_bridge)
# FIXME(cbrandily): temporary, will be replaced by a fake machine
port = self.useFixture(
net_helpers.OVSPortFixture(br_int, client_ns.namespace)).port
port.addr.add(ip_cidr)
net_helpers.set_namespace_gateway(port,
router_ip_cidr.partition('/')[0])
# Query metadata proxy
url = 'http://%(host)s:%(port)s' % {'host': dhcp.METADATA_DEFAULT_IP,
'port': dhcp.METADATA_PORT}
cmd = 'curl', '--max-time', METADATA_REQUEST_TIMEOUT, '-D-', url
try:
raw_headers = client_ns.netns.execute(cmd)
except RuntimeError:
self.fail('metadata proxy unreachable on %s before timeout' % url)
# Check status code
firstline = raw_headers.splitlines()[0]
self.assertIn(str(webob.exc.HTTPOk.code), firstline.split())
class UnprivilegedUserMetadataL3AgentTestCase(MetadataL3AgentTestCase):
"""Test metadata proxy with least privileged user.
The least privileged user has uid=65534 and is commonly named 'nobody' but
not always, that's why we use its uid.
"""
SOCKET_MODE = 0o664
def setUp(self):
super(UnprivilegedUserMetadataL3AgentTestCase, self).setUp()
self.agent.conf.set_override('metadata_proxy_user', '65534')
self.agent.conf.set_override('metadata_proxy_watch_log', False)
class UnprivilegedUserGroupMetadataL3AgentTestCase(MetadataL3AgentTestCase):
"""Test metadata proxy with least privileged user/group.
The least privileged user has uid=65534 and is commonly named 'nobody' but
not always, that's why we use its uid.
Its group has gid=65534 and is commonly named 'nobody' or 'nogroup', that's
why we use its gid.
"""
SOCKET_MODE = 0o666
def setUp(self):
super(UnprivilegedUserGroupMetadataL3AgentTestCase, self).setUp()
self.agent.conf.set_override('metadata_proxy_user', '65534')
self.agent.conf.set_override('metadata_proxy_group', '65534')
self.agent.conf.set_override('metadata_proxy_watch_log', False)
class TestDvrRouter(L3AgentTestFramework):
def test_dvr_router_lifecycle_without_ha_without_snat_with_fips(self):
self._dvr_router_lifecycle(enable_ha=False, enable_snat=False)
def test_dvr_router_lifecycle_without_ha_with_snat_with_fips(self):
self._dvr_router_lifecycle(enable_ha=False, enable_snat=True)
def _helper_create_dvr_router_fips_for_ext_network(
self, agent_mode, **dvr_router_kwargs):
self.agent.conf.agent_mode = agent_mode
router_info = self.generate_dvr_router_info(**dvr_router_kwargs)
mocked_ext_net_id = (
neutron_l3_agent.L3PluginApi.return_value.get_external_network_id)
mocked_ext_net_id.return_value = (
router_info['_floatingips'][0]['floating_network_id'])
router = self.manage_router(self.agent, router_info)
fip_ns = router.fip_ns.get_name()
return router, fip_ns
def _validate_fips_for_external_network(self, router, fip_ns):
self.assertTrue(self._namespace_exists(router.ns_name))
self.assertTrue(self._namespace_exists(fip_ns))
self._assert_dvr_floating_ips(router)
self._assert_snat_namespace_does_not_exist(router)
def test_dvr_router_fips_for_multiple_ext_networks(self):
agent_mode = 'dvr'
# Create the first router fip with external net1
dvr_router1_kwargs = {'ip_address': '19.4.4.3',
'subnet_cidr': '19.4.4.0/24',
'gateway_ip': '19.4.4.1',
'gateway_mac': 'ca:fe:de:ab:cd:ef'}
router1, fip1_ns = (
self._helper_create_dvr_router_fips_for_ext_network(
agent_mode, **dvr_router1_kwargs))
# Validate the fip with external net1
self._validate_fips_for_external_network(router1, fip1_ns)
# Create the second router fip with external net2
dvr_router2_kwargs = {'ip_address': '19.4.5.3',
'subnet_cidr': '19.4.5.0/24',
'gateway_ip': '19.4.5.1',
'gateway_mac': 'ca:fe:de:ab:cd:fe'}
router2, fip2_ns = (
self._helper_create_dvr_router_fips_for_ext_network(
agent_mode, **dvr_router2_kwargs))
# Validate the fip with external net2
self._validate_fips_for_external_network(router2, fip2_ns)
def _dvr_router_lifecycle(self, enable_ha=False, enable_snat=False,
custom_mtu=2000):
'''Test dvr router lifecycle
:param enable_ha: sets the ha value for the router.
:param enable_snat: the value of enable_snat is used
to set the agent_mode.
'''
# The value of agent_mode can be dvr, dvr_snat, or legacy.
# Since by definition this is a dvr (distributed = true)
# only dvr and dvr_snat are applicable
self.agent.conf.agent_mode = 'dvr_snat' if enable_snat else 'dvr'
self.agent.conf.network_device_mtu = custom_mtu
# We get the router info particular to a dvr router
router_info = self.generate_dvr_router_info(
enable_ha, enable_snat)
# We need to mock the get_agent_gateway_port return value
# because the whole L3PluginApi is mocked and we need the port
# gateway_port information before the l3_agent will create it.
# The port returned needs to have the same information as
# router_info['gw_port']
mocked_gw_port = (
neutron_l3_agent.L3PluginApi.return_value.get_agent_gateway_port)
mocked_gw_port.return_value = router_info['gw_port']
# We also need to mock the get_external_network_id method to
# get the correct fip namespace.
mocked_ext_net_id = (
neutron_l3_agent.L3PluginApi.return_value.get_external_network_id)
mocked_ext_net_id.return_value = (
router_info['_floatingips'][0]['floating_network_id'])
# With all that set we can now ask the l3_agent to
# manage the router (create it, create namespaces,
# attach interfaces, etc...)
router = self.manage_router(self.agent, router_info)
self.assertTrue(self._namespace_exists(router.ns_name))
self.assertTrue(self._metadata_proxy_exists(self.agent.conf, router))
self._assert_internal_devices(router)
self._assert_dvr_external_device(router)
self._assert_dvr_gateway(router)
self._assert_dvr_floating_ips(router)
self._assert_snat_chains(router)
self._assert_floating_ip_chains(router)
self._assert_metadata_chains(router)
self._assert_extra_routes(router)
self._assert_rfp_fpr_mtu(router, custom_mtu)
self._delete_router(self.agent, router.router_id)
self._assert_interfaces_deleted_from_ovs()
self._assert_router_does_not_exist(router)
def generate_dvr_router_info(
self, enable_ha=False, enable_snat=False, **kwargs):
router = test_l3_agent.prepare_router_data(
enable_snat=enable_snat,
enable_floating_ip=True,
enable_ha=enable_ha,
**kwargs)
internal_ports = router.get(l3_constants.INTERFACE_KEY, [])
router['distributed'] = True
router['gw_port_host'] = self.agent.conf.host
router['gw_port']['binding:host_id'] = self.agent.conf.host
floating_ip = router['_floatingips'][0]
floating_ip['floating_network_id'] = router['gw_port']['network_id']
floating_ip['host'] = self.agent.conf.host
floating_ip['port_id'] = internal_ports[0]['id']
floating_ip['status'] = 'ACTIVE'
self._add_snat_port_info_to_router(router, internal_ports)
# FIP has a dependency on external gateway. So we need to create
# the snat_port info and fip_agent_gw_port_info irrespective of
# the agent type the dvr supports. The namespace creation is
# dependent on the agent_type.
external_gw_port = router['gw_port']
self._add_fip_agent_gw_port_info_to_router(router, external_gw_port)
return router
def _add_fip_agent_gw_port_info_to_router(self, router, external_gw_port):
# Add fip agent gateway port information to the router_info
fip_gw_port_list = router.get(
l3_constants.FLOATINGIP_AGENT_INTF_KEY, [])
if not fip_gw_port_list and external_gw_port:
# Get values from external gateway port
fixed_ip = external_gw_port['fixed_ips'][0]
float_subnet = external_gw_port['subnets'][0]
port_ip = fixed_ip['ip_address']
# Pick an ip address which is not the same as port_ip
fip_gw_port_ip = str(netaddr.IPAddress(port_ip) + 5)
# Add floatingip agent gateway port info to router
prefixlen = netaddr.IPNetwork(float_subnet['cidr']).prefixlen
router[l3_constants.FLOATINGIP_AGENT_INTF_KEY] = [
{'subnets': [
{'cidr': float_subnet['cidr'],
'gateway_ip': float_subnet['gateway_ip'],
'id': fixed_ip['subnet_id']}],
'network_id': external_gw_port['network_id'],
'device_owner': 'network:floatingip_agent_gateway',
'mac_address': 'fa:16:3e:80:8d:89',
'binding:host_id': self.agent.conf.host,
'fixed_ips': [{'subnet_id': fixed_ip['subnet_id'],
'ip_address': fip_gw_port_ip,
'prefixlen': prefixlen}],
'id': _uuid(),
'device_id': _uuid()}
]
def _add_snat_port_info_to_router(self, router, internal_ports):
# Add snat port information to the router
snat_port_list = router.get(l3_constants.SNAT_ROUTER_INTF_KEY, [])
if not snat_port_list and internal_ports:
# Get values from internal port
port = internal_ports[0]
fixed_ip = port['fixed_ips'][0]
snat_subnet = port['subnets'][0]
port_ip = fixed_ip['ip_address']
# Pick an ip address which is not the same as port_ip
snat_ip = str(netaddr.IPAddress(port_ip) + 5)
# Add the info to router as the first snat port
# in the list of snat ports
prefixlen = netaddr.IPNetwork(snat_subnet['cidr']).prefixlen
router[l3_constants.SNAT_ROUTER_INTF_KEY] = [
{'subnets': [
{'cidr': snat_subnet['cidr'],
'gateway_ip': snat_subnet['gateway_ip'],
'id': fixed_ip['subnet_id']}],
'network_id': port['network_id'],
'device_owner': 'network:router_centralized_snat',
'mac_address': 'fa:16:3e:80:8d:89',
'fixed_ips': [{'subnet_id': fixed_ip['subnet_id'],
'ip_address': snat_ip,
'prefixlen': prefixlen}],
'id': _uuid(),
'device_id': _uuid()}
]
def _assert_dvr_external_device(self, router):
external_port = router.get_ex_gw_port()
snat_ns_name = dvr_snat_ns.SnatNamespace.get_snat_ns_name(
router.router_id)
# if the agent is in dvr_snat mode, then we have to check
# that the correct ports and ip addresses exist in the
# snat_ns_name namespace
if self.agent.conf.agent_mode == 'dvr_snat':
self.assertTrue(self.device_exists_with_ips_and_mac(
external_port, router.get_external_device_name,
snat_ns_name))
# if the agent is in dvr mode then the snat_ns_name namespace
# should not be present at all:
elif self.agent.conf.agent_mode == 'dvr':
self.assertFalse(
self._namespace_exists(snat_ns_name),
"namespace %s was found but agent is in dvr mode not dvr_snat"
% (str(snat_ns_name))
)
# if the agent is anything else the test is misconfigured
# we force a test failure with message
else:
self.assertTrue(False, " agent not configured for dvr or dvr_snat")
def _assert_dvr_gateway(self, router):
gateway_expected_in_snat_namespace = (
self.agent.conf.agent_mode == 'dvr_snat'
)
if gateway_expected_in_snat_namespace:
self._assert_dvr_snat_gateway(router)
snat_namespace_should_not_exist = (
self.agent.conf.agent_mode == 'dvr'
)
if snat_namespace_should_not_exist:
self._assert_snat_namespace_does_not_exist(router)
def _assert_dvr_snat_gateway(self, router):
namespace = dvr_snat_ns.SnatNamespace.get_snat_ns_name(
router.router_id)
external_port = router.get_ex_gw_port()
external_device_name = router.get_external_device_name(
external_port['id'])
external_device = ip_lib.IPDevice(external_device_name,
namespace=namespace)
existing_gateway = (
external_device.route.get_gateway().get('gateway'))
expected_gateway = external_port['subnets'][0]['gateway_ip']
self.assertEqual(expected_gateway, existing_gateway)
def _assert_snat_namespace_does_not_exist(self, router):
namespace = dvr_snat_ns.SnatNamespace.get_snat_ns_name(
router.router_id)
self.assertFalse(self._namespace_exists(namespace))
def _assert_dvr_floating_ips(self, router):
# in the fip namespace:
# Check that the fg-<port-id> (floatingip_agent_gateway)
# is created with the ip address of the external gateway port
floating_ips = router.router[l3_constants.FLOATINGIP_KEY]
self.assertTrue(floating_ips)
# We need to fetch the floatingip agent gateway port info
# from the router_info
floating_agent_gw_port = (
router.router[l3_constants.FLOATINGIP_AGENT_INTF_KEY])
self.assertTrue(floating_agent_gw_port)
external_gw_port = floating_agent_gw_port[0]
fip_ns = self.agent.get_fip_ns(floating_ips[0]['floating_network_id'])
fip_ns_name = fip_ns.get_name()
fg_port_created_successfully = ip_lib.device_exists_with_ips_and_mac(
fip_ns.get_ext_device_name(external_gw_port['id']),
[self._port_first_ip_cidr(external_gw_port)],
external_gw_port['mac_address'],
namespace=fip_ns_name)
self.assertTrue(fg_port_created_successfully)
# Check fpr-router device has been created
device_name = fip_ns.get_int_device_name(router.router_id)
fpr_router_device_created_successfully = ip_lib.device_exists(
device_name, namespace=fip_ns_name)
self.assertTrue(fpr_router_device_created_successfully)
# In the router namespace
# Check rfp-<router-id> is created correctly
for fip in floating_ips:
device_name = fip_ns.get_rtr_ext_device_name(router.router_id)
self.assertTrue(ip_lib.device_exists(
device_name, namespace=router.ns_name))
def test_dvr_router_rem_fips_on_restarted_agent(self):
self.agent.conf.agent_mode = 'dvr_snat'
router_info = self.generate_dvr_router_info()
router1 = self._create_router(self.agent, router_info)
self._add_fip(router1, '192.168.111.12', self.agent.conf.host)
fip_ns = router1.fip_ns.get_name()
restarted_agent = neutron_l3_agent.L3NATAgentWithStateReport(
self.agent.host, self.agent.conf)
router1.router[l3_constants.FLOATINGIP_KEY] = []
self._create_router(restarted_agent, router1.router)
self._assert_dvr_snat_gateway(router1)
self.assertFalse(self._namespace_exists(fip_ns))
def test_dvr_router_add_internal_network_set_arp_cache(self):
# Check that, when the router is set up and there are
# existing ports on the the uplinked subnet, the ARP
# cache is properly populated.
self.agent.conf.agent_mode = 'dvr_snat'
router_info = test_l3_agent.prepare_router_data()
router_info['distributed'] = True
expected_neighbor = '35.4.1.10'
port_data = {
'fixed_ips': [{'ip_address': expected_neighbor}],
'mac_address': 'fa:3e:aa:bb:cc:dd',
'device_owner': 'compute:None'
}
self.agent.plugin_rpc.get_ports_by_subnet.return_value = [port_data]
router1 = self._create_router(self.agent, router_info)
internal_device = router1.get_internal_device_name(
router_info['_interfaces'][0]['id'])
neighbors = ip_lib.IPDevice(internal_device, router1.ns_name).neigh
self.assertEqual(expected_neighbor, neighbors.show().split()[0])
def _assert_rfp_fpr_mtu(self, router, expected_mtu=1500):
dev_mtu = self.get_device_mtu(
router.router_id, router.fip_ns.get_rtr_ext_device_name,
router.ns_name)
self.assertEqual(expected_mtu, dev_mtu)
dev_mtu = self.get_device_mtu(
router.router_id, router.fip_ns.get_int_device_name,
router.fip_ns.get_name())
self.assertEqual(expected_mtu, dev_mtu)
|
pnavarro/neutron
|
neutron/tests/functional/agent/test_l3_agent.py
|
Python
|
apache-2.0
| 53,684 | 0.000112 |
#!/usr/bin/env python
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
from codecs import open
from setuptools import setup
VERSION = "3.0.1+dev"
CLASSIFIERS = [
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'License :: OSI Approved :: MIT License',
]
with open('README.rst', 'r', encoding='utf-8') as f:
README = f.read()
with open('HISTORY.rst', 'r', encoding='utf-8') as f:
HISTORY = f.read()
setup(
name='azure-cli-nspkg',
version=VERSION,
description='Microsoft Azure CLI Namespace Package',
long_description=README + '\n\n' + HISTORY,
license='MIT',
author='Microsoft Corporation',
author_email='azpycli@microsoft.com',
url='https://github.com/Azure/azure-cli',
zip_safe=False,
classifiers=CLASSIFIERS,
install_requires=[
'azure-nspkg>=2.0.0'
],
packages=[
'azure',
'azure.cli',
],
)
|
samedder/azure-cli
|
src/azure-cli-nspkg/setup.py
|
Python
|
mit
| 1,582 | 0.000632 |
from __future__ import print_function, unicode_literals
import sys
from resources.lib.kodiutils import params as decode
class Params:
handle = int(sys.argv[1]) if len(sys.argv) > 1 else -1
orig_args = sys.argv[2] if len(sys.argv) > 2 else ''
args = decode(sys.argv[2]) if len(sys.argv) > 2 else {}
resume = sys.argv[3][7:] != 'false' if len(sys.argv) > 3 else False
url = None
params = Params()
|
bbaronSVK/plugin.video.stream-cinema
|
resources/lib/params.py
|
Python
|
gpl-3.0
| 420 | 0 |
"""
Common tests shared by test_str, test_unicode, test_userstring and test_string.
"""
import unittest, string, sys, struct
from test import support
from collections import UserList
class Sequence:
def __init__(self, seq='wxyz'): self.seq = seq
def __len__(self): return len(self.seq)
def __getitem__(self, i): return self.seq[i]
class BadSeq1(Sequence):
def __init__(self): self.seq = [7, 'hello', 123]
def __str__(self): return '{0} {1} {2}'.format(*self.seq)
class BadSeq2(Sequence):
def __init__(self): self.seq = ['a', 'b', 'c']
def __len__(self): return 8
class BaseTest(unittest.TestCase):
# These tests are for buffers of values (bytes) and not
# specific to character interpretation, used for bytes objects
# and various string implementations
# The type to be tested
# Change in subclasses to change the behaviour of fixtesttype()
type2test = None
# All tests pass their arguments to the testing methods
# as str objects. fixtesttype() can be used to propagate
# these arguments to the appropriate type
def fixtype(self, obj):
if isinstance(obj, str):
return self.__class__.type2test(obj)
elif isinstance(obj, list):
return [self.fixtype(x) for x in obj]
elif isinstance(obj, tuple):
return tuple([self.fixtype(x) for x in obj])
elif isinstance(obj, dict):
return dict([
(self.fixtype(key), self.fixtype(value))
for (key, value) in obj.items()
])
else:
return obj
# check that obj.method(*args) returns result
def checkequal(self, result, obj, methodname, *args):
result = self.fixtype(result)
obj = self.fixtype(obj)
args = self.fixtype(args)
realresult = getattr(obj, methodname)(*args)
self.assertEqual(
result,
realresult
)
# if the original is returned make sure that
# this doesn't happen with subclasses
if obj is realresult:
try:
class subtype(self.__class__.type2test):
pass
except TypeError:
pass # Skip this if we can't subclass
else:
obj = subtype(obj)
realresult = getattr(obj, methodname)(*args)
self.assertIsNot(obj, realresult)
# check that obj.method(*args) raises exc
def checkraises(self, exc, obj, methodname, *args):
obj = self.fixtype(obj)
args = self.fixtype(args)
self.assertRaises(
exc,
getattr(obj, methodname),
*args
)
# call obj.method(*args) without any checks
def checkcall(self, obj, methodname, *args):
obj = self.fixtype(obj)
args = self.fixtype(args)
getattr(obj, methodname)(*args)
def test_count(self):
self.checkequal(3, 'aaa', 'count', 'a')
self.checkequal(0, 'aaa', 'count', 'b')
self.checkequal(3, 'aaa', 'count', 'a')
self.checkequal(0, 'aaa', 'count', 'b')
self.checkequal(3, 'aaa', 'count', 'a')
self.checkequal(0, 'aaa', 'count', 'b')
self.checkequal(0, 'aaa', 'count', 'b')
self.checkequal(2, 'aaa', 'count', 'a', 1)
self.checkequal(0, 'aaa', 'count', 'a', 10)
self.checkequal(1, 'aaa', 'count', 'a', -1)
self.checkequal(3, 'aaa', 'count', 'a', -10)
self.checkequal(1, 'aaa', 'count', 'a', 0, 1)
self.checkequal(3, 'aaa', 'count', 'a', 0, 10)
self.checkequal(2, 'aaa', 'count', 'a', 0, -1)
self.checkequal(0, 'aaa', 'count', 'a', 0, -10)
self.checkequal(3, 'aaa', 'count', '', 1)
self.checkequal(1, 'aaa', 'count', '', 3)
self.checkequal(0, 'aaa', 'count', '', 10)
self.checkequal(2, 'aaa', 'count', '', -1)
self.checkequal(4, 'aaa', 'count', '', -10)
self.checkequal(1, '', 'count', '')
self.checkequal(0, '', 'count', '', 1, 1)
self.checkequal(0, '', 'count', '', sys.maxsize, 0)
self.checkequal(0, '', 'count', 'xx')
self.checkequal(0, '', 'count', 'xx', 1, 1)
self.checkequal(0, '', 'count', 'xx', sys.maxsize, 0)
self.checkraises(TypeError, 'hello', 'count')
self.checkraises(TypeError, 'hello', 'count', 42)
# For a variety of combinations,
# verify that str.count() matches an equivalent function
# replacing all occurrences and then differencing the string lengths
charset = ['', 'a', 'b']
digits = 7
base = len(charset)
teststrings = set()
for i in range(base ** digits):
entry = []
for j in range(digits):
i, m = divmod(i, base)
entry.append(charset[m])
teststrings.add(''.join(entry))
teststrings = [self.fixtype(ts) for ts in teststrings]
for i in teststrings:
n = len(i)
for j in teststrings:
r1 = i.count(j)
if j:
r2, rem = divmod(n - len(i.replace(j, self.fixtype(''))),
len(j))
else:
r2, rem = len(i)+1, 0
if rem or r1 != r2:
self.assertEqual(rem, 0, '%s != 0 for %s' % (rem, i))
self.assertEqual(r1, r2, '%s != %s for %s' % (r1, r2, i))
def test_find(self):
self.checkequal(0, 'abcdefghiabc', 'find', 'abc')
self.checkequal(9, 'abcdefghiabc', 'find', 'abc', 1)
self.checkequal(-1, 'abcdefghiabc', 'find', 'def', 4)
self.checkequal(0, 'abc', 'find', '', 0)
self.checkequal(3, 'abc', 'find', '', 3)
self.checkequal(-1, 'abc', 'find', '', 4)
# to check the ability to pass None as defaults
self.checkequal( 2, 'rrarrrrrrrrra', 'find', 'a')
self.checkequal(12, 'rrarrrrrrrrra', 'find', 'a', 4)
self.checkequal(-1, 'rrarrrrrrrrra', 'find', 'a', 4, 6)
self.checkequal(12, 'rrarrrrrrrrra', 'find', 'a', 4, None)
self.checkequal( 2, 'rrarrrrrrrrra', 'find', 'a', None, 6)
self.checkraises(TypeError, 'hello', 'find')
self.checkraises(TypeError, 'hello', 'find', 42)
self.checkequal(0, '', 'find', '')
self.checkequal(-1, '', 'find', '', 1, 1)
self.checkequal(-1, '', 'find', '', sys.maxsize, 0)
self.checkequal(-1, '', 'find', 'xx')
self.checkequal(-1, '', 'find', 'xx', 1, 1)
self.checkequal(-1, '', 'find', 'xx', sys.maxsize, 0)
# issue 7458
self.checkequal(-1, 'ab', 'find', 'xxx', sys.maxsize + 1, 0)
# For a variety of combinations,
# verify that str.find() matches __contains__
# and that the found substring is really at that location
charset = ['', 'a', 'b', 'c']
digits = 5
base = len(charset)
teststrings = set()
for i in range(base ** digits):
entry = []
for j in range(digits):
i, m = divmod(i, base)
entry.append(charset[m])
teststrings.add(''.join(entry))
teststrings = [self.fixtype(ts) for ts in teststrings]
for i in teststrings:
for j in teststrings:
loc = i.find(j)
r1 = (loc != -1)
r2 = j in i
self.assertEqual(r1, r2)
if loc != -1:
self.assertEqual(i[loc:loc+len(j)], j)
def test_rfind(self):
self.checkequal(9, 'abcdefghiabc', 'rfind', 'abc')
self.checkequal(12, 'abcdefghiabc', 'rfind', '')
self.checkequal(0, 'abcdefghiabc', 'rfind', 'abcd')
self.checkequal(-1, 'abcdefghiabc', 'rfind', 'abcz')
self.checkequal(3, 'abc', 'rfind', '', 0)
self.checkequal(3, 'abc', 'rfind', '', 3)
self.checkequal(-1, 'abc', 'rfind', '', 4)
# to check the ability to pass None as defaults
self.checkequal(12, 'rrarrrrrrrrra', 'rfind', 'a')
self.checkequal(12, 'rrarrrrrrrrra', 'rfind', 'a', 4)
self.checkequal(-1, 'rrarrrrrrrrra', 'rfind', 'a', 4, 6)
self.checkequal(12, 'rrarrrrrrrrra', 'rfind', 'a', 4, None)
self.checkequal( 2, 'rrarrrrrrrrra', 'rfind', 'a', None, 6)
self.checkraises(TypeError, 'hello', 'rfind')
self.checkraises(TypeError, 'hello', 'rfind', 42)
# For a variety of combinations,
# verify that str.rfind() matches __contains__
# and that the found substring is really at that location
charset = ['', 'a', 'b', 'c']
digits = 5
base = len(charset)
teststrings = set()
for i in range(base ** digits):
entry = []
for j in range(digits):
i, m = divmod(i, base)
entry.append(charset[m])
teststrings.add(''.join(entry))
teststrings = [self.fixtype(ts) for ts in teststrings]
for i in teststrings:
for j in teststrings:
loc = i.rfind(j)
r1 = (loc != -1)
r2 = j in i
self.assertEqual(r1, r2)
if loc != -1:
self.assertEqual(i[loc:loc+len(j)], j)
# issue 7458
self.checkequal(-1, 'ab', 'rfind', 'xxx', sys.maxsize + 1, 0)
def test_index(self):
self.checkequal(0, 'abcdefghiabc', 'index', '')
self.checkequal(3, 'abcdefghiabc', 'index', 'def')
self.checkequal(0, 'abcdefghiabc', 'index', 'abc')
self.checkequal(9, 'abcdefghiabc', 'index', 'abc', 1)
self.checkraises(ValueError, 'abcdefghiabc', 'index', 'hib')
self.checkraises(ValueError, 'abcdefghiab', 'index', 'abc', 1)
self.checkraises(ValueError, 'abcdefghi', 'index', 'ghi', 8)
self.checkraises(ValueError, 'abcdefghi', 'index', 'ghi', -1)
# to check the ability to pass None as defaults
self.checkequal( 2, 'rrarrrrrrrrra', 'index', 'a')
self.checkequal(12, 'rrarrrrrrrrra', 'index', 'a', 4)
self.checkraises(ValueError, 'rrarrrrrrrrra', 'index', 'a', 4, 6)
self.checkequal(12, 'rrarrrrrrrrra', 'index', 'a', 4, None)
self.checkequal( 2, 'rrarrrrrrrrra', 'index', 'a', None, 6)
self.checkraises(TypeError, 'hello', 'index')
self.checkraises(TypeError, 'hello', 'index', 42)
def test_rindex(self):
self.checkequal(12, 'abcdefghiabc', 'rindex', '')
self.checkequal(3, 'abcdefghiabc', 'rindex', 'def')
self.checkequal(9, 'abcdefghiabc', 'rindex', 'abc')
self.checkequal(0, 'abcdefghiabc', 'rindex', 'abc', 0, -1)
self.checkraises(ValueError, 'abcdefghiabc', 'rindex', 'hib')
self.checkraises(ValueError, 'defghiabc', 'rindex', 'def', 1)
self.checkraises(ValueError, 'defghiabc', 'rindex', 'abc', 0, -1)
self.checkraises(ValueError, 'abcdefghi', 'rindex', 'ghi', 0, 8)
self.checkraises(ValueError, 'abcdefghi', 'rindex', 'ghi', 0, -1)
# to check the ability to pass None as defaults
self.checkequal(12, 'rrarrrrrrrrra', 'rindex', 'a')
self.checkequal(12, 'rrarrrrrrrrra', 'rindex', 'a', 4)
self.checkraises(ValueError, 'rrarrrrrrrrra', 'rindex', 'a', 4, 6)
self.checkequal(12, 'rrarrrrrrrrra', 'rindex', 'a', 4, None)
self.checkequal( 2, 'rrarrrrrrrrra', 'rindex', 'a', None, 6)
self.checkraises(TypeError, 'hello', 'rindex')
self.checkraises(TypeError, 'hello', 'rindex', 42)
def test_lower(self):
self.checkequal('hello', 'HeLLo', 'lower')
self.checkequal('hello', 'hello', 'lower')
self.checkraises(TypeError, 'hello', 'lower', 42)
def test_upper(self):
self.checkequal('HELLO', 'HeLLo', 'upper')
self.checkequal('HELLO', 'HELLO', 'upper')
self.checkraises(TypeError, 'hello', 'upper', 42)
def test_expandtabs(self):
self.checkequal('abc\rab def\ng hi', 'abc\rab\tdef\ng\thi', 'expandtabs')
self.checkequal('abc\rab def\ng hi', 'abc\rab\tdef\ng\thi', 'expandtabs', 8)
self.checkequal('abc\rab def\ng hi', 'abc\rab\tdef\ng\thi', 'expandtabs', 4)
self.checkequal('abc\r\nab def\ng hi', 'abc\r\nab\tdef\ng\thi', 'expandtabs', 4)
self.checkequal('abc\rab def\ng hi', 'abc\rab\tdef\ng\thi', 'expandtabs')
self.checkequal('abc\rab def\ng hi', 'abc\rab\tdef\ng\thi', 'expandtabs', 8)
self.checkequal('abc\r\nab\r\ndef\ng\r\nhi', 'abc\r\nab\r\ndef\ng\r\nhi', 'expandtabs', 4)
self.checkequal(' a\n b', ' \ta\n\tb', 'expandtabs', 1)
self.checkraises(TypeError, 'hello', 'expandtabs', 42, 42)
# This test is only valid when sizeof(int) == sizeof(void*) == 4.
if sys.maxsize < (1 << 32) and struct.calcsize('P') == 4:
self.checkraises(OverflowError,
'\ta\n\tb', 'expandtabs', sys.maxsize)
def test_split(self):
# by a char
self.checkequal(['a', 'b', 'c', 'd'], 'a|b|c|d', 'split', '|')
self.checkequal(['a|b|c|d'], 'a|b|c|d', 'split', '|', 0)
self.checkequal(['a', 'b|c|d'], 'a|b|c|d', 'split', '|', 1)
self.checkequal(['a', 'b', 'c|d'], 'a|b|c|d', 'split', '|', 2)
self.checkequal(['a', 'b', 'c', 'd'], 'a|b|c|d', 'split', '|', 3)
self.checkequal(['a', 'b', 'c', 'd'], 'a|b|c|d', 'split', '|', 4)
self.checkequal(['a', 'b', 'c', 'd'], 'a|b|c|d', 'split', '|',
sys.maxsize-2)
self.checkequal(['a|b|c|d'], 'a|b|c|d', 'split', '|', 0)
self.checkequal(['a', '', 'b||c||d'], 'a||b||c||d', 'split', '|', 2)
self.checkequal(['endcase ', ''], 'endcase |', 'split', '|')
self.checkequal(['', ' startcase'], '| startcase', 'split', '|')
self.checkequal(['', 'bothcase', ''], '|bothcase|', 'split', '|')
self.checkequal(['a', '', 'b\x00c\x00d'], 'a\x00\x00b\x00c\x00d', 'split', '\x00', 2)
self.checkequal(['a']*20, ('a|'*20)[:-1], 'split', '|')
self.checkequal(['a']*15 +['a|a|a|a|a'],
('a|'*20)[:-1], 'split', '|', 15)
# by string
self.checkequal(['a', 'b', 'c', 'd'], 'a//b//c//d', 'split', '//')
self.checkequal(['a', 'b//c//d'], 'a//b//c//d', 'split', '//', 1)
self.checkequal(['a', 'b', 'c//d'], 'a//b//c//d', 'split', '//', 2)
self.checkequal(['a', 'b', 'c', 'd'], 'a//b//c//d', 'split', '//', 3)
self.checkequal(['a', 'b', 'c', 'd'], 'a//b//c//d', 'split', '//', 4)
self.checkequal(['a', 'b', 'c', 'd'], 'a//b//c//d', 'split', '//',
sys.maxsize-10)
self.checkequal(['a//b//c//d'], 'a//b//c//d', 'split', '//', 0)
self.checkequal(['a', '', 'b////c////d'], 'a////b////c////d', 'split', '//', 2)
self.checkequal(['endcase ', ''], 'endcase test', 'split', 'test')
self.checkequal(['', ' begincase'], 'test begincase', 'split', 'test')
self.checkequal(['', ' bothcase ', ''], 'test bothcase test',
'split', 'test')
self.checkequal(['a', 'bc'], 'abbbc', 'split', 'bb')
self.checkequal(['', ''], 'aaa', 'split', 'aaa')
self.checkequal(['aaa'], 'aaa', 'split', 'aaa', 0)
self.checkequal(['ab', 'ab'], 'abbaab', 'split', 'ba')
self.checkequal(['aaaa'], 'aaaa', 'split', 'aab')
self.checkequal([''], '', 'split', 'aaa')
self.checkequal(['aa'], 'aa', 'split', 'aaa')
self.checkequal(['A', 'bobb'], 'Abbobbbobb', 'split', 'bbobb')
self.checkequal(['A', 'B', ''], 'AbbobbBbbobb', 'split', 'bbobb')
self.checkequal(['a']*20, ('aBLAH'*20)[:-4], 'split', 'BLAH')
self.checkequal(['a']*20, ('aBLAH'*20)[:-4], 'split', 'BLAH', 19)
self.checkequal(['a']*18 + ['aBLAHa'], ('aBLAH'*20)[:-4],
'split', 'BLAH', 18)
# argument type
self.checkraises(TypeError, 'hello', 'split', 42, 42, 42)
# null case
self.checkraises(ValueError, 'hello', 'split', '')
self.checkraises(ValueError, 'hello', 'split', '', 0)
def test_rsplit(self):
# by a char
self.checkequal(['a', 'b', 'c', 'd'], 'a|b|c|d', 'rsplit', '|')
self.checkequal(['a|b|c', 'd'], 'a|b|c|d', 'rsplit', '|', 1)
self.checkequal(['a|b', 'c', 'd'], 'a|b|c|d', 'rsplit', '|', 2)
self.checkequal(['a', 'b', 'c', 'd'], 'a|b|c|d', 'rsplit', '|', 3)
self.checkequal(['a', 'b', 'c', 'd'], 'a|b|c|d', 'rsplit', '|', 4)
self.checkequal(['a', 'b', 'c', 'd'], 'a|b|c|d', 'rsplit', '|',
sys.maxsize-100)
self.checkequal(['a|b|c|d'], 'a|b|c|d', 'rsplit', '|', 0)
self.checkequal(['a||b||c', '', 'd'], 'a||b||c||d', 'rsplit', '|', 2)
self.checkequal(['', ' begincase'], '| begincase', 'rsplit', '|')
self.checkequal(['endcase ', ''], 'endcase |', 'rsplit', '|')
self.checkequal(['', 'bothcase', ''], '|bothcase|', 'rsplit', '|')
self.checkequal(['a\x00\x00b', 'c', 'd'], 'a\x00\x00b\x00c\x00d', 'rsplit', '\x00', 2)
self.checkequal(['a']*20, ('a|'*20)[:-1], 'rsplit', '|')
self.checkequal(['a|a|a|a|a']+['a']*15,
('a|'*20)[:-1], 'rsplit', '|', 15)
# by string
self.checkequal(['a', 'b', 'c', 'd'], 'a//b//c//d', 'rsplit', '//')
self.checkequal(['a//b//c', 'd'], 'a//b//c//d', 'rsplit', '//', 1)
self.checkequal(['a//b', 'c', 'd'], 'a//b//c//d', 'rsplit', '//', 2)
self.checkequal(['a', 'b', 'c', 'd'], 'a//b//c//d', 'rsplit', '//', 3)
self.checkequal(['a', 'b', 'c', 'd'], 'a//b//c//d', 'rsplit', '//', 4)
self.checkequal(['a', 'b', 'c', 'd'], 'a//b//c//d', 'rsplit', '//',
sys.maxsize-5)
self.checkequal(['a//b//c//d'], 'a//b//c//d', 'rsplit', '//', 0)
self.checkequal(['a////b////c', '', 'd'], 'a////b////c////d', 'rsplit', '//', 2)
self.checkequal(['', ' begincase'], 'test begincase', 'rsplit', 'test')
self.checkequal(['endcase ', ''], 'endcase test', 'rsplit', 'test')
self.checkequal(['', ' bothcase ', ''], 'test bothcase test',
'rsplit', 'test')
self.checkequal(['ab', 'c'], 'abbbc', 'rsplit', 'bb')
self.checkequal(['', ''], 'aaa', 'rsplit', 'aaa')
self.checkequal(['aaa'], 'aaa', 'rsplit', 'aaa', 0)
self.checkequal(['ab', 'ab'], 'abbaab', 'rsplit', 'ba')
self.checkequal(['aaaa'], 'aaaa', 'rsplit', 'aab')
self.checkequal([''], '', 'rsplit', 'aaa')
self.checkequal(['aa'], 'aa', 'rsplit', 'aaa')
self.checkequal(['bbob', 'A'], 'bbobbbobbA', 'rsplit', 'bbobb')
self.checkequal(['', 'B', 'A'], 'bbobbBbbobbA', 'rsplit', 'bbobb')
self.checkequal(['a']*20, ('aBLAH'*20)[:-4], 'rsplit', 'BLAH')
self.checkequal(['a']*20, ('aBLAH'*20)[:-4], 'rsplit', 'BLAH', 19)
self.checkequal(['aBLAHa'] + ['a']*18, ('aBLAH'*20)[:-4],
'rsplit', 'BLAH', 18)
# argument type
self.checkraises(TypeError, 'hello', 'rsplit', 42, 42, 42)
# null case
self.checkraises(ValueError, 'hello', 'rsplit', '')
self.checkraises(ValueError, 'hello', 'rsplit', '', 0)
def test_replace(self):
EQ = self.checkequal
# Operations on the empty string
EQ("", "", "replace", "", "")
EQ("A", "", "replace", "", "A")
EQ("", "", "replace", "A", "")
EQ("", "", "replace", "A", "A")
EQ("", "", "replace", "", "", 100)
EQ("", "", "replace", "", "", sys.maxsize)
# interleave (from=="", 'to' gets inserted everywhere)
EQ("A", "A", "replace", "", "")
EQ("*A*", "A", "replace", "", "*")
EQ("*1A*1", "A", "replace", "", "*1")
EQ("*-#A*-#", "A", "replace", "", "*-#")
EQ("*-A*-A*-", "AA", "replace", "", "*-")
EQ("*-A*-A*-", "AA", "replace", "", "*-", -1)
EQ("*-A*-A*-", "AA", "replace", "", "*-", sys.maxsize)
EQ("*-A*-A*-", "AA", "replace", "", "*-", 4)
EQ("*-A*-A*-", "AA", "replace", "", "*-", 3)
EQ("*-A*-A", "AA", "replace", "", "*-", 2)
EQ("*-AA", "AA", "replace", "", "*-", 1)
EQ("AA", "AA", "replace", "", "*-", 0)
# single character deletion (from=="A", to=="")
EQ("", "A", "replace", "A", "")
EQ("", "AAA", "replace", "A", "")
EQ("", "AAA", "replace", "A", "", -1)
EQ("", "AAA", "replace", "A", "", sys.maxsize)
EQ("", "AAA", "replace", "A", "", 4)
EQ("", "AAA", "replace", "A", "", 3)
EQ("A", "AAA", "replace", "A", "", 2)
EQ("AA", "AAA", "replace", "A", "", 1)
EQ("AAA", "AAA", "replace", "A", "", 0)
EQ("", "AAAAAAAAAA", "replace", "A", "")
EQ("BCD", "ABACADA", "replace", "A", "")
EQ("BCD", "ABACADA", "replace", "A", "", -1)
EQ("BCD", "ABACADA", "replace", "A", "", sys.maxsize)
EQ("BCD", "ABACADA", "replace", "A", "", 5)
EQ("BCD", "ABACADA", "replace", "A", "", 4)
EQ("BCDA", "ABACADA", "replace", "A", "", 3)
EQ("BCADA", "ABACADA", "replace", "A", "", 2)
EQ("BACADA", "ABACADA", "replace", "A", "", 1)
EQ("ABACADA", "ABACADA", "replace", "A", "", 0)
EQ("BCD", "ABCAD", "replace", "A", "")
EQ("BCD", "ABCADAA", "replace", "A", "")
EQ("BCD", "BCD", "replace", "A", "")
EQ("*************", "*************", "replace", "A", "")
EQ("^A^", "^"+"A"*1000+"^", "replace", "A", "", 999)
# substring deletion (from=="the", to=="")
EQ("", "the", "replace", "the", "")
EQ("ater", "theater", "replace", "the", "")
EQ("", "thethe", "replace", "the", "")
EQ("", "thethethethe", "replace", "the", "")
EQ("aaaa", "theatheatheathea", "replace", "the", "")
EQ("that", "that", "replace", "the", "")
EQ("thaet", "thaet", "replace", "the", "")
EQ("here and re", "here and there", "replace", "the", "")
EQ("here and re and re", "here and there and there",
"replace", "the", "", sys.maxsize)
EQ("here and re and re", "here and there and there",
"replace", "the", "", -1)
EQ("here and re and re", "here and there and there",
"replace", "the", "", 3)
EQ("here and re and re", "here and there and there",
"replace", "the", "", 2)
EQ("here and re and there", "here and there and there",
"replace", "the", "", 1)
EQ("here and there and there", "here and there and there",
"replace", "the", "", 0)
EQ("here and re and re", "here and there and there", "replace", "the", "")
EQ("abc", "abc", "replace", "the", "")
EQ("abcdefg", "abcdefg", "replace", "the", "")
# substring deletion (from=="bob", to=="")
EQ("bob", "bbobob", "replace", "bob", "")
EQ("bobXbob", "bbobobXbbobob", "replace", "bob", "")
EQ("aaaaaaa", "aaaaaaabob", "replace", "bob", "")
EQ("aaaaaaa", "aaaaaaa", "replace", "bob", "")
# single character replace in place (len(from)==len(to)==1)
EQ("Who goes there?", "Who goes there?", "replace", "o", "o")
EQ("WhO gOes there?", "Who goes there?", "replace", "o", "O")
EQ("WhO gOes there?", "Who goes there?", "replace", "o", "O", sys.maxsize)
EQ("WhO gOes there?", "Who goes there?", "replace", "o", "O", -1)
EQ("WhO gOes there?", "Who goes there?", "replace", "o", "O", 3)
EQ("WhO gOes there?", "Who goes there?", "replace", "o", "O", 2)
EQ("WhO goes there?", "Who goes there?", "replace", "o", "O", 1)
EQ("Who goes there?", "Who goes there?", "replace", "o", "O", 0)
EQ("Who goes there?", "Who goes there?", "replace", "a", "q")
EQ("who goes there?", "Who goes there?", "replace", "W", "w")
EQ("wwho goes there?ww", "WWho goes there?WW", "replace", "W", "w")
EQ("Who goes there!", "Who goes there?", "replace", "?", "!")
EQ("Who goes there!!", "Who goes there??", "replace", "?", "!")
EQ("Who goes there?", "Who goes there?", "replace", ".", "!")
# substring replace in place (len(from)==len(to) > 1)
EQ("Th** ** a t**sue", "This is a tissue", "replace", "is", "**")
EQ("Th** ** a t**sue", "This is a tissue", "replace", "is", "**", sys.maxsize)
EQ("Th** ** a t**sue", "This is a tissue", "replace", "is", "**", -1)
EQ("Th** ** a t**sue", "This is a tissue", "replace", "is", "**", 4)
EQ("Th** ** a t**sue", "This is a tissue", "replace", "is", "**", 3)
EQ("Th** ** a tissue", "This is a tissue", "replace", "is", "**", 2)
EQ("Th** is a tissue", "This is a tissue", "replace", "is", "**", 1)
EQ("This is a tissue", "This is a tissue", "replace", "is", "**", 0)
EQ("cobob", "bobob", "replace", "bob", "cob")
EQ("cobobXcobocob", "bobobXbobobob", "replace", "bob", "cob")
EQ("bobob", "bobob", "replace", "bot", "bot")
# replace single character (len(from)==1, len(to)>1)
EQ("ReyKKjaviKK", "Reykjavik", "replace", "k", "KK")
EQ("ReyKKjaviKK", "Reykjavik", "replace", "k", "KK", -1)
EQ("ReyKKjaviKK", "Reykjavik", "replace", "k", "KK", sys.maxsize)
EQ("ReyKKjaviKK", "Reykjavik", "replace", "k", "KK", 2)
EQ("ReyKKjavik", "Reykjavik", "replace", "k", "KK", 1)
EQ("Reykjavik", "Reykjavik", "replace", "k", "KK", 0)
EQ("A----B----C----", "A.B.C.", "replace", ".", "----")
EQ("Reykjavik", "Reykjavik", "replace", "q", "KK")
# replace substring (len(from)>1, len(to)!=len(from))
EQ("ham, ham, eggs and ham", "spam, spam, eggs and spam",
"replace", "spam", "ham")
EQ("ham, ham, eggs and ham", "spam, spam, eggs and spam",
"replace", "spam", "ham", sys.maxsize)
EQ("ham, ham, eggs and ham", "spam, spam, eggs and spam",
"replace", "spam", "ham", -1)
EQ("ham, ham, eggs and ham", "spam, spam, eggs and spam",
"replace", "spam", "ham", 4)
EQ("ham, ham, eggs and ham", "spam, spam, eggs and spam",
"replace", "spam", "ham", 3)
EQ("ham, ham, eggs and spam", "spam, spam, eggs and spam",
"replace", "spam", "ham", 2)
EQ("ham, spam, eggs and spam", "spam, spam, eggs and spam",
"replace", "spam", "ham", 1)
EQ("spam, spam, eggs and spam", "spam, spam, eggs and spam",
"replace", "spam", "ham", 0)
EQ("bobob", "bobobob", "replace", "bobob", "bob")
EQ("bobobXbobob", "bobobobXbobobob", "replace", "bobob", "bob")
EQ("BOBOBOB", "BOBOBOB", "replace", "bob", "bobby")
# XXX Commented out. Is there any reason to support buffer objects
# as arguments for str.replace()? GvR
## ba = bytearray('a')
## bb = bytearray('b')
## EQ("bbc", "abc", "replace", ba, bb)
## EQ("aac", "abc", "replace", bb, ba)
#
self.checkequal('one@two!three!', 'one!two!three!', 'replace', '!', '@', 1)
self.checkequal('onetwothree', 'one!two!three!', 'replace', '!', '')
self.checkequal('one@two@three!', 'one!two!three!', 'replace', '!', '@', 2)
self.checkequal('one@two@three@', 'one!two!three!', 'replace', '!', '@', 3)
self.checkequal('one@two@three@', 'one!two!three!', 'replace', '!', '@', 4)
self.checkequal('one!two!three!', 'one!two!three!', 'replace', '!', '@', 0)
self.checkequal('one@two@three@', 'one!two!three!', 'replace', '!', '@')
self.checkequal('one!two!three!', 'one!two!three!', 'replace', 'x', '@')
self.checkequal('one!two!three!', 'one!two!three!', 'replace', 'x', '@', 2)
self.checkequal('-a-b-c-', 'abc', 'replace', '', '-')
self.checkequal('-a-b-c', 'abc', 'replace', '', '-', 3)
self.checkequal('abc', 'abc', 'replace', '', '-', 0)
self.checkequal('', '', 'replace', '', '')
self.checkequal('abc', 'abc', 'replace', 'ab', '--', 0)
self.checkequal('abc', 'abc', 'replace', 'xy', '--')
# Next three for SF bug 422088: [OSF1 alpha] string.replace(); died with
# MemoryError due to empty result (platform malloc issue when requesting
# 0 bytes).
self.checkequal('', '123', 'replace', '123', '')
self.checkequal('', '123123', 'replace', '123', '')
self.checkequal('x', '123x123', 'replace', '123', '')
self.checkraises(TypeError, 'hello', 'replace')
self.checkraises(TypeError, 'hello', 'replace', 42)
self.checkraises(TypeError, 'hello', 'replace', 42, 'h')
self.checkraises(TypeError, 'hello', 'replace', 'h', 42)
def test_replace_overflow(self):
# Check for overflow checking on 32 bit machines
if sys.maxsize != 2147483647 or struct.calcsize("P") > 4:
return
A2_16 = "A" * (2**16)
self.checkraises(OverflowError, A2_16, "replace", "", A2_16)
self.checkraises(OverflowError, A2_16, "replace", "A", A2_16)
self.checkraises(OverflowError, A2_16, "replace", "AA", A2_16+A2_16)
class CommonTest(BaseTest):
# This testcase contains test that can be used in all
# stringlike classes. Currently this is str, unicode
# UserString and the string module.
def test_hash(self):
# SF bug 1054139: += optimization was not invalidating cached hash value
a = self.type2test('DNSSEC')
b = self.type2test('')
for c in a:
b += c
hash(b)
self.assertEqual(hash(a), hash(b))
def test_capitalize(self):
self.checkequal(' hello ', ' hello ', 'capitalize')
self.checkequal('Hello ', 'Hello ','capitalize')
self.checkequal('Hello ', 'hello ','capitalize')
self.checkequal('Aaaa', 'aaaa', 'capitalize')
self.checkequal('Aaaa', 'AaAa', 'capitalize')
self.checkraises(TypeError, 'hello', 'capitalize', 42)
def test_lower(self):
self.checkequal('hello', 'HeLLo', 'lower')
self.checkequal('hello', 'hello', 'lower')
self.checkraises(TypeError, 'hello', 'lower', 42)
def test_upper(self):
self.checkequal('HELLO', 'HeLLo', 'upper')
self.checkequal('HELLO', 'HELLO', 'upper')
self.checkraises(TypeError, 'hello', 'upper', 42)
def test_expandtabs(self):
self.checkequal('abc\rab def\ng hi', 'abc\rab\tdef\ng\thi', 'expandtabs')
self.checkequal('abc\rab def\ng hi', 'abc\rab\tdef\ng\thi', 'expandtabs', 8)
self.checkequal('abc\rab def\ng hi', 'abc\rab\tdef\ng\thi', 'expandtabs', 4)
self.checkequal('abc\r\nab def\ng hi', 'abc\r\nab\tdef\ng\thi', 'expandtabs', 4)
self.checkequal('abc\rab def\ng hi', 'abc\rab\tdef\ng\thi', 'expandtabs')
self.checkequal('abc\rab def\ng hi', 'abc\rab\tdef\ng\thi', 'expandtabs', 8)
self.checkequal('abc\r\nab\r\ndef\ng\r\nhi', 'abc\r\nab\r\ndef\ng\r\nhi', 'expandtabs', 4)
self.checkraises(TypeError, 'hello', 'expandtabs', 42, 42)
def test_additional_split(self):
self.checkequal(['this', 'is', 'the', 'split', 'function'],
'this is the split function', 'split')
# by whitespace
self.checkequal(['a', 'b', 'c', 'd'], 'a b c d ', 'split')
self.checkequal(['a', 'b c d'], 'a b c d', 'split', None, 1)
self.checkequal(['a', 'b', 'c d'], 'a b c d', 'split', None, 2)
self.checkequal(['a', 'b', 'c', 'd'], 'a b c d', 'split', None, 3)
self.checkequal(['a', 'b', 'c', 'd'], 'a b c d', 'split', None, 4)
self.checkequal(['a', 'b', 'c', 'd'], 'a b c d', 'split', None,
sys.maxsize-1)
self.checkequal(['a b c d'], 'a b c d', 'split', None, 0)
self.checkequal(['a b c d'], ' a b c d', 'split', None, 0)
self.checkequal(['a', 'b', 'c d'], 'a b c d', 'split', None, 2)
self.checkequal([], ' ', 'split')
self.checkequal(['a'], ' a ', 'split')
self.checkequal(['a', 'b'], ' a b ', 'split')
self.checkequal(['a', 'b '], ' a b ', 'split', None, 1)
self.checkequal(['a', 'b c '], ' a b c ', 'split', None, 1)
self.checkequal(['a', 'b', 'c '], ' a b c ', 'split', None, 2)
self.checkequal(['a', 'b'], '\n\ta \t\r b \v ', 'split')
aaa = ' a '*20
self.checkequal(['a']*20, aaa, 'split')
self.checkequal(['a'] + [aaa[4:]], aaa, 'split', None, 1)
self.checkequal(['a']*19 + ['a '], aaa, 'split', None, 19)
# mixed use of str and unicode
self.checkequal(['a', 'b', 'c d'], 'a b c d', 'split', ' ', 2)
def test_additional_rsplit(self):
self.checkequal(['this', 'is', 'the', 'rsplit', 'function'],
'this is the rsplit function', 'rsplit')
# by whitespace
self.checkequal(['a', 'b', 'c', 'd'], 'a b c d ', 'rsplit')
self.checkequal(['a b c', 'd'], 'a b c d', 'rsplit', None, 1)
self.checkequal(['a b', 'c', 'd'], 'a b c d', 'rsplit', None, 2)
self.checkequal(['a', 'b', 'c', 'd'], 'a b c d', 'rsplit', None, 3)
self.checkequal(['a', 'b', 'c', 'd'], 'a b c d', 'rsplit', None, 4)
self.checkequal(['a', 'b', 'c', 'd'], 'a b c d', 'rsplit', None,
sys.maxsize-20)
self.checkequal(['a b c d'], 'a b c d', 'rsplit', None, 0)
self.checkequal(['a b c d'], 'a b c d ', 'rsplit', None, 0)
self.checkequal(['a b', 'c', 'd'], 'a b c d', 'rsplit', None, 2)
self.checkequal([], ' ', 'rsplit')
self.checkequal(['a'], ' a ', 'rsplit')
self.checkequal(['a', 'b'], ' a b ', 'rsplit')
self.checkequal([' a', 'b'], ' a b ', 'rsplit', None, 1)
self.checkequal([' a b','c'], ' a b c ', 'rsplit',
None, 1)
self.checkequal([' a', 'b', 'c'], ' a b c ', 'rsplit',
None, 2)
self.checkequal(['a', 'b'], '\n\ta \t\r b \v ', 'rsplit', None, 88)
aaa = ' a '*20
self.checkequal(['a']*20, aaa, 'rsplit')
self.checkequal([aaa[:-4]] + ['a'], aaa, 'rsplit', None, 1)
self.checkequal([' a a'] + ['a']*18, aaa, 'rsplit', None, 18)
# mixed use of str and unicode
self.checkequal(['a b', 'c', 'd'], 'a b c d', 'rsplit', ' ', 2)
def test_strip(self):
self.checkequal('hello', ' hello ', 'strip')
self.checkequal('hello ', ' hello ', 'lstrip')
self.checkequal(' hello', ' hello ', 'rstrip')
self.checkequal('hello', 'hello', 'strip')
# strip/lstrip/rstrip with None arg
self.checkequal('hello', ' hello ', 'strip', None)
self.checkequal('hello ', ' hello ', 'lstrip', None)
self.checkequal(' hello', ' hello ', 'rstrip', None)
self.checkequal('hello', 'hello', 'strip', None)
# strip/lstrip/rstrip with str arg
self.checkequal('hello', 'xyzzyhelloxyzzy', 'strip', 'xyz')
self.checkequal('helloxyzzy', 'xyzzyhelloxyzzy', 'lstrip', 'xyz')
self.checkequal('xyzzyhello', 'xyzzyhelloxyzzy', 'rstrip', 'xyz')
self.checkequal('hello', 'hello', 'strip', 'xyz')
self.checkraises(TypeError, 'hello', 'strip', 42, 42)
self.checkraises(TypeError, 'hello', 'lstrip', 42, 42)
self.checkraises(TypeError, 'hello', 'rstrip', 42, 42)
def test_ljust(self):
self.checkequal('abc ', 'abc', 'ljust', 10)
self.checkequal('abc ', 'abc', 'ljust', 6)
self.checkequal('abc', 'abc', 'ljust', 3)
self.checkequal('abc', 'abc', 'ljust', 2)
self.checkequal('abc*******', 'abc', 'ljust', 10, '*')
self.checkraises(TypeError, 'abc', 'ljust')
def test_rjust(self):
self.checkequal(' abc', 'abc', 'rjust', 10)
self.checkequal(' abc', 'abc', 'rjust', 6)
self.checkequal('abc', 'abc', 'rjust', 3)
self.checkequal('abc', 'abc', 'rjust', 2)
self.checkequal('*******abc', 'abc', 'rjust', 10, '*')
self.checkraises(TypeError, 'abc', 'rjust')
def test_center(self):
self.checkequal(' abc ', 'abc', 'center', 10)
self.checkequal(' abc ', 'abc', 'center', 6)
self.checkequal('abc', 'abc', 'center', 3)
self.checkequal('abc', 'abc', 'center', 2)
self.checkequal('***abc****', 'abc', 'center', 10, '*')
self.checkraises(TypeError, 'abc', 'center')
def test_swapcase(self):
self.checkequal('hEllO CoMPuTErS', 'HeLLo cOmpUteRs', 'swapcase')
self.checkraises(TypeError, 'hello', 'swapcase', 42)
def test_zfill(self):
self.checkequal('123', '123', 'zfill', 2)
self.checkequal('123', '123', 'zfill', 3)
self.checkequal('0123', '123', 'zfill', 4)
self.checkequal('+123', '+123', 'zfill', 3)
self.checkequal('+123', '+123', 'zfill', 4)
self.checkequal('+0123', '+123', 'zfill', 5)
self.checkequal('-123', '-123', 'zfill', 3)
self.checkequal('-123', '-123', 'zfill', 4)
self.checkequal('-0123', '-123', 'zfill', 5)
self.checkequal('000', '', 'zfill', 3)
self.checkequal('34', '34', 'zfill', 1)
self.checkequal('0034', '34', 'zfill', 4)
self.checkraises(TypeError, '123', 'zfill')
class MixinStrUnicodeUserStringTest:
# additional tests that only work for
# stringlike objects, i.e. str, unicode, UserString
# (but not the string module)
def test_islower(self):
self.checkequal(False, '', 'islower')
self.checkequal(True, 'a', 'islower')
self.checkequal(False, 'A', 'islower')
self.checkequal(False, '\n', 'islower')
self.checkequal(True, 'abc', 'islower')
self.checkequal(False, 'aBc', 'islower')
self.checkequal(True, 'abc\n', 'islower')
self.checkraises(TypeError, 'abc', 'islower', 42)
def test_isupper(self):
self.checkequal(False, '', 'isupper')
self.checkequal(False, 'a', 'isupper')
self.checkequal(True, 'A', 'isupper')
self.checkequal(False, '\n', 'isupper')
self.checkequal(True, 'ABC', 'isupper')
self.checkequal(False, 'AbC', 'isupper')
self.checkequal(True, 'ABC\n', 'isupper')
self.checkraises(TypeError, 'abc', 'isupper', 42)
def test_istitle(self):
self.checkequal(False, '', 'istitle')
self.checkequal(False, 'a', 'istitle')
self.checkequal(True, 'A', 'istitle')
self.checkequal(False, '\n', 'istitle')
self.checkequal(True, 'A Titlecased Line', 'istitle')
self.checkequal(True, 'A\nTitlecased Line', 'istitle')
self.checkequal(True, 'A Titlecased, Line', 'istitle')
self.checkequal(False, 'Not a capitalized String', 'istitle')
self.checkequal(False, 'Not\ta Titlecase String', 'istitle')
self.checkequal(False, 'Not--a Titlecase String', 'istitle')
self.checkequal(False, 'NOT', 'istitle')
self.checkraises(TypeError, 'abc', 'istitle', 42)
def test_isspace(self):
self.checkequal(False, '', 'isspace')
self.checkequal(False, 'a', 'isspace')
self.checkequal(True, ' ', 'isspace')
self.checkequal(True, '\t', 'isspace')
self.checkequal(True, '\r', 'isspace')
self.checkequal(True, '\n', 'isspace')
self.checkequal(True, ' \t\r\n', 'isspace')
self.checkequal(False, ' \t\r\na', 'isspace')
self.checkraises(TypeError, 'abc', 'isspace', 42)
def test_isalpha(self):
self.checkequal(False, '', 'isalpha')
self.checkequal(True, 'a', 'isalpha')
self.checkequal(True, 'A', 'isalpha')
self.checkequal(False, '\n', 'isalpha')
self.checkequal(True, 'abc', 'isalpha')
self.checkequal(False, 'aBc123', 'isalpha')
self.checkequal(False, 'abc\n', 'isalpha')
self.checkraises(TypeError, 'abc', 'isalpha', 42)
def test_isalnum(self):
self.checkequal(False, '', 'isalnum')
self.checkequal(True, 'a', 'isalnum')
self.checkequal(True, 'A', 'isalnum')
self.checkequal(False, '\n', 'isalnum')
self.checkequal(True, '123abc456', 'isalnum')
self.checkequal(True, 'a1b3c', 'isalnum')
self.checkequal(False, 'aBc000 ', 'isalnum')
self.checkequal(False, 'abc\n', 'isalnum')
self.checkraises(TypeError, 'abc', 'isalnum', 42)
def test_isdigit(self):
self.checkequal(False, '', 'isdigit')
self.checkequal(False, 'a', 'isdigit')
self.checkequal(True, '0', 'isdigit')
self.checkequal(True, '0123456789', 'isdigit')
self.checkequal(False, '0123456789a', 'isdigit')
self.checkraises(TypeError, 'abc', 'isdigit', 42)
def test_title(self):
self.checkequal(' Hello ', ' hello ', 'title')
self.checkequal('Hello ', 'hello ', 'title')
self.checkequal('Hello ', 'Hello ', 'title')
self.checkequal('Format This As Title String', "fOrMaT thIs aS titLe String", 'title')
self.checkequal('Format,This-As*Title;String', "fOrMaT,thIs-aS*titLe;String", 'title', )
self.checkequal('Getint', "getInt", 'title')
self.checkraises(TypeError, 'hello', 'title', 42)
def test_splitlines(self):
self.checkequal(['abc', 'def', '', 'ghi'], "abc\ndef\n\rghi", 'splitlines')
self.checkequal(['abc', 'def', '', 'ghi'], "abc\ndef\n\r\nghi", 'splitlines')
self.checkequal(['abc', 'def', 'ghi'], "abc\ndef\r\nghi", 'splitlines')
self.checkequal(['abc', 'def', 'ghi'], "abc\ndef\r\nghi\n", 'splitlines')
self.checkequal(['abc', 'def', 'ghi', ''], "abc\ndef\r\nghi\n\r", 'splitlines')
self.checkequal(['', 'abc', 'def', 'ghi', ''], "\nabc\ndef\r\nghi\n\r", 'splitlines')
self.checkequal(['\n', 'abc\n', 'def\r\n', 'ghi\n', '\r'], "\nabc\ndef\r\nghi\n\r", 'splitlines', 1)
self.checkraises(TypeError, 'abc', 'splitlines', 42, 42)
def test_startswith(self):
self.checkequal(True, 'hello', 'startswith', 'he')
self.checkequal(True, 'hello', 'startswith', 'hello')
self.checkequal(False, 'hello', 'startswith', 'hello world')
self.checkequal(True, 'hello', 'startswith', '')
self.checkequal(False, 'hello', 'startswith', 'ello')
self.checkequal(True, 'hello', 'startswith', 'ello', 1)
self.checkequal(True, 'hello', 'startswith', 'o', 4)
self.checkequal(False, 'hello', 'startswith', 'o', 5)
self.checkequal(True, 'hello', 'startswith', '', 5)
self.checkequal(False, 'hello', 'startswith', 'lo', 6)
self.checkequal(True, 'helloworld', 'startswith', 'lowo', 3)
self.checkequal(True, 'helloworld', 'startswith', 'lowo', 3, 7)
self.checkequal(False, 'helloworld', 'startswith', 'lowo', 3, 6)
# test negative indices
self.checkequal(True, 'hello', 'startswith', 'he', 0, -1)
self.checkequal(True, 'hello', 'startswith', 'he', -53, -1)
self.checkequal(False, 'hello', 'startswith', 'hello', 0, -1)
self.checkequal(False, 'hello', 'startswith', 'hello world', -1, -10)
self.checkequal(False, 'hello', 'startswith', 'ello', -5)
self.checkequal(True, 'hello', 'startswith', 'ello', -4)
self.checkequal(False, 'hello', 'startswith', 'o', -2)
self.checkequal(True, 'hello', 'startswith', 'o', -1)
self.checkequal(True, 'hello', 'startswith', '', -3, -3)
self.checkequal(False, 'hello', 'startswith', 'lo', -9)
self.checkraises(TypeError, 'hello', 'startswith')
self.checkraises(TypeError, 'hello', 'startswith', 42)
# test tuple arguments
self.checkequal(True, 'hello', 'startswith', ('he', 'ha'))
self.checkequal(False, 'hello', 'startswith', ('lo', 'llo'))
self.checkequal(True, 'hello', 'startswith', ('hellox', 'hello'))
self.checkequal(False, 'hello', 'startswith', ())
self.checkequal(True, 'helloworld', 'startswith', ('hellowo',
'rld', 'lowo'), 3)
self.checkequal(False, 'helloworld', 'startswith', ('hellowo', 'ello',
'rld'), 3)
self.checkequal(True, 'hello', 'startswith', ('lo', 'he'), 0, -1)
self.checkequal(False, 'hello', 'startswith', ('he', 'hel'), 0, 1)
self.checkequal(True, 'hello', 'startswith', ('he', 'hel'), 0, 2)
self.checkraises(TypeError, 'hello', 'startswith', (42,))
def test_endswith(self):
self.checkequal(True, 'hello', 'endswith', 'lo')
self.checkequal(False, 'hello', 'endswith', 'he')
self.checkequal(True, 'hello', 'endswith', '')
self.checkequal(False, 'hello', 'endswith', 'hello world')
self.checkequal(False, 'helloworld', 'endswith', 'worl')
self.checkequal(True, 'helloworld', 'endswith', 'worl', 3, 9)
self.checkequal(True, 'helloworld', 'endswith', 'world', 3, 12)
self.checkequal(True, 'helloworld', 'endswith', 'lowo', 1, 7)
self.checkequal(True, 'helloworld', 'endswith', 'lowo', 2, 7)
self.checkequal(True, 'helloworld', 'endswith', 'lowo', 3, 7)
self.checkequal(False, 'helloworld', 'endswith', 'lowo', 4, 7)
self.checkequal(False, 'helloworld', 'endswith', 'lowo', 3, 8)
self.checkequal(False, 'ab', 'endswith', 'ab', 0, 1)
self.checkequal(False, 'ab', 'endswith', 'ab', 0, 0)
# test negative indices
self.checkequal(True, 'hello', 'endswith', 'lo', -2)
self.checkequal(False, 'hello', 'endswith', 'he', -2)
self.checkequal(True, 'hello', 'endswith', '', -3, -3)
self.checkequal(False, 'hello', 'endswith', 'hello world', -10, -2)
self.checkequal(False, 'helloworld', 'endswith', 'worl', -6)
self.checkequal(True, 'helloworld', 'endswith', 'worl', -5, -1)
self.checkequal(True, 'helloworld', 'endswith', 'worl', -5, 9)
self.checkequal(True, 'helloworld', 'endswith', 'world', -7, 12)
self.checkequal(True, 'helloworld', 'endswith', 'lowo', -99, -3)
self.checkequal(True, 'helloworld', 'endswith', 'lowo', -8, -3)
self.checkequal(True, 'helloworld', 'endswith', 'lowo', -7, -3)
self.checkequal(False, 'helloworld', 'endswith', 'lowo', 3, -4)
self.checkequal(False, 'helloworld', 'endswith', 'lowo', -8, -2)
self.checkraises(TypeError, 'hello', 'endswith')
self.checkraises(TypeError, 'hello', 'endswith', 42)
# test tuple arguments
self.checkequal(False, 'hello', 'endswith', ('he', 'ha'))
self.checkequal(True, 'hello', 'endswith', ('lo', 'llo'))
self.checkequal(True, 'hello', 'endswith', ('hellox', 'hello'))
self.checkequal(False, 'hello', 'endswith', ())
self.checkequal(True, 'helloworld', 'endswith', ('hellowo',
'rld', 'lowo'), 3)
self.checkequal(False, 'helloworld', 'endswith', ('hellowo', 'ello',
'rld'), 3, -1)
self.checkequal(True, 'hello', 'endswith', ('hell', 'ell'), 0, -1)
self.checkequal(False, 'hello', 'endswith', ('he', 'hel'), 0, 1)
self.checkequal(True, 'hello', 'endswith', ('he', 'hell'), 0, 4)
self.checkraises(TypeError, 'hello', 'endswith', (42,))
def test___contains__(self):
self.checkequal(True, '', '__contains__', '')
self.checkequal(True, 'abc', '__contains__', '')
self.checkequal(False, 'abc', '__contains__', '\0')
self.checkequal(True, '\0abc', '__contains__', '\0')
self.checkequal(True, 'abc\0', '__contains__', '\0')
self.checkequal(True, '\0abc', '__contains__', 'a')
self.checkequal(True, 'asdf', '__contains__', 'asdf')
self.checkequal(False, 'asd', '__contains__', 'asdf')
self.checkequal(False, '', '__contains__', 'asdf')
def test_subscript(self):
self.checkequal('a', 'abc', '__getitem__', 0)
self.checkequal('c', 'abc', '__getitem__', -1)
self.checkequal('a', 'abc', '__getitem__', 0)
self.checkequal('abc', 'abc', '__getitem__', slice(0, 3))
self.checkequal('abc', 'abc', '__getitem__', slice(0, 1000))
self.checkequal('a', 'abc', '__getitem__', slice(0, 1))
self.checkequal('', 'abc', '__getitem__', slice(0, 0))
self.checkraises(TypeError, 'abc', '__getitem__', 'def')
def test_slice(self):
self.checkequal('abc', 'abc', '__getitem__', slice(0, 1000))
self.checkequal('abc', 'abc', '__getitem__', slice(0, 3))
self.checkequal('ab', 'abc', '__getitem__', slice(0, 2))
self.checkequal('bc', 'abc', '__getitem__', slice(1, 3))
self.checkequal('b', 'abc', '__getitem__', slice(1, 2))
self.checkequal('', 'abc', '__getitem__', slice(2, 2))
self.checkequal('', 'abc', '__getitem__', slice(1000, 1000))
self.checkequal('', 'abc', '__getitem__', slice(2000, 1000))
self.checkequal('', 'abc', '__getitem__', slice(2, 1))
self.checkraises(TypeError, 'abc', '__getitem__', 'def')
def test_extended_getslice(self):
# Test extended slicing by comparing with list slicing.
s = string.ascii_letters + string.digits
indices = (0, None, 1, 3, 41, -1, -2, -37)
for start in indices:
for stop in indices:
# Skip step 0 (invalid)
for step in indices[1:]:
L = list(s)[start:stop:step]
self.checkequal("".join(L), s, '__getitem__',
slice(start, stop, step))
def test_mul(self):
self.checkequal('', 'abc', '__mul__', -1)
self.checkequal('', 'abc', '__mul__', 0)
self.checkequal('abc', 'abc', '__mul__', 1)
self.checkequal('abcabcabc', 'abc', '__mul__', 3)
self.checkraises(TypeError, 'abc', '__mul__')
self.checkraises(TypeError, 'abc', '__mul__', '')
# XXX: on a 64-bit system, this doesn't raise an overflow error,
# but either raises a MemoryError, or succeeds (if you have 54TiB)
#self.checkraises(OverflowError, 10000*'abc', '__mul__', 2000000000)
def test_join(self):
# join now works with any sequence type
# moved here, because the argument order is
# different in string.join (see the test in
# test.test_string.StringTest.test_join)
self.checkequal('a b c d', ' ', 'join', ['a', 'b', 'c', 'd'])
self.checkequal('abcd', '', 'join', ('a', 'b', 'c', 'd'))
self.checkequal('bd', '', 'join', ('', 'b', '', 'd'))
self.checkequal('ac', '', 'join', ('a', '', 'c', ''))
self.checkequal('w x y z', ' ', 'join', Sequence())
self.checkequal('abc', 'a', 'join', ('abc',))
self.checkequal('z', 'a', 'join', UserList(['z']))
self.checkequal('a.b.c', '.', 'join', ['a', 'b', 'c'])
self.assertRaises(TypeError, '.'.join, ['a', 'b', 3])
for i in [5, 25, 125]:
self.checkequal(((('a' * i) + '-') * i)[:-1], '-', 'join',
['a' * i] * i)
self.checkequal(((('a' * i) + '-') * i)[:-1], '-', 'join',
('a' * i,) * i)
#self.checkequal(str(BadSeq1()), ' ', 'join', BadSeq1())
self.checkequal('a b c', ' ', 'join', BadSeq2())
self.checkraises(TypeError, ' ', 'join')
self.checkraises(TypeError, ' ', 'join', 7)
self.checkraises(TypeError, ' ', 'join', [1, 2, bytes()])
try:
def f():
yield 4 + ""
self.fixtype(' ').join(f())
except TypeError as e:
if '+' not in str(e):
self.fail('join() ate exception message')
else:
self.fail('exception not raised')
def test_formatting(self):
self.checkequal('+hello+', '+%s+', '__mod__', 'hello')
self.checkequal('+10+', '+%d+', '__mod__', 10)
self.checkequal('a', "%c", '__mod__', "a")
self.checkequal('a', "%c", '__mod__', "a")
self.checkequal('"', "%c", '__mod__', 34)
self.checkequal('$', "%c", '__mod__', 36)
self.checkequal('10', "%d", '__mod__', 10)
self.checkequal('\x7f', "%c", '__mod__', 0x7f)
for ordinal in (-100, 0x200000):
# unicode raises ValueError, str raises OverflowError
self.checkraises((ValueError, OverflowError), '%c', '__mod__', ordinal)
longvalue = sys.maxsize + 10
slongvalue = str(longvalue)
self.checkequal(' 42', '%3ld', '__mod__', 42)
self.checkequal('42', '%d', '__mod__', 42.0)
self.checkequal(slongvalue, '%d', '__mod__', longvalue)
self.checkcall('%d', '__mod__', float(longvalue))
self.checkequal('0042.00', '%07.2f', '__mod__', 42)
self.checkequal('0042.00', '%07.2F', '__mod__', 42)
self.checkraises(TypeError, 'abc', '__mod__')
self.checkraises(TypeError, '%(foo)s', '__mod__', 42)
self.checkraises(TypeError, '%s%s', '__mod__', (42,))
self.checkraises(TypeError, '%c', '__mod__', (None,))
self.checkraises(ValueError, '%(foo', '__mod__', {})
self.checkraises(TypeError, '%(foo)s %(bar)s', '__mod__', ('foo', 42))
self.checkraises(TypeError, '%d', '__mod__', "42") # not numeric
self.checkraises(TypeError, '%d', '__mod__', (42+0j)) # no int conversion provided
# argument names with properly nested brackets are supported
self.checkequal('bar', '%((foo))s', '__mod__', {'(foo)': 'bar'})
# 100 is a magic number in PyUnicode_Format, this forces a resize
self.checkequal(103*'a'+'x', '%sx', '__mod__', 103*'a')
self.checkraises(TypeError, '%*s', '__mod__', ('foo', 'bar'))
self.checkraises(TypeError, '%10.*f', '__mod__', ('foo', 42.))
self.checkraises(ValueError, '%10', '__mod__', (42,))
def test_floatformatting(self):
# float formatting
for prec in range(100):
format = '%%.%if' % prec
value = 0.01
for x in range(60):
value = value * 3.14159265359 / 3.0 * 10.0
self.checkcall(format, "__mod__", value)
def test_inplace_rewrites(self):
# Check that strings don't copy and modify cached single-character strings
self.checkequal('a', 'A', 'lower')
self.checkequal(True, 'A', 'isupper')
self.checkequal('A', 'a', 'upper')
self.checkequal(True, 'a', 'islower')
self.checkequal('a', 'A', 'replace', 'A', 'a')
self.checkequal(True, 'A', 'isupper')
self.checkequal('A', 'a', 'capitalize')
self.checkequal(True, 'a', 'islower')
self.checkequal('A', 'a', 'swapcase')
self.checkequal(True, 'a', 'islower')
self.checkequal('A', 'a', 'title')
self.checkequal(True, 'a', 'islower')
def test_partition(self):
self.checkequal(('this is the par', 'ti', 'tion method'),
'this is the partition method', 'partition', 'ti')
# from raymond's original specification
S = 'http://www.python.org'
self.checkequal(('http', '://', 'www.python.org'), S, 'partition', '://')
self.checkequal(('http://www.python.org', '', ''), S, 'partition', '?')
self.checkequal(('', 'http://', 'www.python.org'), S, 'partition', 'http://')
self.checkequal(('http://www.python.', 'org', ''), S, 'partition', 'org')
self.checkraises(ValueError, S, 'partition', '')
self.checkraises(TypeError, S, 'partition', None)
def test_rpartition(self):
self.checkequal(('this is the rparti', 'ti', 'on method'),
'this is the rpartition method', 'rpartition', 'ti')
# from raymond's original specification
S = 'http://www.python.org'
self.checkequal(('http', '://', 'www.python.org'), S, 'rpartition', '://')
self.checkequal(('', '', 'http://www.python.org'), S, 'rpartition', '?')
self.checkequal(('', 'http://', 'www.python.org'), S, 'rpartition', 'http://')
self.checkequal(('http://www.python.', 'org', ''), S, 'rpartition', 'org')
self.checkraises(ValueError, S, 'rpartition', '')
self.checkraises(TypeError, S, 'rpartition', None)
class MixinStrUnicodeTest:
# Additional tests that only work with str and unicode.
def test_bug1001011(self):
# Make sure join returns a NEW object for single item sequences
# involving a subclass.
# Make sure that it is of the appropriate type.
# Check the optimisation still occurs for standard objects.
t = self.type2test
class subclass(t):
pass
s1 = subclass("abcd")
s2 = t().join([s1])
self.assertIsNot(s1, s2)
self.assertIs(type(s2), t)
s1 = t("abcd")
s2 = t().join([s1])
self.assertIs(s1, s2)
# Should also test mixed-type join.
if t is str:
s1 = subclass("abcd")
s2 = "".join([s1])
self.assertIsNot(s1, s2)
self.assertIs(type(s2), t)
s1 = t("abcd")
s2 = "".join([s1])
self.assertIs(s1, s2)
## elif t is str8:
## s1 = subclass("abcd")
## s2 = "".join([s1])
## self.assertIsNot(s1, s2)
## self.assertIs(type(s2), str) # promotes!
## s1 = t("abcd")
## s2 = "".join([s1])
## self.assertIsNot(s1, s2)
## self.assertIs(type(s2), str) # promotes!
else:
self.fail("unexpected type for MixinStrUnicodeTest %r" % t)
|
MalloyPower/parsing-python
|
front-end/testsuite-python-lib/Python-3.2/Lib/test/string_tests.py
|
Python
|
mit
| 58,093 | 0.001635 |
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# This simple example on how to do animations using graph-tool. Here we do a
# simple simulation of an S->I->R->S epidemic model, where each vertex can be in
# one of the following states: Susceptible (S), infected (I), recovered (R). A
# vertex in the S state becomes infected either spontaneously with a probability
# 'x' or because a neighbour is infected. An infected node becomes recovered
# with probability 'r', and a recovered vertex becomes again susceptible with
# probability 's'.
# DISCLAIMER: The following code is definitely not the most efficient approach
# if you want to simulate this dynamics for very large networks, and/or for very
# long times. The main purpose is simply to highlight the animation capabilities
# of graph-tool.
from graph_tool.all import *
from numpy.random import *
import sys, os, os.path
import cairo
seed(42)
seed_rng(42)
# We need some Gtk and gobject functions
from gi.repository import Gtk, Gdk, GdkPixbuf, GObject
# We will use the karate-club network
g = collection.data["karate"]
pos = g.vp["pos"] # layout positions
# We will filter out vertices which are in the "Recovered" state, by masking
# them using a property map.
removed = g.new_vertex_property("bool")
# SIRS dynamics parameters:
x = 0.001 # spontaneous outbreak probability
r = 0.1 # I->R probability
s = 0.01 # R->S probability
# (Note that the S->I transition happens simultaneously for every vertex with a
# probability equal to the fraction of non-recovered neighbours which are
# infected.)
S = 0
I = 1
R = 2
# Initialize all vertices to the S state
state = g.new_vertex_property("int")
state.a = S
# Images used to draw the nodes. They need to be loaded as cairo surfaces.
Simg = cairo.ImageSurface.create_from_png("face-grin.png")
Simg_fear = cairo.ImageSurface.create_from_png("face-surprise.png")
Iimg = cairo.ImageSurface.create_from_png("zombie.png")
vertex_sfcs = g.new_vertex_property("object")
for v in g.vertices():
vertex_sfcs[v] = Simg
# Newly infected nodes will be highlighted in red
newly_infected = g.new_vertex_property("bool")
# If True, the frames will be dumped to disk as images.
offscreen = sys.argv[1] == "offscreen" if len(sys.argv) > 1 else False
max_count = 500
if offscreen and not os.path.exists("./frames"):
os.mkdir("./frames")
# This creates a GTK+ window with the initial graph layout
if not offscreen:
win = GraphWindow(g, pos, geometry=(500, 400),
vertex_size=42,
vertex_anchor=0,
edge_color=[0.6, 0.6, 0.6, 1],
edge_sloppy=True,
vertex_surface=vertex_sfcs,
vertex_halo=newly_infected,
vertex_halo_size=1.2,
vertex_halo_color=[0.8, 0, 0, 0.6])
else:
count = 0
win = Gtk.OffscreenWindow()
win.set_default_size(500, 400)
win.graph = GraphWidget(g, pos,
vertex_size=42,
vertex_anchor=0,
edge_sloppy=True,
edge_color=[0.6, 0.6, 0.6, 1],
vertex_surface=vertex_sfcs,
vertex_halo=newly_infected,
vertex_halo_color=[0.8, 0, 0, 0.6])
win.add(win.graph)
# This function will be called repeatedly by the GTK+ main loop, and we use it
# to update the state according to the SIRS dynamics.
def update_state():
newly_infected.a = False
removed.a = False
# visit the nodes in random order
vs = list(g.vertices())
shuffle(vs)
for v in vs:
if state[v] == I:
if random() < r:
state[v] = R
elif state[v] == S:
if random() < x:
state[v] = I
else:
ns = list(v.out_neighbours())
if len(ns) > 0:
w = ns[randint(0, len(ns))] # choose a random neighbour
if state[w] == I:
state[v] = I
newly_infected[v] = True
elif random() < s:
state[v] = S
if state[v] == R:
removed[v] = True
if state[v] == S:
if I in [state[w] for w in v.out_neighbours()]:
vertex_sfcs[v] = Simg_fear
else:
vertex_sfcs[v] = Simg
else:
vertex_sfcs[v] = Iimg
# Filter out the recovered vertices
g.set_vertex_filter(removed, inverted=True)
# The following will force the re-drawing of the graph, and issue a
# re-drawing of the GTK window.
win.graph.regenerate_surface()
win.graph.queue_draw()
# if doing an offscreen animation, dump frame to disk
if offscreen:
global count
pixbuf = win.get_pixbuf()
pixbuf.savev(r'./frames/zombies%06d.png' % count, 'png', [], [])
if count > max_count:
sys.exit(0)
count += 1
# We need to return True so that the main loop will call this function more
# than once.
return True
# Bind the function above as an 'idle' callback.
cid = GObject.idle_add(update_state)
# We will give the user the ability to stop the program by closing the window.
win.connect("delete_event", Gtk.main_quit)
# Actually show the window, and start the main loop.
win.show_all()
Gtk.main()
|
jim-pansn/graph-tool
|
doc/demos/animation_zombies.py
|
Python
|
gpl-3.0
| 5,436 | 0.001288 |
# <hr>Calculates the tangents and bitangents for the imported meshes.
#
# Does nothing if a mesh does not have normals. You might want this post
# processing step to be executed if you plan to use tangent space calculations
# such as normal mapping applied to the meshes. There's a config setting,
# <tt>#AI_CONFIG_PP_CT_MAX_SMOOTHING_ANGLE<tt>, which allows you to specify
# a maximum smoothing angle for the algorithm. However, usually you'll
# want to leave it at the default value.
#
aiProcess_CalcTangentSpace = 0x1
## <hr>Identifies and joins identical vertex data sets within all
# imported meshes.
#
# After this step is run, each mesh contains unique vertices,
# so a vertex may be used by multiple faces. You usually want
# to use this post processing step. If your application deals with
# indexed geometry, this step is compulsory or you'll just waste rendering
# time. <b>If this flag is not specified<b>, no vertices are referenced by
# more than one face and <b>no index buffer is required<b> for rendering.
#
aiProcess_JoinIdenticalVertices = 0x2
## <hr>Converts all the imported data to a left-handed coordinate space.
#
# By default the data is returned in a right-handed coordinate space (which
# OpenGL prefers). In this space, +X points to the right,
# +Z points towards the viewer, and +Y points upwards. In the DirectX
# coordinate space +X points to the right, +Y points upwards, and +Z points
# away from the viewer.
#
# You'll probably want to consider this flag if you use Direct3D for
# rendering. The #aiProcess_ConvertToLeftHanded flag supersedes this
# setting and bundles all conversions typically required for D3D-based
# applications.
#
aiProcess_MakeLeftHanded = 0x4
## <hr>Triangulates all faces of all meshes.
#
# By default the imported mesh data might contain faces with more than 3
# indices. For rendering you'll usually want all faces to be triangles.
# This post processing step splits up faces with more than 3 indices into
# triangles. Line and point primitives are #not# modified! If you want
# 'triangles only' with no other kinds of primitives, try the following
# solution:
# <ul>
# <li>Specify both #aiProcess_Triangulate and #aiProcess_SortByPType <li>
# <li>Ignore all point and line meshes when you process assimp's output<li>
# <ul>
#
aiProcess_Triangulate = 0x8
## <hr>Removes some parts of the data structure (animations, materials,
# light sources, cameras, textures, vertex components).
#
# The components to be removed are specified in a separate
# configuration option, <tt>#AI_CONFIG_PP_RVC_FLAGS<tt>. This is quite useful
# if you don't need all parts of the output structure. Vertex colors
# are rarely used today for example... Calling this step to remove unneeded
# data from the pipeline as early as possible results in increased
# performance and a more optimized output data structure.
# This step is also useful if you want to force Assimp to recompute
# normals or tangents. The corresponding steps don't recompute them if
# they're already there (loaded from the source asset). By using this
# step you can make sure they are NOT there.
#
# This flag is a poor one, mainly because its purpose is usually
# misunderstood. Consider the following case: a 3D model has been exported
# from a CAD app, and it has per-face vertex colors. Vertex positions can't be
# shared, thus the #aiProcess_JoinIdenticalVertices step fails to
# optimize the data because of these nasty little vertex colors.
# Most apps don't even process them, so it's all for nothing. By using
# this step, unneeded components are excluded as early as possible
# thus opening more room for internal optimizations.
#
aiProcess_RemoveComponent = 0x10
## <hr>Generates normals for all faces of all meshes.
#
# This is ignored if normals are already there at the time this flag
# is evaluated. Model importers try to load them from the source file, so
# they're usually already there. Face normals are shared between all points
# of a single face, so a single point can have multiple normals, which
# forces the library to duplicate vertices in some cases.
# #aiProcess_JoinIdenticalVertices is #senseless# then.
#
# This flag may not be specified together with #aiProcess_GenSmoothNormals.
#
aiProcess_GenNormals = 0x20
## <hr>Generates smooth normals for all vertices in the mesh.
#
# This is ignored if normals are already there at the time this flag
# is evaluated. Model importers try to load them from the source file, so
# they're usually already there.
#
# This flag may not be specified together with
# #aiProcess_GenNormals. There's a configuration option,
# <tt>#AI_CONFIG_PP_GSN_MAX_SMOOTHING_ANGLE<tt> which allows you to specify
# an angle maximum for the normal smoothing algorithm. Normals exceeding
# this limit are not smoothed, resulting in a 'hard' seam between two faces.
# Using a decent angle here (e.g. 80 degrees) results in very good visual
# appearance.
#
aiProcess_GenSmoothNormals = 0x40
## <hr>Splits large meshes into smaller sub-meshes.
#
# This is quite useful for real-time rendering, where the number of triangles
# which can be maximally processed in a single draw-call is limited
# by the video driverhardware. The maximum vertex buffer is usually limited
# too. Both requirements can be met with this step: you may specify both a
# triangle and vertex limit for a single mesh.
#
# The split limits can (and should!) be set through the
# <tt>#AI_CONFIG_PP_SLM_VERTEX_LIMIT<tt> and <tt>#AI_CONFIG_PP_SLM_TRIANGLE_LIMIT<tt>
# settings. The default values are <tt>#AI_SLM_DEFAULT_MAX_VERTICES<tt> and
# <tt>#AI_SLM_DEFAULT_MAX_TRIANGLES<tt>.
#
# Note that splitting is generally a time-consuming task, but only if there's
# something to split. The use of this step is recommended for most users.
#
aiProcess_SplitLargeMeshes = 0x80
## <hr>Removes the node graph and pre-transforms all vertices with
# the local transformation matrices of their nodes.
#
# The output scene still contains nodes, however there is only a
# root node with children, each one referencing only one mesh,
# and each mesh referencing one material. For rendering, you can
# simply render all meshes in order - you don't need to pay
# attention to local transformations and the node hierarchy.
# Animations are removed during this step.
# This step is intended for applications without a scenegraph.
# The step CAN cause some problems: if e.g. a mesh of the asset
# contains normals and another, using the same material index, does not,
# they will be brought together, but the first meshes's part of
# the normal list is zeroed. However, these artifacts are rare.
# @note The <tt>#AI_CONFIG_PP_PTV_NORMALIZE<tt> configuration property
# can be set to normalize the scene's spatial dimension to the -1...1
# range.
#
aiProcess_PreTransformVertices = 0x100
## <hr>Limits the number of bones simultaneously affecting a single vertex
# to a maximum value.
#
# If any vertex is affected by more than the maximum number of bones, the least
# important vertex weights are removed and the remaining vertex weights are
# renormalized so that the weights still sum up to 1.
# The default bone weight limit is 4 (defined as <tt>#AI_LMW_MAX_WEIGHTS<tt> in
# config.h), but you can use the <tt>#AI_CONFIG_PP_LBW_MAX_WEIGHTS<tt> setting to
# supply your own limit to the post processing step.
#
# If you intend to perform the skinning in hardware, this post processing
# step might be of interest to you.
#
aiProcess_LimitBoneWeights = 0x200
## <hr>Validates the imported scene data structure.
# This makes sure that all indices are valid, all animations and
# bones are linked correctly, all material references are correct .. etc.
#
# It is recommended that you capture Assimp's log output if you use this flag,
# so you can easily find out what's wrong if a file fails the
# validation. The validator is quite strict and will find #all#
# inconsistencies in the data structure... It is recommended that plugin
# developers use it to debug their loaders. There are two types of
# validation failures:
# <ul>
# <li>Error: There's something wrong with the imported data. Further
# postprocessing is not possible and the data is not usable at all.
# The import fails. #Importer::GetErrorString() or #aiGetErrorString()
# carry the error message around.<li>
# <li>Warning: There are some minor issues (e.g. 1000000 animation
# keyframes with the same time), but further postprocessing and use
# of the data structure is still safe. Warning details are written
# to the log file, <tt>#AI_SCENE_FLAGS_VALIDATION_WARNING<tt> is set
# in #aiScene::mFlags<li>
# <ul>
#
# This post-processing step is not time-consuming. Its use is not
# compulsory, but recommended.
#
aiProcess_ValidateDataStructure = 0x400
## <hr>Reorders triangles for better vertex cache locality.
#
# The step tries to improve the ACMR (average post-transform vertex cache
# miss ratio) for all meshes. The implementation runs in O(n) and is
# roughly based on the 'tipsify' algorithm (see <a href="
# http:www.cs.princeton.edugfxpubsSander_2007_%3ETRtipsy.pdf">this
# paper<a>).
#
# If you intend to render huge models in hardware, this step might
# be of interest to you. The <tt>#AI_CONFIG_PP_ICL_PTCACHE_SIZE<tt>config
# setting can be used to fine-tune the cache optimization.
#
aiProcess_ImproveCacheLocality = 0x800
## <hr>Searches for redundantunreferenced materials and removes them.
#
# This is especially useful in combination with the
# #aiProcess_PretransformVertices and #aiProcess_OptimizeMeshes flags.
# Both join small meshes with equal characteristics, but they can't do
# their work if two meshes have different materials. Because several
# material settings are lost during Assimp's import filters,
# (and because many exporters don't check for redundant materials), huge
# models often have materials which are are defined several times with
# exactly the same settings.
#
# Several material settings not contributing to the final appearance of
# a surface are ignored in all comparisons (e.g. the material name).
# So, if you're passing additional information through the
# content pipeline (probably using #magic# material names), don't
# specify this flag. Alternatively take a look at the
# <tt>#AI_CONFIG_PP_RRM_EXCLUDE_LIST<tt> setting.
#
aiProcess_RemoveRedundantMaterials = 0x1000
## <hr>This step tries to determine which meshes have normal vectors
# that are facing inwards and inverts them.
#
# The algorithm is simple but effective:
# the bounding box of all vertices + their normals is compared against
# the volume of the bounding box of all vertices without their normals.
# This works well for most objects, problems might occur with planar
# surfaces. However, the step tries to filter such cases.
# The step inverts all in-facing normals. Generally it is recommended
# to enable this step, although the result is not always correct.
#
aiProcess_FixInfacingNormals = 0x2000
## <hr>This step splits meshes with more than one primitive type in
# homogeneous sub-meshes.
#
# The step is executed after the triangulation step. After the step
# returns, just one bit is set in aiMesh::mPrimitiveTypes. This is
# especially useful for real-time rendering where point and line
# primitives are often ignored or rendered separately.
# You can use the <tt>#AI_CONFIG_PP_SBP_REMOVE<tt> option to specify which
# primitive types you need. This can be used to easily exclude
# lines and points, which are rarely used, from the import.
#
aiProcess_SortByPType = 0x8000
## <hr>This step searches all meshes for degenerate primitives and
# converts them to proper lines or points.
#
# A face is 'degenerate' if one or more of its points are identical.
# To have the degenerate stuff not only detected and collapsed but
# removed, try one of the following procedures:
# <br><b>1.<b> (if you support lines and points for rendering but don't
# want the degenerates)<br>
# <ul>
# <li>Specify the #aiProcess_FindDegenerates flag.
# <li>
# <li>Set the <tt>AI_CONFIG_PP_FD_REMOVE<tt> option to 1. This will
# cause the step to remove degenerate triangles from the import
# as soon as they're detected. They won't pass any further
# pipeline steps.
# <li>
# <ul>
# <br><b>2.<b>(if you don't support lines and points at all)<br>
# <ul>
# <li>Specify the #aiProcess_FindDegenerates flag.
# <li>
# <li>Specify the #aiProcess_SortByPType flag. This moves line and
# point primitives to separate meshes.
# <li>
# <li>Set the <tt>AI_CONFIG_PP_SBP_REMOVE<tt> option to
# @code aiPrimitiveType_POINTS | aiPrimitiveType_LINES
# @endcode to cause SortByPType to reject point
# and line meshes from the scene.
# <li>
# <ul>
# @note Degenerate polygons are not necessarily evil and that's why
# they're not removed by default. There are several file formats which
# don't support lines or points, and some exporters bypass the
# format specification and write them as degenerate triangles instead.
#
aiProcess_FindDegenerates = 0x10000
## <hr>This step searches all meshes for invalid data, such as zeroed
# normal vectors or invalid UV coords and removesfixes them. This is
# intended to get rid of some common exporter errors.
#
# This is especially useful for normals. If they are invalid, and
# the step recognizes this, they will be removed and can later
# be recomputed, i.e. by the #aiProcess_GenSmoothNormals flag.<br>
# The step will also remove meshes that are infinitely small and reduce
# animation tracks consisting of hundreds if redundant keys to a single
# key. The <tt>AI_CONFIG_PP_FID_ANIM_ACCURACY<tt> config property decides
# the accuracy of the check for duplicate animation tracks.
#
aiProcess_FindInvalidData = 0x20000
## <hr>This step converts non-UV mappings (such as spherical or
# cylindrical mapping) to proper texture coordinate channels.
#
# Most applications will support UV mapping only, so you will
# probably want to specify this step in every case. Note that Assimp is not
# always able to match the original mapping implementation of the
# 3D app which produced a model perfectly. It's always better to let the
# modelling app compute the UV channels - 3ds max, Maya, Blender,
# LightWave, and Modo do this for example.
#
# @note If this step is not requested, you'll need to process the
# <tt>#AI_MATKEY_MAPPING<tt> material property in order to display all assets
# properly.
#
aiProcess_GenUVCoords = 0x40000
## <hr>This step applies per-texture UV transformations and bakes
# them into stand-alone vtexture coordinate channels.
#
# UV transformations are specified per-texture - see the
# <tt>#AI_MATKEY_UVTRANSFORM<tt> material key for more information.
# This step processes all textures with
# transformed input UV coordinates and generates a new (pre-transformed) UV channel
# which replaces the old channel. Most applications won't support UV
# transformations, so you will probably want to specify this step.
#
# @note UV transformations are usually implemented in real-time apps by
# transforming texture coordinates at vertex shader stage with a 3x3
# (homogenous) transformation matrix.
#
aiProcess_TransformUVCoords = 0x80000
## <hr>This step searches for duplicate meshes and replaces them
# with references to the first mesh.
#
# This step takes a while, so don't use it if speed is a concern.
# Its main purpose is to workaround the fact that many export
# file formats don't support instanced meshes, so exporters need to
# duplicate meshes. This step removes the duplicates again. Please
# note that Assimp does not currently support per-node material
# assignment to meshes, which means that identical meshes with
# different materials are currently #not# joined, although this is
# planned for future versions.
#
aiProcess_FindInstances = 0x100000
## <hr>A postprocessing step to reduce the number of meshes.
#
# This will, in fact, reduce the number of draw calls.
#
# This is a very effective optimization and is recommended to be used
# together with #aiProcess_OptimizeGraph, if possible. The flag is fully
# compatible with both #aiProcess_SplitLargeMeshes and #aiProcess_SortByPType.
#
aiProcess_OptimizeMeshes = 0x200000
## <hr>A postprocessing step to optimize the scene hierarchy.
#
# Nodes without animations, bones, lights or cameras assigned are
# collapsed and joined.
#
# Node names can be lost during this step. If you use special 'tag nodes'
# to pass additional information through your content pipeline, use the
# <tt>#AI_CONFIG_PP_OG_EXCLUDE_LIST<tt> setting to specify a list of node
# names you want to be kept. Nodes matching one of the names in this list won't
# be touched or modified.
#
# Use this flag with caution. Most simple files will be collapsed to a
# single node, so complex hierarchies are usually completely lost. This is not
# useful for editor environments, but probably a very effective
# optimization if you just want to get the model data, convert it to your
# own format, and render it as fast as possible.
#
# This flag is designed to be used with #aiProcess_OptimizeMeshes for best
# results.
#
# @note 'Crappy' scenes with thousands of extremely small meshes packed
# in deeply nested nodes exist for almost all file formats.
# #aiProcess_OptimizeMeshes in combination with #aiProcess_OptimizeGraph
# usually fixes them all and makes them renderable.
#
aiProcess_OptimizeGraph = 0x400000
## <hr>This step flips all UV coordinates along the y-axis and adjusts
# material settings and bitangents accordingly.
#
# <b>Output UV coordinate system:<b>
# @code
# 0y|0y ---------- 1x|0y
# | |
# | |
# | |
# 0x|1y ---------- 1x|1y
# @endcode
#
# You'll probably want to consider this flag if you use Direct3D for
# rendering. The #aiProcess_ConvertToLeftHanded flag supersedes this
# setting and bundles all conversions typically required for D3D-based
# applications.
#
aiProcess_FlipUVs = 0x800000
## <hr>This step adjusts the output face winding order to be CW.
#
# The default face winding order is counter clockwise (CCW).
#
# <b>Output face order:<b>
# @code
# x2
#
# x0
# x1
# @endcode
#
aiProcess_FlipWindingOrder = 0x1000000
## <hr>This step splits meshes with many bones into sub-meshes so that each
# su-bmesh has fewer or as many bones as a given limit.
#
aiProcess_SplitByBoneCount = 0x2000000
## <hr>This step removes bones losslessly or according to some threshold.
#
# In some cases (i.e. formats that require it) exporters are forced to
# assign dummy bone weights to otherwise static meshes assigned to
# animated meshes. Full, weight-based skinning is expensive while
# animating nodes is extremely cheap, so this step is offered to clean up
# the data in that regard.
#
# Use <tt>#AI_CONFIG_PP_DB_THRESHOLD<tt> to control this.
# Use <tt>#AI_CONFIG_PP_DB_ALL_OR_NONE<tt> if you want bones removed if and
# only if all bones within the scene qualify for removal.
#
aiProcess_Debone = 0x4000000
aiProcess_GenEntityMeshes = 0x100000
aiProcess_OptimizeAnimations = 0x200000
aiProcess_FixTexturePaths = 0x200000
## @def aiProcess_ConvertToLeftHanded
# @brief Shortcut flag for Direct3D-based applications.
#
# Supersedes the #aiProcess_MakeLeftHanded and #aiProcess_FlipUVs and
# #aiProcess_FlipWindingOrder flags.
# The output data matches Direct3D's conventions: left-handed geometry, upper-left
# origin for UV coordinates and finally clockwise face order, suitable for CCW culling.
#
# @deprecated
#
aiProcess_ConvertToLeftHanded = ( \
aiProcess_MakeLeftHanded | \
aiProcess_FlipUVs | \
aiProcess_FlipWindingOrder | \
0 )
## @def aiProcessPreset_TargetRealtimeUse_Fast
# @brief Default postprocess configuration optimizing the data for real-time rendering.
#
# Applications would want to use this preset to load models on end-user PCs,
# maybe for direct use in game.
#
# If you're using DirectX, don't forget to combine this value with
# the #aiProcess_ConvertToLeftHanded step. If you don't support UV transformations
# in your application apply the #aiProcess_TransformUVCoords step, too.
# @note Please take the time to read the docs for the steps enabled by this preset.
# Some of them offer further configurable properties, while some of them might not be of
# use for you so it might be better to not specify them.
#
aiProcessPreset_TargetRealtime_Fast = ( \
aiProcess_CalcTangentSpace | \
aiProcess_GenNormals | \
aiProcess_JoinIdenticalVertices | \
aiProcess_Triangulate | \
aiProcess_GenUVCoords | \
aiProcess_SortByPType | \
0 )
## @def aiProcessPreset_TargetRealtime_Quality
# @brief Default postprocess configuration optimizing the data for real-time rendering.
#
# Unlike #aiProcessPreset_TargetRealtime_Fast, this configuration
# performs some extra optimizations to improve rendering speed and
# to minimize memory usage. It could be a good choice for a level editor
# environment where import speed is not so important.
#
# If you're using DirectX, don't forget to combine this value with
# the #aiProcess_ConvertToLeftHanded step. If you don't support UV transformations
# in your application apply the #aiProcess_TransformUVCoords step, too.
# @note Please take the time to read the docs for the steps enabled by this preset.
# Some of them offer further configurable properties, while some of them might not be
# of use for you so it might be better to not specify them.
#
aiProcessPreset_TargetRealtime_Quality = ( \
aiProcess_CalcTangentSpace | \
aiProcess_GenSmoothNormals | \
aiProcess_JoinIdenticalVertices | \
aiProcess_ImproveCacheLocality | \
aiProcess_LimitBoneWeights | \
aiProcess_RemoveRedundantMaterials | \
aiProcess_SplitLargeMeshes | \
aiProcess_Triangulate | \
aiProcess_GenUVCoords | \
aiProcess_SortByPType | \
aiProcess_FindDegenerates | \
aiProcess_FindInvalidData | \
0 )
## @def aiProcessPreset_TargetRealtime_MaxQuality
# @brief Default postprocess configuration optimizing the data for real-time rendering.
#
# This preset enables almost every optimization step to achieve perfectly
# optimized data. It's your choice for level editor environments where import speed
# is not important.
#
# If you're using DirectX, don't forget to combine this value with
# the #aiProcess_ConvertToLeftHanded step. If you don't support UV transformations
# in your application, apply the #aiProcess_TransformUVCoords step, too.
# @note Please take the time to read the docs for the steps enabled by this preset.
# Some of them offer further configurable properties, while some of them might not be
# of use for you so it might be better to not specify them.
#
aiProcessPreset_TargetRealtime_MaxQuality = ( \
aiProcessPreset_TargetRealtime_Quality | \
aiProcess_FindInstances | \
aiProcess_ValidateDataStructure | \
aiProcess_OptimizeMeshes | \
0 )
|
ivansoban/ILEngine
|
thirdparty/assimp/port/PyAssimp/pyassimp/postprocess.py
|
Python
|
mit
| 23,509 | 0.012676 |
import re
from scrapy.spider import BaseSpider
from scrapy.selector import HtmlXPathSelector
from scrapy.http import Request, FormRequest, HtmlResponse
from scrapy.utils.response import get_base_url
from scrapy.utils.url import urljoin_rfc
from scrapy.http import FormRequest
from productloader import load_product
import re
class CartridgeSave(BaseSpider):
name = 'cartridgesave.co.uk'
allowed_domains = ['cartridgesave.co.uk', 'www.cartridgesave.co.uk']
start_urls = ('http://www.cartridgesave.co.uk',)
def __init__(self, *args, **kwargs):
super(CartridgeSave, self).__init__(*args, **kwargs)
self.URL_BASE = 'http://www.cartridgesave.co.uk'
self.product_name_re = re.compile('.*/(.*?)\.html')
def parse_product(self, response):
if not isinstance(response, HtmlResponse):
return
hxs = HtmlXPathSelector(response)
res = {}
try:
# name = hxs.select('//div[@id="specification"]/ul/li[position()=1]').re('.* \((.*)\)')[0]
url = response.url
name = self.product_name_re.search(url).groups()[0]
price = hxs.select('.//span[@class="ex_vat_price"]/text()').re('\xa3(.*)')[0]
res['url'] = url
res['description'] = name
res['price'] = price
res['sku'] = res['description']
yield load_product(res, response)
except IndexError:
return
def parse(self, response):
if not isinstance(response, HtmlResponse):
return
#categories
hxs = HtmlXPathSelector(response)
# printer brands
printers_brands = hxs.select('//div[@id="manufacturers"]//li/a/@href').extract()
for url in printers_brands:
url = urljoin_rfc(self.URL_BASE, url)
yield Request(url)
# printer list
printers_list = hxs.select('//ul[@class="printer_list"]//li/a/@href').extract()
for url in printers_list:
url = urljoin_rfc(self.URL_BASE, url)
yield Request(url)
# next page
# next_page =
# if next_page:
# url = urljoin_rfc(URL_BASE, next_page[0])
# yield Request(url)
# products
products = hxs.select('//div[@class="group_products"]//li/a[not(@class="lowest_price info")]/@href').extract()
for product in products:
product = urljoin_rfc(self.URL_BASE, product)
yield Request(product, callback=self.parse_product)
|
0--key/lib
|
portfolio/Python/scrapy/inkshop/cartridgesavecouk.py
|
Python
|
apache-2.0
| 2,521 | 0.004363 |
#!/usr/bin/python3
"""
A script to get the public ip address from http://checkip.dyndns.org
"""
import urllib.error
import urllib.request
import re
import time
def contact_server():
"""
Try to get public ip address
"""
ipv4_address_pattern=re.compile(r'[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+')
ip_addresses=[]
try:
try:
with urllib.request.urlopen("http://checkip.dyndns.org") as urlobject:
ip_addresses=[addr for addr in ipv4_address_pattern.findall(str(urlobject.read()))]
return ip_addresses
except ConnectionResetError:
return None
except urllib.error.URLError:
return None
def get_ip():
"""
Keep running in a loop until an ip_address is obtained
"""
result=None
while not result:
time.sleep(0.1)
result=contact_server()
return result[0]
|
Bolt64/my_code
|
twitter_bot/get_ip.py
|
Python
|
mit
| 881 | 0.010216 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# ####################################################################
# Copyright (C) 2005-2019 by the FIFE team
# http://www.fifengine.net
# This file is part of FIFE.
#
# FIFE is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
# ####################################################################
from __future__ import absolute_import
from builtins import range
from .swig_test_utils import *
from fife.extensions.serializers.xmlanimation import loadXMLAnimation
class ActionTests(unittest.TestCase):
def setUp(self):
template = 'tests/data/wolf_walk/wolf_walk_%s.xml'
dirnames = ['e', 'ne', 'n', 'nw', 'w', 'sw', 's', 'se']
files = [template % dirname for dirname in dirnames]
self.engine = getEngine()
self.map = self.engine.getModel().createMap("map001")
self.grid = self.engine.getModel().getCellGrid("square")
self.layer = self.map.createLayer("Layer001", self.grid)
self.layer.setWalkable(True)
self.layer.createCellCache()
self.target = fife.Location(self.layer)
self.obj = fife.Object("object001", 'plaa')
fife.ObjectVisual.create(self.obj)
self.pather = fife.RoutePather()
self.obj.setPather(self.pather)
self.action = self.obj.createAction('walk')
fife.ActionVisual.create(self.action)
for index, direction in enumerate(dirnames):
degree = 45 * index
self.action.get2dGfxVisual().addAnimation(degree, loadXMLAnimation(self.engine, files[index]))
self.ground = fife.Object("ground", 'plaa')
image = self.engine.getImageManager().load('tests/data/earth_1.png')
fife.ObjectVisual.create(self.ground)
self.ground.get2dGfxVisual().addStaticImage(0, image.getHandle())
self.ground.img = self.engine.getImageManager().get(image.getHandle())
for y in range(-2,3):
for x in range(-2,3):
inst = self.layer.createInstance(self.ground, fife.ModelCoordinate(x,y))
fife.InstanceVisual.create(inst)
self.inst = self.layer.createInstance(self.obj, fife.ModelCoordinate(-2,-2))
fife.InstanceVisual.create(self.inst)
def tearDown(self):
self.engine.destroy()
def _testWalkingAction(self):
self.inst.move('walk', self.target, 0.05)
self.engine.initializePumping()
backend = self.engine.renderBackend
for i in range(360):
self.inst.getLocation().getLayerCoordinates()
self.target.getLayerCoordinates()
if self.inst.getLocation().getLayerCoordinates() == self.target.getLayerCoordinates():
break
self.inst.update()
action = self.inst.currentAction
angle = 0 #self.inst.orientation
animation = action.getAnimationByAngle(angle)
timestamp = self.inst.actionRuntime % animation.duration
image = animation.getFrameByTimestamp( timestamp )
if image:
image.render(fife.Rect(0,0,image.width,image.height),255)
self.engine.pump()
self.engine.finalizePumping()
def testWalkAround(self):
rb = self.engine.getRenderBackend()
viewport = fife.Rect(0, 0, rb.getWidth(), rb.getHeight())
cam = self.map.addCamera("foo", viewport)
cam.setCellImageDimensions(self.ground.img.getWidth(), self.ground.img.getHeight())
cam.setRotation(45)
cam.setTilt(40)
renderer = fife.InstanceRenderer.getInstance(cam)
renderer.activateAllLayers(self.map)
self.engine.initializePumping()
self.target.setLayerCoordinates(fife.ModelCoordinate(2,-2))
self.inst.move('walk', self.target, 0.9)
targets = (
(2,0), (2,-1), (2,-2), (1,-2),
(0,-2), (-1,-2), (-2,-2), (-2,-1),
(-2,0), (-2,1), (-2,2), (-1,2),
(0,2), (1,2), (2,2), (2,1))
for target in targets:
l = self.inst.getLocation()
l.setLayerCoordinates(fife.ModelCoordinate(0,0))
self.inst.setLocation(l)
self.target.setLayerCoordinates(fife.ModelCoordinate(*target))
self.inst.move('walk', self.target, 0.9)
for i in range(10):
self.engine.pump()
self.engine.finalizePumping()
self.map.removeCamera("foo")
TEST_CLASSES = [ActionTests]
if __name__ == '__main__':
unittest.main()
|
fifengine/fifengine
|
tests/swig_tests/action_tests.py
|
Python
|
lgpl-2.1
| 4,655 | 0.02986 |
# on success, nothing is printed
import simplexorrequestor
# I'm keeping some of these datastructures tiny in order to make the output
# more readable if an error is discovered
mirrorinfolist = [{'name':'mirror1'}, {'name':'mirror2'}, {'name':'mirror3'}, {'name':'mirror4'}, {'name':'mirror5'}]
blocklist = [12,34]
# this would usually be richer, but I only need these fields
manifestdict = {'blockcount':64, 'hashalgorithm':'noop',
'blockhashlist':['']*64}
rxgobj = simplexorrequestor.RandomXORRequestor(mirrorinfolist, blocklist, manifestdict, 2)
request1 = rxgobj.get_next_xorrequest()
request2 = rxgobj.get_next_xorrequest()
# success!
rxgobj.notify_success(request1,'a')
request3 = rxgobj.get_next_xorrequest()
# so request1 and request3 should be for the same mirror...
assert(request1[0] == request3[0])
# failure..
rxgobj.notify_failure(request2)
request4 = rxgobj.get_next_xorrequest()
assert(request2[0] != request4[0])
# so request2 and request4 should be for different mirrors...
# success!
rxgobj.notify_success(request3,'b')
# we're out of blocks to request from the first mirror...
rxgobj.notify_success(request4,chr(2))
# we're out of blocks to request from the first mirror...
request5 = rxgobj.get_next_xorrequest()
assert(request5[0] != request3[0])
assert(request5[0] == request4[0])
# we should have requested from the same mirror we tried before
rxgobj.notify_success(request5,chr(4))
# this should be ()
request6 = rxgobj.get_next_xorrequest()
assert(request6 == ())
# okay, now it's time to see if we get the right answer... 'a' ^ chr(2) == 'c'
answer1 = rxgobj.return_block(12)
# 'b' ^ chr(4) == 'f'
answer2 = rxgobj.return_block(34)
assert(answer1 == 'c')
assert(answer2 == 'f')
# Now let's try this where we chew through all of the mirrors to ensure we get
# the right exception
mirrorinfolist = [{'name':'mirror1'}, {'name':'mirror2'}, {'name':'mirror3'}]
rxgobj = simplexorrequestor.RandomXORRequestor(mirrorinfolist, blocklist, manifestdict, 2)
request1 = rxgobj.get_next_xorrequest()
request2 = rxgobj.get_next_xorrequest()
rxgobj.notify_failure(request1)
try:
rxgobj.notify_failure(request2)
except simplexorrequestor.InsufficientMirrors:
pass
else:
print "Should be notified of insufficient mirrors!"
|
scotfu/uppir
|
test_simplexorrequestor.py
|
Python
|
mit
| 2,272 | 0.011884 |
import datetime
from django.apps import apps
from django.core.mail import EmailMessage, EmailMultiAlternatives
import olympia.core.logger
from olympia import amo
from olympia.activity.models import ActivityLog
from olympia.amo.celery import task
from olympia.amo.utils import get_email_backend
from olympia.bandwagon.models import Collection
log = olympia.core.logger.getLogger('z.task')
@task
def send_email(recipient, subject, message, from_email=None,
html_message=None, attachments=None, real_email=False,
cc=None, headers=None, max_retries=3, reply_to=None,
**kwargs):
backend = EmailMultiAlternatives if html_message else EmailMessage
connection = get_email_backend(real_email)
result = backend(subject, message, from_email, to=recipient, cc=cc,
connection=connection, headers=headers,
attachments=attachments, reply_to=reply_to)
if html_message:
result.attach_alternative(html_message, 'text/html')
try:
result.send()
return True
except Exception as e:
log.exception('send_mail() failed with error: %s, retrying' % e)
return send_email.retry(exc=e, max_retries=max_retries)
@task
def set_modified_on_object(app_label, model_name, pk, **kw):
"""Sets modified on one object at a time."""
model = apps.get_model(app_label, model_name)
obj = model.objects.get(pk=pk)
try:
log.info('Setting modified on object: %s, %s' % (model_name, pk))
obj.update(modified=datetime.datetime.now(), **kw)
except Exception as e:
log.error('Failed to set modified on: %s, %s - %s' %
(model_name, pk, e))
@task
def delete_logs(items, **kw):
log.info('[%s@%s] Deleting logs' % (len(items), delete_logs.rate_limit))
ActivityLog.objects.filter(pk__in=items).exclude(
action__in=amo.LOG_KEEP).delete()
@task
def delete_anonymous_collections(items, **kw):
log.info('[%s@%s] Deleting anonymous collections' %
(len(items), delete_anonymous_collections.rate_limit))
Collection.objects.filter(type=amo.COLLECTION_ANONYMOUS,
pk__in=items).delete()
|
lavish205/olympia
|
src/olympia/amo/tasks.py
|
Python
|
bsd-3-clause
| 2,217 | 0 |
from distutils.core import setup
setup(
name='MetCalcs',
version='0.1.1',
author='Peter D. Willetts',
author_email='peterwilletts24@gmail.com',
packages=['metcalcs',],
#scripts=[],
license='LICENSE.txt',
description='Some functions for calculating met parameters from sounding variables, and from parcel ascents',
long_description=open('README.txt').read(),
install_requires=[
"numpy",
"scipy",]
)
|
peterwilletts24/MetCalcs
|
setup.py
|
Python
|
gpl-3.0
| 459 | 0.010893 |
'''
Mepinta
Copyright (c) 2011-2012, Joaquin G. Duo, mepinta@joaquinduo.com.ar
This file is part of Mepinta under GPL 3
'''
import unittest
from mepinta.pipelineview.actiontree.data_model import ActionTree
from mepinta.pipelineview.actiontree.ActionTreeManager import ActionTreeManager
from pprint import pprint, pformat
from mepinta.context.MepintaContext import MepintaContext
from pipeline_backend.logging.logging import LOG_DEBUG
class Test(unittest.TestCase):
def print_verify(self, answers, tree):
print '='*8
print tree
count = max(answers.keys()) if answers else 1
answers[count + 1] = tree.buildTree(string=True)
def test_adding_actions(self):
import plugins.python.processors.actiontree.UndoableGraph.generator.EmptyGraph as EmptyGraph
answers = {}
tree = ActionTree()
mngr = ActionTreeManager()
mngr.addAction(tree, EmptyGraph)
print tree.actions_graph.pline.getTopology()
def test_tree(self):
answers = {}
tree = ActionTree()
mngr = ActionTreeManager()
self.print_verify(answers, tree)
mngr.undoAction(tree)
self.print_verify(answers, tree)
mngr.redoAction(tree)
self.print_verify(answers, tree)
mngr.addAction(tree, 'actiontree.UndoableGraph.generator.EmptyGraph')
self.print_verify(answers, tree)
mngr.addAction(tree, 'actiontree.UndoableGraph.generator.EmptyGraph')
self.print_verify(answers, tree)
mngr.addAction(tree, 'actiontree.UndoableGraph.generator.EmptyGraph')
act3 = tree.current_action
self.print_verify(answers, tree)
mngr.undoAction(tree)
self.print_verify(answers, tree)
mngr.redoAction(tree)
self.print_verify(answers, tree)
mngr.undoAction(tree)
self.print_verify(answers, tree)
mngr.addAction(tree, 'actiontree.UndoableGraph.generator.EmptyGraph')
act4 = tree.current_action
self.print_verify(answers, tree)
print mngr.setCurrentAction(tree, act3)
mngr.addAction(tree, 'actiontree.UndoableGraph.generator.EmptyGraph')
self.print_verify(answers, tree)
# print mngr.setCurrentAction(tree, act3)
mngr.addAction(tree, 'actiontree.UndoableGraph.generator.EmptyGraph')
self.print_verify(answers, tree)
# print mngr.setCurrentAction(tree, act3)
mngr.addAction(tree, 'actiontree.UndoableGraph.generator.EmptyGraph')
self.print_verify(answers, tree)
print mngr.setCurrentAction(tree, act4)
mngr.eval(tree)
if __name__ == "__main__":
MepintaContext('python', log_level=LOG_DEBUG)
#import sys;sys.argv = ['', 'Test.testName']
unittest.main()
|
joaduo/mepinta
|
core/python_core/mepinta/pipelineview/actiontree/tests/test_ActionTreeManager.py
|
Python
|
gpl-3.0
| 2,762 | 0.00181 |
import lxml.html
from html import escape
import chevron
import math
import prairielearn as pl
import numpy as np
import random
WEIGHT_DEFAULT = 1
CORRECT_ANSWER_DEFAULT = None
LABEL_DEFAULT = None
SUFFIX_DEFAULT = None
DISPLAY_DEFAULT = 'inline'
def prepare(element_html, data):
element = lxml.html.fragment_fromstring(element_html)
required_attribs = ['answers-name']
optional_attribs = ['weight', 'correct-answer', 'label', 'suffix', 'display']
pl.check_attribs(element, required_attribs, optional_attribs)
name = pl.get_string_attrib(element, 'answers-name')
correct_answer = pl.get_integer_attrib(element, 'correct-answer', CORRECT_ANSWER_DEFAULT)
if correct_answer is not None:
if name in data['correct_answers']:
raise Exception('duplicate correct_answers variable name: %s' % name)
data['correct_answers'][name] = correct_answer
def render(element_html, data):
element = lxml.html.fragment_fromstring(element_html)
name = pl.get_string_attrib(element, 'answers-name')
label = pl.get_string_attrib(element, 'label', LABEL_DEFAULT)
suffix = pl.get_string_attrib(element, 'suffix', SUFFIX_DEFAULT)
display = pl.get_string_attrib(element, 'display', DISPLAY_DEFAULT)
if data['panel'] == 'question':
editable = data['editable']
raw_submitted_answer = data['raw_submitted_answers'].get(name, None)
# Get info strings
info_params = {'format': True}
with open('pl-integer-input.mustache', 'r', encoding='utf-8') as f:
info = chevron.render(f, info_params).strip()
with open('pl-integer-input.mustache', 'r', encoding='utf-8') as f:
info_params.pop('format', None)
info_params['shortformat'] = True
shortinfo = chevron.render(f, info_params).strip()
html_params = {
'question': True,
'name': name,
'label': label,
'suffix': suffix,
'editable': editable,
'info': info,
'shortinfo': shortinfo,
'uuid': pl.get_uuid()
}
partial_score = data['partial_scores'].get(name, {'score': None})
score = partial_score.get('score', None)
if score is not None:
try:
score = float(score)
if score >= 1:
html_params['correct'] = True
elif score > 0:
html_params['partial'] = math.floor(score * 100)
else:
html_params['incorrect'] = True
except Exception:
raise ValueError('invalid score' + score)
if display == 'inline':
html_params['inline'] = True
elif display == 'block':
html_params['block'] = True
else:
raise ValueError('method of display "%s" is not valid (must be "inline" or "block")' % display)
if raw_submitted_answer is not None:
html_params['raw_submitted_answer'] = escape(raw_submitted_answer)
with open('pl-integer-input.mustache', 'r', encoding='utf-8') as f:
html = chevron.render(f, html_params).strip()
elif data['panel'] == 'submission':
parse_error = data['format_errors'].get(name, None)
html_params = {
'submission': True,
'label': label,
'parse_error': parse_error,
'uuid': pl.get_uuid()
}
if parse_error is None:
# Get submitted answer, raising an exception if it does not exist
a_sub = data['submitted_answers'].get(name, None)
if a_sub is None:
raise Exception('submitted answer is None')
# If answer is in a format generated by pl.to_json, convert it
# back to a standard type (otherwise, do nothing)
a_sub = pl.from_json(a_sub)
html_params['suffix'] = suffix
html_params['a_sub'] = '{:d}'.format(a_sub)
else:
raw_submitted_answer = data['raw_submitted_answers'].get(name, None)
if raw_submitted_answer is not None:
html_params['raw_submitted_answer'] = escape(raw_submitted_answer)
partial_score = data['partial_scores'].get(name, {'score': None})
score = partial_score.get('score', None)
if score is not None:
try:
score = float(score)
if score >= 1:
html_params['correct'] = True
elif score > 0:
html_params['partial'] = math.floor(score * 100)
else:
html_params['incorrect'] = True
except Exception:
raise ValueError('invalid score' + score)
with open('pl-integer-input.mustache', 'r', encoding='utf-8') as f:
html = chevron.render(f, html_params).strip()
elif data['panel'] == 'answer':
a_tru = pl.from_json(data['correct_answers'].get(name, None))
if a_tru is not None:
html_params = {'answer': True, 'label': label, 'a_tru': '{:d}'.format(a_tru), 'suffix': suffix}
with open('pl-integer-input.mustache', 'r', encoding='utf-8') as f:
html = chevron.render(f, html_params).strip()
else:
html = ''
else:
raise Exception('Invalid panel type: %s' % data['panel'])
return html
def parse(element_html, data):
element = lxml.html.fragment_fromstring(element_html)
name = pl.get_string_attrib(element, 'answers-name')
# Get submitted answer or return parse_error if it does not exist
a_sub = data['submitted_answers'].get(name, None)
if a_sub is None:
data['format_errors'][name] = 'No submitted answer.'
data['submitted_answers'][name] = None
return
# Convert to integer
try:
a_sub_parsed = pl.string_to_integer(a_sub)
if a_sub_parsed is None:
raise ValueError('invalid submitted answer (wrong type)')
if not np.isfinite(a_sub_parsed):
raise ValueError('invalid submitted answer (not finite)')
data['submitted_answers'][name] = pl.to_json(a_sub_parsed)
except Exception:
data['format_errors'][name] = 'Invalid format. The submitted answer was not an integer.'
data['submitted_answers'][name] = None
def grade(element_html, data):
element = lxml.html.fragment_fromstring(element_html)
name = pl.get_string_attrib(element, 'answers-name')
# Get weight
weight = pl.get_integer_attrib(element, 'weight', WEIGHT_DEFAULT)
# Get true answer (if it does not exist, create no grade - leave it
# up to the question code)
a_tru = pl.from_json(data['correct_answers'].get(name, None))
if a_tru is None:
return
# Get submitted answer (if it does not exist, score is zero)
a_sub = data['submitted_answers'].get(name, None)
if a_sub is None:
data['partial_scores'][name] = {'score': 0, 'weight': weight}
return
# If submitted answer is in a format generated by pl.to_json, convert it
# back to a standard type (otherwise, do nothing)
a_sub = pl.from_json(a_sub)
# Cast both submitted and true answers as integers.
a_tru = int(a_tru)
a_sub = int(a_sub)
if a_tru == a_sub:
data['partial_scores'][name] = {'score': 1, 'weight': weight}
else:
data['partial_scores'][name] = {'score': 0, 'weight': weight}
def test(element_html, data):
element = lxml.html.fragment_fromstring(element_html)
name = pl.get_string_attrib(element, 'answers-name')
weight = pl.get_integer_attrib(element, 'weight', WEIGHT_DEFAULT)
# Get correct answer
a_tru = data['correct_answers'][name]
# If correct answer is in a format generated by pl.to_json, convert it
# back to a standard type (otherwise, do nothing)
a_tru = pl.from_json(a_tru)
result = random.choices(['correct', 'incorrect', 'invalid'], [5, 5, 1])[0]
if result == 'correct':
data['raw_submitted_answers'][name] = str(a_tru)
data['partial_scores'][name] = {'score': 1, 'weight': weight}
elif result == 'incorrect':
data['raw_submitted_answers'][name] = str(a_tru + (random.randint(1, 11) * random.choice([-1, 1])))
data['partial_scores'][name] = {'score': 0, 'weight': weight}
elif result == 'invalid':
# FIXME: add more invalid expressions, make text of format_errors
# correct, and randomize
if random.choice([True, False]):
data['raw_submitted_answers'][name] = '1 + 2'
else:
data['raw_submitted_answers'][name] = '3.4'
data['format_errors'][name] = 'invalid'
else:
raise Exception('invalid result: %s' % result)
|
mwest1066/PrairieLearn
|
elements/pl-integer-input/pl-integer-input.py
|
Python
|
agpl-3.0
| 8,803 | 0.001022 |
import unittest
from highest_number_cubed import highest_number_cubed
class TestHighestNumberCubed(unittest.TestCase):
def test_three(self):
self.assertEqual(highest_number_cubed(30), 3)
def test_two(self):
self.assertEqual(highest_number_cubed(12), 2)
def test_one(self):
self.assertEqual(highest_number_cubed(3), 1)
def test_big(self):
self.assertEqual(highest_number_cubed(12000), 22)
|
rmotr-group-projects/itp-w1-highest-number-cubed
|
tests/test_main.py
|
Python
|
mit
| 443 | 0 |
""" Defines the ScalesTickGenerator class.
"""
from numpy import array
from traits.api import Any
from enable.font_metrics_provider import font_metrics_provider
from ticks import AbstractTickGenerator
# Use the new scales/ticks library
from scales.api import ScaleSystem
class ScalesTickGenerator(AbstractTickGenerator):
scale = Any #Instance(ScaleSystem, args=())
font = Any
def _scale_default(self):
return ScaleSystem()
def get_ticks(self, data_low, data_high, bounds_low, bounds_high, interval,
use_endpoints=False, scale=None):
if interval != "auto":
ticks = self.scale.ticks(data_low, data_high, (data_high - data_low) / interval)
else:
ticks = self.scale.ticks(data_low, data_high)
return ticks
def get_ticks_and_labels(self, data_low, data_high, bounds_low, bounds_high,
orientation = "h"):
# TODO: add support for Interval
# TODO: add support for vertical labels
metrics = font_metrics_provider()
if self.font is not None and hasattr(metrics, "set_font"):
metrics.set_font(self.font)
test_str = "0123456789-+"
charsize = metrics.get_full_text_extent(test_str)[0] / len(test_str)
numchars = (bounds_high - bounds_low) / charsize
tmp = zip(*self.scale.labels(data_low, data_high, numlabels=8, char_width=numchars))
# Check to make sure we actually have labels/ticks to show before
# unpacking the return tuple into (tick_array, labels).
if len(tmp) == 0:
return array([]), []
else:
return array(tmp[0]), tmp[1]
|
tommy-u/chaco
|
chaco/scales_tick_generator.py
|
Python
|
bsd-3-clause
| 1,682 | 0.004162 |
# -*-coding:Utf-8 -*
# Copyright (c) 2010-2017 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Package contenant l'éditeur 'shedit'.
Si des redéfinitions de contexte-éditeur standard doivent être faites, elles
seront placées dans ce package
"""
from textwrap import dedent
from primaires.interpreteur.editeur.presentation import Presentation
from primaires.interpreteur.editeur.description import Description
from primaires.interpreteur.editeur.entier import Entier
from primaires.interpreteur.editeur.flag import Flag
from primaires.interpreteur.editeur.tableau import Tableau
from primaires.interpreteur.editeur.uniligne import Uniligne
from .edt_carte import EdtCarte
class EdtShedit(Presentation):
"""Classe définissant l'éditeur de modèle de navires 'shedit'.
"""
nom = "shedit"
def __init__(self, personnage, modele):
"""Constructeur de l'éditeur"""
if personnage:
instance_connexion = personnage.instance_connexion
else:
instance_connexion = None
Presentation.__init__(self, instance_connexion, modele)
if personnage and modele:
self.construire(modele)
def __getnewargs__(self):
return (None, None)
def construire(self, modele):
"""Construction de l'éditeur"""
# nom
nom = self.ajouter_choix("nom", "n", Uniligne, modele, "nom")
nom.parent = self
nom.prompt = "Nom du navire : "
nom.apercu = "{objet.nom}"
nom.aide_courte = \
"Entrez le |ent|nom|ff| du navire ou |cmd|/|ff| pour revenir " \
"à la fenêtre parente.\n\nNom actuel : |bc|{objet.nom}|ff|"
# Description
description = self.ajouter_choix("description", "d",
Description, modele, "description")
description.parent = self
description.apercu = "{objet.description." \
"paragraphes_indentes}"
description.aide_courte = \
"| |tit|" + "Description du navire {}".format(
modele.cle).ljust(76) + "|ff||\n" + self.opts.separateur
# Description à la vente
description_vente = self.ajouter_choix("description à la vente", "ve",
Description, modele, "description_vente")
description_vente.parent = self
description_vente.apercu = "{objet.description_vente." \
"paragraphes_indentes}"
description_vente.aide_courte = \
"| |tit|" + "Description en vente du navire {}".format(
modele.cle).ljust(76) + "|ff||\n" + self.opts.separateur
# Genre masculin
masculin = self.ajouter_choix("genre masculin", "g", Flag, modele,
"masculin")
masculin.parent = self
# Descriptions indépendantes
independantes = self.ajouter_choix("descriptions indépendantes", "in", Flag,
modele, "descriptions_independantes")
independantes.parent = self
# Facteurs d'orientation
facteurs = self.ajouter_choix("facteurs d'orientation", "f", Tableau,
modele, "facteurs_orientations",
((("Allure", ["vent debout", "au près", "bon plein",
"largue", "grand largue", "vent arrière"]),
("Facteur", "flottant"))))
facteurs.parent = self
facteurs.apercu = "{valeur}"
facteurs.aide_courte = dedent("""
Entrez |cmd|/|ff| pour revenir à la fenêtre parente.
Entrez une allure, un signe |cmd|/|ff| et son facteur influençant
la vitesse. Si ce facteur est négatif, le navire va "culer".
Si ce facteur est positif, la vitesse (dépendant du vent)
sera multipliée par le facteur de l'allure précisé. Vent
debout est une allure qui fait culer le navire. Le facteur
augmente ensuite, restant faible (ou nul) pour le près, davantage
pour le bon plein puis le largue, qui est souvent la
meilleure allure. Le facteur diminue ensuite généralement
pour le grand largue et le vent arrière.
{valeur}""".strip("\n"))
# Peut conquérir
peut_conquerir = self.ajouter_choix("peut conquérir", "co", Flag,
modele, "peut_conquerir")
peut_conquerir.parent = self
# Niveau
niveau = self.ajouter_choix("niveau", "i", Entier, modele, "niveau")
niveau.parent = self
niveau.apercu = "{objet.niveau}"
niveau.prompt = "Niveau du navire : "
niveau.aide_courte = \
"Entrez |ent|le niveau|ff| du navire ou |cmd|/|ff| pour " \
"revenir à la fenêtre parente.\n\n" \
"Le niveau détermine le nombre d'XP données en cas de " \
"récompense (quand\nun personnage conquit le navire par " \
"exemple). Une fraction du niveau\nest donné en XP secondaire " \
"du niveau navigation.\n\n" \
"Niveau actuel : {objet.niveau}"
# Prix
prix = self.ajouter_choix("prix unitaire", "u", Entier, modele,
"m_valeur", 1)
prix.parent = self
prix.apercu = "{objet.m_valeur}"
prix.prompt = "Prix unitaire du navire : "
prix.aide_courte = \
"Entrez |ent|le prix unitaire|ff| du navire ou |cmd|/|ff| pour " \
"revenir à la fenêtre parente.\n\n" \
"Prix unitaire actuel : |bc|{objet.m_valeur}|ff|"
# Durée de construction
duree = self.ajouter_choix("durée de construction", "r", Entier,
modele, "duree_construction", 1)
duree.parent = self
duree.apercu = "{objet.duree_construction} minute(s)"
duree.prompt = "Durée de construction du nagvire (en minutes) : "
duree.aide_courte = \
"Entrez |ent|la durée de construction|ff| du navire ou " \
"|cmd|/|ff| pour\nrevenir à la fenêtre parente.\n\n" \
"Cette durée, exprimée en minutes, est celle qui doit " \
"s'écouler entre le\nmoment ou un personnage achète le navire " \
"dans un chantier naval et le moment\noù le chantier naval " \
"place son nouveau navire dans le port.\n\n" \
"Durée de construction actuelle : " \
"|bc|{objet.duree_construction}|ff| minute(s)"
# Poids max
poids_max = self.ajouter_choix("poids maximum", "p", Entier, modele,
"poids_max", 1)
poids_max.parent = self
poids_max.apercu = "{objet.poids_max} kg"
poids_max.prompt = "Poids maximum avant de sombrer (en kg) : "
poids_max.aide_courte = \
"Entrez |ent|le poids maximum|ff| du navire avant qu'il ne " \
"sombre ou |cmd|/|ff| pour revenir\nà la fenêtre parente.\n\n" \
"Poids maximum actuel : {objet.poids_max} kg"
# Poids max dans la cale
cale_max = self.ajouter_choix("poids maximum dans la cale", "ca",
Entier, modele, "cale_max", 1)
cale_max.parent = self
cale_max.apercu = "{objet.cale_max} kg"
cale_max.prompt = "Poids maximum de la cale (en kg) : "
cale_max.aide_courte = \
"Entrez |ent|le poids maximum|ff| de la cale du navire " \
"ou |cmd|/|ff| pour revenir\nà la fenêtre parente.\n\n" \
"Poids maximum actuel de la cale : {objet.cale_max} kg"
# Tirant d'eau
tirant = self.ajouter_choix("tirant d'eau", "ti", Entier, modele,
"tirant_eau", 1)
tirant.parent = self
tirant.apercu = "{valeur} brasses"
tirant.prompt = "Tirant d'eau en brasses : "
tirant.aide_courte = \
"Entrez |ent|le tirant d'eau|ff| ou |cmd|/|ff| pour revenir " \
"à la fenêtre parente.\n\nLe tirant d'eau du navire est " \
"la distance verticale qui sépare la flotaison\nde la quille " \
": c'est donc la partie immergée. Un navire avec un tirant\n" \
"d'eau trop important ne porura pas s'approcher des terres. " \
"En fonction\nde la profondeur de l'étendue, également " \
"spécifiée en brasses, tous\nles navires ne pourront pas " \
"s'y aventurer.\n\nTirant d'eau actuel : {valeur} brasses"
# Fond plat
plat = self.ajouter_choix("fond plat", "pl", Flag, modele, "fond_plat")
plat.parent = self
# Carte
carte = self.ajouter_choix("carte", "c", EdtCarte, modele)
carte.parent = self
|
vlegoff/tsunami
|
src/secondaires/navigation/editeurs/shedit/__init__.py
|
Python
|
bsd-3-clause
| 10,053 | 0.001802 |
"""
XML handler for calibrations
"""
# Standard library modules.
# Third party modules.
# Local modules.
from pyhmsa.spec.condition.calibration import \
(CalibrationConstant, CalibrationLinear,
CalibrationPolynomial, CalibrationExplicit)
from pyhmsa.fileformat.xmlhandler.condition.condition import _ConditionXMLHandler
# Globals and constants variables.
class CalibrationConstantXMLHandler(_ConditionXMLHandler):
def __init__(self, version):
super().__init__(CalibrationConstant, version)
class CalibrationLinearXMLHandler(_ConditionXMLHandler):
def __init__(self, version):
super().__init__(CalibrationLinear, version)
class CalibrationPolynomialXMLHandler(_ConditionXMLHandler):
def __init__(self, version):
super().__init__(CalibrationPolynomial, version)
class CalibrationExplicitXMLHandler(_ConditionXMLHandler):
def __init__(self, version):
super().__init__(CalibrationExplicit, version)
|
pyhmsa/pyhmsa
|
pyhmsa/fileformat/xmlhandler/condition/calibration.py
|
Python
|
mit
| 965 | 0.006218 |
from .features import Dictionary, RegexMatches, Stemmed, Stopwords
name = "dutch"
try:
import enchant
dictionary = enchant.Dict("nl")
except enchant.errors.DictNotFoundError:
raise ImportError("No enchant-compatible dictionary found for 'nl'. " +
"Consider installing 'myspell-nl'.")
dictionary = Dictionary(name + ".dictionary", dictionary.check)
"""
:class:`~revscoring.languages.features.Dictionary` features via
:class:`enchant.Dict` "nl". Provided by `myspell-nl`
"""
try:
from nltk.corpus import stopwords as nltk_stopwords
stopwords = set(nltk_stopwords.words('dutch'))
except LookupError:
raise ImportError("Could not load stopwords for {0}. ".format(__name__) +
"You may need to install the nltk 'stopwords' " +
"corpora. See http://www.nltk.org/data.html")
stopwords = Stopwords(name + ".stopwords", stopwords)
"""
:class:`~revscoring.languages.features.Stopwords` features provided by
:func:`nltk.corpus.stopwords` "dutch"
"""
try:
from nltk.stem.snowball import SnowballStemmer
stemmer = SnowballStemmer("dutch")
except ValueError:
raise ImportError("Could not load stemmer for {0}. ".format(__name__))
stemmed = Stemmed(name + ".stemmed", stemmer.stem)
"""
:class:`~revscoring.languages.features.Stemmed` word features via
:class:`nltk.stem.snowball.SnowballStemmer` "dutch"
"""
badword_regexes = [
r"aars",
r"an(aal|us)\w*",
r"balhaar",
r"drol(len)?",
r"fack(en|ing|s)?", "facking",
r"flikkers?",
r"focking",
r"ge(ile?|lul)",
r"geneukt",
r"hoer(en?)?",
r"homos?",
r"kaka?",
r"kak(hoofd|ken)",
r"k[ae]nker",
r"klootzak(ken)?",
r"klote",
r"kont(gat|je)?",
r"pedo",
r"penis(sen)?",
r"peop",
r"piemels?",
r"pijpen",
r"pik",
r"pimel",
r"pipi",
r"poep(chinees?|en|hoofd)?",
r"poep(ie|je|sex|te?)s?",
r"porno?",
r"neuke?",
r"neuken(de)?",
r"neukt(en?)?",
r"stron(d|t)",
r"suck(s|t)?",
r"zuigt",
r"sukkels?",
r"ter(ing|ten)", "tetten",
r"tieten",
r"vagina",
r"verekte",
r"verkracht",
r"dikzak",
r"dildo",
r"mon?g(olen|ool)?", "mooiboy",
r"negers?",
r"shit",
r"sperma",
r"kut(jes?)?",
r"stelletje",
r"losers?",
r"lul(len)?",
r"reet",
r"scheet", "scheten", r"schijt",
r"diaree",
r"slet",
r"lekkerding",
r"likken"
]
badwords = RegexMatches(name + ".badwords", badword_regexes)
"""
:class:`~revscoring.languages.features.RegexMatches` features via a list of
badword detecting regexes.
"""
informal_regexes = [
r"aap(jes)?",
r"banaan",
r"bent",
r"boe(it)?",
r"doei"
r"dombo",
r"domme",
r"eigelijk",
r"godverdomme",
r"groetjes",
r"gwn",
r"hoi",
r"hal+o+",
r"heb",
r"hee+[jyl]", r"heee+?l",
r"houd?",
r"(?:hoi+)+",
r"hoor",
r"izan",
r"jij",
r"jou",
r"jullie",
r"kaas",
r"klopt",
r"kots",
r"kusjes",
r"le?kke?re?",
r"maarja",
r"mama",
r"nou",
r"oma",
r"ofzo",
r"oke",
r"sexy?",
r"snap",
r"stink(en|t)",
r"stoer",
r"swag",
r"swek",
r"vies", "vieze",
r"vind",
r"vuile",
r"xxx",
r"yeah",
r"zielig",
r"zooi",
r"yolo",
r"zeg"
]
informals = RegexMatches(name + ".informals", informal_regexes)
"""
:class:`~revscoring.languages.features.RegexMatches` features via a list of
informal word detecting regexes.
"""
|
yafeunteun/wikipedia-spam-classifier
|
revscoring/revscoring/languages/dutch.py
|
Python
|
mit
| 3,534 | 0 |
from django.conf.urls import patterns, include, url
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
from views import HomeView, SimpleChainView, MultipleChainView, ModelChainView, EditCarView, DeleteCarView, \
AjaxChainedNames, AjaxChainedCountries, AjaxChainedCities, AjaxChainedModels, AjaxChainedColors
urlpatterns = patterns('',
# Examples:
url(r'^ajax/chained-names/$', AjaxChainedNames.as_view(), name='ajax_chained_names'),
url(r'^ajax/chained-countries/$', AjaxChainedCountries.as_view(), name='ajax_chained_countries'),
url(r'^ajax/chained-cities/$', AjaxChainedCities.as_view(), name='ajax_chained_cities'),
url(r'^ajax/chained-brand-models/$', AjaxChainedModels.as_view(), name='ajax_chained_models'),
url(r'^ajax/chained-colors/$', AjaxChainedColors.as_view(), name='ajax_chained_colors'),
url(r'^simple-chain/$', SimpleChainView.as_view(), name='simple_chain'),
url(r'^multiple-chain/$', MultipleChainView.as_view(), name='multiple_chain'),
url(r'^model-chain/$', ModelChainView.as_view(), name='model_chain'),
url(r'^edit-car/(?P<pk>[-\d]+)/$', EditCarView.as_view(), name='edit_car'),
url(r'^delete-car/(?P<pk>[-\d]+)/$', DeleteCarView.as_view(), name='delete_car'),
url(r'^$', HomeView.as_view(), name='home'),
# url(r'^example/', include('example.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
)
|
PragmaticMates/django-clever-selects
|
example/example/urls.py
|
Python
|
mit
| 1,638 | 0.007326 |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-03-23 19:08
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('deposit', '0007_remove_urls_and_add_oairecord'),
]
operations = [
migrations.CreateModel(
name='UserPreferences',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('last_repository', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='last_used_by', to='deposit.Repository')),
('preferred_repository', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='preferrend_by', to='deposit.Repository')),
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
|
wetneb/dissemin
|
deposit/migrations/0008_userpreferences.py
|
Python
|
agpl-3.0
| 1,117 | 0.003581 |
##########################################################################
#
# This file is part of Lilith
# made by J. Bernon and B. Dumont
#
# Web page: http://lpsc.in2p3.fr/projects-th/lilith/
#
# In case of questions email bernon@lpsc.in2p3.fr dum33@ibs.re.kr
#
#
# Lilith is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Lilith is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Lilith. If not, see <http://www.gnu.org/licenses/>.
#
##########################################################################
from ..errors import EffectiveMuError
from warnings import warn
def compute_effective_mu(user_mu, mu, alpha):
"""Compute mu_eff(mu,alpha) based on the general template"""
effective_mu = {}
mutemplate = mu["mutemplate"]
prod_modes = ["ggH", "VBF", "WH", "ZH", "ttH"]
alpha_names = alpha.keys()
template_names = []
for template in mutemplate:
template_names.append(template["extra"]["syst"])
try:
alpha_names.index(template["extra"]["syst"])
except ValueError:
raise EffectiveMuError(
'systematic uncertainty '+template["extra"]["syst"]+' not provided in user input file')
# for name in [name for name in alpha_names if name not in template_names]:
# warn('systematic uncertainty '+name+
# ' in user input has no experimental equivalent: will be ignored',Warning,stacklevel=3)
for prod, decay in user_mu:
mu_eff = user_mu[prod,decay]
for template in mutemplate:
alpha_i = alpha[template["extra"]["syst"]]["val"]
alpha_0 = template["alpha0"]
mu_eff += template["phi"]*(alpha_i-alpha_0)
for pprime in prod_modes:
mu_eff += user_mu[pprime,decay]*template[prod,pprime]*(alpha_i-alpha_0)
effective_mu[prod,decay] = mu_eff
return effective_mu
|
jeremy-bernon/Lilith
|
lilith/internal/effectivemu.py
|
Python
|
gpl-3.0
| 2,350 | 0.00383 |
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import copy
import uuid
import mock
import yaml
from mistralclient.api.v2 import action_executions
from mistralclient.api.v2 import executions
from mistralclient.api.v2 import workflows
from oslo_config import cfg
# XXX: actionsensor import depends on config being setup.
import st2tests.config as tests_config
tests_config.parse_args()
from mistral_v2 import MistralRunner
import st2common
from st2common.bootstrap import actionsregistrar
from st2common.bootstrap import policiesregistrar
from st2common.bootstrap import runnersregistrar
from st2common.constants import action as action_constants
from st2common.models.db.liveaction import LiveActionDB
from st2common.persistence.liveaction import LiveAction
from st2common.persistence.policy import Policy
from st2common.runners import base as runners
from st2common.services import action as action_service
from st2common.transport.liveaction import LiveActionPublisher
from st2common.transport.publishers import CUDPublisher
from st2common.util import loader
from st2tests import DbTestCase
from st2tests import fixturesloader
from st2tests.mocks.liveaction import MockLiveActionPublisher
MISTRAL_RUNNER_NAME = 'mistral_v2'
TEST_PACK = 'mistral_tests'
TEST_PACK_PATH = fixturesloader.get_fixtures_packs_base_path() + '/' + TEST_PACK
PACKS = [
TEST_PACK_PATH,
fixturesloader.get_fixtures_packs_base_path() + '/core'
]
# Non-workbook with a single workflow
WF1_META_FILE_NAME = 'workflow_v2.yaml'
WF1_META_FILE_PATH = TEST_PACK_PATH + '/actions/' + WF1_META_FILE_NAME
WF1_META_CONTENT = loader.load_meta_file(WF1_META_FILE_PATH)
WF1_NAME = WF1_META_CONTENT['pack'] + '.' + WF1_META_CONTENT['name']
WF1_ENTRY_POINT = TEST_PACK_PATH + '/actions/' + WF1_META_CONTENT['entry_point']
WF1_ENTRY_POINT_X = WF1_ENTRY_POINT.replace(WF1_META_FILE_NAME, 'xformed_' + WF1_META_FILE_NAME)
WF1_SPEC = yaml.safe_load(MistralRunner.get_workflow_definition(WF1_ENTRY_POINT_X))
WF1_YAML = yaml.safe_dump(WF1_SPEC, default_flow_style=False)
WF1 = workflows.Workflow(None, {'name': WF1_NAME, 'definition': WF1_YAML})
MISTRAL_EXECUTION = {'id': str(uuid.uuid4()), 'state': 'RUNNING', 'workflow_name': WF1_NAME}
WF1_EXEC = copy.deepcopy(MISTRAL_EXECUTION)
@mock.patch.object(
CUDPublisher,
'publish_update',
mock.MagicMock(return_value=None))
@mock.patch.object(
CUDPublisher,
'publish_create',
mock.MagicMock(side_effect=MockLiveActionPublisher.publish_create))
@mock.patch.object(
LiveActionPublisher,
'publish_state',
mock.MagicMock(side_effect=MockLiveActionPublisher.publish_state))
class MistralRunnerPolicyTest(DbTestCase):
@classmethod
def setUpClass(cls):
super(MistralRunnerPolicyTest, cls).setUpClass()
# Override the retry configuration here otherwise st2tests.config.parse_args
# in DbTestCase.setUpClass will reset these overrides.
cfg.CONF.set_override('retry_exp_msec', 100, group='mistral')
cfg.CONF.set_override('retry_exp_max_msec', 200, group='mistral')
cfg.CONF.set_override('retry_stop_max_msec', 200, group='mistral')
cfg.CONF.set_override('api_url', 'http://0.0.0.0:9101', group='auth')
def setUp(self):
super(MistralRunnerPolicyTest, self).setUp()
# Start with a clean database for each test.
self._establish_connection_and_re_create_db()
# Register runners.
runnersregistrar.register_runners()
actions_registrar = actionsregistrar.ActionsRegistrar(
use_pack_cache=False,
fail_on_failure=True
)
for pack in PACKS:
actions_registrar.register_from_pack(pack)
# Register policies required for the tests.
policiesregistrar.register_policy_types(st2common)
policies_registrar = policiesregistrar.PolicyRegistrar(
use_pack_cache=False,
fail_on_failure=True
)
for pack in PACKS:
policies_registrar.register_from_pack(pack)
@classmethod
def get_runner_class(cls, runner_name):
return runners.get_runner(runner_name).__class__
def _drop_all_other_policies(self, test_policy):
policy_dbs = [policy_db for policy_db in Policy.get_all() if policy_db.ref != test_policy]
for policy_db in policy_dbs:
Policy.delete(policy_db, publish=False)
@mock.patch.object(
workflows.WorkflowManager, 'list',
mock.MagicMock(return_value=[]))
@mock.patch.object(
workflows.WorkflowManager, 'get',
mock.MagicMock(return_value=WF1))
@mock.patch.object(
workflows.WorkflowManager, 'create',
mock.MagicMock(return_value=[WF1]))
@mock.patch.object(
executions.ExecutionManager, 'create',
mock.MagicMock(return_value=executions.Execution(None, WF1_EXEC)))
@mock.patch.object(
action_executions.ActionExecutionManager, 'update',
mock.MagicMock(return_value=None))
def test_cancel_on_task_action_concurrency(self):
# Delete other policies in the test pack to avoid conflicts.
required_policy = 'mistral_tests.cancel_on_concurrency'
self._drop_all_other_policies(required_policy)
# Get threshold from the policy.
policy = Policy.get_by_ref(required_policy)
threshold = policy.parameters.get('threshold', 0)
self.assertGreater(threshold, 0)
# Launch instances of the workflow up to threshold.
for i in range(0, threshold):
liveaction = LiveActionDB(action=WF1_NAME, parameters={'friend': 'friend' + str(i)})
liveaction, execution1 = action_service.request(liveaction)
liveaction = LiveAction.get_by_id(str(liveaction.id))
self.assertEqual(liveaction.status, action_constants.LIVEACTION_STATUS_RUNNING)
# Check number of running instances
running = LiveAction.count(
action=WF1_NAME, status=action_constants.LIVEACTION_STATUS_RUNNING)
self.assertEqual(running, threshold)
# Mock the mistral runner cancel method to assert cancel is called.
mistral_runner_cls = self.get_runner_class('mistral_v2')
with mock.patch.object(mistral_runner_cls, 'cancel', mock.MagicMock(return_value=None)):
# Launch another instance of the workflow with mistral callback defined
# to indicate that this is executed under a workflow.
callback = {
'source': MISTRAL_RUNNER_NAME,
'url': 'http://127.0.0.1:8989/v2/action_executions/12345'
}
params = {'friend': 'grande animalerie'}
liveaction2 = LiveActionDB(action=WF1_NAME, parameters=params, callback=callback)
liveaction2, execution2 = action_service.request(liveaction2)
action_executions.ActionExecutionManager.update.assert_called_once_with(
'12345',
output='{"error": "Execution canceled by user."}',
state='CANCELLED'
)
liveaction2 = LiveAction.get_by_id(str(liveaction2.id))
self.assertEqual(liveaction2.status, action_constants.LIVEACTION_STATUS_CANCELED)
# Assert cancel has been called.
mistral_runner_cls.cancel.assert_called_once_with()
@mock.patch.object(
workflows.WorkflowManager, 'list',
mock.MagicMock(return_value=[]))
@mock.patch.object(
workflows.WorkflowManager, 'get',
mock.MagicMock(return_value=WF1))
@mock.patch.object(
workflows.WorkflowManager, 'create',
mock.MagicMock(return_value=[WF1]))
@mock.patch.object(
executions.ExecutionManager, 'create',
mock.MagicMock(return_value=executions.Execution(None, WF1_EXEC)))
@mock.patch.object(
action_executions.ActionExecutionManager, 'update',
mock.MagicMock(return_value=None))
def test_cancel_on_task_action_concurrency_by_attr(self):
# Delete other policies in the test pack to avoid conflicts.
required_policy = 'mistral_tests.cancel_on_concurrency_by_attr'
self._drop_all_other_policies(required_policy)
# Get threshold from the policy.
policy = Policy.get_by_ref(required_policy)
threshold = policy.parameters.get('threshold', 0)
self.assertGreater(threshold, 0)
params = {'friend': 'grande animalerie'}
# Launch instances of the workflow up to threshold.
for i in range(0, threshold):
liveaction = LiveActionDB(action=WF1_NAME, parameters=params)
liveaction, execution1 = action_service.request(liveaction)
liveaction = LiveAction.get_by_id(str(liveaction.id))
self.assertEqual(liveaction.status, action_constants.LIVEACTION_STATUS_RUNNING)
# Check number of running instances
running = LiveAction.count(
action=WF1_NAME, status=action_constants.LIVEACTION_STATUS_RUNNING,
parameters__friend=params['friend'])
self.assertEqual(running, threshold)
# Mock the mistral runner cancel method to assert cancel is called.
mistral_runner_cls = self.get_runner_class('mistral_v2')
with mock.patch.object(mistral_runner_cls, 'cancel', mock.MagicMock(return_value=None)):
# Launch another instance of the workflow with mistral callback defined
# to indicate that this is executed under a workflow.
callback = {
'source': MISTRAL_RUNNER_NAME,
'url': 'http://127.0.0.1:8989/v2/action_executions/12345'
}
liveaction2 = LiveActionDB(action=WF1_NAME, parameters=params, callback=callback)
liveaction2, execution2 = action_service.request(liveaction2)
action_executions.ActionExecutionManager.update.assert_called_once_with(
'12345',
output='{"error": "Execution canceled by user."}',
state='CANCELLED'
)
liveaction2 = LiveAction.get_by_id(str(liveaction2.id))
self.assertEqual(liveaction2.status, action_constants.LIVEACTION_STATUS_CANCELED)
# Assert cancel has been called.
mistral_runner_cls.cancel.assert_called_once_with()
|
tonybaloney/st2
|
contrib/runners/mistral_v2/tests/unit/test_mistral_v2_policy.py
|
Python
|
apache-2.0
| 11,045 | 0.00335 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('hack_plot', '0009_auto_20150703_2236'),
]
operations = [
migrations.CreateModel(
name='SshHackLocation',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('longitude', models.DecimalField(max_digits=10, decimal_places=6)),
('latitude', models.DecimalField(max_digits=10, decimal_places=6)),
],
options={
},
bases=(models.Model,),
),
migrations.AlterUniqueTogether(
name='sshhacklocation',
unique_together=set([('longitude', 'latitude')]),
),
migrations.RemoveField(
model_name='sshhackip',
name='latitude',
),
migrations.RemoveField(
model_name='sshhackip',
name='longitude',
),
migrations.AddField(
model_name='sshhackip',
name='location',
field=models.ForeignKey(default=1, to='hack_plot.SshHackLocation'),
preserve_default=False,
),
]
|
hellsgate1001/graphs
|
hack_plot/migrations/0010_auto_20150705_2020.py
|
Python
|
mit
| 1,309 | 0.002292 |
#!/usr/bin/env python
# The function declaration of Boost::Python objects are hardcoded
# as string. This Script should show changes/update this strings.
#
# (Example of such issue in Civ4:BTS is CyPlayer::initUnit.)
#
import os
import glob
import pdb
# pdb.set_trace()
REWRITE_FILES = True
VERBOSE = 1
DEBUG_WRITE = False # Write into /run/shm/[CvFoobar].tmp.cpp
file_cache = dict()
def get_function(line):
if "python::init" in line: # Constructor line is without name.
return None
b = line.find("&")
if b == -1: raise Exception("No begin of function name found.")
e = line.find(",", b)
if e == -1: e = line.find(")", b)
if e == -1: raise Exception("No end of function name found.")
return (line[b+1:e], b+1, e)
def get_doc(line):
# Restrict on substring after second ','
c1 = line.find(",")
c2 = line.find(",", c1+1)
if c2 == -1: c2 = line.find(")", c1+1)
b = line.rfind(", \"", c2)
if b == -1: return (", (\"\"", c2+1, c2+1)
e = line.find("\"", b+3)
if e == -1: return (", (\"\"", c2+1, c2+1)
return (line[b+2:e+1], b+2, e+1)
def get_cpp_filename(func_name):
""" Return filename WITHOUT extension. """
return func_name[:func_name.find(":")]
def get_sub_funcname(func_name):
""" Return (right most) substring [b] of [a]::[b]. """
return func_name[func_name.rfind("::")+2:]
def get_cpp_file(fname, explizit_fname=None):
if fname in file_cache:
return file_cache[fname]
if VERBOSE: print("\tLoad %s..." % (fname))
try:
with open(fname+".cpp", "r") as f:
file_cache[fname] = f.readlines()
except IOError:
if fname == "CvInfos": # Prevent rekursion
raise Exception("CvInfos.cpp missing")
info = get_cpp_file("CvInfos")
file_cache[fname] = info
return file_cache[fname]
def clean_args(sArgs):
""" Convert boost::python::list& into list, etc. """
sArgs = sArgs.replace("&", "")
colon_pos = sArgs.find("::")
while colon_pos > -1:
b = max([
sArgs.rfind(" ", 0, colon_pos),
sArgs.rfind("(", 0, colon_pos)])
lArgs = [c for c in sArgs]
# print("Remove %i %i %s" % (b+1, colon_pos+2, lArgs[b+1:colon_pos+2]))
lArgs[b+1:colon_pos+2] = []
sArgs = "".join(lArgs)
colon_pos = sArgs.find("::")
sArgs = sArgs.replace(" /*", " (")
sArgs = sArgs.replace("*/ ", ") ")
return sArgs
def get_new_doc(func_name):
cpp_file = get_cpp_file(get_cpp_filename(func_name))
loc_func_name = get_sub_funcname(func_name)
search_pat = "::%s" % (loc_func_name)
if VERBOSE: print("Search %s" % (search_pat))
for iLine in range(len(cpp_file)):
line = cpp_file[iLine]
if search_pat in line:
# Check if declaration is in one line
n = 1
while n < 5 and len(line.split("(")) != len(line.split(")")):
line[-1] = [] # Remove '\n'
line += cpp_file[iLine+n]
n += 1
args = line[line.find("("):line.find(")")+1]
# ret_arg = line[:line.find(" ")]
ret_arg = line[:line.rfind(" ", 0, line.find("::"))]
args = clean_args(args)
ret_arg = clean_args(ret_arg)
return "%s %s" % (ret_arg, args)
# "Function not found"
return None
def save_comment_string(old_doc, new_doc):
""" Transfer comment after ' - ' string in old doc string. """
com = " - "
if com in old_doc and com not in new_doc:
new_doc = new_doc + old_doc[old_doc.find(com):old_doc.rfind("\"")]
return new_doc
def update(line):
if VERBOSE: print(line)
tFunc = get_function(line)
if tFunc is None:
return line
tDoc = get_doc(line)
sOldDoc = tDoc[0]
sNewDoc = get_new_doc(tFunc[0])
if sNewDoc is None:
if VERBOSE: print("\t Function not found: %s" %(tFunc[0]))
return line
sNewDoc = save_comment_string(sOldDoc, sNewDoc)
# print("----- %s\n%s\n\"%s\"\n" % (tFunc[0], sOldDoc, sNewDoc))
# Replace ')' with ', ' if line without doc string get new one.
end = line[tDoc[1]-1]
end2 = ""
if end == ")":
end = ", "
end2 = ")"
newLine = "%s%s\"%s\"%s%s\r\n" % (line[:tDoc[1]-1], end, sNewDoc,
end2, line[tDoc[2]])
return newLine
if __name__ == "__main__":
interfaces = glob.glob("Cy*Interface*.cpp")
# interfaces = ['CyAreaInterface.cpp']
for interface in interfaces:
print("Handle " + interface)
with open(interface, "r") as f:
txt = f.readlines()
new_txt = []
for line in txt:
if ".def(" in line:
line = update(line)
new_txt.append(line)
if new_txt != txt:
# print(new_txt)
out = "".join(new_txt)
out = out.replace('\r\r', '\r') # Source unknown.
if REWRITE_FILES:
print("Detect change... and rewrite file " + interface)
with open(interface, "w") as f:
f.write(out)
if DEBUG_WRITE:
dfile = "/run/shm/" + interface.replace(".", ".tmp.")
print("Detect change... and write file " + dfile)
with open(dfile, "w") as f:
f.write(out)
|
Zulan/PBStats
|
CvGameCoreDLL/update_interface_docstrings.py
|
Python
|
gpl-2.0
| 5,421 | 0.00535 |
"""Support for RainMachine devices."""
import asyncio
from datetime import timedelta
import logging
from regenmaschine import Client
from regenmaschine.errors import RainMachineError
import voluptuous as vol
from homeassistant.const import (
ATTR_ATTRIBUTION,
CONF_IP_ADDRESS,
CONF_PASSWORD,
CONF_PORT,
CONF_SSL,
)
from homeassistant.core import callback
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers import aiohttp_client, config_validation as cv
from homeassistant.helpers.dispatcher import async_dispatcher_send
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.event import async_track_time_interval
from homeassistant.helpers.service import verify_domain_control
from .const import (
CONF_ZONE_RUN_TIME,
DATA_CLIENT,
DATA_PROGRAMS,
DATA_PROVISION_SETTINGS,
DATA_RESTRICTIONS_CURRENT,
DATA_RESTRICTIONS_UNIVERSAL,
DATA_ZONES,
DATA_ZONES_DETAILS,
DEFAULT_ZONE_RUN,
DOMAIN,
PROGRAM_UPDATE_TOPIC,
SENSOR_UPDATE_TOPIC,
ZONE_UPDATE_TOPIC,
)
_LOGGER = logging.getLogger(__name__)
CONF_PROGRAM_ID = "program_id"
CONF_SECONDS = "seconds"
CONF_ZONE_ID = "zone_id"
DATA_LISTENER = "listener"
DEFAULT_ATTRIBUTION = "Data provided by Green Electronics LLC"
DEFAULT_ICON = "mdi:water"
DEFAULT_SCAN_INTERVAL = timedelta(seconds=60)
DEFAULT_SSL = True
SERVICE_ALTER_PROGRAM = vol.Schema({vol.Required(CONF_PROGRAM_ID): cv.positive_int})
SERVICE_ALTER_ZONE = vol.Schema({vol.Required(CONF_ZONE_ID): cv.positive_int})
SERVICE_PAUSE_WATERING = vol.Schema({vol.Required(CONF_SECONDS): cv.positive_int})
SERVICE_START_PROGRAM_SCHEMA = vol.Schema(
{vol.Required(CONF_PROGRAM_ID): cv.positive_int}
)
SERVICE_START_ZONE_SCHEMA = vol.Schema(
{
vol.Required(CONF_ZONE_ID): cv.positive_int,
vol.Optional(CONF_ZONE_RUN_TIME, default=DEFAULT_ZONE_RUN): cv.positive_int,
}
)
SERVICE_STOP_PROGRAM_SCHEMA = vol.Schema(
{vol.Required(CONF_PROGRAM_ID): cv.positive_int}
)
SERVICE_STOP_ZONE_SCHEMA = vol.Schema({vol.Required(CONF_ZONE_ID): cv.positive_int})
CONFIG_SCHEMA = cv.deprecated(DOMAIN, invalidation_version="0.119")
async def async_setup(hass, config):
"""Set up the RainMachine component."""
hass.data[DOMAIN] = {DATA_CLIENT: {}, DATA_LISTENER: {}}
return True
async def async_setup_entry(hass, config_entry):
"""Set up RainMachine as config entry."""
entry_updates = {}
if not config_entry.unique_id:
# If the config entry doesn't already have a unique ID, set one:
entry_updates["unique_id"] = config_entry.data[CONF_IP_ADDRESS]
if CONF_ZONE_RUN_TIME in config_entry.data:
# If a zone run time exists in the config entry's data, pop it and move it to
# options:
data = {**config_entry.data}
entry_updates["data"] = data
entry_updates["options"] = {
**config_entry.options,
CONF_ZONE_RUN_TIME: data.pop(CONF_ZONE_RUN_TIME),
}
if entry_updates:
hass.config_entries.async_update_entry(config_entry, **entry_updates)
_verify_domain_control = verify_domain_control(hass, DOMAIN)
websession = aiohttp_client.async_get_clientsession(hass)
client = Client(session=websession)
try:
await client.load_local(
config_entry.data[CONF_IP_ADDRESS],
config_entry.data[CONF_PASSWORD],
port=config_entry.data[CONF_PORT],
ssl=config_entry.data.get(CONF_SSL, DEFAULT_SSL),
)
except RainMachineError as err:
_LOGGER.error("An error occurred: %s", err)
raise ConfigEntryNotReady from err
else:
# regenmaschine can load multiple controllers at once, but we only grab the one
# we loaded above:
controller = next(iter(client.controllers.values()))
rainmachine = RainMachine(hass, config_entry, controller)
# Update the data object, which at this point (prior to any sensors registering
# "interest" in the API), will focus on grabbing the latest program and zone data:
await rainmachine.async_update()
hass.data[DOMAIN][DATA_CLIENT][config_entry.entry_id] = rainmachine
for component in ("binary_sensor", "sensor", "switch"):
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(config_entry, component)
)
@_verify_domain_control
async def disable_program(call):
"""Disable a program."""
await rainmachine.controller.programs.disable(call.data[CONF_PROGRAM_ID])
await rainmachine.async_update_programs_and_zones()
@_verify_domain_control
async def disable_zone(call):
"""Disable a zone."""
await rainmachine.controller.zones.disable(call.data[CONF_ZONE_ID])
await rainmachine.async_update_programs_and_zones()
@_verify_domain_control
async def enable_program(call):
"""Enable a program."""
await rainmachine.controller.programs.enable(call.data[CONF_PROGRAM_ID])
await rainmachine.async_update_programs_and_zones()
@_verify_domain_control
async def enable_zone(call):
"""Enable a zone."""
await rainmachine.controller.zones.enable(call.data[CONF_ZONE_ID])
await rainmachine.async_update_programs_and_zones()
@_verify_domain_control
async def pause_watering(call):
"""Pause watering for a set number of seconds."""
await rainmachine.controller.watering.pause_all(call.data[CONF_SECONDS])
await rainmachine.async_update_programs_and_zones()
@_verify_domain_control
async def start_program(call):
"""Start a particular program."""
await rainmachine.controller.programs.start(call.data[CONF_PROGRAM_ID])
await rainmachine.async_update_programs_and_zones()
@_verify_domain_control
async def start_zone(call):
"""Start a particular zone for a certain amount of time."""
await rainmachine.controller.zones.start(
call.data[CONF_ZONE_ID], call.data[CONF_ZONE_RUN_TIME]
)
await rainmachine.async_update_programs_and_zones()
@_verify_domain_control
async def stop_all(call):
"""Stop all watering."""
await rainmachine.controller.watering.stop_all()
await rainmachine.async_update_programs_and_zones()
@_verify_domain_control
async def stop_program(call):
"""Stop a program."""
await rainmachine.controller.programs.stop(call.data[CONF_PROGRAM_ID])
await rainmachine.async_update_programs_and_zones()
@_verify_domain_control
async def stop_zone(call):
"""Stop a zone."""
await rainmachine.controller.zones.stop(call.data[CONF_ZONE_ID])
await rainmachine.async_update_programs_and_zones()
@_verify_domain_control
async def unpause_watering(call):
"""Unpause watering."""
await rainmachine.controller.watering.unpause_all()
await rainmachine.async_update_programs_and_zones()
for service, method, schema in [
("disable_program", disable_program, SERVICE_ALTER_PROGRAM),
("disable_zone", disable_zone, SERVICE_ALTER_ZONE),
("enable_program", enable_program, SERVICE_ALTER_PROGRAM),
("enable_zone", enable_zone, SERVICE_ALTER_ZONE),
("pause_watering", pause_watering, SERVICE_PAUSE_WATERING),
("start_program", start_program, SERVICE_START_PROGRAM_SCHEMA),
("start_zone", start_zone, SERVICE_START_ZONE_SCHEMA),
("stop_all", stop_all, {}),
("stop_program", stop_program, SERVICE_STOP_PROGRAM_SCHEMA),
("stop_zone", stop_zone, SERVICE_STOP_ZONE_SCHEMA),
("unpause_watering", unpause_watering, {}),
]:
hass.services.async_register(DOMAIN, service, method, schema=schema)
hass.data[DOMAIN][DATA_LISTENER] = config_entry.add_update_listener(
async_reload_entry
)
return True
async def async_unload_entry(hass, config_entry):
"""Unload an OpenUV config entry."""
hass.data[DOMAIN][DATA_CLIENT].pop(config_entry.entry_id)
cancel_listener = hass.data[DOMAIN][DATA_LISTENER].pop(config_entry.entry_id)
cancel_listener()
tasks = [
hass.config_entries.async_forward_entry_unload(config_entry, component)
for component in ("binary_sensor", "sensor", "switch")
]
await asyncio.gather(*tasks)
return True
async def async_reload_entry(hass, config_entry):
"""Handle an options update."""
await hass.config_entries.async_reload(config_entry.entry_id)
class RainMachine:
"""Define a generic RainMachine object."""
def __init__(self, hass, config_entry, controller):
"""Initialize."""
self._async_cancel_time_interval_listener = None
self.config_entry = config_entry
self.controller = controller
self.data = {}
self.device_mac = controller.mac
self.hass = hass
self._api_category_count = {
DATA_PROVISION_SETTINGS: 0,
DATA_RESTRICTIONS_CURRENT: 0,
DATA_RESTRICTIONS_UNIVERSAL: 0,
}
self._api_category_locks = {
DATA_PROVISION_SETTINGS: asyncio.Lock(),
DATA_RESTRICTIONS_CURRENT: asyncio.Lock(),
DATA_RESTRICTIONS_UNIVERSAL: asyncio.Lock(),
}
async def _async_update_listener_action(self, now):
"""Define an async_track_time_interval action to update data."""
await self.async_update()
@callback
def async_deregister_sensor_api_interest(self, api_category):
"""Decrement the number of entities with data needs from an API category."""
# If this deregistration should leave us with no registration at all, remove the
# time interval:
if sum(self._api_category_count.values()) == 0:
if self._async_cancel_time_interval_listener:
self._async_cancel_time_interval_listener()
self._async_cancel_time_interval_listener = None
return
self._api_category_count[api_category] -= 1
async def async_fetch_from_api(self, api_category):
"""Execute the appropriate coroutine to fetch particular data from the API."""
if api_category == DATA_PROGRAMS:
data = await self.controller.programs.all(include_inactive=True)
elif api_category == DATA_PROVISION_SETTINGS:
data = await self.controller.provisioning.settings()
elif api_category == DATA_RESTRICTIONS_CURRENT:
data = await self.controller.restrictions.current()
elif api_category == DATA_RESTRICTIONS_UNIVERSAL:
data = await self.controller.restrictions.universal()
elif api_category == DATA_ZONES:
data = await self.controller.zones.all(include_inactive=True)
elif api_category == DATA_ZONES_DETAILS:
# This API call needs to be separate from the DATA_ZONES one above because,
# maddeningly, the DATA_ZONES_DETAILS API call doesn't include the current
# state of the zone:
data = await self.controller.zones.all(details=True, include_inactive=True)
self.data[api_category] = data
async def async_register_sensor_api_interest(self, api_category):
"""Increment the number of entities with data needs from an API category."""
# If this is the first registration we have, start a time interval:
if not self._async_cancel_time_interval_listener:
self._async_cancel_time_interval_listener = async_track_time_interval(
self.hass,
self._async_update_listener_action,
DEFAULT_SCAN_INTERVAL,
)
self._api_category_count[api_category] += 1
# If a sensor registers interest in a particular API call and the data doesn't
# exist for it yet, make the API call and grab the data:
async with self._api_category_locks[api_category]:
if api_category not in self.data:
await self.async_fetch_from_api(api_category)
async def async_update(self):
"""Update all RainMachine data."""
tasks = [self.async_update_programs_and_zones(), self.async_update_sensors()]
await asyncio.gather(*tasks)
async def async_update_sensors(self):
"""Update sensor/binary sensor data."""
_LOGGER.debug("Updating sensor data for RainMachine")
# Fetch an API category if there is at least one interested entity:
tasks = {}
for category, count in self._api_category_count.items():
if count == 0:
continue
tasks[category] = self.async_fetch_from_api(category)
results = await asyncio.gather(*tasks.values(), return_exceptions=True)
for api_category, result in zip(tasks, results):
if isinstance(result, RainMachineError):
_LOGGER.error(
"There was an error while updating %s: %s", api_category, result
)
continue
async_dispatcher_send(self.hass, SENSOR_UPDATE_TOPIC)
async def async_update_programs_and_zones(self):
"""Update program and zone data.
Program and zone updates always go together because of how linked they are:
programs affect zones and certain combinations of zones affect programs.
Note that this call does not take into account interested entities when making
the API calls; we make the reasonable assumption that switches will always be
enabled.
"""
_LOGGER.debug("Updating program and zone data for RainMachine")
tasks = {
DATA_PROGRAMS: self.async_fetch_from_api(DATA_PROGRAMS),
DATA_ZONES: self.async_fetch_from_api(DATA_ZONES),
DATA_ZONES_DETAILS: self.async_fetch_from_api(DATA_ZONES_DETAILS),
}
results = await asyncio.gather(*tasks.values(), return_exceptions=True)
for api_category, result in zip(tasks, results):
if isinstance(result, RainMachineError):
_LOGGER.error(
"There was an error while updating %s: %s", api_category, result
)
async_dispatcher_send(self.hass, PROGRAM_UPDATE_TOPIC)
async_dispatcher_send(self.hass, ZONE_UPDATE_TOPIC)
class RainMachineEntity(Entity):
"""Define a generic RainMachine entity."""
def __init__(self, rainmachine):
"""Initialize."""
self._attrs = {ATTR_ATTRIBUTION: DEFAULT_ATTRIBUTION}
self._device_class = None
self._name = None
self.rainmachine = rainmachine
@property
def device_class(self):
"""Return the device class."""
return self._device_class
@property
def device_info(self):
"""Return device registry information for this entity."""
return {
"identifiers": {(DOMAIN, self.rainmachine.controller.mac)},
"name": self.rainmachine.controller.name,
"manufacturer": "RainMachine",
"model": (
f"Version {self.rainmachine.controller.hardware_version} "
f"(API: {self.rainmachine.controller.api_version})"
),
"sw_version": self.rainmachine.controller.software_version,
}
@property
def device_state_attributes(self) -> dict:
"""Return the state attributes."""
return self._attrs
@property
def name(self) -> str:
"""Return the name of the entity."""
return self._name
@property
def should_poll(self):
"""Disable polling."""
return False
@callback
def _update_state(self):
"""Update the state."""
self.update_from_latest_data()
self.async_write_ha_state()
@callback
def update_from_latest_data(self):
"""Update the entity."""
raise NotImplementedError
|
balloob/home-assistant
|
homeassistant/components/rainmachine/__init__.py
|
Python
|
apache-2.0
| 15,955 | 0.001818 |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2014 Cloudbase Solutions Srl
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
import posixpath
import unittest
from oslo.config import cfg
from six.moves.urllib import error
from cloudbaseinit.metadata.services import base
from cloudbaseinit.metadata.services import ec2service
CONF = cfg.CONF
class EC2ServiceTest(unittest.TestCase):
def setUp(self):
CONF.set_override('retry_count_interval', 0)
self._service = ec2service.EC2Service()
@mock.patch('cloudbaseinit.utils.network.check_metadata_ip_route')
@mock.patch('cloudbaseinit.metadata.services.ec2service.EC2Service'
'.get_host_name')
def _test_load(self, mock_get_host_name, mock_check_metadata_ip_route,
side_effect):
mock_get_host_name.side_effect = [side_effect]
response = self._service.load()
mock_check_metadata_ip_route.assert_called_once_with(
CONF.ec2_metadata_base_url)
mock_get_host_name.assert_called_once()
if side_effect is Exception:
self.assertFalse(response)
else:
self.assertTrue(response)
def test_load(self):
self._test_load(side_effect=None)
def test_load_exception(self):
self._test_load(side_effect=Exception)
@mock.patch('six.moves.urllib.request.urlopen')
def _test_get_response(self, mock_urlopen, ret_value):
req = mock.MagicMock()
mock_urlopen.side_effect = [ret_value]
is_instance = isinstance(ret_value, error.HTTPError)
if is_instance and ret_value.code == 404:
self.assertRaises(base.NotExistingMetadataException,
self._service._get_response, req)
elif is_instance and ret_value.code != 404:
self.assertRaises(error.HTTPError,
self._service._get_response, req)
else:
response = self._service._get_response(req)
self.assertEqual(ret_value, response)
mock_urlopen.assert_called_once_with(req)
def test_get_response(self):
self._test_get_response(ret_value=None)
def test_get_response_error_404(self):
err = error.HTTPError("http://169.254.169.254/", 404,
'test error 404', {}, None)
self._test_get_response(ret_value=err)
def test_get_response_error_other(self):
err = error.HTTPError("http://169.254.169.254/", 409,
'test error 409', {}, None)
self._test_get_response(ret_value=err)
@mock.patch('six.moves.urllib.request.Request')
@mock.patch('cloudbaseinit.metadata.services.ec2service.EC2Service'
'._get_response')
def test_get_data(self, mock_get_response, mock_Request):
response = self._service._get_data('fake')
fake_path = posixpath.join(CONF.ec2_metadata_base_url, 'fake')
mock_Request.assert_called_once_with(fake_path)
mock_get_response.assert_called_once_with(mock_Request())
self.assertEqual(mock_get_response.return_value.read.return_value,
response)
@mock.patch('cloudbaseinit.metadata.services.ec2service.EC2Service'
'._get_cache_data')
def test_get_host_name(self, mock_get_cache_data):
response = self._service.get_host_name()
mock_get_cache_data.assert_called_once_with(
'%s/meta-data/local-hostname' % self._service._metadata_version)
self.assertEqual(mock_get_cache_data.return_value, response)
@mock.patch('cloudbaseinit.metadata.services.ec2service.EC2Service'
'._get_cache_data')
def test_get_instance_id(self, mock_get_cache_data):
response = self._service.get_instance_id()
mock_get_cache_data.assert_called_once_with(
'%s/meta-data/instance-id' % self._service._metadata_version)
self.assertEqual(mock_get_cache_data.return_value, response)
@mock.patch('cloudbaseinit.metadata.services.ec2service.EC2Service'
'._get_cache_data')
def test_get_public_keys(self, mock_get_cache_data):
mock_get_cache_data.side_effect = ['key=info', 'fake key']
response = self._service.get_public_keys()
expected = [
mock.call('%s/meta-data/public-keys' %
self._service._metadata_version),
mock.call('%(version)s/meta-data/public-keys/%('
'idx)s/openssh-key' %
{'version': self._service._metadata_version,
'idx': 'key'})]
self.assertEqual(expected, mock_get_cache_data.call_args_list)
self.assertEqual(['fake key'], response)
|
cernops/cloudbase-init
|
cloudbaseinit/tests/metadata/services/test_ec2service.py
|
Python
|
apache-2.0
| 5,279 | 0 |
import pkg_resources
from datetime import datetime
import re
from inbox.api.validation import (
get_recipients, get_attachments, get_thread, get_message)
from inbox.api.err import InputError
from inbox.contacts.process_mail import update_contacts_from_message
from inbox.models import Message, Part
from inbox.models.action_log import schedule_action
from inbox.sqlalchemy_ext.util import generate_public_id
VERSION = pkg_resources.get_distribution('inbox-sync').version
class SendMailException(Exception):
"""
Raised when sending fails.
Parameters
----------
message: string
A descriptive error message.
http_code: int
An appropriate HTTP error code for the particular type of failure.
server_error: string, optional
The error returned by the mail server.
failures: dict, optional
If sending only failed for some recipients, information on the specific
failures.
"""
def __init__(self, message, http_code, server_error=None, failures=None):
self.message = message
self.http_code = http_code
self.server_error = server_error
self.failures = failures
super(SendMailException, self).__init__(
message, http_code, server_error, failures)
def get_sendmail_client(account):
from inbox.sendmail import module_registry
sendmail_mod = module_registry.get(account.provider)
sendmail_cls = getattr(sendmail_mod, sendmail_mod.SENDMAIL_CLS)
sendmail_client = sendmail_cls(account)
return sendmail_client
def create_draft_from_mime(account, raw_mime, db_session):
our_uid = generate_public_id() # base-36 encoded string
new_headers = ('X-INBOX-ID: {0}-0\r\n'
'Message-Id: <{0}-0@mailer.nylas.com>\r\n'
'User-Agent: NylasMailer/{1}\r\n').format(our_uid, VERSION)
new_body = new_headers + raw_mime
with db_session.no_autoflush:
msg = Message.create_from_synced(account, '', '',
datetime.utcnow(), new_body)
if msg.from_addr and len(msg.from_addr) > 1:
raise InputError("from_addr field can have at most one item")
if msg.reply_to and len(msg.reply_to) > 1:
raise InputError("reply_to field can have at most one item")
if msg.subject is not None and not \
isinstance(msg.subject, basestring):
raise InputError('"subject" should be a string')
if not isinstance(msg.body, basestring):
raise InputError('"body" should be a string')
if msg.references or msg.in_reply_to:
msg.is_reply = True
thread_cls = account.thread_cls
msg.thread = thread_cls(
subject=msg.subject,
recentdate=msg.received_date,
namespace=account.namespace,
subjectdate=msg.received_date)
msg.is_created = True
msg.is_sent = True
msg.is_draft = False
msg.is_read = True
db_session.add(msg)
db_session.flush()
return msg
def block_to_part(block, message, namespace):
inline_image_uri = r'cid:{}'.format(block.public_id)
is_inline = re.search(inline_image_uri, message.body) is not None
# Create a new Part object to associate to the message object.
# (You can't just set block.message, because if block is an
# attachment on an existing message, that would dissociate it from
# the existing message.)
part = Part(block=block)
part.content_id = block.public_id if is_inline else None
part.namespace_id = namespace.id
part.content_disposition = 'inline' if is_inline else 'attachment'
part.is_inboxapp_attachment = True
return part
def create_message_from_json(data, namespace, db_session, is_draft):
""" Construct a Message instance from `data`, a dictionary representing the
POST body of an API request. All new objects are added to the session, but
not committed."""
# Validate the input and get referenced objects (thread, attachments)
# as necessary.
to_addr = get_recipients(data.get('to'), 'to')
cc_addr = get_recipients(data.get('cc'), 'cc')
bcc_addr = get_recipients(data.get('bcc'), 'bcc')
from_addr = get_recipients(data.get('from'), 'from')
reply_to = get_recipients(data.get('reply_to'), 'reply_to')
if from_addr and len(from_addr) > 1:
raise InputError("from_addr field can have at most one item")
if reply_to and len(reply_to) > 1:
raise InputError("reply_to field can have at most one item")
subject = data.get('subject')
if subject is not None and not isinstance(subject, basestring):
raise InputError('"subject" should be a string')
body = data.get('body', '')
if not isinstance(body, basestring):
raise InputError('"body" should be a string')
blocks = get_attachments(data.get('file_ids'), namespace.id, db_session)
reply_to_thread = get_thread(data.get('thread_id'), namespace.id,
db_session)
reply_to_message = get_message(data.get('reply_to_message_id'),
namespace.id, db_session)
if reply_to_message is not None and reply_to_thread is not None:
if reply_to_message not in reply_to_thread.messages:
raise InputError('Message {} is not in thread {}'.
format(reply_to_message.public_id,
reply_to_thread.public_id))
with db_session.no_autoflush:
account = namespace.account
dt = datetime.utcnow()
uid = generate_public_id()
to_addr = to_addr or []
cc_addr = cc_addr or []
bcc_addr = bcc_addr or []
blocks = blocks or []
if subject is None:
# If this is a reply with no explicitly specified subject, set the
# subject from the prior message/thread by default.
# TODO(emfree): Do we want to allow changing the subject on a reply
# at all?
if reply_to_message is not None:
subject = reply_to_message.subject
elif reply_to_thread is not None:
subject = reply_to_thread.subject
subject = subject or ''
message = Message()
message.namespace = namespace
message.is_created = True
message.is_draft = is_draft
message.from_addr = from_addr if from_addr else \
[(account.name, account.email_address)]
# TODO(emfree): we should maybe make received_date nullable, so its
# value doesn't change in the case of a drafted-and-later-reconciled
# message.
message.received_date = dt
message.subject = subject
message.body = body
message.to_addr = to_addr
message.cc_addr = cc_addr
message.bcc_addr = bcc_addr
message.reply_to = reply_to
# TODO(emfree): this is different from the normal 'size' value of a
# message, which is the size of the entire MIME message.
message.size = len(body)
message.is_read = True
message.is_sent = False
message.public_id = uid
message.version = 0
message.regenerate_nylas_uid()
# Set the snippet
message.snippet = message.calculate_html_snippet(body)
# Associate attachments to the draft message
for block in blocks:
message.parts.append(block_to_part(block, message, namespace))
update_contacts_from_message(db_session, message, namespace)
if reply_to_message is not None:
message.is_reply = True
_set_reply_headers(message, reply_to_message)
thread = reply_to_message.thread
message.reply_to_message = reply_to_message
elif reply_to_thread is not None:
message.is_reply = True
thread = reply_to_thread
# Construct the in-reply-to and references headers from the last
# message currently in the thread.
previous_messages = [m for m in thread.messages if not m.is_draft]
if previous_messages:
last_message = previous_messages[-1]
message.reply_to_message = last_message
_set_reply_headers(message, last_message)
else:
# If this isn't a reply to anything, create a new thread object for
# the draft. We specialize the thread class so that we can, for
# example, add the g_thrid for Gmail later if we reconcile a synced
# message with this one. This is a huge hack, but works.
message.is_reply = False
thread_cls = account.thread_cls
thread = thread_cls(
subject=message.subject,
recentdate=message.received_date,
namespace=namespace,
subjectdate=message.received_date)
message.thread = thread
db_session.add(message)
if is_draft:
schedule_action('save_draft', message, namespace.id, db_session,
version=message.version)
db_session.flush()
return message
def update_draft(db_session, account, draft, to_addr=None,
subject=None, body=None, blocks=None, cc_addr=None,
bcc_addr=None, from_addr=None, reply_to=None):
"""
Update draft with new attributes.
"""
def update(attr, value=None):
if value is not None:
setattr(draft, attr, value)
if attr == 'body':
# Update size, snippet too
draft.size = len(value)
draft.snippet = draft.calculate_html_snippet(
value)
update('to_addr', to_addr)
update('cc_addr', cc_addr)
update('bcc_addr', bcc_addr)
update('reply_to', reply_to)
update('from_addr', from_addr)
update('subject', subject if subject else None)
update('body', body if body else None)
update('received_date', datetime.utcnow())
# Remove any attachments that aren't specified
new_block_ids = [b.id for b in blocks]
for part in filter(lambda x: x.block_id not in new_block_ids,
draft.parts):
draft.parts.remove(part)
db_session.delete(part)
# Parts require special handling
for block in blocks:
# Don't re-add attachments that are already attached
if block.id in [p.block_id for p in draft.parts]:
continue
draft.parts.append(block_to_part(block, draft, account.namespace))
thread = draft.thread
if len(thread.messages) == 1:
# If there are no prior messages on the thread, update its subject and
# dates to match the draft.
thread.subject = draft.subject
thread.subjectdate = draft.received_date
thread.recentdate = draft.received_date
# Remove previous message-contact associations, and create new ones.
draft.contacts = []
update_contacts_from_message(db_session, draft, account.namespace)
# The draft we're updating may or may not be one authored through the API:
# - Ours: is_created = True, Message-Id = public_id+version
# - Not Ours: is_created = False, Message-Id = ???
# Mark that the draft is now created by us
draft.is_created = True
# Save the current Message-Id so we know which draft to delete in syncback
old_message_id_header = draft.message_id_header
# Increment version and rebuild the message ID header.
draft.version += 1
draft.regenerate_nylas_uid()
# Sync to remote
schedule_action('update_draft', draft, draft.namespace.id, db_session,
version=draft.version,
old_message_id_header=old_message_id_header)
db_session.commit()
return draft
def delete_draft(db_session, account, draft):
""" Delete the given draft. """
thread = draft.thread
assert draft.is_draft
# Delete remotely.
schedule_action('delete_draft', draft, draft.namespace.id, db_session,
nylas_uid=draft.nylas_uid,
message_id_header=draft.message_id_header)
db_session.delete(draft)
# Delete the thread if it would now be empty.
if not thread.messages:
db_session.delete(thread)
db_session.commit()
def generate_attachments(message, blocks):
attachment_dicts = []
for block in blocks:
content_disposition = 'attachment'
for part in block.parts:
if part.message_id == message.id and part.content_disposition == 'inline':
content_disposition = 'inline'
break
attachment_dicts.append({
'block_id': block.public_id,
'filename': block.filename,
'data': block.data,
'content_type': block.content_type,
'content_disposition': content_disposition,
})
return attachment_dicts
def _set_reply_headers(new_message, previous_message):
"""When creating a draft in reply to a thread, set the In-Reply-To and
References headers appropriately, if possible."""
if previous_message.message_id_header:
new_message.in_reply_to = previous_message.message_id_header
if previous_message.references:
new_message.references = (previous_message.references +
[previous_message.message_id_header])
else:
new_message.references = [previous_message.message_id_header]
|
nylas/sync-engine
|
inbox/sendmail/base.py
|
Python
|
agpl-3.0
| 13,544 | 0.000074 |
"""
Helper for keeping processes singletons
"""
__license__ = """
This file is part of RAPD
Copyright (C) 2016-2018 Cornell University
All rights reserved.
RAPD is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, version 3.
RAPD is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
__created__ = "2016-03-02"
__maintainer__ = "Frank Murphy"
__email__ = "fmurphy@anl.gov"
__status__ = "Development"
# Standard imports
import fcntl
import os
def lock_file(file_path):
"""
Method to make sure only one instance is running on this machine.
If file_path is False, no locking will occur
If file_path is not False and is already locked, an error will be thrown
If file_path is not False and can be locked, a False will be returned
Keyword arguments
file_path -- potential file for maintaining lock
"""
# If file_path is a path, try to lock
if file_path:
# Create the directory for file_path if it does not exist
if not os.path.exists(os.path.dirname(file_path)):
os.makedirs(os.path.dirname(file_path))
global _file_handle
_file_handle = open(file_path, "w")
try:
fcntl.lockf(_file_handle, fcntl.LOCK_EX | fcntl.LOCK_NB)
return False
except IOError:
#raise Exception("%s is already locked, unable to run" % file_path)
return True
# If file_path is False, always return False
else:
return True
def close_lock_file():
"""Close the _file_handle handle."""
_file_handle.close()
|
RAPD/RAPD
|
src/utils/lock.py
|
Python
|
agpl-3.0
| 1,987 | 0.002013 |
# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
#
# Copyright (C) 2015, 2016 Canonical Ltd
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import logging
import os
import os.path
import fixtures
from snapcraft.main import main
from snapcraft import tests
class PrimeCommandTestCase(tests.TestCase):
yaml_template = """name: prime-test
version: 1.0
summary: test prime
description: if the prime is succesful the state file will be updated
confinement: strict
parts:
{parts}"""
yaml_part = """ prime{:d}:
plugin: nil"""
def make_snapcraft_yaml(self, n=1):
parts = '\n'.join([self.yaml_part.format(i) for i in range(n)])
super().make_snapcraft_yaml(self.yaml_template.format(parts=parts))
parts = []
for i in range(n):
part_dir = os.path.join(self.parts_dir, 'prime{}'.format(i))
state_dir = os.path.join(part_dir, 'state')
parts.append({
'part_dir': part_dir,
'state_dir': state_dir,
})
return parts
def test_prime_invalid_part(self):
fake_logger = fixtures.FakeLogger(level=logging.ERROR)
self.useFixture(fake_logger)
self.make_snapcraft_yaml()
with self.assertRaises(SystemExit) as raised:
main(['prime', 'no-prime', ])
self.assertEqual(1, raised.exception.code)
self.assertEqual(
fake_logger.output,
"The part named 'no-prime' is not defined in 'snapcraft.yaml'\n")
def test_prime_defaults(self):
fake_logger = fixtures.FakeLogger(level=logging.ERROR)
self.useFixture(fake_logger)
parts = self.make_snapcraft_yaml()
main(['prime'])
self.assertTrue(os.path.exists(self.snap_dir),
'Expected a prime directory')
self.assertTrue(
os.path.exists(
os.path.join(self.snap_dir, 'meta', 'snap.yaml')),
'Expected a snap.yaml')
self.assertTrue(os.path.exists(self.stage_dir),
'Expected a stage directory')
self.assertTrue(os.path.exists(self.parts_dir),
'Expected a parts directory')
self.assertTrue(os.path.exists(parts[0]['part_dir']),
'Expected a part directory for the build0 part')
self.verify_state('build0', parts[0]['state_dir'], 'prime')
def test_prime_one_part_only_from_3(self):
fake_logger = fixtures.FakeLogger(level=logging.ERROR)
self.useFixture(fake_logger)
parts = self.make_snapcraft_yaml(n=3)
main(['prime', 'prime1'])
self.assertFalse(
os.path.exists(
os.path.join(self.snap_dir, 'meta', 'snap.yaml')),
'There should not be a snap.yaml')
self.assertTrue(os.path.exists(self.snap_dir),
'Expected a prime directory')
self.assertTrue(os.path.exists(self.stage_dir),
'Expected a stage directory')
self.assertTrue(os.path.exists(self.parts_dir),
'Expected a parts directory')
self.assertTrue(os.path.exists(parts[1]['part_dir']),
'Expected a part directory for the prime1 part')
self.verify_state('prime1', parts[1]['state_dir'], 'prime')
for i in [0, 2]:
self.assertFalse(os.path.exists(parts[i]['part_dir']),
'Pulled wrong part')
self.assertFalse(os.path.exists(parts[i]['state_dir']),
'Expected for only to be a state file for build1')
def test_prime_ran_twice_is_a_noop(self):
fake_logger = fixtures.FakeLogger(level=logging.INFO)
self.useFixture(fake_logger)
parts = self.make_snapcraft_yaml()
main(['prime'])
self.assertEqual(
'Preparing to pull prime0 \n'
'Pulling prime0 \n'
'Preparing to build prime0 \n'
'Building prime0 \n'
'Staging prime0 \n'
'Priming prime0 \n',
fake_logger.output)
self.assertTrue(os.path.exists(self.stage_dir),
'Expected a stage directory')
self.assertTrue(os.path.exists(self.parts_dir),
'Expected a parts directory')
self.assertTrue(os.path.exists(parts[0]['part_dir']),
'Expected a part directory for the build0 part')
self.verify_state('build0', parts[0]['state_dir'], 'prime')
fake_logger = fixtures.FakeLogger(level=logging.INFO)
self.useFixture(fake_logger)
main(['prime'])
self.assertEqual(
'Skipping pull prime0 (already ran)\n'
'Skipping build prime0 (already ran)\n'
'Skipping stage prime0 (already ran)\n'
'Skipping prime prime0 (already ran)\n',
fake_logger.output)
|
jocave/snapcraft
|
snapcraft/tests/test_commands_prime.py
|
Python
|
gpl-3.0
| 5,460 | 0 |
# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2017-04-16 10:06
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('foxy_player_api', '0009_auto_20170415_2318'),
]
operations = [
migrations.AlterUniqueTogether(
name='playlisttracks',
unique_together=set([]),
),
]
|
TheCheshireFox/foxy-player
|
foxy_player/foxy_player_api/migrations/0010_auto_20170416_1306.py
|
Python
|
apache-2.0
| 419 | 0 |
# Copyright (c) 2019 2021 by Rocky Bernstein
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Isolate Python 2.6 and 2.7 version-specific semantic actions here.
"""
from uncompyle6.semantics.consts import TABLE_DIRECT
def customize_for_version26_27(self, version):
########################################
# Python 2.6+
# except <condition> as <var>
# vs. older:
# except <condition> , <var>
#
# For 2.6 we use the older syntax which
# matches how we parse this in bytecode
########################################
if version > (2, 6):
TABLE_DIRECT.update({
'except_cond2': ( '%|except %c as %c:\n', 1, 5 ),
# When a generator is a single parameter of a function,
# it doesn't need the surrounding parenethesis.
'call_generator': ('%c%P', 0, (1, -1, ', ', 100)),
})
else:
TABLE_DIRECT.update({
'testtrue_then': ( 'not %p', (0, 22) ),
})
# FIXME: this should be a transformation
def n_call(node):
mapping = self._get_mapping(node)
key = node
for i in mapping[1:]:
key = key[i]
pass
if key.kind == 'CALL_FUNCTION_1':
# A function with one argument. If this is a generator,
# no parenthesis is needed.
args_node = node[-2]
if args_node == 'expr':
n = args_node[0]
if n == 'generator_exp':
node.kind = 'call_generator'
pass
pass
self.default(node)
self.n_call = n_call
|
rocky/python-uncompyle6
|
uncompyle6/semantics/customize26_27.py
|
Python
|
gpl-3.0
| 2,225 | 0.002247 |
# coding: utf-8
"""
Copyright 2015 SmartBear Software
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Ref: https://github.com/swagger-api/swagger-codegen
"""
from pprint import pformat
from six import iteritems
class ComputerSystem100SystemType(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self):
"""
ComputerSystem100SystemType - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
}
self.attribute_map = {
}
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
|
jlongever/redfish-client-python
|
on_http_redfish_1_0/models/computer_system_1_0_0_system_type.py
|
Python
|
apache-2.0
| 2,513 | 0.002388 |
##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class PerlFontTtf(PerlPackage):
"""Perl module for TrueType Font hacking"""
homepage = "http://search.cpan.org/~bhallissy/Font-TTF-1.06/lib/Font/TTF.pm"
url = "http://search.cpan.org/CPAN/authors/id/B/BH/BHALLISSY/Font-TTF-1.06.tar.gz"
version('1.06', '241b59310ad4450e6e050d5e790f1b21')
|
krafczyk/spack
|
var/spack/repos/builtin/packages/perl-font-ttf/package.py
|
Python
|
lgpl-2.1
| 1,568 | 0.001913 |
#-*- coding: utf-8 -*-
'''
Created on 24 авг. 2010
@author: ivan
'''
from gi.repository import Gtk
import logging
from foobnix.fc.fc import FC
from foobnix.helpers.image import ImageBase
from foobnix.util.const import SITE_LOCALE, ICON_FOOBNIX
from foobnix.util.localization import foobnix_localization
from foobnix.gui.service.path_service import get_foobnix_resourse_path_by_name
foobnix_localization()
def responseToDialog(entry, dialog, response):
dialog.response(response)
def file_selection_dialog(title, current_folder=None):
chooser = Gtk.FileSelection(title)
chooser.set_icon_from_file(get_foobnix_resourse_path_by_name(ICON_FOOBNIX))
chooser.set_default_response(Gtk.ResponseType.OK)
chooser.set_select_multiple(True)
paths = None
if current_folder:
chooser.set_current_folder(current_folder)
response = chooser.run()
if response == Gtk.ResponseType.OK:
paths = chooser.get_selections()
elif response == Gtk.ResponseType.CANCEL:
logging.info('Closed, no files selected')
chooser.destroy()
return paths
def file_chooser_dialog(title, current_folder=None):
chooser = Gtk.FileChooserDialog(title, action=Gtk.FILE_CHOOSER_ACTION_OPEN, buttons=("folder-open", Gtk.ResponseType.OK))
chooser.set_icon_from_file(get_foobnix_resourse_path_by_name(ICON_FOOBNIX))
chooser.set_default_response(Gtk.ResponseType.OK)
chooser.set_select_multiple(True)
paths = None
if current_folder:
chooser.set_current_folder(current_folder)
response = chooser.run()
if response == Gtk.ResponseType.OK:
paths = chooser.get_filenames()
elif response == Gtk.ResponseType.CANCEL:
logging.info('Closed, no files selected')
chooser.destroy()
return paths
def directory_chooser_dialog(title, current_folder=None):
chooser = Gtk.FileChooserDialog(title, action=Gtk.FileChooserAction.SELECT_FOLDER, buttons=("folder-open", Gtk.ResponseType.OK))
chooser.set_default_response(Gtk.ResponseType.OK)
chooser.set_select_multiple(True)
paths = None
if current_folder:
chooser.set_current_folder(current_folder)
response = chooser.run()
if response == Gtk.ResponseType.OK:
paths = chooser.get_filenames()
elif response == Gtk.ResponseType.CANCEL:
logging.info('Closed, no directory selected')
chooser.destroy()
return paths
def one_line_dialog(dialog_title, parent=None, entry_text=None, message_text1=None, message_text2=None):
dialog = Gtk.MessageDialog(
parent,
Gtk.DialogFlags.MODAL | Gtk.DialogFlags.DESTROY_WITH_PARENT,
Gtk.MessageType.INFO,
Gtk.ButtonsType.OK,
None)
dialog.set_icon_from_file(get_foobnix_resourse_path_by_name(ICON_FOOBNIX))
dialog.set_title(dialog_title)
if message_text1:
dialog.set_markup(message_text1)
if message_text2:
dialog.format_secondary_markup(message_text2)
entry = Gtk.Entry()
'''set last widget in action area as default widget (button OK)'''
dialog.set_default_response(Gtk.ResponseType.OK)
'''activate default widget after Enter pressed in entry'''
entry.set_activates_default(True)
if entry_text:
entry.set_text(entry_text)
dialog.vbox.pack_start(entry, True, True, 0)
dialog.show_all()
dialog.run()
text = entry.get_text()
dialog.destroy()
return text if text else None
def two_line_dialog(dialog_title, parent=None, message_text1=None,
message_text2=None, entry_text1="", entry_text2=""):
dialog = Gtk.MessageDialog(
parent,
Gtk.DialogFlags.MODAL | Gtk.DialogFlags.DESTROY_WITH_PARENT,
Gtk.MessageType.QUESTION,
Gtk.ButtonsType.OK,
None)
dialog.set_icon_from_file(get_foobnix_resourse_path_by_name(ICON_FOOBNIX))
dialog.set_title(dialog_title)
if message_text1:
dialog.set_markup(message_text1)
if message_text2:
dialog.format_secondary_markup(message_text2)
login_entry = Gtk.Entry()
if entry_text1:
login_entry.set_text(entry_text1)
login_entry.show()
password_entry = Gtk.Entry()
if entry_text2:
password_entry.set_text(entry_text2)
password_entry.show()
hbox = Gtk.VBox()
hbox.pack_start(login_entry, False, False, 0)
hbox.pack_start(password_entry, False, False, 0)
dialog.vbox.pack_start(hbox, True, True, 0)
dialog.show_all()
'''set last widget in action area as default widget (button OK)'''
dialog.set_default_response(Gtk.ResponseType.OK)
'''activate default widget after Enter pressed in entry'''
login_entry.set_activates_default(True)
password_entry.set_activates_default(True)
dialog.run()
login_text = login_entry.get_text()
password_text = password_entry.get_text()
dialog.destroy()
return [login_text, password_text] if (login_text and password_text) else [None,None]
def info_dialog(title, message, parent=None):
dialog = Gtk.MessageDialog(
parent,
Gtk.DialogFlags.MODAL | Gtk.DialogFlags.DESTROY_WITH_PARENT,
Gtk.MessageType.INFO,
Gtk.ButtonsType.OK,
None)
dialog.set_icon_from_file(get_foobnix_resourse_path_by_name(ICON_FOOBNIX))
dialog.set_title(title)
dialog.set_markup(title)
dialog.format_secondary_markup(message)
dialog.show_all()
dialog.run()
dialog.destroy()
def info_dialog_with_link(title, version, link):
dialog = Gtk.MessageDialog(
None,
Gtk.DialogFlags.MODAL | Gtk.DialogFlags.DESTROY_WITH_PARENT,
Gtk.MessageType.INFO,
Gtk.ButtonsType.OK,
None)
dialog.set_icon_from_file(get_foobnix_resourse_path_by_name(ICON_FOOBNIX))
dialog.set_title(title)
dialog.set_markup(title)
dialog.format_secondary_markup("<b>" + version + "</b>")
link = Gtk.LinkButton(link, link)
link.show()
dialog.vbox.pack_end(link, True, True, 0)
dialog.show_all()
dialog.run()
dialog.destroy()
def info_dialog_with_link_and_donate(version):
dialog = Gtk.MessageDialog(
None,
Gtk.DialogFlags.MODAL | Gtk.DialogFlags.DESTROY_WITH_PARENT,
Gtk.MessageType.INFO,
Gtk.ButtonsType.OK,
None)
dialog.set_icon_from_file(get_foobnix_resourse_path_by_name(ICON_FOOBNIX))
dialog.set_title(_("New foobnix release avaliable"))
dialog.set_markup(_("New foobnix release avaliable"))
dialog.format_secondary_markup("<b>" + version + "</b>")
card = Gtk.LinkButton("http://foobnix.com/%s/download.html"%SITE_LOCALE, _("Download and Donate"))
#terminal = Gtk.LinkButton("http://www.foobnix.com/donate/eng#terminal", _("Download and Donate by Webmoney or Payment Terminal"))
# link = Gtk.LinkButton("http://www.foobnix.com/support?lang=%s"%SITE_LOCALE, _("Download"))
frame = Gtk.Frame(label="Please donate and download")
vbox = Gtk.Box.new(Gtk.Orientation.VERTICAL, 0)
vbox.set_homogeneous(True)
vbox.pack_start(card, True, True)
#vbox.pack_start(terminal, True, True)
vbox.pack_start(link, True, True)
frame.add(vbox)
image = ImageBase("images/foobnix-slogan.jpg")
dialog.vbox.pack_start(image, True, True)
dialog.vbox.pack_start(frame, True, True)
dialog.vbox.pack_start(Gtk.Label(_("We hope you like the player. We will make it even better.")), True, True)
version_check = Gtk.CheckButton(_("Check for new foobnix release on start"))
version_check.set_active(FC().check_new_version)
dialog.vbox.pack_start(version_check, True, True)
dialog.show_all()
dialog.run()
FC().check_new_version = version_check.get_active()
FC().save()
dialog.destroy()
def show_entry_dialog(title, description):
dialog = Gtk.MessageDialog(
None,
Gtk.DialogFlags.MODAL | Gtk.DialogFlags.DESTROY_WITH_PARENT,
Gtk.MessageType.QUESTION,
Gtk.ButtonsType.OK,
None)
dialog.set_icon_from_file(get_foobnix_resourse_path_by_name(ICON_FOOBNIX))
dialog.set_markup(title)
entry = Gtk.Entry()
entry.connect("activate", responseToDialog, dialog, Gtk.ResponseType.OK)
hbox = Gtk.HBox()
hbox.pack_start(Gtk.Label("Value:"), False, 5, 5)
hbox.pack_end(entry)
dialog.format_secondary_markup(description)
dialog.vbox.pack_end(hbox, True, True, 0)
dialog.show_all()
dialog.run()
text = entry.get_text()
dialog.destroy()
return text
def show_login_password_error_dialog(title, description, login, password):
dialog = Gtk.MessageDialog(
None,
Gtk.DialogFlags.MODAL | Gtk.DialogFlags.DESTROY_WITH_PARENT,
Gtk.MessageType.ERROR,
Gtk.ButtonsType.OK,
title)
dialog.set_icon_from_file(get_foobnix_resourse_path_by_name(ICON_FOOBNIX))
dialog.set_markup(str(title))
dialog.format_secondary_markup(description)
login_entry = Gtk.Entry()
login_entry.set_text(login)
login_entry.show()
password_entry = Gtk.Entry()
password_entry.set_text(password)
password_entry.set_visibility(False)
password_entry.set_invisible_char("*")
password_entry.show()
hbox = Gtk.VBox()
hbox.pack_start(login_entry, False, False, 0)
hbox.pack_start(password_entry, False, False, 0)
dialog.vbox.pack_start(hbox, True, True, 0)
dialog.show_all()
dialog.run()
login_text = login_entry.get_text()
password_text = password_entry.get_text()
dialog.destroy()
return [login_text, password_text]
def file_saving_dialog(title, current_folder=None):
chooser = Gtk.FileChooserDialog(title, action=Gtk.FileChooserAction.SAVE, buttons=("document-save", Gtk.ResponseType.OK))
chooser.set_icon_from_file(get_foobnix_resourse_path_by_name(ICON_FOOBNIX))
chooser.set_default_response(Gtk.ResponseType.OK)
chooser.set_select_multiple(False)
if current_folder:
chooser.set_current_folder(current_folder)
response = chooser.run()
if response == Gtk.ResponseType.OK:
paths = chooser.get_filenames()
elif response == Gtk.ResponseType.CANCEL:
logging.info('Closed, no files selected')
chooser.destroy()
class FileSavingDialog(Gtk.FileChooserDialog):
def __init__(self, title, func, args = None, current_folder=None, current_name=None):
Gtk.FileChooserDialog.__init__(self, title, action=Gtk.FileChooserAction.SAVE, buttons=("document-save", Gtk.ResponseType.OK))
self.set_default_response(Gtk.ResponseType.OK)
self.set_select_multiple(False)
self.set_do_overwrite_confirmation(True)
self.set_icon_from_file(get_foobnix_resourse_path_by_name(ICON_FOOBNIX))
if current_folder:
self.set_current_folder(current_folder)
if current_name:
self.set_current_name(current_name)
response = self.run()
if response == Gtk.ResponseType.OK:
filename = self.get_filename()
folder = self.get_current_folder()
if func:
try:
if args: func(filename, folder, args)
else: func(filename, folder)
except IOError, e:
logging.error(e)
elif response == Gtk.ResponseType.CANCEL:
logging.info('Closed, no files selected')
self.destroy()
if __name__ == '__main__':
info_dialog_with_link_and_donate("foobnix 0.2.1-8")
Gtk.main()
|
kagel/foobnix
|
foobnix/helpers/dialog_entry.py
|
Python
|
gpl-3.0
| 12,111 | 0.004625 |
"""
Description : This file implements the LogSig algorithm for log parsing
Author : LogPAI team
License : MIT
"""
from datetime import datetime
import random
import math
import time
import operator
import re
import os
import pandas as pd
import hashlib
class Para:
def __init__(self, path, rex, savePath, groupNum, logformat):
self.path = path
self.rex = rex
self.savePath = savePath
self.groupNum = groupNum # partition into k groups
self.logformat = logformat
class LogParser:
def __init__(self, indir, outdir, groupNum, log_format, rex=[], seed=0):
self.para = Para(path=indir, rex=rex, savePath=outdir, groupNum=groupNum, logformat=log_format)
self.wordLL = []
self.loglineNum = 0
self.termpairLLT = []
self.logNumPerGroup = []
self.groupIndex = dict() # each line corresponding to which group
self.termPairLogNumLD = []
self.logIndexPerGroup = []
self.seed = seed
def loadLog(self):
""" Load datasets and use regular expression to split it and remove some columns
"""
print('Loading logs...')
headers, regex = self.generate_logformat_regex(self.para.logformat)
self.df_log = self.log_to_dataframe(os.path.join(self.para.path, self.logname), regex, headers,
self.para.logformat)
for idx, line in self.df_log.iterrows():
line = line['Content']
if self.para.rex:
for currentRex in self.para.rex:
line = re.sub(currentRex, '', line)
wordSeq = line.strip().split()
self.wordLL.append(tuple(wordSeq))
def termpairGene(self):
print('Generating term pairs...')
i = 0
for wordL in self.wordLL:
wordLT = []
for j in range(len(wordL)):
for k in range(j + 1, len(wordL), 1):
if wordL[j] != '[$]' and wordL[k] != '[$]':
termpair = (wordL[j], wordL[k])
wordLT.append(termpair)
self.termpairLLT.append(wordLT)
i += 1
# termPairLogNumLD, used to account the occurrence of each termpair of each group
for i in range(self.para.groupNum):
newDict = dict()
self.termPairLogNumLD.append(newDict)
# initialize the item value to zero
self.logNumPerGroup.append(0)
# divide logs into initial groupNum groups randomly, the group number of each log is stored in the groupIndex
self.loglineNum = len(self.wordLL)
random.seed(self.seed)
for i in range(self.loglineNum):
ran = random.randint(0, self.para.groupNum - 1) # group number from 0 to k-1
self.groupIndex[i] = ran
self.logNumPerGroup[ran] += 1 # count the number of loglines per group
# count the frequency of each termpairs per group
i = 0
for termpairLT in self.termpairLLT:
j = 0
for key in termpairLT:
currGroupIndex = self.groupIndex[i]
if key not in self.termPairLogNumLD[currGroupIndex]:
self.termPairLogNumLD[currGroupIndex][key] = 1
else:
self.termPairLogNumLD[currGroupIndex][key] += 1
j += 1
i += 1
def LogMessParti(self):
""" Use local search, for each log, find the group that it should be moved to.
in this process, termpairs occurange should also make some changes and logNumber
of corresponding should be changed
"""
print('Log message partitioning...')
changed = True
while changed:
changed = False
i = 0
for termpairLT in self.termpairLLT:
curGroup = self.groupIndex[i]
alterGroup = potenFunc(curGroup, self.termPairLogNumLD, self.logNumPerGroup, i, termpairLT,
self.para.groupNum)
if curGroup != alterGroup:
changed = True
self.groupIndex[i] = alterGroup
# update the dictionary of each group
for key in termpairLT:
# minus 1 from the current group count on this key
self.termPairLogNumLD[curGroup][key] -= 1
if self.termPairLogNumLD[curGroup][key] == 0:
del self.termPairLogNumLD[curGroup][key]
# add 1 to the alter group
if key not in self.termPairLogNumLD[alterGroup]:
self.termPairLogNumLD[alterGroup][key] = 1
else:
self.termPairLogNumLD[alterGroup][key] += 1
self.logNumPerGroup[curGroup] -= 1
self.logNumPerGroup[alterGroup] += 1
i += 1
def signatConstr(self):
""" Calculate the occurancy of each word of each group, and for each group, save the words that
happen more than half all log number to be candidateTerms(list of dict, words:frequency),
"""
print('Log message signature construction...')
# create the folder to save the resulted templates
if not os.path.exists(self.para.savePath):
os.makedirs(self.para.savePath)
wordFreqPerGroup = []
candidateTerm = []
candidateSeq = []
self.signature = []
# save the all the log indexs of each group: logIndexPerGroup
for t in range(self.para.groupNum):
dic = dict()
newlogIndex = []
newCandidate = dict()
wordFreqPerGroup.append(dic)
self.logIndexPerGroup.append(newlogIndex)
candidateSeq.append(newCandidate)
# count the occurence of each word of each log per group
# and save into the wordFreqPerGroup, which is a list of dictionary,
# where each dictionary represents a group, key is the word, value is the occurence
lineNo = 0
for wordL in self.wordLL:
groupIndex = self.groupIndex[lineNo]
self.logIndexPerGroup[groupIndex].append(lineNo)
for key in wordL:
if key not in wordFreqPerGroup[groupIndex]:
wordFreqPerGroup[groupIndex][key] = 1
else:
wordFreqPerGroup[groupIndex][key] += 1
lineNo += 1
# calculate the halfLogNum and select those words whose occurence is larger than halfLogNum
# as constant part and save into candidateTerm
for i in range(self.para.groupNum):
halfLogNum = math.ceil(self.logNumPerGroup[i] / 2.0)
dic = dict((k, v) for k, v in wordFreqPerGroup[i].items() if v >= halfLogNum)
candidateTerm.append(dic)
# scan each logline's each word that also is a part of candidateTerm, put these words together
# as a new candidate sequence, thus, each raw log will have a corresponding candidate sequence
# and count the occurence of these candidate sequence of each group and select the most frequent
# candidate sequence as the signature, i.e. the templates
lineNo = 0
for wordL in self.wordLL:
curGroup = self.groupIndex[lineNo]
newCandiSeq = []
for key in wordL:
if key in candidateTerm[curGroup]:
newCandiSeq.append(key)
keySeq = tuple(newCandiSeq)
if keySeq not in candidateSeq[curGroup]:
candidateSeq[curGroup][keySeq] = 1
else:
candidateSeq[curGroup][keySeq] += 1
lineNo += 1
for i in range(self.para.groupNum):
sig = max(candidateSeq[i].items(), key=operator.itemgetter(1))[0]
self.signature.append(sig)
def writeResultToFile(self):
idx_eventID = {}
for idx, item in enumerate(self.signature):
eventStr = ' '.join(item)
idx_eventID[idx] = hashlib.md5(eventStr.encode('utf-8')).hexdigest()[0:8]
EventId = []
EventTemplate = []
LineId_groupId = []
for idx, item in enumerate(self.logIndexPerGroup):
for LineId in item:
LineId_groupId.append([LineId, idx])
LineId_groupId.sort(key=lambda x:x[0])
for item in LineId_groupId:
GroupID = item[1]
EventId.append(idx_eventID[GroupID])
EventTemplate.append(' '.join(self.signature[GroupID]))
self.df_log['EventId'] = EventId
self.df_log['EventTemplate'] = EventTemplate
self.df_log.to_csv(os.path.join(self.para.savePath, self.logname + '_structured.csv'), index=False)
occ_dict = dict(self.df_log['EventTemplate'].value_counts())
df_event = pd.DataFrame()
df_event['EventTemplate'] = self.df_log['EventTemplate'].unique()
df_event['EventId'] = df_event['EventTemplate'].map(lambda x: hashlib.md5(x.encode('utf-8')).hexdigest()[0:8])
df_event['Occurrences'] = df_event['EventTemplate'].map(occ_dict)
df_event.to_csv(os.path.join(self.para.savePath, self.logname + '_templates.csv'), index=False, columns=["EventId", "EventTemplate","Occurrences"])
def log_to_dataframe(self, log_file, regex, headers, logformat):
""" Function to transform log file to dataframe """
log_messages = []
linecount = 0
with open(log_file, 'r') as fin:
for line in fin.readlines():
try:
match = regex.search(line.strip())
message = [match.group(header) for header in headers]
log_messages.append(message)
linecount += 1
except Exception as e:
pass
logdf = pd.DataFrame(log_messages, columns=headers)
logdf.insert(0, 'LineId', None)
logdf['LineId'] = [i + 1 for i in range(linecount)]
return logdf
def generate_logformat_regex(self, logformat):
"""
Function to generate regular expression to split log messages
"""
headers = []
splitters = re.split(r'(<[^<>]+>)', logformat)
regex = ''
for k in range(len(splitters)):
if k % 2 == 0:
splitter = re.sub(' +', '\s+', splitters[k])
regex += splitter
else:
header = splitters[k].strip('<').strip('>')
regex += '(?P<%s>.*?)' % header
headers.append(header)
regex = re.compile('^' + regex + '$')
return headers, regex
def parse(self, logname):
print('Parsing file: ' + os.path.join(self.para.path, logname))
start_time = datetime.now()
self.logname = logname
self.loadLog()
self.termpairGene()
self.LogMessParti()
self.signatConstr()
self.writeResultToFile()
print('Parsing done. [Time taken: {!s}]'.format(datetime.now() - start_time))
def potenFunc(curGroupIndex, termPairLogNumLD, logNumPerGroup, lineNum, termpairLT, k):
maxDeltaD = 0
maxJ = curGroupIndex
for i in range(k):
returnedDeltaD = getDeltaD(logNumPerGroup, termPairLogNumLD, curGroupIndex, i, lineNum, termpairLT)
if returnedDeltaD > maxDeltaD:
maxDeltaD = returnedDeltaD
maxJ = i
return maxJ
# part of the potential function
def getDeltaD(logNumPerGroup, termPairLogNumLD, groupI, groupJ, lineNum, termpairLT):
deltaD = 0
Ci = logNumPerGroup[groupI]
Cj = logNumPerGroup[groupJ]
for r in termpairLT:
if r in termPairLogNumLD[groupJ]:
deltaD += (pow(((termPairLogNumLD[groupJ][r] + 1) / (Cj + 1.0)), 2) \
- pow((termPairLogNumLD[groupI][r] / (Ci + 0.0)), 2))
else:
deltaD += (pow((1 / (Cj + 1.0)), 2) - pow((termPairLogNumLD[groupI][r] / (Ci + 0.0)), 2))
deltaD = deltaD * 3
return deltaD
|
logpai/logparser
|
logparser/LogSig/LogSig.py
|
Python
|
mit
| 12,489 | 0.003043 |
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2009 Sharoon Thomas
# Copyright (C) 2010-Today OpenERP SA (<http://www.openerp.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
#
##############################################################################
from openerp.osv import fields, osv
class email_template_preview(osv.osv_memory):
_inherit = "email.template"
_name = "email_template.preview"
_description = "Email Template Preview"
def _get_records(self, cr, uid, context=None):
"""
Return Records of particular Email Template's Model
"""
if context is None:
context = {}
template_id = context.get('template_id', False)
if not template_id:
return []
email_template = self.pool.get('email.template')
template = email_template.browse(cr, uid, int(template_id), context=context)
template_object = template.model_id
model = self.pool[template_object.model]
record_ids = model.search(cr, uid, [], 0, 10, 'id', context=context)
default_id = context.get('default_res_id')
if default_id and default_id not in record_ids:
record_ids.insert(0, default_id)
return model.name_get(cr, uid, record_ids, context)
def default_get(self, cr, uid, fields, context=None):
if context is None:
context = {}
result = super(email_template_preview, self).default_get(cr, uid, fields, context=context)
email_template = self.pool.get('email.template')
template_id = context.get('template_id')
if 'res_id' in fields and not result.get('res_id'):
records = self._get_records(cr, uid, context=context)
result['res_id'] = records and records[0][0] or False # select first record as a Default
if template_id and 'model_id' in fields and not result.get('model_id'):
result['model_id'] = email_template.read(cr, uid, int(template_id), ['model_id'], context).get('model_id', False)
return result
_columns = {
'res_id': fields.selection(_get_records, 'Sample Document'),
'partner_ids': fields.many2many('res.partner', string='Recipients'),
}
def on_change_res_id(self, cr, uid, ids, res_id, context=None):
if context is None:
context = {'value': {}}
if not res_id or not context.get('template_id'):
return {'value': {}}
email_template = self.pool.get('email.template')
template_id = context.get('template_id')
template = email_template.browse(cr, uid, template_id, context=context)
# generate and get template values
mail_values = email_template.generate_email(cr, uid, template_id, res_id, context=context)
vals = dict((field, mail_values.get(field, False)) for field in ('email_from', 'email_to', 'email_cc', 'reply_to', 'subject', 'body_html', 'partner_to', 'partner_ids', 'attachment_ids'))
vals['name'] = template.name
return {'value': vals}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
diogocs1/comps
|
web/addons/email_template/wizard/email_template_preview.py
|
Python
|
apache-2.0
| 3,851 | 0.002597 |
"""
The Fibonacci Sequence is the series of numbers:
0, 1, 1, 2, 3, 5, 8, 13, 21, 34, ...
The next number is found by adding up the two numbers before it.
The 2 is found by adding the two numbers before it (1+1)
Similarly, the 3 is found by adding the two numbers before it (1+2),
And the 5 is (2+3),
and so on!
"""
"""
Using lists to return a list of fib numbers
"""
def fib1(limit=10):
"""
Returns a list of fib numbers
"""
nth_number = limit
if limit <= 1:
answer = [0]
elif limit == 2:
answer = [0,1]
else:
fib_num = [0,1]
while len(fib_num) < nth_number:
fib1 = fib_num[len(fib_num)-2]
fib2 = fib_num[len(fib_num)-1]
fib3 = fib2 + fib1
fib_num.append(fib3)
answer = fib_num
return answer
"""
How to return a specific fib number.
"""
def fib2(nth_num=10):
"""
Returns the nth fib number
"""
# Base cases
fib1 = 0
fib2 = 1
if nth_num <= 1:
answer = fib1
elif nth_num == 2:
answer = fib2
else:
current_fib = 2
while nth_num - current_fib > 0:
fib1, fib2 = fib2, fib1 + fib2
current_fib = current_fib + 1
answer = fib2
return answer
"""
Solve with generators
"""
def fib3(nth_num=10):
"""
A generator that yields fib numbers
"""
# Base case
fib1 = 0
fib2 = 1
if nth_num <= 1:
yield fib1
elif nth_num == 2:
yield fib1
yield fib2
else:
yield fib1
yield fib2
current_fib = 2
while nth_num - current_fib > 0:
fib1, fib2 = fib2, fib1 + fib2
yield fib2
current_fib = current_fib + 1
def fib_list(limit=10):
answer = []
for fib_num in fib3(limit):
answer.append(fib_num)
return answer
def nth_fib_num(nth_num=10):
answer = 0
for fib_num in fib3(nth_num):
answer = fib_num
return answer
if __name__ == "__main__":
print(fib1(10))
print(fib2(10))
print(fib_list(10))
print(nth_fib_num(10))
|
crazcalm/PyTN_talk_proposal
|
recipies/recipe1/fib.py
|
Python
|
mit
| 2,169 | 0.004149 |
# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
#
# Copyright (C) 2017,2020 Canonical Ltd
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
from textwrap import dedent
from testtools.matchers import Contains, FileContains
from snapcraft.plugins.v1 import _python
from ._basesuite import PythonBaseTestCase
def _create_site_py(base_dir):
site_py = os.path.join(base_dir, "usr", "lib", "pythontest", "site.py")
os.makedirs(os.path.dirname(site_py))
open(site_py, "w").close()
def _create_user_site_packages(base_dir):
user_site_dir = os.path.join(base_dir, "lib", "pythontest", "site-packages")
os.makedirs(user_site_dir)
class SiteCustomizeTestCase(PythonBaseTestCase):
def setUp(self):
super().setUp()
self.expected_sitecustomize = dedent(
"""\
import site
import os
snap_dir = os.getenv("SNAP")
snapcraft_stage_dir = os.getenv("SNAPCRAFT_STAGE")
snapcraft_part_install = os.getenv("SNAPCRAFT_PART_INSTALL")
# Do not include snap_dir during builds as this will include
# snapcraft's in-snap site directory.
if snapcraft_stage_dir is not None and snapcraft_part_install is not None:
site_directories = [snapcraft_stage_dir, snapcraft_part_install]
else:
site_directories = [snap_dir]
for d in site_directories:
if d:
site_dir = os.path.join(d, "lib/pythontest/site-packages")
site.addsitedir(site_dir)
if snap_dir:
site.ENABLE_USER_SITE = False"""
)
def test_generate_sitecustomize_staged(self):
stage_dir = "stage_dir"
install_dir = "install_dir"
# Create the python binary in the staging area
self._create_python_binary(stage_dir)
# Create a site.py in both staging and install areas
_create_site_py(stage_dir)
_create_site_py(install_dir)
# Create a user site dir in install area
_create_user_site_packages(install_dir)
_python.generate_sitecustomize(
"test", stage_dir=stage_dir, install_dir=install_dir
)
site_path = os.path.join(
install_dir, "usr", "lib", "pythontest", "sitecustomize.py"
)
self.assertThat(site_path, FileContains(self.expected_sitecustomize))
def test_generate_sitecustomize_installed(self):
stage_dir = "stage_dir"
install_dir = "install_dir"
# Create the python binary in the installed area
self._create_python_binary(install_dir)
# Create a site.py in both staging and install areas
_create_site_py(stage_dir)
_create_site_py(install_dir)
# Create a user site dir in install area
_create_user_site_packages(install_dir)
_python.generate_sitecustomize(
"test", stage_dir=stage_dir, install_dir=install_dir
)
site_path = os.path.join(
install_dir, "usr", "lib", "pythontest", "sitecustomize.py"
)
self.assertThat(site_path, FileContains(self.expected_sitecustomize))
def test_generate_sitecustomize_missing_user_site_raises(self):
stage_dir = "stage_dir"
install_dir = "install_dir"
# Create the python binary in the installed area
self._create_python_binary(install_dir)
# Create a site.py in both staging and install areas
_create_site_py(stage_dir)
_create_site_py(install_dir)
# Do NOT create a user site dir, and attempt to generate sitecustomize.
raised = self.assertRaises(
_python.errors.MissingUserSitePackagesError,
_python.generate_sitecustomize,
"test",
stage_dir=stage_dir,
install_dir=install_dir,
)
self.assertThat(str(raised), Contains("Unable to find user site packages"))
def test_generate_sitecustomize_missing_site_py_raises(self):
stage_dir = "stage_dir"
install_dir = "install_dir"
# Create the python binary in the staging area
self._create_python_binary(stage_dir)
# Create a site.py, but only in install area (not staging area)
_create_site_py(install_dir)
# Create a user site dir in install area
_create_user_site_packages(install_dir)
raised = self.assertRaises(
_python.errors.MissingSitePyError,
_python.generate_sitecustomize,
"test",
stage_dir=stage_dir,
install_dir=install_dir,
)
self.assertThat(str(raised), Contains("Unable to find site.py"))
|
chipaca/snapcraft
|
tests/unit/plugins/v1/python/test_sitecustomize.py
|
Python
|
gpl-3.0
| 5,261 | 0.00076 |
# (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = '''
callback: syslog_json
callback_type: notification
requirements:
- whitelist in configuration
short_description: sends JSON events to syslog
version_added: "1.9"
description:
- This plugin logs ansible-playbook and ansible runs to a syslog server in JSON format
- Before 2.9 only environment variables were available for configuration
options:
server:
description: syslog server that will receive the event
env:
- name: SYSLOG_SERVER
default: localhost
ini:
- section: callback_syslog_json
key: syslog_server
port:
description: port on which the syslog server is listening
env:
- name: SYSLOG_PORT
default: 514
ini:
- section: callback_syslog_json
key: syslog_port
facility:
description: syslog facility to log as
env:
- name: SYSLOG_FACILITY
default: user
ini:
- section: callback_syslog_json
key: syslog_facility
'''
import os
import json
import logging
import logging.handlers
import socket
from ansible.plugins.callback import CallbackBase
class CallbackModule(CallbackBase):
"""
logs ansible-playbook and ansible runs to a syslog server in json format
"""
CALLBACK_VERSION = 2.0
CALLBACK_TYPE = 'aggregate'
CALLBACK_NAME = 'syslog_json'
CALLBACK_NEEDS_WHITELIST = True
def __init__(self):
super(CallbackModule, self).__init__()
self.set_options()
syslog_host = self.get_option("server")
syslog_port = int(self.get_option("port"))
syslog_facility = self.get_option("facility")
self.logger = logging.getLogger('ansible logger')
self.logger.setLevel(logging.DEBUG)
self.handler = logging.handlers.SysLogHandler(
address=(syslog_host, syslog_port),
facility=syslog_facility
)
self.logger.addHandler(self.handler)
self.hostname = socket.gethostname()
def runner_on_failed(self, host, res, ignore_errors=False):
self.logger.error('%s ansible-command: task execution FAILED; host: %s; message: %s', self.hostname, host, self._dump_results(res))
def runner_on_ok(self, host, res):
self.logger.info('%s ansible-command: task execution OK; host: %s; message: %s', self.hostname, host, self._dump_results(res))
def runner_on_skipped(self, host, item=None):
self.logger.info('%s ansible-command: task execution SKIPPED; host: %s; message: %s', self.hostname, host, 'skipped')
def runner_on_unreachable(self, host, res):
self.logger.error('%s ansible-command: task execution UNREACHABLE; host: %s; message: %s', self.hostname, host, self._dump_results(res))
def runner_on_async_failed(self, host, res, jid):
self.logger.error('%s ansible-command: task execution FAILED; host: %s; message: %s', self.hostname, host, self._dump_results(res))
def playbook_on_import_for_host(self, host, imported_file):
self.logger.info('%s ansible-command: playbook IMPORTED; host: %s; message: imported file %s', self.hostname, host, imported_file)
def playbook_on_not_import_for_host(self, host, missing_file):
self.logger.info('%s ansible-command: playbook NOT IMPORTED; host: %s; message: missing file %s', self.hostname, host, missing_file)
|
thaim/ansible
|
lib/ansible/plugins/callback/syslog_json.py
|
Python
|
mit
| 3,681 | 0.004075 |
"""
Initialize models
"""
|
myadventure/myadventure-api
|
app/models/__init__.py
|
Python
|
apache-2.0
| 27 | 0 |
import re
from ztag.annotation import Annotation
from ztag.annotation import OperatingSystem
from ztag import protocols
import ztag.test
class FtpCesarFtpd(Annotation):
protocol = protocols.FTP
subprotocol = protocols.FTP.BANNER
port = None
impl_re = re.compile("^220[- ]CesarFTP 0\.\d+", re.IGNORECASE)
version_re = re.compile("CesarFTP (\d+\.\d+)([a-z])?", re.IGNORECASE)
tests = {
"FtpCesarFtpd_1": {
"global_metadata": {
"os": OperatingSystem.WINDOWS,
},
"local_metadata": {
"product": "Cesar FTP",
"version": "0.99",
"revision": "g"
}
}
}
def process(self, obj, meta):
banner = obj["banner"]
if self.impl_re.search(banner):
meta.global_metadata.os = OperatingSystem.WINDOWS
meta.local_metadata.product = "Cesar FTP"
version = self.version_re.search(banner).group(1)
meta.local_metadata.version = version
rev = self.version_re.search(banner).group(2)
meta.local_metadata.revision = rev
return meta
""" Tests
"220 CesarFTP 0.99g Server Welcome !\r\n"
"220 CesarFTP 0.99g Server Welcome !\r\n"
"220 CesarFTP 0.99g Server Welcome !\r\n"
"220 CesarFTP 0.99g Server Welcome !\r\n"
"220 CesarFTP 0.99g Server Welcome !\r\n"
"220 CesarFTP 0.99g Server Welcome !\r\n"
"220 CesarFTP 0.99g Server Welcome !\r\n"
"220 CesarFTP 0.99g Server Welcome !\r\n"
"220 CesarFTP 0.99g Server Welcome !\r\n"
"220 CesarFTP 0.99g Server Welcome !\r\n"
"""
|
zmap/ztag
|
ztag/annotations/FtpCesarFtpd.py
|
Python
|
apache-2.0
| 1,652 | 0.003632 |
#!/usr/bin/env python
"""
Purpose : Extract next sequence number of auto-scaled instance and set new tag to self instance. Script will be running from new instance.
will take input from command line instead of from json file
Future Plan :
will associate instance to a role based IAM profile
Usage :
python ec2-autoscale-instance-modify.py -a <your aws access_key> -s <aws secret key> -g <auto scale group that used in cloudformation file> -r <region> -n <min_server_number> -c <customer> -t <uat/plab/prod> -p <appname> -d <domainname ie example.net>
"""
__author__ = "kama maiti"
__copyright__ = "Copyright 2016, AWS autoscaled instance tag modification project"
__credits__ = ["kamal maiti"]
__license__ = "GPL"
__version__ = "1.0.0"
__maintainer__ = "kamal maiti"
__email__ = "kamal.maiti@gmail.com"
__status__ = "production/Non-production"
import re
import argparse
import boto.ec2.autoscale
from boto.ec2 import EC2Connection
import shlex, subprocess
akey = ""
skey = ""
ag = ""
rg = ""
min_num = ""
def find_server_number(str):
#Assuming first match only with consecutive three digits
match = []
match = re.findall(r'\d\d\d', str)
if match:
return match #will return a list containg server number
else:
return match #will return blank list
def main():
arg_parser = argparse.ArgumentParser(description='Read autoscale instance')
arg_parser.add_argument('-a', dest='akey',help='Provide AWS_ACCESS_KEY')
arg_parser.add_argument('-s', dest='skey',help='Provide AWS_SECRET_ACCESS_KEY')
arg_parser.add_argument('-g', dest='ag',help='Provide User provided autoscale group name')
arg_parser.add_argument('-r', dest='rg',help='Provide region name')
arg_parser.add_argument('-n', dest='min_num',help='Minimum Server name')
arg_parser.add_argument('-c', dest='customer',help='Name of the customer in short')
arg_parser.add_argument('-t', dest='servertype',help='Type of the server ie prod or uat or plab')
arg_parser.add_argument('-p', dest='purpose',help='Purpose of the Server')
arg_parser.add_argument('-d', dest='domain',help='Domain name that will be appended to server name')
args = arg_parser.parse_args()
#print(args)
access_key = args.akey
secret_key = args.skey
region = args.rg
group_name = str(args.ag)
min_server_num = int(args.min_num)
customer = str(args.customer)
servertype = str(args.servertype)
purpose = str(args.purpose)
domain = str(args.domain)
#created two objects below. One for autocale connection and another for ec2 instance
as_conn = boto.ec2.autoscale.connect_to_region(region, aws_access_key_id=access_key, aws_secret_access_key=secret_key)
ec2_conn = boto.ec2.connect_to_region(region, aws_access_key_id=access_key, aws_secret_access_key=secret_key)
try:
groups = as_conn.get_all_groups()
all_groups = [group.name for group in groups]
for g in all_groups:
if group_name in g: #searching autocaling group that we are concerned with. Note all autoscalling group name should be unique
FOUND_GROUP = g #FOUND_GROUP will save exact AG name. Note that exact AG name is not same as user provided name. It'll check if group_name is subset of g
FOUND_GROUP_WITH_DES = as_conn.get_all_groups(names=[FOUND_GROUP])[0]
instance_ids = [i.instance_id for i in FOUND_GROUP_WITH_DES.instances]
#reservations = ec2_conn.get_all_instances(instance_ids)
instances = ec2_conn.get_only_instances(instance_ids)
#instances = [i for r in reservations for i in r.instances]
lNameTag = []
#collect all tags of all instances and sort Name tags and save them in list.
for i,j in enumerate(instances):
a = instances[i].tags
lNameTag.append(a['Name'])
#process each instances and take their server number in one list
lServerNum = []
if not lNameTag: #checking if list is empty or not. If empty then this is first instance whose server num will be min_server_num
next_number = min_server_num
else:
for server in lNameTag: #iterating each value of "Name" tag
if not find_server_number(server): #if method find_server_number returns null list
next_number = min_server_num
else:
val = find_server_number(server) #got value like [u'101']. Below comand will remove [],' and u
actual_num=str(val).strip('[]').strip('u').strip('\'')
lServerNum.append(int(actual_num)) #actual_num is string, need to convert to int
if not lServerNum: #check if list of server number is blank or not
next_number = min_server_num
else:
maximum_number = max(lServerNum) #used max function to find out maximum number in the list
next_number = maximum_number + 1
#Now we need to save this next_number in a file so that we can collect it and send to other commands.
with open('/tmp/serverno','w') as fd: #created a file and save the number as string. Then read it and used later
fd.write(str(next_number))
with open('/tmp/serverno','r') as fd:
num=fd.read()
#Will modify tag of current instance. Let's build a new tag.
delm = "-" #Delimeter that will be used to join multiple string
seq = ( customer, servertype, purpose, num, domain) #created a tuple
new_tag = delm.join(seq) #joined tuple strings
with open('/tmp/nodename','w') as fd:
fd.write(str(new_tag))
#will extract current instance ID using curl. ie curl http://169.254.169.254/latest/meta-data/instance-id
#
cmd = 'curl http://169.254.169.254/latest/meta-data/instance-id'
#shlex is simple lexical analyser for splitting a large string into tokens
args = shlex.split(cmd) #args will have value like : ['curl', 'http://169.254.169.254/latest/meta-data/instance-id']
output,error = subprocess.Popen(args,stdout = subprocess.PIPE, stderr= subprocess.PIPE).communicate() #out and error are saved in variable. communicate will execute comamnd
#o="i-fd96291f" #used for testing
cur_instance_reservation = ec2_conn.get_all_instances(instance_ids=output)
cur_instance = cur_instance_reservation[0].instances[0]
cur_instance.add_tag('Name', new_tag)
finally:
as_conn.close()
ec2_conn.close()
if __name__ == '__main__':
main()
|
kmaiti/AWSAutoScalingWithF5andCloudFormation
|
aws-autoscale-ec2-instance-modify.py
|
Python
|
gpl-3.0
| 6,954 | 0.012511 |
"""
categories: Types,bytes
description: Bytes subscr with step != 1 not implemented
cause: Unknown
workaround: Unknown
"""
print(b'123'[0:3:2])
|
cwyark/micropython
|
tests/cpydiff/types_bytes_subscrstep.py
|
Python
|
mit
| 145 | 0 |
import imp
import os.path
import pkgutil
import six
import unittest
import streamlink.plugins
from streamlink import Streamlink
class PluginTestMeta(type):
def __new__(mcs, name, bases, dict):
plugin_path = os.path.dirname(streamlink.plugins.__file__)
plugins = []
for loader, pname, ispkg in pkgutil.iter_modules([plugin_path]):
file, pathname, desc = imp.find_module(pname, [plugin_path])
module = imp.load_module(pname, file, pathname, desc)
if hasattr(module, "__plugin__"):
plugins.append((pname))
session = Streamlink()
def gentest(pname):
def load_plugin_test(self):
# Reset file variable to ensure it is still open when doing
# load_plugin else python might open the plugin source .py
# using ascii encoding instead of utf-8.
# See also open() call here: imp._HackedGetData.get_data
file, pathname, desc = imp.find_module(pname, [plugin_path])
session.load_plugin(pname, file, pathname, desc)
# validate that can_handle_url does not fail
session.plugins[pname].can_handle_url("http://test.com")
return load_plugin_test
for pname in plugins:
dict['test_{0}_load'.format(pname)] = gentest(pname)
return type.__new__(mcs, name, bases, dict)
@six.add_metaclass(PluginTestMeta)
class TestPlugins(unittest.TestCase):
"""
Test that each plugin can be loaded and does not fail when calling can_handle_url.
"""
|
back-to/streamlink
|
tests/test_plugins.py
|
Python
|
bsd-2-clause
| 1,609 | 0.000622 |
#!/usr/bin/python3
"""
Cleans-up Sphinx-only constructs (ie from README.rst),
so that *PyPi* can format it properly.
To check for remaining errors, install ``sphinx`` and run::
python setup.py --long-description | sed -file 'this_file.sed' | rst2html.py --halt=warning
"""
import re
import sys, io
def yield_sphinx_only_markup(lines):
"""
:param file_inp: a `filename` or ``sys.stdin``?
:param file_out: a `filename` or ``sys.stdout`?`
"""
substs = [
## Selected Sphinx-only Roles.
#
(r':abbr:`([^`]+)`', r'\1'),
(r':ref:`([^`]+)`', r'`\1`_'),
(r':term:`([^`]+)`', r'**\1**'),
(r':dfn:`([^`]+)`', r'**\1**'),
(r':(samp|guilabel|menuselection):`([^`]+)`', r'``\2``'),
## Sphinx-only roles:
# :foo:`bar` --> foo(``bar``)
# :a:foo:`bar` XXX afoo(``bar``)
#
#(r'(:(\w+))?:(\w+):`([^`]*)`', r'\2\3(``\4``)'),
(r':(\w+):`([^`]*)`', r'\1(``\2``)'),
## Sphinx-only Directives.
#
(r'\.\. doctest', r'code-block'),
(r'\.\. plot::', r'.. '),
(r'\.\. seealso', r'info'),
(r'\.\. glossary', r'rubric'),
(r'\.\. figure::', r'.. '),
## Other
#
(r'\|version\|', r'x.x.x'),
]
regex_subs = [ (re.compile(regex, re.IGNORECASE), sub) for (regex, sub) in substs ]
def clean_line(line):
try:
for (regex, sub) in regex_subs:
line = regex.sub(sub, line)
except Exception as ex:
print("ERROR: %s, (line(%s)"%(regex, sub))
raise ex
return line
for line in lines:
yield clean_line(line)
|
iSTB/python-schemata
|
rst_cleaner.py
|
Python
|
mit
| 1,813 | 0.006067 |
#!/usr/bin/env python
#
# Copyright (C) 2013-2017(H)
# Max Planck Institute for Polymer Research
#
# This file is part of ESPResSo++.
#
# ESPResSo++ is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ESPResSo++ is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# -*- coding: utf-8 -*-
import sys
import time
import espressopp
import mpi4py.MPI as MPI
import unittest
class TestTDforce(unittest.TestCase):
def setUp(self):
# set up system
system = espressopp.System()
box = (10, 10, 10)
system.bc = espressopp.bc.OrthorhombicBC(system.rng, box)
system.skin = 0.3
system.comm = MPI.COMM_WORLD
nodeGrid = espressopp.tools.decomp.nodeGrid(espressopp.MPI.COMM_WORLD.size,box,rc=1.5,skin=system.skin)
cellGrid = espressopp.tools.decomp.cellGrid(box, nodeGrid, rc=1.5, skin=system.skin)
system.storage = espressopp.storage.DomainDecompositionAdress(system, nodeGrid, cellGrid)
self.system = system
def test_slab(self):
# add some particles
particle_list = [
(1, 1, 0, espressopp.Real3D(5.5, 5.0, 5.0), 1.0, 0),
(2, 1, 0, espressopp.Real3D(6.5, 5.0, 5.0), 1.0, 0),
(3, 1, 0, espressopp.Real3D(7.5, 5.0, 5.0), 1.0, 0),
(4, 1, 0, espressopp.Real3D(8.5, 5.0, 5.0), 1.0, 0),
(5, 1, 0, espressopp.Real3D(9.5, 5.0, 5.0), 1.0, 0),
(6, 0, 0, espressopp.Real3D(5.5, 5.0, 5.0), 1.0, 1),
(7, 0, 0, espressopp.Real3D(6.5, 5.0, 5.0), 1.0, 1),
(8, 0, 0, espressopp.Real3D(7.5, 5.0, 5.0), 1.0, 1),
(9, 0, 0, espressopp.Real3D(8.5, 5.0, 5.0), 1.0, 1),
(10, 0, 0, espressopp.Real3D(9.5, 5.0, 5.0), 1.0, 1),
]
tuples = [(1,6),(2,7),(3,8),(4,9),(5,10)]
self.system.storage.addParticles(particle_list, 'id', 'type', 'q', 'pos', 'mass','adrat')
ftpl = espressopp.FixedTupleListAdress(self.system.storage)
ftpl.addTuples(tuples)
self.system.storage.setFixedTuplesAdress(ftpl)
self.system.storage.decompose()
# generate a verlet list
vl = espressopp.VerletListAdress(self.system, cutoff=1.5, adrcut=1.5,
dEx=1.0, dHy=1.0, adrCenter=[5.0, 5.0, 5.0], sphereAdr=False)
# initialize lambda values
integrator = espressopp.integrator.VelocityVerlet(self.system)
integrator.dt = 0.01
adress = espressopp.integrator.Adress(self.system,vl,ftpl)
integrator.addExtension(adress)
espressopp.tools.AdressDecomp(self.system, integrator)
# set up TD force
thdforce = espressopp.integrator.TDforce(self.system,vl)
thdforce.addForce(itype=3,filename="table_tf.tab",type=1)
integrator.addExtension(thdforce)
# x coordinates of particles before integration
before = [self.system.storage.getParticle(i).pos[0] for i in range(1,6)]
# run ten steps
integrator.run(10)
# x coordinates of particles after integration
after = [self.system.storage.getParticle(i).pos[0] for i in range(1,6)]
# run checks (only one particle is in hybrid region, should feel the thermodynamic force, and should hence move along the x direction)
self.assertEqual(before[0], after[0])
self.assertAlmostEqual(after[1], 6.596913, places=5)
self.assertEqual(before[2], after[2])
self.assertEqual(before[3], after[3])
self.assertEqual(before[4], after[4])
def test_fixed_sphere(self):
# add some particles
particle_list = [
(1, 1, 0, espressopp.Real3D(5.0, 5.5, 5.0), 1.0, 0),
(2, 1, 0, espressopp.Real3D(5.0, 6.5, 5.0), 1.0, 0),
(3, 1, 0, espressopp.Real3D(5.0, 7.5, 5.0), 1.0, 0),
(4, 1, 0, espressopp.Real3D(5.0, 8.5, 5.0), 1.0, 0),
(5, 1, 0, espressopp.Real3D(5.0, 9.5, 5.0), 1.0, 0),
(6, 0, 0, espressopp.Real3D(5.0, 5.5, 5.0), 1.0, 1),
(7, 0, 0, espressopp.Real3D(5.0, 6.5, 5.0), 1.0, 1),
(8, 0, 0, espressopp.Real3D(5.0, 7.5, 5.0), 1.0, 1),
(9, 0, 0, espressopp.Real3D(5.0, 8.5, 5.0), 1.0, 1),
(10, 0, 0, espressopp.Real3D(5.0, 9.5, 5.0), 1.0, 1),
]
tuples = [(1,6),(2,7),(3,8),(4,9),(5,10)]
self.system.storage.addParticles(particle_list, 'id', 'type', 'q', 'pos', 'mass','adrat')
ftpl = espressopp.FixedTupleListAdress(self.system.storage)
ftpl.addTuples(tuples)
self.system.storage.setFixedTuplesAdress(ftpl)
self.system.storage.decompose()
# generate a verlet list
vl = espressopp.VerletListAdress(self.system, cutoff=1.5, adrcut=1.5,
dEx=1.0, dHy=1.0, adrCenter=[5.0, 5.0, 5.0], sphereAdr=True)
# initialize lambda values
integrator = espressopp.integrator.VelocityVerlet(self.system)
integrator.dt = 0.01
adress = espressopp.integrator.Adress(self.system,vl,ftpl)
integrator.addExtension(adress)
espressopp.tools.AdressDecomp(self.system, integrator)
# set up TD force
thdforce = espressopp.integrator.TDforce(self.system,vl)
thdforce.addForce(itype=3,filename="table_tf.tab",type=1)
integrator.addExtension(thdforce)
# y coordinates of particles before integration
before = [self.system.storage.getParticle(i).pos[1] for i in range(1,6)]
# run ten steps
integrator.run(10)
# y coordinates of particles after integration
after = [self.system.storage.getParticle(i).pos[1] for i in range(1,6)]
# run checks (as test before, just that particles should move in y-direction given the new setup and the spherical adaptive resolution geometry)
self.assertEqual(before[0], after[0])
self.assertAlmostEqual(after[1], 6.596913, places=5)
self.assertEqual(before[2], after[2])
self.assertEqual(before[3], after[3])
self.assertEqual(before[4], after[4])
def test_single_moving_sphere(self):
# add some particles
particle_list = [
(1, 1, 0, espressopp.Real3D(5.0, 5.0, 5.0), espressopp.Real3D(0.0, 1.0, 0.0), 1.0, 0),
(2, 1, 0, espressopp.Real3D(5.0, 6.5, 5.0), espressopp.Real3D(0.0, 0.0, 0.0), 1.0, 0),
(3, 1, 0, espressopp.Real3D(5.0, 7.5, 5.0), espressopp.Real3D(0.0, 0.0, 0.0), 1.0, 0),
(4, 1, 0, espressopp.Real3D(5.0, 8.5, 5.0), espressopp.Real3D(0.0, 0.0, 0.0), 1.0, 0),
(5, 1, 0, espressopp.Real3D(5.0, 9.5, 5.0), espressopp.Real3D(0.0, 0.0, 0.0), 1.0, 0),
(6, 0, 0, espressopp.Real3D(5.0, 5.0, 5.0), espressopp.Real3D(0.0, 1.0, 0.0), 1.0, 1),
(7, 0, 0, espressopp.Real3D(5.0, 6.5, 5.0), espressopp.Real3D(0.0, 0.0, 0.0), 1.0, 1),
(8, 0, 0, espressopp.Real3D(5.0, 7.5, 5.0), espressopp.Real3D(0.0, 0.0, 0.0), 1.0, 1),
(9, 0, 0, espressopp.Real3D(5.0, 8.5, 5.0), espressopp.Real3D(0.0, 0.0, 0.0), 1.0, 1),
(10, 0, 0, espressopp.Real3D(5.0, 9.5, 5.0), espressopp.Real3D(0.0, 0.0, 0.0), 1.0, 1),
]
tuples = [(1,6),(2,7),(3,8),(4,9),(5,10)]
self.system.storage.addParticles(particle_list, 'id', 'type', 'q', 'pos', 'v', 'mass','adrat')
ftpl = espressopp.FixedTupleListAdress(self.system.storage)
ftpl.addTuples(tuples)
self.system.storage.setFixedTuplesAdress(ftpl)
self.system.storage.decompose()
# generate a verlet list
vl = espressopp.VerletListAdress(self.system, cutoff=1.5, adrcut=1.5,
dEx=1.0, dHy=1.0, pids=[1], sphereAdr=True)
# initialize lambda values
integrator = espressopp.integrator.VelocityVerlet(self.system)
integrator.dt = 0.01
adress = espressopp.integrator.Adress(self.system,vl,ftpl)
integrator.addExtension(adress)
espressopp.tools.AdressDecomp(self.system, integrator)
# set up TD force
thdforce = espressopp.integrator.TDforce(self.system,vl)
thdforce.addForce(itype=3,filename="table_tf.tab",type=1)
integrator.addExtension(thdforce)
# y coordinates of particles before integration
before = [self.system.storage.getParticle(i).pos[1] for i in range(1,6)]
# run ten steps
integrator.run(10)
# y coordinates of particles after integration
after = [self.system.storage.getParticle(i).pos[1] for i in range(1,6)]
# run checks (first particle moves and defines the AdResS region, second particle is in the corresponding hybrid region, others are in coarse-grained region)
self.assertAlmostEqual(after[0], 5.100000, places=5)
self.assertAlmostEqual(after[1], 6.618377, places=5)
self.assertEqual(before[2], after[2])
self.assertEqual(before[3], after[3])
self.assertEqual(before[4], after[4])
def test_many_moving_spheres(self):
# add some particles
particle_list = [
(1, 1, 0, espressopp.Real3D(4.0, 5.0, 5.0), espressopp.Real3D(-1.0, 1.0, 0.0), 1.0, 0),
(2, 1, 0, espressopp.Real3D(6.0, 5.0, 5.0), espressopp.Real3D(1.0, 1.0, 0.0), 1.0, 0),
(3, 1, 0, espressopp.Real3D(5.0, 6.2, 5.0), espressopp.Real3D(0.0, 0.0, 0.0), 1.0, 0),
(4, 1, 0, espressopp.Real3D(3.0, 8.5, 5.0), espressopp.Real3D(0.0, 0.0, 0.0), 1.0, 0),
(5, 1, 0, espressopp.Real3D(7.0, 9.5, 5.0), espressopp.Real3D(0.0, 0.0, 0.0), 1.0, 0),
(6, 0, 0, espressopp.Real3D(4.0, 5.0, 5.0), espressopp.Real3D(-1.0, 1.0, 0.0), 1.0, 1),
(7, 0, 0, espressopp.Real3D(6.0, 5.0, 5.0), espressopp.Real3D(1.0, 1.0, 0.0), 1.0, 1),
(8, 0, 0, espressopp.Real3D(5.0, 6.2, 5.0), espressopp.Real3D(0.0, 0.0, 0.0), 1.0, 1),
(9, 0, 0, espressopp.Real3D(3.0, 8.5, 5.0), espressopp.Real3D(0.0, 0.0, 0.0), 1.0, 1),
(10, 0, 0, espressopp.Real3D(7.0, 9.5, 5.0), espressopp.Real3D(0.0, 0.0, 0.0), 1.0, 1),
]
tuples = [(1,6),(2,7),(3,8),(4,9),(5,10)]
self.system.storage.addParticles(particle_list, 'id', 'type', 'q', 'pos', 'v', 'mass','adrat')
ftpl = espressopp.FixedTupleListAdress(self.system.storage)
ftpl.addTuples(tuples)
self.system.storage.setFixedTuplesAdress(ftpl)
self.system.storage.decompose()
# generate a verlet list
vl = espressopp.VerletListAdress(self.system, cutoff=1.5, adrcut=1.5,
dEx=1.0, dHy=1.0, pids=[1,2], sphereAdr=True)
# initialize lambda values
integrator = espressopp.integrator.VelocityVerlet(self.system)
integrator.dt = 0.01
adress = espressopp.integrator.Adress(self.system,vl,ftpl)
integrator.addExtension(adress)
espressopp.tools.AdressDecomp(self.system, integrator)
# set up TD force
thdforce = espressopp.integrator.TDforce(self.system, vl, startdist = 0.9, enddist = 2.1, edgeweightmultiplier = 20)
thdforce.addForce(itype=3,filename="table_tf.tab",type=1)
integrator.addExtension(thdforce)
# y coordinates of particles before integration
before = [self.system.storage.getParticle(i).pos[1] for i in range(1,6)]
# run ten steps
integrator.run(10)
# y coordinates of particles after integration
after = [self.system.storage.getParticle(i).pos[1] for i in range(1,6)]
# run checks (first two particles move and together define the AdResS region, third particle is in the corresponding hybrid region, others are in coarse-grained region)
self.assertAlmostEqual(after[0], 5.100000, places=5)
self.assertAlmostEqual(after[1], 5.100000, places=5)
self.assertAlmostEqual(after[2], 6.260323, places=5)
self.assertEqual(before[3], after[3])
self.assertEqual(before[4], after[4])
def test_many_moving_spheres_fewupdates(self):
# add some particles
particle_list = [
(1, 1, 0, espressopp.Real3D(4.0, 5.0, 5.0), espressopp.Real3D(-1.0, 1.0, 0.0), 1.0, 0),
(2, 1, 0, espressopp.Real3D(6.0, 5.0, 5.0), espressopp.Real3D(1.0, 1.0, 0.0), 1.0, 0),
(3, 1, 0, espressopp.Real3D(5.0, 6.2, 5.0), espressopp.Real3D(0.0, 0.0, 0.0), 1.0, 0),
(4, 1, 0, espressopp.Real3D(3.0, 8.5, 5.0), espressopp.Real3D(0.0, 0.0, 0.0), 1.0, 0),
(5, 1, 0, espressopp.Real3D(7.0, 9.5, 5.0), espressopp.Real3D(0.0, 0.0, 0.0), 1.0, 0),
(6, 0, 0, espressopp.Real3D(4.0, 5.0, 5.0), espressopp.Real3D(-1.0, 1.0, 0.0), 1.0, 1),
(7, 0, 0, espressopp.Real3D(6.0, 5.0, 5.0), espressopp.Real3D(1.0, 1.0, 0.0), 1.0, 1),
(8, 0, 0, espressopp.Real3D(5.0, 6.2, 5.0), espressopp.Real3D(0.0, 0.0, 0.0), 1.0, 1),
(9, 0, 0, espressopp.Real3D(3.0, 8.5, 5.0), espressopp.Real3D(0.0, 0.0, 0.0), 1.0, 1),
(10, 0, 0, espressopp.Real3D(7.0, 9.5, 5.0), espressopp.Real3D(0.0, 0.0, 0.0), 1.0, 1),
]
tuples = [(1,6),(2,7),(3,8),(4,9),(5,10)]
self.system.storage.addParticles(particle_list, 'id', 'type', 'q', 'pos', 'v', 'mass','adrat')
ftpl = espressopp.FixedTupleListAdress(self.system.storage)
ftpl.addTuples(tuples)
self.system.storage.setFixedTuplesAdress(ftpl)
self.system.storage.decompose()
# generate a verlet list
vl = espressopp.VerletListAdress(self.system, cutoff=1.5, adrcut=1.5,
dEx=1.0, dHy=1.0, pids=[1,2], sphereAdr=True)
# initialize lambda values
integrator = espressopp.integrator.VelocityVerlet(self.system)
integrator.dt = 0.01
adress = espressopp.integrator.Adress(self.system,vl,ftpl,regionupdates = 5)
integrator.addExtension(adress)
espressopp.tools.AdressDecomp(self.system, integrator)
# set up TD force
thdforce = espressopp.integrator.TDforce(self.system, vl, startdist = 0.9, enddist = 2.1, edgeweightmultiplier = 20)
thdforce.addForce(itype=3,filename="table_tf.tab",type=1)
integrator.addExtension(thdforce)
# y coordinates of particles before integration
before = [self.system.storage.getParticle(i).pos[1] for i in range(1,6)]
# run ten steps
integrator.run(20)
# y coordinates of particles after integration (as test before, but more integration steps and the AdResS region is updated only every 5 steps)
after = [self.system.storage.getParticle(i).pos[1] for i in range(1,6)]
# run checks
self.assertAlmostEqual(after[0], 5.200000, places=5)
self.assertAlmostEqual(after[1], 5.200000, places=5)
self.assertAlmostEqual(after[2], 6.393199, places=5)
self.assertEqual(before[3], after[3])
self.assertEqual(before[4], after[4])
def test_force_value(self):
# set up TD force
vl = espressopp.VerletListAdress(self.system, cutoff=1.5, adrcut=1.5,
dEx=1.0, dHy=1.0, adrCenter=[5.0, 5.0, 5.0], sphereAdr=False)
thdforce = espressopp.integrator.TDforce(self.system,vl)
thdforce.addForce(itype=3,filename="table_tf.tab",type=1)
self.assertAlmostEqual(thdforce.getForce(1, 0.9), 1.52920, places=5)
def test_switch_table(self):
# set up TD force
vl = espressopp.VerletListAdress(self.system, cutoff=1.5, adrcut=1.5,
dEx=1.0, dHy=1.0, adrCenter=[5.0, 5.0, 5.0], sphereAdr=False)
thdforce = espressopp.integrator.TDforce(self.system,vl)
thdforce.addForce(itype=3,filename="table_tf.tab",type=1)
self.assertAlmostEqual(thdforce.getForce(1, 0.9), 1.52920, places=5)
thdforce.addForce(itype=3,filename="table_tf2.tab",type=1)
self.assertAlmostEqual(thdforce.getForce(1, 0.9), 9.89, places=3) # New table new value
if __name__ == '__main__':
unittest.main()
|
espressopp/espressopp
|
testsuite/AdResS/TDforce/test_TDforce.py
|
Python
|
gpl-3.0
| 16,457 | 0.010634 |
import contextlib
import logging
import os
import tempfile
from django.db.utils import InternalError
from django.test import TestCase
from django.test.client import Client
from django.urls import reverse
from botocore.exceptions import ConnectionClosedError
from unittest.mock import patch
from casexml.apps.case.exceptions import IllegalCaseId
from couchforms.models import UnfinishedSubmissionStub
from couchforms.openrosa_response import ResponseNature
from couchforms.signals import successful_form_received
from corehq.apps.domain.shortcuts import create_domain
from corehq.apps.users.models import WebUser
from corehq.blobs import get_blob_db
from corehq.const import OPENROSA_VERSION_2, OPENROSA_VERSION_3
from corehq.form_processor.models import CommCareCase, XFormInstance
from corehq.form_processor.tests.utils import (
FormProcessorTestUtils,
sharded,
)
from corehq.middleware import OPENROSA_VERSION_HEADER
from corehq.util.test_utils import TestFileMixin, flag_enabled, capture_log_output
FORM_WITH_CASE_ID = 'ad38211be256653bceac8e2156475666'
def tmpfile(mode='w', *args, **kwargs):
fd, path = tempfile.mkstemp(*args, **kwargs)
return (os.fdopen(fd, mode), path)
@sharded
class SubmissionErrorTest(TestCase, TestFileMixin):
file_path = ('data',)
root = os.path.dirname(__file__)
@classmethod
def setUpClass(cls):
super(SubmissionErrorTest, cls).setUpClass()
cls.domain = create_domain("submit-errors")
cls.couch_user = WebUser.create(None, "test", "foobar", None, None)
cls.couch_user.add_domain_membership(cls.domain.name, is_admin=True)
cls.couch_user.save()
cls.client = Client()
cls.client.login(**{'username': 'test', 'password': 'foobar'})
cls.url = reverse("receiver_post", args=[cls.domain])
@classmethod
def tearDownClass(cls):
cls.couch_user.delete(cls.domain.name, deleted_by=None)
cls.domain.delete()
super(SubmissionErrorTest, cls).tearDownClass()
def setUp(self):
FormProcessorTestUtils.delete_all_xforms(self.domain.name)
def tearDown(self):
FormProcessorTestUtils.delete_all_cases_forms_ledgers(self.domain.name)
UnfinishedSubmissionStub.objects.all().delete()
def _submit(self, formname, open_rosa_header=None):
open_rosa_header = open_rosa_header or OPENROSA_VERSION_2
file_path = self.get_path(formname, '')
with open(file_path, "rb") as f:
res = self.client.post(
self.url,
{"xml_submission_file": f},
** {OPENROSA_VERSION_HEADER: open_rosa_header}
)
return file_path, res
def testSubmitBadAttachmentType(self):
res = self.client.post(self.url, {
"xml_submission_file": "this isn't a file"
})
self.assertEqual(400, res.status_code)
#self.assertIn("xml_submission_file", res.content)
def testSubmitDuplicate(self):
file, res = self._submit('simple_form.xml')
self.assertEqual(201, res.status_code)
self.assertIn(" √ ".encode('utf-8'), res.content)
file, res = self._submit('simple_form.xml')
self.assertEqual(201, res.status_code)
_, res_openrosa3 = self._submit('simple_form.xml', open_rosa_header=OPENROSA_VERSION_3)
self.assertEqual(201, res_openrosa3.status_code)
self.assertIn("Form is a duplicate", res.content.decode('utf-8'))
# make sure we logged it
[log] = XFormInstance.objects.get_forms_by_type(self.domain.name, 'XFormDuplicate', limit=1)
self.assertIsNotNone(log)
self.assertIn("Form is a duplicate", log.problem)
with open(file, 'rb') as f:
self.assertEqual(f.read(), log.get_xml())
def _test_submission_error_post_save(self, openrosa_version):
evil_laugh = "mwa ha ha!"
with failing_signal_handler(evil_laugh):
file, res = self._submit("simple_form.xml", openrosa_version)
if openrosa_version == OPENROSA_VERSION_3:
self.assertEqual(422, res.status_code)
self.assertIn(ResponseNature.POST_PROCESSING_FAILURE.encode('utf-8'), res.content)
else:
self.assertEqual(201, res.status_code)
self.assertIn(ResponseNature.SUBMIT_SUCCESS.encode('utf-8'), res.content)
form_id = 'ad38211be256653bceac8e2156475664'
form = XFormInstance.objects.get_form(form_id, self.domain.name)
self.assertTrue(form.is_normal)
self.assertTrue(form.initial_processing_complete)
stubs = UnfinishedSubmissionStub.objects.filter(
domain=self.domain, xform_id=form_id, saved=True
).all()
self.assertEqual(1, len(stubs))
def test_submission_error_post_save_2_0(self):
self._test_submission_error_post_save(OPENROSA_VERSION_2)
def test_submission_error_post_save_3_0(self):
self._test_submission_error_post_save(OPENROSA_VERSION_3)
# make sure that a re-submission has the same response
self._test_submission_error_post_save(OPENROSA_VERSION_3)
def _test_submit_bad_data(self, bad_data):
f, path = tmpfile(mode='wb', suffix='.xml')
with f:
f.write(bad_data)
with open(path, 'rb') as f:
with capture_log_output('', logging.WARNING) as logs:
res = self.client.post(self.url, {
"xml_submission_file": f
})
self.assertEqual(422, res.status_code)
self.assertIn('Invalid XML', res.content.decode('utf-8'))
# make sure we logged it
[log] = XFormInstance.objects.get_forms_by_type(self.domain.name, 'SubmissionErrorLog', limit=1)
self.assertIsNotNone(log)
self.assertIn('Invalid XML', log.problem)
self.assertEqual(bad_data, log.get_xml())
self.assertEqual(log.form_data, {})
return logs.get_output()
def test_submit_bad_xml(self):
log_output = self._test_submit_bad_data(b'\xad\xac\xab\xd36\xe1\xab\xd6\x9dR\x9b')
self.assertRegexpMatches(log_output, r"Problem receiving submission.*")
def test_submit_bad_device_log(self):
log_output = self._test_submit_bad_data(
"malformed xml dvice log</log></log_subreport></device_report>".encode('utf8')
)
self.assertRegexpMatches(log_output, r"Badly formed device log.*")
def test_missing_xmlns(self):
file, res = self._submit('missing_xmlns.xml')
self.assertEqual(422, res.status_code)
message = "Form is missing a required field: XMLNS"
self.assertIn(message, res.content.decode('utf-8'))
# make sure we logged it
[log] = XFormInstance.objects.get_forms_by_type(self.domain.name, 'SubmissionErrorLog', limit=1)
self.assertIsNotNone(log)
self.assertIn(message, log.problem)
with open(file, 'rb') as f:
self.assertEqual(f.read(), log.get_xml())
@flag_enabled('DATA_MIGRATION')
def test_data_migration(self):
file, res = self._submit('simple_form.xml')
self.assertEqual(503, res.status_code)
message = "Service Temporarily Unavailable"
self.assertIn(message, res.content.decode('utf-8'))
def test_error_saving_normal_form(self):
sql_patch = patch(
'corehq.form_processor.backends.sql.processor.FormProcessorSQL.save_processed_models',
side_effect=InternalError
)
with sql_patch:
with self.assertRaises(InternalError):
_, res = self._submit('form_with_case.xml')
stubs = UnfinishedSubmissionStub.objects.filter(
domain=self.domain, saved=False, xform_id=FORM_WITH_CASE_ID
).all()
self.assertEqual(1, len(stubs))
form = XFormInstance.objects.get_form(FORM_WITH_CASE_ID, self.domain)
self.assertTrue(form.is_error)
self.assertTrue(form.initial_processing_complete)
def _test_case_processing_error(self, openrosa_version):
with patch('casexml.apps.case.xform._get_or_update_cases', side_effect=IllegalCaseId):
_, res = self._submit('form_with_case.xml', open_rosa_header=openrosa_version)
if openrosa_version == OPENROSA_VERSION_3:
self.assertEqual(422, res.status_code)
self.assertIn(ResponseNature.PROCESSING_FAILURE, res.content.decode('utf-8'))
else:
self.assertEqual(201, res.status_code)
self.assertIn(ResponseNature.SUBMIT_ERROR, res.content.decode('utf-8'))
form = XFormInstance.objects.get_form(FORM_WITH_CASE_ID, self.domain)
self.assertTrue(form.is_error)
self.assertFalse(form.initial_processing_complete)
def test_case_processing_error_2_0(self):
self._test_case_processing_error(OPENROSA_VERSION_2)
def test_case_processing_error_3_0(self):
self._test_case_processing_error(OPENROSA_VERSION_3)
# make sure that a re-submission has the same response
self._test_case_processing_error(OPENROSA_VERSION_3)
def test_no_form_lock_on_submit_device_log(self):
from corehq.form_processor.submission_post import FormProcessingResult as FPR
from corehq.form_processor.parsers.form import FormProcessingResult
class FakeResponse(dict):
content = "device-log-response"
status_code = 200
streaming = False
cookies = []
_closable_objects = []
def has_header(self, name):
return False
def set_cookie(self, *args, **kw):
pass
def close(self):
pass
def process_device_log(self_, xform):
result = FormProcessingResult(xform)
# verify form is not locked: acquire lock without XFormLockError
with result.get_locked_forms() as forms:
self.assertEqual(forms, [xform])
calls.append(1)
return FPR(FakeResponse(), xform, [], [], 'device-log')
calls = []
with patch(
'corehq.form_processor.submission_post.SubmissionPost.process_device_log',
new_callable=lambda: process_device_log,
):
_, res = self._submit('device_log.xml')
self.assertEqual(res.status_code, 200)
self.assertEqual(calls, [1])
def test_xform_locked(self):
from corehq.form_processor.interfaces.processor import FormProcessorInterface
form_id = 'ad38211be256653bceac8e2156475664'
proc = FormProcessorInterface(self.domain)
lock = proc.acquire_lock_for_xform(form_id)
try:
_, response = self._submit('simple_form.xml')
finally:
lock.release()
self.assertEqual(response.status_code, 423)
def test_error_publishing_to_kafka(self):
sql_patch = patch(
'corehq.form_processor.backends.sql.processor.FormProcessorSQL.publish_changes_to_kafka',
side_effect=ValueError
)
with sql_patch:
_, res = self._submit('form_with_case.xml')
self.assertEqual(res.status_code, 201)
stubs = UnfinishedSubmissionStub.objects.filter(
domain=self.domain, saved=True, xform_id=FORM_WITH_CASE_ID
).all()
self.assertEqual(1, len(stubs))
form = XFormInstance.objects.get_form(FORM_WITH_CASE_ID, self.domain)
self.assertFalse(form.is_error)
self.assertTrue(form.initial_processing_complete)
def test_error_writing_to_blobdb(self):
error = ConnectionClosedError(endpoint_url='url')
with self.assertRaises(ConnectionClosedError), patch.object(get_blob_db(), 'put', side_effect=error):
self._submit('form_with_case.xml')
self.client.exc_info = None # clear error to prevent it from being raised on next request
stubs = UnfinishedSubmissionStub.objects.filter(
domain=self.domain, saved=False, xform_id=FORM_WITH_CASE_ID
).all()
self.assertEqual(1, len(stubs))
old_form = XFormInstance.objects.get_form(FORM_WITH_CASE_ID, self.domain)
self.assertTrue(old_form.is_error)
self.assertTrue(old_form.initial_processing_complete)
expected_problem_message = f'{type(error).__name__}: {error}'
self.assertEqual(old_form.problem, expected_problem_message)
_, resp = self._submit('form_with_case.xml')
self.assertEqual(resp.status_code, 201)
new_form = XFormInstance.objects.get_form(FORM_WITH_CASE_ID, self.domain)
self.assertTrue(new_form.is_normal)
old_form.refresh_from_db() # can't fetch by form_id since form_id changed
self.assertEqual(old_form.orig_id, FORM_WITH_CASE_ID)
self.assertEqual(old_form.problem, expected_problem_message)
def test_submit_duplicate_blob_not_found(self):
# https://dimagi-dev.atlassian.net/browse/ICDS-376
file, res = self._submit('form_with_case.xml')
self.assertEqual(201, res.status_code)
self.assertIn(" √ ".encode('utf-8'), res.content)
form = XFormInstance.objects.get_form(FORM_WITH_CASE_ID, self.domain.name)
form_attachment_meta = form.get_attachment_meta('form.xml')
blobdb = get_blob_db()
with patch.object(blobdb.metadb, 'delete'):
blobdb.delete(form_attachment_meta.key)
file, res = self._submit('form_with_case.xml')
self.assertEqual(res.status_code, 201)
form = XFormInstance.objects.get_form(FORM_WITH_CASE_ID, self.domain.name)
deprecated_form = XFormInstance.objects.get_form(form.deprecated_form_id, self.domain.name)
self.assertTrue(deprecated_form.is_deprecated)
case = CommCareCase.objects.get_case('ad38211be256653bceac8e2156475667', self.domain.name)
transactions = case.transactions
self.assertEqual(2, len(transactions))
self.assertTrue(transactions[0].is_form_transaction)
self.assertTrue(transactions[1].is_case_rebuild)
@contextlib.contextmanager
def failing_signal_handler(error_message):
def fail(sender, xform, **kwargs):
raise Exception(error_message)
successful_form_received.connect(fail)
try:
yield
finally:
successful_form_received.disconnect(fail)
|
dimagi/commcare-hq
|
corehq/apps/receiverwrapper/tests/test_submit_errors.py
|
Python
|
bsd-3-clause
| 14,436 | 0.00194 |
#
# To test this, first have a running local zookeeper installation
# (See http://zookeeper.apache.org/)
# Next, open a couple of shells
# Run this in the first one, watch the output. It will create a lock and hold it for 20 seconds.
# Run it again in the second one, watch that it doesn't acquire the lock until the fist instance exits,
# and then holds the lock itself for 20 seconds after acquiring it.
#
# You can speed things up by killing the first instance after you
# start the second. The second should immediately acquire the lock.
#
import zklock, time
# You can specify a host to connect(). The default is localhost and
# the default ZooKeeper port.
zklock.connect()
# This creates a global lock named 'test'. Any other zklock connected
# to the same ZooKeeper instance and trying to create a lock of the
# same name will be blocked while this program holds the lock named 'test'
z = zklock.Lock('test')
try:
if z.acquire():
print "zklocktest: Lock acquired"
time.sleep(20)
z.release()
except:
z.release()
with zklock.ScopedLock("scoped_lock_test", block=False) as z:
if z.acquired:
print "Locked!"
time.sleep(20)
else:
print "Could not obtain lock!"
print "zklocktest: Exiting"
|
tinyogre/zklock
|
zklocktest.py
|
Python
|
lgpl-3.0
| 1,271 | 0.010228 |
#!/usr/bin/env python
"""Wordnik.com's Swagger generic API client. This client handles the client-
server communication, and is invariant across implementations. Specifics of
the methods and models for each application are generated from the Swagger
templates."""
import sys
import os
import re
import urllib.request, urllib.parse, urllib.error
import http.client
import json
import datetime
import mimetypes
import base64
from .models import *
from groupdocs.FileStream import FileStream
from groupdocs import version
class RequestSigner(object):
def __init__(self):
if type(self) == RequestSigner:
raise Exception("RequestSigner is an abstract class and cannot be instantiated.")
def signUrl(self, url):
raise NotImplementedError
def signContent(self, requestBody, headers):
raise NotImplementedError
class DefaultRequestSigner(RequestSigner):
def signUrl(self, url):
return url
def signContent(self, requestBody, headers):
return requestBody
class ApiClient(object):
"""Generic API client for Swagger client library builds"""
def __init__(self, requestSigner=None):
self.signer = requestSigner if requestSigner != None else DefaultRequestSigner()
self.cookie = None
self.headers = {'Groupdocs-Referer': '/'.join((version.__pkgname__, version.__version__))}
self.__debug = False
def setDebug(self, flag, logFilepath=None):
self.__debug = flag
self.__logFilepath = logFilepath
def addHeaders(self, **headers):
self.headers = headers
def callAPI(self, apiServer, resourcePath, method, queryParams, postData,
headerParams=None, returnType=str):
if self.__debug and self.__logFilepath:
stdOut = sys.stdout
logFile = open(self.__logFilepath, 'a')
sys.stdout = logFile
url = apiServer + resourcePath
headers = {}
if self.headers:
for param, value in self.headers.items():
headers[param] = value
if headerParams:
for param, value in headerParams.items():
headers[param] = value
isFileUpload = False
if not postData:
headers['Content-type'] = 'text/html'
elif isinstance(postData, FileStream):
isFileUpload = True
if postData.contentType:
headers['Content-type'] = 'application/octet-stream'
if postData.size:
headers['Content-Length'] = str(postData.size)
else:
headers['Content-type'] = 'application/json'
if self.cookie:
headers['Cookie'] = self.cookie
data = None
if queryParams:
# Need to remove None values, these should not be sent
sentQueryParams = {}
for param, value in queryParams.items():
if value != None:
sentQueryParams[param] = value
if sentQueryParams:
url = url + '?' + urllib.parse.urlencode(sentQueryParams)
if method in ['POST', 'PUT', 'DELETE']:
if isFileUpload:
data = postData.inputStream
elif not postData:
data = ""
elif type(postData) not in [str, int, float, bool]:
data = self.signer.signContent(json.dumps(self.sanitizeForSerialization(postData)), headers)
else:
data = self.signer.signContent(postData, headers)
if self.__debug:
http.client.HTTPConnection.debuglevel = 1
if data and not isFileUpload:
data = data.encode('utf-8')
request = MethodRequest(method=method, url=self.encodeURI(self.signer.signUrl(url)), headers=headers,
data=data)
try:
# Make the request
response = urllib.request.urlopen(request)
if 'Set-Cookie' in response.headers:
self.cookie = response.headers['Set-Cookie']
if response.code == 200 or response.code == 201 or response.code == 202:
if returnType == FileStream:
fs = FileStream.fromHttp(response)
if self.__debug: print("\n", "< Response Body:\n", ">>>stream info: fileName=%s contentType=%s size=%s" % (fs.fileName, fs.contentType, fs.size), "\n", sep="")
return fs if 'Transfer-Encoding' in response.headers or (fs.size != None and int(fs.size) > 0) else None
else:
encoding = response.headers.get_content_charset()
if not encoding: encoding = 'iso-8859-1'
string = response.read().decode(encoding)
if self.__debug: print("\n", "< Response Body:\n", string, "\n", sep="")
try:
data = json.loads(string)
except ValueError: # PUT requests don't return anything
data = None
return data
elif response.code == 404:
return None
else:
encoding = response.headers.get_content_charset()
if not encoding: encoding = 'iso-8859-1'
string = response.read().decode(encoding)
try:
msg = json.loads(string)['error_message']
except ValueError:
msg = string
raise ApiException(response.code, msg)
except urllib.error.HTTPError as e:
raise ApiException(e.code, e.msg)
finally:
if isFileUpload:
try:
postData.inputStream.close()
except Exception as e:
pass
if self.__debug:
http.client.HTTPConnection.debuglevel = 0
if self.__logFilepath:
sys.stdout = stdOut
logFile.close()
def toPathValue(self, obj):
"""Serialize a list to a CSV string, if necessary.
Args:
obj -- data object to be serialized
Returns:
string -- json serialization of object
"""
if type(obj) == list:
return ','.join(obj)
else:
return obj
def sanitizeForSerialization(self, obj):
"""Dump an object into JSON for POSTing."""
if not obj:
return None
elif type(obj) in [str, int, float, bool]:
return obj
elif type(obj) == list:
return [self.sanitizeForSerialization(subObj) for subObj in obj]
elif type(obj) == datetime.datetime:
return obj.isoformat()
else:
if type(obj) == dict:
objDict = obj
else:
objDict = obj.__dict__
return {key: self.sanitizeForSerialization(val)
for (key, val) in objDict.items()
if key != 'swaggerTypes' and val != None}
def deserialize(self, obj, objClass):
"""Derialize a JSON string into an object.
Args:
obj -- string or object to be deserialized
objClass -- class literal for deserialzied object, or string
of class name
Returns:
object -- deserialized object"""
if not obj:
return None
# Have to accept objClass as string or actual type. Type could be a
# native Python type, or one of the model classes.
if type(objClass) == str:
if 'list[' in objClass:
match = re.match('list\[(.*)\]', objClass)
subClass = match.group(1)
return [self.deserialize(subObj, subClass) for subObj in obj]
if (objClass in ['int', 'float', 'dict', 'list', 'str']):
objClass = eval(objClass)
else: # not a native type, must be model class
objClass = eval(objClass + '.' + objClass)
if objClass in [str, int, float, bool]:
return objClass(obj)
elif objClass == datetime:
# Server will always return a time stamp in UTC, but with
# trailing +0000 indicating no offset from UTC. So don't process
# last 5 characters.
return datetime.datetime.strptime(obj[:-5],
"%Y-%m-%dT%H:%M:%S.%f")
instance = objClass()
for attr, attrType in instance.swaggerTypes.items():
lc_attr = attr[0].lower() + attr[1:]
uc_attr = attr[0].upper() + attr[1:]
real_attr = None
if attr in obj:
real_attr = attr
elif lc_attr in obj:
real_attr = lc_attr
elif uc_attr in obj:
real_attr = uc_attr
if real_attr != None:
value = obj[real_attr]
if not value:
setattr(instance, real_attr, None)
elif attrType in ['str', 'int', 'long', 'float', 'bool']:
attrType = eval(attrType)
try:
value = attrType(value)
except UnicodeEncodeError:
value = str(value)
setattr(instance, real_attr, value)
elif 'list[' in attrType:
match = re.match('list\[(.*)\]', attrType)
subClass = match.group(1)
subValues = []
for subValue in value:
subValues.append(self.deserialize(subValue,
subClass))
setattr(instance, real_attr, subValues)
else:
setattr(instance, real_attr, self.deserialize(value,
attrType))
return instance
@staticmethod
def encodeURI(url):
encoded = urllib.parse.quote(url, safe='~@#$&()*!=:;,.?/\'').replace("%25", "%")
return encoded
@staticmethod
def encodeURIComponent(url):
return urllib.parse.quote(url, safe='~()*!.\'')
@staticmethod
def readAsDataURL(filePath):
mimetype = mimetypes.guess_type(filePath, False)[0] or "application/octet-stream"
filecontents = open(filePath, 'rb').read()
return 'data:' + mimetype + ';base64,' + base64.b64encode(filecontents).decode()
class MethodRequest(urllib.request.Request):
def __init__(self, *args, **kwargs):
"""Construct a MethodRequest. Usage is the same as for
`urllib.Request` except it also takes an optional `method`
keyword argument. If supplied, `method` will be used instead of
the default."""
if 'method' in kwargs:
self.method = kwargs.pop('method')
return urllib.request.Request.__init__(self, *args, **kwargs)
def get_method(self):
return getattr(self, 'method', urllib.request.Request.get_method(self))
class ApiException(Exception):
def __init__(self, code, *args):
super(Exception, self).__init__((code, ) + args)
self.code = code
|
liosha2007/temporary-groupdocs-python3-sdk
|
groupdocs/ApiClient.py
|
Python
|
apache-2.0
| 11,500 | 0.004609 |
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Pexego Sistemas Informáticos (http://www.pexego.es) All Rights Reserved
# $Jesús Ventosinos Mayor$
# $Javier Colmenero Fernández$
# Copyright (c) 2014 Didotech srl (info at didotech.com)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from datetime import datetime
from openerp.tools.translate import _
from openerp.osv import orm, fields
class project_task_history(orm.Model):
_inherit = "project.task.history"
_columns = {
'state': fields.selection([
('draft', 'New'),
('open', 'In Progress'),
('pending', 'Pending'),
('done', 'Done'),
('working', 'Working'),
('cancelled', 'Cancelled')
], 'State', readonly=True, required=True)
}
class time_control_user_task(orm.Model):
_name = 'time.control.user.task'
_columns = {
'user': fields.many2one('res.users', 'user'),
'work_start': fields.datetime('Work start'),
'work_end': fields.datetime('Work end'),
'started_task': fields.many2one('project.task', 'Started task')
}
class project_task(orm.Model):
_inherit = "project.task"
def _get_users_working(self, cr, uid, ids, field_name, args, context=None):
if context is None:
context = {}
res = {}
user_task_obj = self.pool["time.control.user.task"]
for task in self.browse(cr, uid, ids, context):
stream = ''
user_ids = []
user_task_ids = user_task_obj.search(cr, uid, [('started_task', '=', task.id)])
if user_task_ids:
for user_task in user_task_obj.browse(cr, uid, user_task_ids, context):
if user_task.user.name:
stream += user_task.user.name + u","
user_ids.append(user_task.user.id)
res[task.id] = {'working_users': stream, 'user_is_working': uid in user_ids}
else:
res[task.id] = {'working_users': '', 'user_is_working': False}
return res
_columns = {
'other_users_ids': fields.many2many('res.users', 'project_task_user_rel', 'user_id', 'task_id', 'Other users'),
'state': fields.selection([
('draft', 'New'),
('open', 'In Progress'),
('pending', 'Pending'),
('done', 'Done'),
('working', 'Working'),
('cancelled', 'Cancelled')
], 'State', readonly=True, required=True),
'working_users': fields.function(_get_users_working, method=True, string='Working users', type='char', size=255, multi=True),
'user_is_working': fields.function(_get_users_working, method=True, string='I am working', type='boolean', multi=True)
}
def stop_task(self, cr, uid, task_id, final, user_task, context=None):
if context is None:
context = {}
self.pool['time.control.user.task'].write(cr, uid, user_task.id, {'work_end': final})
context['user_id'] = uid
context['user_task_id'] = user_task.id
#Call wizard:
wizard_id = self.pool["task.time.control.confirm.wizard"].create(cr, uid, {
'task_to_start': task_id,
'user_task': user_task.id,
'started_task': user_task.started_task.id
}, context=context)
return {
'name': _("Confirm Time"),
'view_mode': 'form',
'view_id': False,
'view_type': 'form',
'res_model': 'task.time.control.confirm.wizard',
'res_id': wizard_id,
'type': 'ir.actions.act_window',
'nodestroy': True,
'target': 'new',
'domain': '[]',
'context': context
}
def work_start_btn(self, cr, uid, task_ids, context):
start = datetime.now()
user_task_obj = self.pool["time.control.user.task"]
project_task_obj = self.pool['project.task']
user_task_ids = user_task_obj.search(cr, uid, [('user', '=', uid), ('started_task', 'in', task_ids)])
if user_task_ids:
user_task = user_task_obj.browse(cr, uid, user_task_ids)[0]
if user_task.started_task:
if user_task.started_task.id == task_ids[0]:
raise orm.except_orm(_("Warning !"), _("Task is alredy started."))
return self.stop_task(cr, uid, task_ids[0], start, user_task, context)
else:
task = project_task_obj.browse(cr, uid, task_ids)[0]
if task.state == 'draft':
self.do_open(cr, uid, task_ids, context)
project_task_obj.write(cr, uid, task_ids, {'state': 'working'})
user_task_obj.write(cr, uid, user_task_ids, {'work_start': start, 'started_task': task_ids[0]})
else:
task = self.pool.get('project.task').browse(cr, uid, task_ids)[0]
if task.state == 'draft':
self.do_open(cr, uid, task_ids, context)
user_task_obj.create(cr, uid, {
'user': uid,
'work_start': start,
'started_task': task_ids[0]
})
project_task_obj.write(cr, uid, task_ids, {'state': 'working'})
return True
def work_end_btn(self, cr, uid, task_ids, context):
end_datetime = datetime.now()
user_task_obj = self.pool["time.control.user.task"]
user_task_ids = user_task_obj.search(cr, uid, [('user', '=', uid), ('started_task', 'in', task_ids)])
if user_task_ids:
user_task = user_task_obj.browse(cr, uid, user_task_ids[0])
if user_task.started_task.id == task_ids[0]:
finished = self.stop_task(cr, uid, None, end_datetime, user_task, context)
if finished:
return finished
else:
raise orm.except_orm(_("Warning!"), _('Task is not init.'))
else:
raise orm.except_orm(_("Warning!"), _('Task started by another user.'))
else:
raise orm.except_orm(_("Warning!"), _('User has no opened tasks.'))
return True
class project_task_work(orm.Model):
_inherit = "project.task.work"
_columns = {
'work_start': fields.datetime('Work start'),
'work_end': fields.datetime('Work end')
}
|
odoousers2014/LibrERP
|
task_time_control/project_task.py
|
Python
|
agpl-3.0
| 7,349 | 0.004084 |
import Base
import sys
import industrial_lib
time_of_day='_night'
(landing_platform,bar,weap) = industrial_lib.MakeCorisc (time_of_day,'bases/bartender_university.py')
|
vinni-au/vega-strike
|
data/bases/university_night.py
|
Python
|
gpl-2.0
| 169 | 0.035503 |
#!/usr/bin/env python
"""
Check if an FST has only zero weights.
"""
import argparse
import fst
import sys
def main(args):
L = fst.read(args.fst_file)
for state in L:
for arc in state:
if arc.weight != fst.TropicalWeight(0.0):
sys.stderr.write(
"Nonzero weight in the fst: node {} arc {}".format(state, arc))
exit(1)
if __name__=='__main__':
parser = argparse.ArgumentParser(description="Zero the weight on all transitions in the FST")
parser.add_argument("fst_file", default='-', nargs='?')
args = parser.parse_args()
main(args)
|
rizar/attention-lvcsr
|
bin/check_all_fst_weights_are_zero.py
|
Python
|
mit
| 633 | 0.004739 |
from unittest import TestCase
from aq.sqlite_util import connect, create_table, insert_all
class TestSqliteUtil(TestCase):
def test_dict_adapter(self):
with connect(':memory:') as conn:
conn.execute('CREATE TABLE foo (foo)')
conn.execute('INSERT INTO foo (foo) VALUES (?)', ({'bar': 'blah'},))
values = conn.execute('SELECT * FROM foo').fetchone()
self.assertEqual(len(values), 1)
self.assertEqual(values[0], '{"bar": "blah"}')
def test_create_table(self):
with connect(':memory:') as conn:
create_table(conn, None, 'foo', ('col1', 'col2'))
tables = conn.execute("PRAGMA table_info(\'foo\')").fetchall()
self.assertEqual(len(tables), 2)
self.assertEqual(tables[0][1], 'col1')
self.assertEqual(tables[1][1], 'col2')
def test_insert_all(self):
class Foo(object):
def __init__(self, c1, c2):
self.c1 = c1
self.c2 = c2
columns = ('c1', 'c2')
values = (Foo(1, 2), Foo(3, 4))
with connect(':memory:') as conn:
create_table(conn, None, 'foo', columns)
insert_all(conn, None, 'foo', columns, values)
rows = conn.execute('SELECT * FROM foo').fetchall()
self.assertTrue((1, 2) in rows, '(1, 2) in rows')
self.assertTrue((3, 4) in rows, '(3, 4) in rows')
def test_json_get_field(self):
with connect(':memory:') as conn:
json_obj = '{"foo": "bar"}'
query = "select json_get('{0}', 'foo')".format(json_obj)
self.assertEqual(conn.execute(query).fetchone()[0], 'bar')
def test_json_get_index(self):
with connect(':memory:') as conn:
json_obj = '[1, 2, 3]'
query = "select json_get('{0}', 1)".format(json_obj)
self.assertEqual(conn.execute(query).fetchone()[0], 2)
def test_json_get_field_nested(self):
with connect(':memory:') as conn:
json_obj = '{"foo": {"bar": "blah"}}'
query = "select json_get('{0}', 'foo')".format(json_obj)
self.assertEqual(conn.execute(query).fetchone()[0], '{"bar": "blah"}')
query = "select json_get(json_get('{0}', 'foo'), 'bar')".format(json_obj)
self.assertEqual(conn.execute(query).fetchone()[0], 'blah')
def test_json_get_field_of_null(self):
with connect(':memory:') as conn:
query = "select json_get(NULL, 'foo')"
self.assertEqual(conn.execute(query).fetchone()[0], None)
def test_json_get_field_of_serialized_null(self):
with connect(':memory:') as conn:
json_obj = 'null'
query = "select json_get('{0}', 'foo')".format(json_obj)
self.assertEqual(conn.execute(query).fetchone()[0], None)
|
lebinh/aq
|
tests/test_sqlite_util.py
|
Python
|
mit
| 2,855 | 0.001051 |
#!/usr/bin/env python2.7
import networkx as nx
import matplotlib.pyplot as plt
import sys, argparse, os, re
parser = argparse.ArgumentParser(description='''Visualizes connections in assembly
using networkx_viewer module.''',
formatter_class=argparse.ArgumentDefaultsHelpFormatter, add_help=False)
#Required arguments
required = parser.add_argument_group('REQUIRED')
required.add_argument('-c', help= 'connections file', required=True, type=argparse.FileType('r'))
#Optional arguments
optional = parser.add_argument_group('OPTIONAL')
optional.add_argument('-h', action="help", help="show this help message and exit")
optional.add_argument('-o', metavar='<*.png>', type=argparse.FileType('w'))
optional.add_argument('-m', metavar='<int>', type=int, default=0)
args = parser.parse_args()
import networkx_viewer as nv
#Build the graph
G= nx.Graph()
nodes= []
edges= {}
for line in args.c:
line= line.strip().split('\t')
if 'accept' not in line or 'flanking' in line:
continue
attr= {}
#line[0]= re.search('(NODE_\d+)', line[0]).group()
#line[2]= re.search('(NODE_\d+)', line[2]).group()
if line[0]==line[2]:
nodes.append([line[0], {'self':'True', 'fill':'blue', 'direction':" ".join(line[:4]), 'count':line[4]}])
print line[0]+"\tSelf-edge"
continue
if line[0] not in nodes:
nodes.append(line[0])
if line[2] not in nodes:
nodes.append(line[2])
edge= sorted([line[0], line[2]])
lookup= "\t".join(edge)
if lookup in edges:
continue
if 'mid' in [line[1], line[3]]:
attr= {'fill':'red'}
attr['direction']= " ".join(line[:4])
attr['count']= line[4]
if int(attr['count'])<args.m:
continue
edge.append(attr)
edges[lookup]= edge
G.add_nodes_from(nodes)
G.add_edges_from(edges.values())
#Draw the graph
app= nv.Viewer(G)
app.mainloop()
|
alexherns/biotite-scripts
|
build_connection_graph.py
|
Python
|
mit
| 1,780 | 0.03427 |
import pandas as pd
import numpy as np
from sklearn import linear_model, cluster
from collections import defaultdict, Iterable
from itertools import chain, combinations
import operator
import psycopg2
conn = psycopg2.connect("dbname='postgres' user='pbailis' host='localhost'")
cur = conn.cursor()
cols = "hardware_manufacturer,hardware_model,hardware_carrier,android_fw_version,hardware_bootloader"
target = "data_count_minutes"
pred = " < 4"
limit = "LIMIT 10000"
to_select = target+","+cols
sql = """
SELECT %s FROM mapmatch_history H, sf_datasets D WHERE H.dataset_id = D.id AND %s %s %s;""" % (to_select, target, pred, limit)
print sql
cur.execute(sql)
colnames = [desc[0] for desc in cur.description]
cur_score = None
cur_rows = []
df = None
data = pd.DataFrame(cur.fetchall(), columns=colnames)
features = data.drop(target, 1)
pd.set_option('display.max_rows', len(features))
pd.set_option('expand_frame_repr', False)
dummies = pd.get_dummies(features, prefix_sep="//")
print "REGRESSION"
scores = [1./max(r[0], .0001) for r in data.itertuples()]
regr = linear_model.LinearRegression().fit(dummies, scores)
c_with_index = zip(range(0, len(regr.coef_)), regr.coef_)
c_with_index.sort(key = lambda x: x[1])
c_with_index.reverse()
MAX_PRINT = 50
for n in range(0, MAX_PRINT):
(dim, c) = c_with_index[n]
print ": ".join(dummies.columns[dim].split("//")), c
NCLUSTERS = 10#int(np.sqrt(len(data)/2))
print "K MEANS RESULTS (%d CLUSTERS):" % NCLUSTERS
km = cluster.KMeans(n_clusters=NCLUSTERS).fit(dummies)
THRESH = .5
for centerno in range(0, len(km.cluster_centers_)):
center = km.cluster_centers_[centerno]
nmatches = len([i for i in km.labels_ if i == centerno])
target_vals = [data.iloc[i, 0] for i in range(0, len(data)) if km.labels_[i] == centerno]
print "\n"
print "N: %d, Average: %f, Std.: %f" % (nmatches, float(sum(target_vals))/len(target_vals), np.std(target_vals))
for dim in range(0, len(center)):
val = center[dim]
if val > THRESH:
print ": ".join(dummies.columns[dim].split("//")), val
print "\nDBSCAN RESULTS:"
dbscan = cluster.DBSCAN(eps=pow(2, .5)).fit(dummies)
for centerno in range(0, len(dbscan.components_)):
center = dbscan.components_[centerno]
nmatches = len([i for i in dbscan.labels_ if i == centerno])
target_vals = [data.iloc[i, 0] for i in range(0, len(data)) if dbscan.labels_[i] == centerno]
if nmatches == 0:
continue
print "\n"
print "N: %d, Average: %f, Std.: %f" % (nmatches, float(sum(target_vals))/len(target_vals), np.std(target_vals))
for dim in range(0, len(center)):
val = center[dim]
if val > THRESH:
print ": ".join(dummies.columns[dim].split("//")), val
'''
birch = cluster.Birch(threshold=1).fit(dummies)
print len(birch.subcluster_centers_)
print
for center in birch.subcluster_centers_:
for dim in range(0, len(center)):
val = center[dim]
if val > THRESH:
print dim, val, dummies.columns[dim].split("//"),
'''
|
stanford-futuredata/macrobase
|
tools/py_analysis/analyze_cluster.py
|
Python
|
apache-2.0
| 3,142 | 0.005729 |
#!/usr/bin/python
import sys
import remote_core as core
import radio_lights
def main(argv):
config = core.load_config()
lights_config_names = {"1":"door_light", "2":"desk_light", "3": "shelf_light"}
if len(argv) == 1 and len(argv[0]) == 2:
if argv[0] == "an":
argv = ["1n", "2n", "3n"]
elif argv[0] == "af":
argv = ["1f", "2f", "3f"]
for item in argv:
if item[-1:] == 'n':
radio_lights.turn_on_single(config["lights"][lights_config_names[item[:1]]])
elif item[-1:] == 'f':
radio_lights.turn_off_single(config["lights"][lights_config_names[item[:1]]])
core.write_config(config)
if __name__ == "__main__":
main(sys.argv[1:])
|
sradevski/homeAutomate
|
scripts/lights_controller.py
|
Python
|
mit
| 665 | 0.040602 |
from django.test import TestCase
from .models import Profile
from django.contrib.auth.models import User
# models test
class ProfileTest(TestCase):
def setUp(self):
self.user = User.objects.create(username='user1', password='123456')
Profile.objects.create(city='noida', contact_no='1234567890')
def test_create_profile(self):
user = User.objects.get(username='user1')
profile_details = Profile.objects.get(user=user)
self.assertEqual(profile_details.city, 'noida')
|
ChillarAnand/junction
|
junction/profiles/tests.py
|
Python
|
mit
| 519 | 0 |
import os
import imp
from tinydb import TinyDB
from paths import DB_DIR
def scan_plugins_dir(plugins_dir='plugins'):
"""Scan the given dir for files matching the spec for plugin files"""
for plugin_file in os.listdir(plugins_dir):
plugin_path = os.path.join(plugins_dir, plugin_file)
if (not plugin_file.startswith('_') and
plugin_file.endswith('.py') and
os.path.isfile(plugin_path)):
yield plugin_file, plugin_path
def load_plugin(filename, path):
return imp.load_source(filename, path)
def list_plugins():
for plugin_file, plugin_path in scan_plugins_dir():
yield load_plugin(plugin_file, plugin_path)
def new_database(name):
full_path = os.path.join(DB_DIR, f'{name}.json')
return TinyDB(full_path)
|
sentriz/steely
|
steely/utils.py
|
Python
|
gpl-3.0
| 800 | 0 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import functools
from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.core.polling.base_polling import LROBasePolling
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from msrest import Serializer
from .. import models as _models
from .._vendor import _convert_request, _format_url_section
T = TypeVar('T')
JSONType = Any
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
_SERIALIZER = Serializer()
_SERIALIZER.client_side_validation = False
def build_create_or_update_request_initial(
kql_script_name: str,
*,
json: JSONType = None,
content: Any = None,
**kwargs: Any
) -> HttpRequest:
api_version = kwargs.pop('api_version', "2021-11-01-preview") # type: str
content_type = kwargs.pop('content_type', None) # type: Optional[str]
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/kqlScripts/{kqlScriptName}')
path_format_arguments = {
"kqlScriptName": _SERIALIZER.url("kql_script_name", kql_script_name, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
if content_type is not None:
header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="PUT",
url=url,
params=query_parameters,
headers=header_parameters,
json=json,
content=content,
**kwargs
)
def build_get_by_name_request(
kql_script_name: str,
**kwargs: Any
) -> HttpRequest:
api_version = kwargs.pop('api_version', "2021-11-01-preview") # type: str
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/kqlScripts/{kqlScriptName}')
path_format_arguments = {
"kqlScriptName": _SERIALIZER.url("kql_script_name", kql_script_name, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_delete_by_name_request_initial(
kql_script_name: str,
**kwargs: Any
) -> HttpRequest:
api_version = kwargs.pop('api_version', "2021-11-01-preview") # type: str
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/kqlScripts/{kqlScriptName}')
path_format_arguments = {
"kqlScriptName": _SERIALIZER.url("kql_script_name", kql_script_name, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="DELETE",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_rename_request_initial(
kql_script_name: str,
*,
json: JSONType = None,
content: Any = None,
**kwargs: Any
) -> HttpRequest:
api_version = kwargs.pop('api_version', "2021-11-01-preview") # type: str
content_type = kwargs.pop('content_type', None) # type: Optional[str]
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/kqlScripts/{kqlScriptName}/rename')
path_format_arguments = {
"kqlScriptName": _SERIALIZER.url("kql_script_name", kql_script_name, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
if content_type is not None:
header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="POST",
url=url,
params=query_parameters,
headers=header_parameters,
json=json,
content=content,
**kwargs
)
class KqlScriptOperations(object):
"""KqlScriptOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.synapse.artifacts.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def _create_or_update_initial(
self,
kql_script_name: str,
kql_script: "_models.KqlScriptResource",
**kwargs: Any
) -> Optional["_models.KqlScriptResource"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.KqlScriptResource"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2021-11-01-preview") # type: str
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(kql_script, 'KqlScriptResource')
request = build_create_or_update_request_initial(
kql_script_name=kql_script_name,
api_version=api_version,
content_type=content_type,
json=_json,
template_url=self._create_or_update_initial.metadata['url'],
)
request = _convert_request(request)
path_format_arguments = {
"endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
}
request.url = self._client.format_url(request.url, **path_format_arguments)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('KqlScriptResource', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/kqlScripts/{kqlScriptName}'} # type: ignore
@distributed_trace
def begin_create_or_update(
self,
kql_script_name: str,
kql_script: "_models.KqlScriptResource",
**kwargs: Any
) -> LROPoller["_models.KqlScriptResource"]:
"""Creates or updates a KQL Script.
:param kql_script_name: KQL script name.
:type kql_script_name: str
:param kql_script: KQL script.
:type kql_script: ~azure.synapse.artifacts.models.KqlScriptResource
:keyword api_version: Api Version. The default value is "2021-11-01-preview". Note that
overriding this default value may result in unsupported behavior.
:paramtype api_version: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be LROBasePolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either KqlScriptResource or the result of
cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.synapse.artifacts.models.KqlScriptResource]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2021-11-01-preview") # type: str
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.KqlScriptResource"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._create_or_update_initial(
kql_script_name=kql_script_name,
kql_script=kql_script,
api_version=api_version,
content_type=content_type,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('KqlScriptResource', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
"endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
}
if polling is True: polling_method = LROBasePolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/kqlScripts/{kqlScriptName}'} # type: ignore
@distributed_trace
def get_by_name(
self,
kql_script_name: str,
**kwargs: Any
) -> "_models.KqlScriptResource":
"""Get KQL script by name.
:param kql_script_name: KQL script name.
:type kql_script_name: str
:keyword api_version: Api Version. The default value is "2021-11-01-preview". Note that
overriding this default value may result in unsupported behavior.
:paramtype api_version: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: KqlScriptResource, or the result of cls(response)
:rtype: ~azure.synapse.artifacts.models.KqlScriptResource
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.KqlScriptResource"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2021-11-01-preview") # type: str
request = build_get_by_name_request(
kql_script_name=kql_script_name,
api_version=api_version,
template_url=self.get_by_name.metadata['url'],
)
request = _convert_request(request)
path_format_arguments = {
"endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
}
request.url = self._client.format_url(request.url, **path_format_arguments)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorContract, pipeline_response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('KqlScriptResource', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_by_name.metadata = {'url': '/kqlScripts/{kqlScriptName}'} # type: ignore
def _delete_by_name_initial(
self,
kql_script_name: str,
**kwargs: Any
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2021-11-01-preview") # type: str
request = build_delete_by_name_request_initial(
kql_script_name=kql_script_name,
api_version=api_version,
template_url=self._delete_by_name_initial.metadata['url'],
)
request = _convert_request(request)
path_format_arguments = {
"endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
}
request.url = self._client.format_url(request.url, **path_format_arguments)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
_delete_by_name_initial.metadata = {'url': '/kqlScripts/{kqlScriptName}'} # type: ignore
@distributed_trace
def begin_delete_by_name(
self,
kql_script_name: str,
**kwargs: Any
) -> LROPoller[None]:
"""Delete KQL script by name.
:param kql_script_name: KQL script name.
:type kql_script_name: str
:keyword api_version: Api Version. The default value is "2021-11-01-preview". Note that
overriding this default value may result in unsupported behavior.
:paramtype api_version: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be LROBasePolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2021-11-01-preview") # type: str
polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_by_name_initial(
kql_script_name=kql_script_name,
api_version=api_version,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
"endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
}
if polling is True: polling_method = LROBasePolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete_by_name.metadata = {'url': '/kqlScripts/{kqlScriptName}'} # type: ignore
def _rename_initial(
self,
kql_script_name: str,
new_name: Optional[str] = None,
**kwargs: Any
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2021-11-01-preview") # type: str
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_rename_request = _models.ArtifactRenameRequest(new_name=new_name)
_json = self._serialize.body(_rename_request, 'ArtifactRenameRequest')
request = build_rename_request_initial(
kql_script_name=kql_script_name,
api_version=api_version,
content_type=content_type,
json=_json,
template_url=self._rename_initial.metadata['url'],
)
request = _convert_request(request)
path_format_arguments = {
"endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
}
request.url = self._client.format_url(request.url, **path_format_arguments)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
_rename_initial.metadata = {'url': '/kqlScripts/{kqlScriptName}/rename'} # type: ignore
@distributed_trace
def begin_rename(
self,
kql_script_name: str,
new_name: Optional[str] = None,
**kwargs: Any
) -> LROPoller[None]:
"""Rename KQL script.
:param kql_script_name: KQL script name.
:type kql_script_name: str
:param new_name: New name of the artifact.
:type new_name: str
:keyword api_version: Api Version. The default value is "2021-11-01-preview". Note that
overriding this default value may result in unsupported behavior.
:paramtype api_version: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be LROBasePolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2021-11-01-preview") # type: str
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._rename_initial(
kql_script_name=kql_script_name,
new_name=new_name,
api_version=api_version,
content_type=content_type,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
"endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
}
if polling is True: polling_method = LROBasePolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_rename.metadata = {'url': '/kqlScripts/{kqlScriptName}/rename'} # type: ignore
|
Azure/azure-sdk-for-python
|
sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_kql_script_operations.py
|
Python
|
mit
| 24,579 | 0.005167 |
# portage.py -- core Portage functionality
# Copyright 1998-2012 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
VERSION="HEAD"
# ===========================================================================
# START OF IMPORTS -- START OF IMPORTS -- START OF IMPORTS -- START OF IMPORT
# ===========================================================================
try:
import sys
import errno
if not hasattr(errno, 'ESTALE'):
# ESTALE may not be defined on some systems, such as interix.
errno.ESTALE = -1
import re
import types
import platform
# Temporarily delete these imports, to ensure that only the
# wrapped versions are imported by portage internals.
import os
del os
import shutil
del shutil
except ImportError as e:
sys.stderr.write("\n\n")
sys.stderr.write("!!! Failed to complete python imports. These are internal modules for\n")
sys.stderr.write("!!! python and failure here indicates that you have a problem with python\n")
sys.stderr.write("!!! itself and thus portage is not able to continue processing.\n\n")
sys.stderr.write("!!! You might consider starting python with verbose flags to see what has\n")
sys.stderr.write("!!! gone wrong. Here is the information we got for this exception:\n")
sys.stderr.write(" "+str(e)+"\n\n");
raise
try:
import portage.proxy.lazyimport
import portage.proxy as proxy
proxy.lazyimport.lazyimport(globals(),
'portage.cache.cache_errors:CacheError',
'portage.checksum',
'portage.checksum:perform_checksum,perform_md5,prelink_capable',
'portage.cvstree',
'portage.data',
'portage.data:lchown,ostype,portage_gid,portage_uid,secpass,' + \
'uid,userland,userpriv_groups,wheelgid',
'portage.dbapi',
'portage.dbapi.bintree:bindbapi,binarytree',
'portage.dbapi.cpv_expand:cpv_expand',
'portage.dbapi.dep_expand:dep_expand',
'portage.dbapi.porttree:close_portdbapi_caches,FetchlistDict,' + \
'portagetree,portdbapi',
'portage.dbapi.vartree:dblink,merge,unmerge,vardbapi,vartree',
'portage.dbapi.virtual:fakedbapi',
'portage.dep',
'portage.dep:best_match_to_list,dep_getcpv,dep_getkey,' + \
'flatten,get_operator,isjustname,isspecific,isvalidatom,' + \
'match_from_list,match_to_list',
'portage.dep.dep_check:dep_check,dep_eval,dep_wordreduce,dep_zapdeps',
'portage.eclass_cache',
'portage.exception',
'portage.getbinpkg',
'portage.locks',
'portage.locks:lockdir,lockfile,unlockdir,unlockfile',
'portage.mail',
'portage.manifest:Manifest',
'portage.output',
'portage.output:bold,colorize',
'portage.package.ebuild.doebuild:doebuild,' + \
'doebuild_environment,spawn,spawnebuild',
'portage.package.ebuild.config:autouse,best_from_dict,' + \
'check_config_instance,config',
'portage.package.ebuild.deprecated_profile_check:' + \
'deprecated_profile_check',
'portage.package.ebuild.digestcheck:digestcheck',
'portage.package.ebuild.digestgen:digestgen',
'portage.package.ebuild.fetch:fetch',
'portage.package.ebuild.getmaskingreason:getmaskingreason',
'portage.package.ebuild.getmaskingstatus:getmaskingstatus',
'portage.package.ebuild.prepare_build_dirs:prepare_build_dirs',
'portage.process',
'portage.process:atexit_register,run_exitfuncs',
'portage.update:dep_transform,fixdbentries,grab_updates,' + \
'parse_updates,update_config_files,update_dbentries,' + \
'update_dbentry',
'portage.util',
'portage.util:atomic_ofstream,apply_secpass_permissions,' + \
'apply_recursive_permissions,dump_traceback,getconfig,' + \
'grabdict,grabdict_package,grabfile,grabfile_package,' + \
'map_dictlist_vals,new_protect_filename,normalize_path,' + \
'pickle_read,pickle_write,stack_dictlist,stack_dicts,' + \
'stack_lists,unique_array,varexpand,writedict,writemsg,' + \
'writemsg_stdout,write_atomic',
'portage.util.digraph:digraph',
'portage.util.env_update:env_update',
'portage.util.ExtractKernelVersion:ExtractKernelVersion',
'portage.util.listdir:cacheddir,listdir',
'portage.util.movefile:movefile',
'portage.util.mtimedb:MtimeDB',
'portage.versions',
'portage.versions:best,catpkgsplit,catsplit,cpv_getkey,' + \
'cpv_getkey@getCPFromCPV,endversion_keys,' + \
'suffix_value@endversion,pkgcmp,pkgsplit,vercmp,ververify',
'portage.xpak',
'subprocess',
'time',
)
try:
from collections import OrderedDict
except ImportError:
proxy.lazyimport.lazyimport(globals(),
'portage.cache.mappings:OrderedDict')
import portage.const
from portage.const import VDB_PATH, PRIVATE_PATH, CACHE_PATH, DEPCACHE_PATH, \
USER_CONFIG_PATH, MODULES_FILE_PATH, CUSTOM_PROFILE_PATH, PORTAGE_BASE_PATH, \
PORTAGE_BIN_PATH, PORTAGE_PYM_PATH, PROFILE_PATH, LOCALE_DATA_PATH, \
EBUILD_SH_BINARY, SANDBOX_BINARY, BASH_BINARY, \
MOVE_BINARY, PRELINK_BINARY, WORLD_FILE, MAKE_CONF_FILE, MAKE_DEFAULTS_FILE, \
DEPRECATED_PROFILE_FILE, USER_VIRTUALS_FILE, EBUILD_SH_ENV_FILE, \
INVALID_ENV_FILE, CUSTOM_MIRRORS_FILE, CONFIG_MEMORY_FILE,\
INCREMENTALS, EAPI, MISC_SH_BINARY, REPO_NAME_LOC, REPO_NAME_FILE
except ImportError as e:
sys.stderr.write("\n\n")
sys.stderr.write("!!! Failed to complete portage imports. There are internal modules for\n")
sys.stderr.write("!!! portage and failure here indicates that you have a problem with your\n")
sys.stderr.write("!!! installation of portage. Please try a rescue portage located in the\n")
sys.stderr.write("!!! portage tree under '/usr/portage/sys-apps/portage/files/' (default).\n")
sys.stderr.write("!!! There is a README.RESCUE file that details the steps required to perform\n")
sys.stderr.write("!!! a recovery of portage.\n")
sys.stderr.write(" "+str(e)+"\n\n")
raise
if sys.hexversion >= 0x3000000:
basestring = str
long = int
# We use utf_8 encoding everywhere. Previously, we used
# sys.getfilesystemencoding() for the 'merge' encoding, but that had
# various problems:
#
# 1) If the locale is ever changed then it can cause orphan files due
# to changed character set translation.
#
# 2) Ebuilds typically install files with utf_8 encoded file names,
# and then portage would be forced to rename those files to match
# sys.getfilesystemencoding(), possibly breaking things.
#
# 3) Automatic translation between encodings can lead to nonsensical
# file names when the source encoding is unknown by portage.
#
# 4) It's inconvenient for ebuilds to convert the encodings of file
# names to match the current locale, and upstreams typically encode
# file names with utf_8 encoding.
#
# So, instead of relying on sys.getfilesystemencoding(), we avoid the above
# problems by using a constant utf_8 'merge' encoding for all locales, as
# discussed in bug #382199 and bug #381509.
_encodings = {
'content' : 'utf_8',
'fs' : 'utf_8',
'merge' : 'utf_8',
'repo.content' : 'utf_8',
'stdio' : 'utf_8',
}
if sys.hexversion >= 0x3000000:
def _unicode_encode(s, encoding=_encodings['content'], errors='backslashreplace'):
if isinstance(s, str):
s = s.encode(encoding, errors)
return s
def _unicode_decode(s, encoding=_encodings['content'], errors='replace'):
if isinstance(s, bytes):
s = str(s, encoding=encoding, errors=errors)
return s
else:
def _unicode_encode(s, encoding=_encodings['content'], errors='backslashreplace'):
if isinstance(s, unicode):
s = s.encode(encoding, errors)
return s
def _unicode_decode(s, encoding=_encodings['content'], errors='replace'):
if isinstance(s, bytes):
s = unicode(s, encoding=encoding, errors=errors)
return s
class _unicode_func_wrapper(object):
"""
Wraps a function, converts arguments from unicode to bytes,
and return values to unicode from bytes. Function calls
will raise UnicodeEncodeError if an argument fails to be
encoded with the required encoding. Return values that
are single strings are decoded with errors='replace'. Return
values that are lists of strings are decoded with errors='strict'
and elements that fail to be decoded are omitted from the returned
list.
"""
__slots__ = ('_func', '_encoding')
def __init__(self, func, encoding=_encodings['fs']):
self._func = func
self._encoding = encoding
def __call__(self, *args, **kwargs):
encoding = self._encoding
wrapped_args = [_unicode_encode(x, encoding=encoding, errors='strict')
for x in args]
if kwargs:
wrapped_kwargs = dict(
(k, _unicode_encode(v, encoding=encoding, errors='strict'))
for k, v in kwargs.items())
else:
wrapped_kwargs = {}
rval = self._func(*wrapped_args, **wrapped_kwargs)
# Don't use isinstance() since we don't want to convert subclasses
# of tuple such as posix.stat_result in Python >=3.2.
if rval.__class__ in (list, tuple):
decoded_rval = []
for x in rval:
try:
x = _unicode_decode(x, encoding=encoding, errors='strict')
except UnicodeDecodeError:
pass
else:
decoded_rval.append(x)
if isinstance(rval, tuple):
rval = tuple(decoded_rval)
else:
rval = decoded_rval
else:
rval = _unicode_decode(rval, encoding=encoding, errors='replace')
return rval
class _unicode_module_wrapper(object):
"""
Wraps a module and wraps all functions with _unicode_func_wrapper.
"""
__slots__ = ('_mod', '_encoding', '_overrides', '_cache')
def __init__(self, mod, encoding=_encodings['fs'], overrides=None, cache=True):
object.__setattr__(self, '_mod', mod)
object.__setattr__(self, '_encoding', encoding)
object.__setattr__(self, '_overrides', overrides)
if cache:
cache = {}
else:
cache = None
object.__setattr__(self, '_cache', cache)
def __getattribute__(self, attr):
cache = object.__getattribute__(self, '_cache')
if cache is not None:
result = cache.get(attr)
if result is not None:
return result
result = getattr(object.__getattribute__(self, '_mod'), attr)
encoding = object.__getattribute__(self, '_encoding')
overrides = object.__getattribute__(self, '_overrides')
override = None
if overrides is not None:
override = overrides.get(id(result))
if override is not None:
result = override
elif isinstance(result, type):
pass
elif type(result) is types.ModuleType:
result = _unicode_module_wrapper(result,
encoding=encoding, overrides=overrides)
elif hasattr(result, '__call__'):
result = _unicode_func_wrapper(result, encoding=encoding)
if cache is not None:
cache[attr] = result
return result
import os as _os
_os_overrides = {
id(_os.fdopen) : _os.fdopen,
id(_os.mkfifo) : _os.mkfifo,
id(_os.popen) : _os.popen,
id(_os.read) : _os.read,
id(_os.system) : _os.system,
}
if hasattr(_os, 'statvfs'):
_os_overrides[id(_os.statvfs)] = _os.statvfs
os = _unicode_module_wrapper(_os, overrides=_os_overrides,
encoding=_encodings['fs'])
_os_merge = _unicode_module_wrapper(_os,
encoding=_encodings['merge'], overrides=_os_overrides)
import shutil as _shutil
shutil = _unicode_module_wrapper(_shutil, encoding=_encodings['fs'])
# Imports below this point rely on the above unicode wrapper definitions.
try:
__import__('selinux')
import portage._selinux
selinux = _unicode_module_wrapper(_selinux,
encoding=_encodings['fs'])
_selinux_merge = _unicode_module_wrapper(_selinux,
encoding=_encodings['merge'])
except (ImportError, OSError) as e:
if isinstance(e, OSError):
sys.stderr.write("!!! SELinux not loaded: %s\n" % str(e))
del e
_selinux = None
selinux = None
_selinux_merge = None
# ===========================================================================
# END OF IMPORTS -- END OF IMPORTS -- END OF IMPORTS -- END OF IMPORTS -- END
# ===========================================================================
_python_interpreter = os.path.realpath(sys.executable)
_bin_path = PORTAGE_BIN_PATH
_pym_path = PORTAGE_PYM_PATH
def _shell_quote(s):
"""
Quote a string in double-quotes and use backslashes to
escape any backslashes, double-quotes, dollar signs, or
backquotes in the string.
"""
for letter in "\\\"$`":
if letter in s:
s = s.replace(letter, "\\" + letter)
return "\"%s\"" % s
bsd_chflags = None
if platform.system() in ('FreeBSD',):
class bsd_chflags(object):
@classmethod
def chflags(cls, path, flags, opts=""):
cmd = ['chflags']
if opts:
cmd.append(opts)
cmd.append('%o' % (flags,))
cmd.append(path)
encoding = _encodings['fs']
if sys.hexversion < 0x3000000 or sys.hexversion >= 0x3020000:
# Python 3.1 does not support bytes in Popen args.
cmd = [_unicode_encode(x, encoding=encoding, errors='strict')
for x in cmd]
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
output = proc.communicate()[0]
status = proc.wait()
if os.WIFEXITED(status) and os.WEXITSTATUS(status) == os.EX_OK:
return
# Try to generate an ENOENT error if appropriate.
if 'h' in opts:
_os_merge.lstat(path)
else:
_os_merge.stat(path)
# Make sure the binary exists.
if not portage.process.find_binary('chflags'):
raise portage.exception.CommandNotFound('chflags')
# Now we're not sure exactly why it failed or what
# the real errno was, so just report EPERM.
output = _unicode_decode(output, encoding=encoding)
e = OSError(errno.EPERM, output)
e.errno = errno.EPERM
e.filename = path
e.message = output
raise e
@classmethod
def lchflags(cls, path, flags):
return cls.chflags(path, flags, opts='-h')
def load_mod(name):
modname = ".".join(name.split(".")[:-1])
mod = __import__(modname)
components = name.split('.')
for comp in components[1:]:
mod = getattr(mod, comp)
return mod
def getcwd():
"this fixes situations where the current directory doesn't exist"
try:
return os.getcwd()
except OSError: #dir doesn't exist
os.chdir("/")
return "/"
getcwd()
def abssymlink(symlink, target=None):
"This reads symlinks, resolving the relative symlinks, and returning the absolute."
if target is not None:
mylink = target
else:
mylink = os.readlink(symlink)
if mylink[0] != '/':
mydir=os.path.dirname(symlink)
mylink=mydir+"/"+mylink
return os.path.normpath(mylink)
_doebuild_manifest_exempt_depend = 0
_testing_eapis = frozenset(["4-python", "4-slot-abi", "5-progress"])
_deprecated_eapis = frozenset(["4_pre1", "3_pre2", "3_pre1", "5_pre1", "5_pre2"])
def _eapi_is_deprecated(eapi):
return eapi in _deprecated_eapis
def eapi_is_supported(eapi):
if not isinstance(eapi, basestring):
# Only call str() when necessary since with python2 it
# can trigger UnicodeEncodeError if EAPI is corrupt.
eapi = str(eapi)
eapi = eapi.strip()
if _eapi_is_deprecated(eapi):
return True
if eapi in _testing_eapis:
return True
try:
eapi = int(eapi)
except ValueError:
eapi = -1
if eapi < 0:
return False
return eapi <= portage.const.EAPI
# This pattern is specified by PMS section 7.3.1.
_pms_eapi_re = re.compile(r"^[ \t]*EAPI=(['\"]?)([A-Za-z0-9+_.-]*)\1[ \t]*([ \t]#.*)?$")
_comment_or_blank_line = re.compile(r"^\s*(#.*)?$")
def _parse_eapi_ebuild_head(f):
eapi = None
eapi_lineno = None
lineno = 0
for line in f:
lineno += 1
m = _comment_or_blank_line.match(line)
if m is None:
eapi_lineno = lineno
m = _pms_eapi_re.match(line)
if m is not None:
eapi = m.group(2)
break
return (eapi, eapi_lineno)
def _movefile(src, dest, **kwargs):
"""Calls movefile and raises a PortageException if an error occurs."""
if movefile(src, dest, **kwargs) is None:
raise portage.exception.PortageException(
"mv '%s' '%s'" % (src, dest))
auxdbkeys = (
'DEPEND', 'RDEPEND', 'SLOT', 'SRC_URI',
'RESTRICT', 'HOMEPAGE', 'LICENSE', 'DESCRIPTION',
'KEYWORDS', 'INHERITED', 'IUSE', 'REQUIRED_USE',
'PDEPEND', 'PROVIDE', 'EAPI',
'PROPERTIES', 'DEFINED_PHASES', 'UNUSED_05', 'UNUSED_04',
'UNUSED_03', 'UNUSED_02', 'UNUSED_01',
)
auxdbkeylen=len(auxdbkeys)
def portageexit():
close_portdbapi_caches()
class _trees_dict(dict):
__slots__ = ('_running_eroot', '_target_eroot',)
def __init__(self, *pargs, **kargs):
dict.__init__(self, *pargs, **kargs)
self._running_eroot = None
self._target_eroot = None
def create_trees(config_root=None, target_root=None, trees=None, env=None,
eprefix=None):
if trees is not None:
# clean up any existing portdbapi instances
for myroot in trees:
portdb = trees[myroot]["porttree"].dbapi
portdb.close_caches()
portdbapi.portdbapi_instances.remove(portdb)
del trees[myroot]["porttree"], myroot, portdb
if trees is None:
trees = _trees_dict()
elif not isinstance(trees, _trees_dict):
# caller passed a normal dict or something,
# but we need a _trees_dict instance
trees = _trees_dict(trees)
if env is None:
env = os.environ
settings = config(config_root=config_root, target_root=target_root,
env=env, eprefix=eprefix)
settings.lock()
trees._target_eroot = settings['EROOT']
myroots = [(settings['EROOT'], settings)]
if settings["ROOT"] == "/":
trees._running_eroot = trees._target_eroot
else:
# When ROOT != "/" we only want overrides from the calling
# environment to apply to the config that's associated
# with ROOT != "/", so pass a nearly empty dict for the env parameter.
clean_env = {}
for k in ('PATH', 'PORTAGE_GRPNAME', 'PORTAGE_USERNAME',
'SSH_AGENT_PID', 'SSH_AUTH_SOCK', 'TERM',
'ftp_proxy', 'http_proxy', 'no_proxy',
'__PORTAGE_TEST_HARDLINK_LOCKS'):
v = settings.get(k)
if v is not None:
clean_env[k] = v
settings = config(config_root=None, target_root="/",
env=clean_env, eprefix=eprefix)
settings.lock()
trees._running_eroot = settings['EROOT']
myroots.append((settings['EROOT'], settings))
for myroot, mysettings in myroots:
trees[myroot] = portage.util.LazyItemsDict(trees.get(myroot, {}))
trees[myroot].addLazySingleton("virtuals", mysettings.getvirtuals)
trees[myroot].addLazySingleton(
"vartree", vartree, categories=mysettings.categories,
settings=mysettings)
trees[myroot].addLazySingleton("porttree",
portagetree, settings=mysettings)
trees[myroot].addLazySingleton("bintree",
binarytree, pkgdir=mysettings["PKGDIR"], settings=mysettings)
return trees
if VERSION == 'HEAD':
class _LazyVersion(proxy.objectproxy.ObjectProxy):
def _get_target(self):
global VERSION
if VERSION is not self:
return VERSION
if os.path.isdir(os.path.join(PORTAGE_BASE_PATH, '.git')):
encoding = _encodings['fs']
cmd = [BASH_BINARY, "-c", ("cd %s ; git describe --tags || exit $? ; " + \
"if [ -n \"`git diff-index --name-only --diff-filter=M HEAD`\" ] ; " + \
"then echo modified ; git rev-list --format=%%ct -n 1 HEAD ; fi ; " + \
"exit 0") % _shell_quote(PORTAGE_BASE_PATH)]
if sys.hexversion < 0x3000000 or sys.hexversion >= 0x3020000:
# Python 3.1 does not support bytes in Popen args.
cmd = [_unicode_encode(x, encoding=encoding, errors='strict')
for x in cmd]
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
output = _unicode_decode(proc.communicate()[0], encoding=encoding)
status = proc.wait()
if os.WIFEXITED(status) and os.WEXITSTATUS(status) == os.EX_OK:
output_lines = output.splitlines()
if output_lines:
version_split = output_lines[0].split('-')
if version_split:
VERSION = version_split[0].lstrip('v')
patchlevel = False
if len(version_split) > 1:
patchlevel = True
VERSION = "%s_p%s" %(VERSION, version_split[1])
if len(output_lines) > 1 and output_lines[1] == 'modified':
head_timestamp = None
if len(output_lines) > 3:
try:
head_timestamp = long(output_lines[3])
except ValueError:
pass
timestamp = long(time.time())
if head_timestamp is not None and timestamp > head_timestamp:
timestamp = timestamp - head_timestamp
if not patchlevel:
VERSION = "%s_p0" % (VERSION,)
VERSION = "%s_p%d" % (VERSION, timestamp)
return VERSION
VERSION = 'HEAD'
return VERSION
VERSION = _LazyVersion()
if "_legacy_globals_constructed" in globals():
# The module has been reloaded, so perform any relevant cleanup
# and prevent memory leaks.
if "db" in _legacy_globals_constructed:
try:
db
except NameError:
pass
else:
if isinstance(db, dict) and db:
for _x in db.values():
try:
if "porttree" in _x.lazy_items:
continue
except (AttributeError, TypeError):
continue
try:
_x = _x["porttree"].dbapi
except (AttributeError, KeyError):
continue
if not isinstance(_x, portdbapi):
continue
_x.close_caches()
try:
portdbapi.portdbapi_instances.remove(_x)
except ValueError:
pass
del _x
class _LegacyGlobalProxy(proxy.objectproxy.ObjectProxy):
__slots__ = ('_name',)
def __init__(self, name):
proxy.objectproxy.ObjectProxy.__init__(self)
object.__setattr__(self, '_name', name)
def _get_target(self):
name = object.__getattribute__(self, '_name')
from portage._legacy_globals import _get_legacy_global
return _get_legacy_global(name)
_legacy_global_var_names = ("archlist", "db", "features",
"groups", "mtimedb", "mtimedbfile", "pkglines",
"portdb", "profiledir", "root", "selinux_enabled",
"settings", "thirdpartymirrors")
for k in _legacy_global_var_names:
globals()[k] = _LegacyGlobalProxy(k)
del k
_legacy_globals_constructed = set()
def _disable_legacy_globals():
"""
This deletes the ObjectProxy instances that are used
for lazy initialization of legacy global variables.
The purpose of deleting them is to prevent new code
from referencing these deprecated variables.
"""
global _legacy_global_var_names
for k in _legacy_global_var_names:
globals().pop(k, None)
|
devurandom/portage
|
pym/portage/__init__.py
|
Python
|
gpl-2.0
| 21,844 | 0.031038 |
"""
This module implements the FormRequest class which is a more convenient class
(than Request) to generate Requests based on form data.
See documentation in docs/topics/request-response.rst
"""
import six
from six.moves.urllib.parse import urljoin, urlencode
import lxml.html
from parsel.selector import create_root_node
from w3lib.html import strip_html5_whitespace
from scrapy.http.request import Request
from scrapy.utils.python import to_bytes, is_listlike
from scrapy.utils.response import get_base_url
class FormRequest(Request):
def __init__(self, *args, **kwargs):
formdata = kwargs.pop('formdata', None)
if formdata and kwargs.get('method') is None:
kwargs['method'] = 'POST'
super(FormRequest, self).__init__(*args, **kwargs)
if formdata:
items = formdata.items() if isinstance(formdata, dict) else formdata
querystr = _urlencode(items, self.encoding)
if self.method == 'POST':
self.headers.setdefault(b'Content-Type', b'application/x-www-form-urlencoded')
self._set_body(querystr)
else:
self._set_url(self.url + ('&' if '?' in self.url else '?') + querystr)
@classmethod
def from_response(cls, response, formname=None, formid=None, formnumber=0, formdata=None,
clickdata=None, dont_click=False, formxpath=None, formcss=None, **kwargs):
kwargs.setdefault('encoding', response.encoding)
if formcss is not None:
from parsel.csstranslator import HTMLTranslator
formxpath = HTMLTranslator().css_to_xpath(formcss)
form = _get_form(response, formname, formid, formnumber, formxpath)
formdata = _get_inputs(form, formdata, dont_click, clickdata, response)
url = _get_form_url(form, kwargs.pop('url', None))
method = kwargs.pop('method', form.method)
return cls(url=url, method=method, formdata=formdata, **kwargs)
def _get_form_url(form, url):
if url is None:
action = form.get('action')
if action is None:
return form.base_url
return urljoin(form.base_url, strip_html5_whitespace(action))
return urljoin(form.base_url, url)
def _urlencode(seq, enc):
values = [(to_bytes(k, enc), to_bytes(v, enc))
for k, vs in seq
for v in (vs if is_listlike(vs) else [vs])]
return urlencode(values, doseq=1)
def _get_form(response, formname, formid, formnumber, formxpath):
"""Find the form element """
root = create_root_node(response.text, lxml.html.HTMLParser,
base_url=get_base_url(response))
forms = root.xpath('//form')
if not forms:
raise ValueError("No <form> element found in %s" % response)
if formname is not None:
f = root.xpath('//form[@name="%s"]' % formname)
if f:
return f[0]
if formid is not None:
f = root.xpath('//form[@id="%s"]' % formid)
if f:
return f[0]
# Get form element from xpath, if not found, go up
if formxpath is not None:
nodes = root.xpath(formxpath)
if nodes:
el = nodes[0]
while True:
if el.tag == 'form':
return el
el = el.getparent()
if el is None:
break
encoded = formxpath if six.PY3 else formxpath.encode('unicode_escape')
raise ValueError('No <form> element found with %s' % encoded)
# If we get here, it means that either formname was None
# or invalid
if formnumber is not None:
try:
form = forms[formnumber]
except IndexError:
raise IndexError("Form number %d not found in %s" %
(formnumber, response))
else:
return form
def _get_inputs(form, formdata, dont_click, clickdata, response):
try:
formdata_keys = dict(formdata or ()).keys()
except (ValueError, TypeError):
raise ValueError('formdata should be a dict or iterable of tuples')
if not formdata:
formdata = ()
inputs = form.xpath('descendant::textarea'
'|descendant::select'
'|descendant::input[not(@type) or @type['
' not(re:test(., "^(?:submit|image|reset)$", "i"))'
' and (../@checked or'
' not(re:test(., "^(?:checkbox|radio)$", "i")))]]',
namespaces={
"re": "http://exslt.org/regular-expressions"})
values = [(k, u'' if v is None else v)
for k, v in (_value(e) for e in inputs)
if k and k not in formdata_keys]
if not dont_click:
clickable = _get_clickable(clickdata, form)
if clickable and clickable[0] not in formdata and not clickable[0] is None:
values.append(clickable)
if isinstance(formdata, dict):
formdata = formdata.items()
values.extend((k, v) for k, v in formdata if v is not None)
return values
def _value(ele):
n = ele.name
v = ele.value
if ele.tag == 'select':
return _select_value(ele, n, v)
return n, v
def _select_value(ele, n, v):
multiple = ele.multiple
if v is None and not multiple:
# Match browser behaviour on simple select tag without options selected
# And for select tags wihout options
o = ele.value_options
return (n, o[0]) if o else (None, None)
elif v is not None and multiple:
# This is a workround to bug in lxml fixed 2.3.1
# fix https://github.com/lxml/lxml/commit/57f49eed82068a20da3db8f1b18ae00c1bab8b12#L1L1139
selected_options = ele.xpath('.//option[@selected]')
v = [(o.get('value') or o.text or u'').strip() for o in selected_options]
return n, v
def _get_clickable(clickdata, form):
"""
Returns the clickable element specified in clickdata,
if the latter is given. If not, it returns the first
clickable element found
"""
clickables = [
el for el in form.xpath(
'descendant::input[re:test(@type, "^(submit|image)$", "i")]'
'|descendant::button[not(@type) or re:test(@type, "^submit$", "i")]',
namespaces={"re": "http://exslt.org/regular-expressions"})
]
if not clickables:
return
# If we don't have clickdata, we just use the first clickable element
if clickdata is None:
el = clickables[0]
return (el.get('name'), el.get('value') or '')
# If clickdata is given, we compare it to the clickable elements to find a
# match. We first look to see if the number is specified in clickdata,
# because that uniquely identifies the element
nr = clickdata.get('nr', None)
if nr is not None:
try:
el = list(form.inputs)[nr]
except IndexError:
pass
else:
return (el.get('name'), el.get('value') or '')
# We didn't find it, so now we build an XPath expression out of the other
# arguments, because they can be used as such
xpath = u'.//*' + \
u''.join(u'[@%s="%s"]' % c for c in six.iteritems(clickdata))
el = form.xpath(xpath)
if len(el) == 1:
return (el[0].get('name'), el[0].get('value') or '')
elif len(el) > 1:
raise ValueError("Multiple elements found (%r) matching the criteria "
"in clickdata: %r" % (el, clickdata))
else:
raise ValueError('No clickable element matching clickdata: %r' % (clickdata,))
|
Ryezhang/scrapy
|
scrapy/http/request/form.py
|
Python
|
bsd-3-clause
| 7,658 | 0.001306 |
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'FormText'
db.create_table(u'landing_formtext', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('title', self.gf('django.db.models.fields.CharField')(max_length=50)),
('text', self.gf('django.db.models.fields.TextField')(max_length=200)),
))
db.send_create_signal(u'landing', ['FormText'])
def backwards(self, orm):
# Deleting model 'FormText'
db.delete_table(u'landing_formtext')
models = {
u'landing.formtext': {
'Meta': {'object_name': 'FormText'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'text': ('django.db.models.fields.TextField', [], {'max_length': '200'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'landing.function': {
'Meta': {'object_name': 'Function'},
'description': ('django.db.models.fields.TextField', [], {'max_length': '200'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'landing.landingregister': {
'Meta': {'object_name': 'LandingRegister'},
'email': ('django.db.models.fields.EmailField', [], {'max_length': '250'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'phone': ('django.db.models.fields.CharField', [], {'max_length': '10'})
},
u'landing.mainimage': {
'Description': ('django.db.models.fields.TextField', [], {'max_length': '200'}),
'Meta': {'object_name': 'MainImage'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'})
},
u'landing.product': {
'Meta': {'object_name': 'Product'},
'description': ('django.db.models.fields.TextField', [], {'max_length': '200'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'landing.slogan': {
'Meta': {'object_name': 'Slogan'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'slogan': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'landing.testimonial': {
'Meta': {'object_name': 'Testimonial'},
'description': ('django.db.models.fields.TextField', [], {'max_length': '200'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
}
}
complete_apps = ['landing']
|
andresfcardenas/marketing-platform
|
landing/migrations/0003_auto__add_formtext.py
|
Python
|
bsd-3-clause
| 3,656 | 0.008206 |
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from collections import OrderedDict
from distutils import util
import os
import re
from typing import Dict, Optional, Sequence, Tuple, Type, Union
from google.api_core import client_options as client_options_lib # type: ignore
from google.api_core import exceptions as core_exceptions # type: ignore
from google.api_core import gapic_v1 # type: ignore
from google.api_core import retry as retries # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport import mtls # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
from google.auth.exceptions import MutualTLSChannelError # type: ignore
from google.oauth2 import service_account # type: ignore
from google.ads.googleads.v8.resources.types import (
product_bidding_category_constant,
)
from google.ads.googleads.v8.services.types import (
product_bidding_category_constant_service,
)
from .transports.base import (
ProductBiddingCategoryConstantServiceTransport,
DEFAULT_CLIENT_INFO,
)
from .transports.grpc import ProductBiddingCategoryConstantServiceGrpcTransport
class ProductBiddingCategoryConstantServiceClientMeta(type):
"""Metaclass for the ProductBiddingCategoryConstantService client.
This provides class-level methods for building and retrieving
support objects (e.g. transport) without polluting the client instance
objects.
"""
_transport_registry = (
OrderedDict()
) # type: Dict[str, Type[ProductBiddingCategoryConstantServiceTransport]]
_transport_registry[
"grpc"
] = ProductBiddingCategoryConstantServiceGrpcTransport
def get_transport_class(
cls, label: str = None,
) -> Type[ProductBiddingCategoryConstantServiceTransport]:
"""Return an appropriate transport class.
Args:
label: The name of the desired transport. If none is
provided, then the first transport in the registry is used.
Returns:
The transport class to use.
"""
# If a specific transport is requested, return that one.
if label:
return cls._transport_registry[label]
# No transport is requested; return the default (that is, the first one
# in the dictionary).
return next(iter(cls._transport_registry.values()))
class ProductBiddingCategoryConstantServiceClient(
metaclass=ProductBiddingCategoryConstantServiceClientMeta
):
"""Service to fetch Product Bidding Categories."""
@staticmethod
def _get_default_mtls_endpoint(api_endpoint):
"""Convert api endpoint to mTLS endpoint.
Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to
"*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively.
Args:
api_endpoint (Optional[str]): the api endpoint to convert.
Returns:
str: converted mTLS api endpoint.
"""
if not api_endpoint:
return api_endpoint
mtls_endpoint_re = re.compile(
r"(?P<name>[^.]+)(?P<mtls>\.mtls)?(?P<sandbox>\.sandbox)?(?P<googledomain>\.googleapis\.com)?"
)
m = mtls_endpoint_re.match(api_endpoint)
name, mtls, sandbox, googledomain = m.groups()
if mtls or not googledomain:
return api_endpoint
if sandbox:
return api_endpoint.replace(
"sandbox.googleapis.com", "mtls.sandbox.googleapis.com"
)
return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com")
DEFAULT_ENDPOINT = "googleads.googleapis.com"
DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore
DEFAULT_ENDPOINT
)
@classmethod
def from_service_account_info(cls, info: dict, *args, **kwargs):
"""Creates an instance of this client using the provided credentials info.
Args:
info (dict): The service account private key info.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
ProductBiddingCategoryConstantServiceClient: The constructed client.
"""
credentials = service_account.Credentials.from_service_account_info(
info
)
kwargs["credentials"] = credentials
return cls(*args, **kwargs)
@classmethod
def from_service_account_file(cls, filename: str, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
file.
Args:
filename (str): The path to the service account private key json
file.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
ProductBiddingCategoryConstantServiceClient: The constructed client.
"""
credentials = service_account.Credentials.from_service_account_file(
filename
)
kwargs["credentials"] = credentials
return cls(*args, **kwargs)
from_service_account_json = from_service_account_file
@property
def transport(self) -> ProductBiddingCategoryConstantServiceTransport:
"""Return the transport used by the client instance.
Returns:
ProductBiddingCategoryConstantServiceTransport: The transport used by the client instance.
"""
return self._transport
@staticmethod
def product_bidding_category_constant_path(
country_code: str, level: str, id: str,
) -> str:
"""Return a fully-qualified product_bidding_category_constant string."""
return "productBiddingCategoryConstants/{country_code}~{level}~{id}".format(
country_code=country_code, level=level, id=id,
)
@staticmethod
def parse_product_bidding_category_constant_path(
path: str,
) -> Dict[str, str]:
"""Parse a product_bidding_category_constant path into its component segments."""
m = re.match(
r"^productBiddingCategoryConstants/(?P<country_code>.+?)~(?P<level>.+?)~(?P<id>.+?)$",
path,
)
return m.groupdict() if m else {}
@staticmethod
def common_billing_account_path(billing_account: str,) -> str:
"""Return a fully-qualified billing_account string."""
return "billingAccounts/{billing_account}".format(
billing_account=billing_account,
)
@staticmethod
def parse_common_billing_account_path(path: str) -> Dict[str, str]:
"""Parse a billing_account path into its component segments."""
m = re.match(r"^billingAccounts/(?P<billing_account>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_folder_path(folder: str,) -> str:
"""Return a fully-qualified folder string."""
return "folders/{folder}".format(folder=folder,)
@staticmethod
def parse_common_folder_path(path: str) -> Dict[str, str]:
"""Parse a folder path into its component segments."""
m = re.match(r"^folders/(?P<folder>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_organization_path(organization: str,) -> str:
"""Return a fully-qualified organization string."""
return "organizations/{organization}".format(organization=organization,)
@staticmethod
def parse_common_organization_path(path: str) -> Dict[str, str]:
"""Parse a organization path into its component segments."""
m = re.match(r"^organizations/(?P<organization>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_project_path(project: str,) -> str:
"""Return a fully-qualified project string."""
return "projects/{project}".format(project=project,)
@staticmethod
def parse_common_project_path(path: str) -> Dict[str, str]:
"""Parse a project path into its component segments."""
m = re.match(r"^projects/(?P<project>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_location_path(project: str, location: str,) -> str:
"""Return a fully-qualified location string."""
return "projects/{project}/locations/{location}".format(
project=project, location=location,
)
@staticmethod
def parse_common_location_path(path: str) -> Dict[str, str]:
"""Parse a location path into its component segments."""
m = re.match(
r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)$", path
)
return m.groupdict() if m else {}
def __init__(
self,
*,
credentials: Optional[ga_credentials.Credentials] = None,
transport: Union[
str, ProductBiddingCategoryConstantServiceTransport, None
] = None,
client_options: Optional[client_options_lib.ClientOptions] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiate the product bidding category constant service client.
Args:
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
transport (Union[str, ~.ProductBiddingCategoryConstantServiceTransport]): The
transport to use. If set to None, a transport is chosen
automatically.
client_options (google.api_core.client_options.ClientOptions): Custom options for the
client. It won't take effect if a ``transport`` instance is provided.
(1) The ``api_endpoint`` property can be used to override the
default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
environment variable can also be used to override the endpoint:
"always" (always use the default mTLS endpoint), "never" (always
use the default regular endpoint) and "auto" (auto switch to the
default mTLS endpoint if client certificate is present, this is
the default value). However, the ``api_endpoint`` property takes
precedence if provided.
(2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
is "true", then the ``client_cert_source`` property can be used
to provide client certificate for mutual TLS transport. If
not provided, the default SSL client certificate will be used if
present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
set, no client certificate will be used.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
Raises:
google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
creation failed for any reason.
"""
if isinstance(client_options, dict):
client_options = client_options_lib.from_dict(client_options)
if client_options is None:
client_options = client_options_lib.ClientOptions()
# Create SSL credentials for mutual TLS if needed.
use_client_cert = bool(
util.strtobool(
os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")
)
)
ssl_credentials = None
is_mtls = False
if use_client_cert:
if client_options.client_cert_source:
import grpc # type: ignore
cert, key = client_options.client_cert_source()
ssl_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
is_mtls = True
else:
creds = SslCredentials()
is_mtls = creds.is_mtls
ssl_credentials = creds.ssl_credentials if is_mtls else None
# Figure out which api endpoint to use.
if client_options.api_endpoint is not None:
api_endpoint = client_options.api_endpoint
else:
use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto")
if use_mtls_env == "never":
api_endpoint = self.DEFAULT_ENDPOINT
elif use_mtls_env == "always":
api_endpoint = self.DEFAULT_MTLS_ENDPOINT
elif use_mtls_env == "auto":
api_endpoint = (
self.DEFAULT_MTLS_ENDPOINT
if is_mtls
else self.DEFAULT_ENDPOINT
)
else:
raise MutualTLSChannelError(
"Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always"
)
# Save or instantiate the transport.
# Ordinarily, we provide the transport, but allowing a custom transport
# instance provides an extensibility point for unusual situations.
if isinstance(
transport, ProductBiddingCategoryConstantServiceTransport
):
# transport is a ProductBiddingCategoryConstantServiceTransport instance.
if credentials:
raise ValueError(
"When providing a transport instance, "
"provide its credentials directly."
)
self._transport = transport
elif isinstance(transport, str):
Transport = type(self).get_transport_class(transport)
self._transport = Transport(
credentials=credentials, host=self.DEFAULT_ENDPOINT
)
else:
self._transport = ProductBiddingCategoryConstantServiceGrpcTransport(
credentials=credentials,
host=api_endpoint,
ssl_channel_credentials=ssl_credentials,
client_info=client_info,
)
def get_product_bidding_category_constant(
self,
request: product_bidding_category_constant_service.GetProductBiddingCategoryConstantRequest = None,
*,
resource_name: str = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> product_bidding_category_constant.ProductBiddingCategoryConstant:
r"""Returns the requested Product Bidding Category in full detail.
List of thrown errors: `AuthenticationError <>`__
`AuthorizationError <>`__ `HeaderError <>`__
`InternalError <>`__ `QuotaError <>`__ `RequestError <>`__
Args:
request (:class:`google.ads.googleads.v8.services.types.GetProductBiddingCategoryConstantRequest`):
The request object. Request message for
[ProductBiddingCategoryConstantService.GetProductBiddingCategoryConstant][google.ads.googleads.v8.services.ProductBiddingCategoryConstantService.GetProductBiddingCategoryConstant].
resource_name (:class:`str`):
Required. Resource name of the
Product Bidding Category to fetch.
This corresponds to the ``resource_name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.ads.googleads.v8.resources.types.ProductBiddingCategoryConstant:
A Product Bidding Category.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
if request is not None and any([resource_name]):
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
# Minor optimization to avoid making a copy if the user passes
# in a product_bidding_category_constant_service.GetProductBiddingCategoryConstantRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(
request,
product_bidding_category_constant_service.GetProductBiddingCategoryConstantRequest,
):
request = product_bidding_category_constant_service.GetProductBiddingCategoryConstantRequest(
request
)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if resource_name is not None:
request.resource_name = resource_name
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[
self._transport.get_product_bidding_category_constant
]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata(
(("resource_name", request.resource_name),)
),
)
# Send the request.
response = rpc(
request, retry=retry, timeout=timeout, metadata=metadata,
)
# Done; return the response.
return response
__all__ = ("ProductBiddingCategoryConstantServiceClient",)
|
googleads/google-ads-python
|
google/ads/googleads/v8/services/services/product_bidding_category_constant_service/client.py
|
Python
|
apache-2.0
| 18,954 | 0.001636 |
# This work was created by participants in the DataONE project, and is
# jointly copyrighted by participating institutions in DataONE. For
# more information on DataONE, see our web site at http://dataone.org.
#
# Copyright 2009-2019 DataONE
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""URL to view mapping."""
import d1_common.utils.filesystem
import d1_common.utils.ulog
import django.urls
import django.views.static
import d1_gmn.app.views.external
import d1_gmn.app.views.get_package
import d1_gmn.app.views.gmn
import d1_gmn.app.views.internal
# from django.urls import path
# from django.views.generic import TemplateView
# Return 404 and 500 as UI page when DEBUG=False
handler404 = "d1_gmn.app.views.internal.error_404"
handler500 = "d1_gmn.app.views.internal.error_500"
urlpatterns = [
# Django's URL dispatcher does not take HTTP method into account, so in the
# cases where the DataONE REST API specifies different methods as different
# methods against the same URL, the methods are dispatched to the same view
# function, which checks the method and dispatches to the appropriate handler.
# Tier 1: Core API (MNCore)
# MNCore.ping() - GET /monitor/ping
django.urls.re_path(
r"^v[12]/monitor/ping/?$",
d1_gmn.app.views.external.get_monitor_ping,
kwargs={"allowed_method_list": ["GET"]},
name="get_monitor_ping",
),
# MNCore.getLogRecords() - GET /log
django.urls.re_path(
r"^v[12]/log/?$",
d1_gmn.app.views.external.get_log,
kwargs={"allowed_method_list": ["GET"]},
name="get_log",
),
# MNCore.getCapabilities() - GET /node
# Also available via Apache redirect from /
django.urls.re_path(
r"^v[12]/(?:node/?)?$",
d1_gmn.app.views.external.get_node,
kwargs={"allowed_method_list": ["GET"]},
name="get_node",
),
# Tier 1: Read API (MNRead)
# MNRead.get() - GET /object/{did}
django.urls.re_path(
r"^v[12]/object/(.+)$",
d1_gmn.app.views.external.dispatch_object,
kwargs={"allowed_method_list": ["GET", "HEAD", "PUT", "DELETE"]},
name="dispatch_object",
),
# MNRead.getSystemMetadata() - GET /meta/{did}
django.urls.re_path(
r"^v[12]/meta/(.+)$",
d1_gmn.app.views.external.get_meta,
kwargs={"allowed_method_list": ["GET"]},
name="get_meta",
),
# MNStorage.updateSystemMetadata() - PUT /meta
django.urls.re_path(
r"^v2/meta$",
d1_gmn.app.views.external.put_meta,
kwargs={"allowed_method_list": ["PUT"]},
name="put_meta",
),
# MNRead.describe() - HEAD /object/{did}
# (handled by object dispatcher)
# MNRead.getChecksum() - GET /checksum/{did}
django.urls.re_path(
r"^v[12]/checksum/(.+)$",
d1_gmn.app.views.external.get_checksum,
kwargs={"allowed_method_list": ["HEAD", "GET"]},
name="get_checksum",
),
# MNRead.listObjects() - GET /object
django.urls.re_path(
r"^v[12]/object/?$",
d1_gmn.app.views.external.dispatch_object_list,
kwargs={"allowed_method_list": ["GET", "POST"]},
name="dispatch_object_list",
),
# MNRead.synchronizationFailed() - POST /error
django.urls.re_path(
r"^v[12]/error/?$",
d1_gmn.app.views.external.post_error,
kwargs={"allowed_method_list": ["POST"]},
name="post_error",
),
# MNRead.getReplica() - GET /replica/{did}
django.urls.re_path(
r"^v[12]/replica/(.+)/?$",
d1_gmn.app.views.external.get_replica,
kwargs={"allowed_method_list": ["GET"]},
name="get_replica",
),
# Tier 2: Authorization API (MNAuthorization)
# MNAuthorization.isAuthorized() - GET /isAuthorized/{did}
django.urls.re_path(
r"^v[12]/isAuthorized/(.+)/?$",
d1_gmn.app.views.external.get_is_authorized,
kwargs={"allowed_method_list": ["GET"]},
name="get_is_authorized",
),
# MNStorage.systemMetadataChanged() - POST /refreshSystemMetadata/{did}
django.urls.re_path(
r"^v[12]/dirtySystemMetadata/?$",
d1_gmn.app.views.external.post_refresh_system_metadata,
kwargs={"allowed_method_list": ["POST"]},
name="post_refresh_system_metadata",
),
# Tier 3: Storage API (MNStorage)
# MNStorage.create() - POST /object
# (handled by object dispatcher)
# MNStorage.update() - PUT /object/{did}
# (handled by object dispatcher)
# MNStorage.generateIdentifier()
django.urls.re_path(
r"^v[12]/generate/?$",
d1_gmn.app.views.external.post_generate_identifier,
kwargs={"allowed_method_list": ["POST", "PUT"]},
name="post_generate_identifier",
),
# MNStorage.delete() - DELETE /object/{did}
# (handled by object dispatcher)
# MNStorage.archive() - PUT /archive/{did}
django.urls.re_path(
r"^v[12]/archive/(.+)/?$",
d1_gmn.app.views.external.put_archive,
kwargs={"allowed_method_list": ["delete", "PUT"]},
name="put_archive",
),
# Tier 4: Replication API (MNReplication)
# MNReplication.replicate() - POST /replicate
django.urls.re_path(
r"^v[12]/replicate/?$",
d1_gmn.app.views.external.post_replicate,
kwargs={"allowed_method_list": ["POST"]},
name="post_replicate",
),
# Package API
# MNPackage.getPackage() - GET /package
django.urls.re_path(
r"^v2/packages/(?P<package_type>.+)/(?P<pid>.+)/?$",
d1_gmn.app.views.get_package.get_package,
kwargs={"allowed_method_list": ["GET"]},
name="get_package",
),
#
# Web UI
#
# Redirect / to /home
django.urls.re_path(
r"^$",
d1_gmn.app.views.internal.root,
kwargs={"allowed_method_list": ["GET"]},
name="root",
),
django.urls.re_path(
r"^home/?$",
d1_gmn.app.views.internal.home,
kwargs={"allowed_method_list": ["GET"]},
name="home",
),
django.urls.re_path(
r"^templates/home.xsl$",
d1_gmn.app.views.internal.home_xslt,
kwargs={"allowed_method_list": ["GET"]},
name="home_xslt",
),
django.urls.re_path(
r"^templates/clipboard/(.+)/?$",
d1_gmn.app.views.internal.clipboard,
kwargs={"allowed_method_list": ["GET"]},
name="clipboard",
),
#
# GMN vendor specific extensions
#
django.urls.re_path(
r"^gmn/object/?$",
d1_gmn.app.views.gmn.get_object_list_json,
kwargs={"allowed_method_list": ["GET"]},
name="get_object_list_json",
),
django.urls.re_path(
r"^gmn/echo/session/?$",
d1_gmn.app.views.gmn.echo_session,
kwargs={"allowed_method_list": ["GET"]},
name="echo_session",
),
django.urls.re_path(
r"^gmn/echo/request/?$",
d1_gmn.app.views.gmn.echo_request,
kwargs={"allowed_method_list": ["GET"]},
name="echo_request_object",
),
]
if django.conf.settings.STATIC_SERVER:
urlpatterns.append(
django.urls.re_path(
r"^static/(?P<path>.*)$",
django.views.static.serve,
kwargs={
# 'static': d1_common.util.abs_path('.'),
"document_root": d1_common.utils.filesystem.abs_path("./static"),
"show_indexes": True,
"allowed_method_list": ["GET"],
},
)
)
|
DataONEorg/d1_python
|
gmn/src/d1_gmn/app/urls.py
|
Python
|
apache-2.0
| 8,003 | 0.00025 |
# Copyright (c) 2017 Linaro Limited.
#
# SPDX-License-Identifier: Apache-2.0
'''Runner for debugging with JLink.'''
from os import path
import os
from .core import ZephyrBinaryRunner, get_env_or_bail
DEFAULT_JLINK_GDB_PORT = 2331
class JLinkBinaryRunner(ZephyrBinaryRunner):
'''Runner front-end for the J-Link GDB server.'''
def __init__(self, device,
gdbserver='JLinkGDBServer', iface='swd', elf_name=None,
gdb=None, gdb_port=DEFAULT_JLINK_GDB_PORT, tui=None,
debug=False):
super(JLinkBinaryRunner, self).__init__(debug=debug)
self.device = device
self.gdbserver_cmd = [gdbserver]
self.iface = iface
self.elf_name = elf_name
self.gdb_cmd = [gdb] if gdb is not None else None
self.gdb_port = gdb_port
self.tui_arg = [tui] if tui is not None else []
def replaces_shell_script(shell_script, command):
return (command in {'debug', 'debugserver'} and
shell_script == 'jlink.sh')
def create_from_env(command, debug):
'''Create runner from environment.
Required:
- JLINK_DEVICE: device name
Required for 'debug':
- GDB: gdb to use
- O: build output directory
- KERNEL_ELF_NAME: zephyr kernel binary in ELF format
Optional for 'debug':
- TUI: if present, passed to gdb server used to flash
Optional for 'debug', 'debugserver':
- JLINK_GDBSERVER: default is JLinkGDBServer
- GDB_PORT: default is 2331
- JLINK_IF: default is swd
'''
device = get_env_or_bail('JLINK_DEVICE')
gdb = os.environ.get('GDB', None)
o = os.environ.get('O', None)
elf = os.environ.get('KERNEL_ELF_NAME', None)
elf_name = None
if o is not None:
if elf is not None:
elf_name = path.join(o, elf)
tui = os.environ.get('TUI', None)
gdbserver = os.environ.get('JLINK_GDBSERVER', 'JLinkGDBServer')
gdb_port = int(os.environ.get('GDB_PORT',
str(DEFAULT_JLINK_GDB_PORT)))
iface = os.environ.get('JLINK_IF', 'swd')
return JLinkBinaryRunner(device, gdbserver=gdbserver,
iface=iface, elf_name=elf_name,
gdb=gdb, gdb_port=gdb_port, tui=tui,
debug=debug)
def print_gdbserver_message(self):
print('JLink GDB server running on port {}'.format(self.gdb_port))
def run(self, command, **kwargs):
if command not in {'debug', 'debugserver'}:
raise ValueError('{} is not supported'.format(command))
server_cmd = (self.gdbserver_cmd +
['-port', str(self.gdb_port),
'-if', self.iface,
'-device', self.device,
'-silent',
'-singlerun'])
if command == 'debugserver':
self.print_gdbserver_message()
self.check_call(server_cmd)
else:
if self.gdb_cmd is None:
raise ValueError('Cannot debug; gdb is missing')
if self.elf_name is None:
raise ValueError('Cannot debug; elf is missing')
client_cmd = (self.gdb_cmd +
self.tui_arg +
[self.elf_name] +
['-ex', 'target remote :{}'.format(self.gdb_port),
'-ex', 'monitor halt',
'-ex', 'load',
'-ex', 'monitor reset'])
self.print_gdbserver_message()
self.run_server_and_client(server_cmd, client_cmd)
|
fbsder/zephyr
|
scripts/support/runner/jlink.py
|
Python
|
apache-2.0
| 3,770 | 0 |
"""
Django settings for blog project.
Generated by 'django-admin startproject' using Django 1.9.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.9/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'sm@g)(fbwdh5wc*xe@j++m9rh^uza5se9a57c5ptwkg*b@ki0x'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'posts',
]
MIDDLEWARE_CLASSES = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'blog.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'blog.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.9/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.9/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = [
os.path.join(BASE_DIR, "static"),
#'/var/www/static/',
]
STATIC_ROOT = os.path.join(os.path.dirname(BASE_DIR), "static_cdn")
MEDIA_URL = "/media/"
MEDIA_ROOT = os.path.join(os.path.dirname(BASE_DIR), "media_cdn")
|
Dingo5733/djangoblog19
|
src/trydjango19/settings.py
|
Python
|
mit
| 3,453 | 0.001448 |
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Batching dataset transformations."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.data.util import convert
from tensorflow.python.data.util import nest
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import tensor_util
from tensorflow.python.ops import gen_experimental_dataset_ops as ged_ops
from tensorflow.python.util import deprecation
from tensorflow.python.util.tf_export import tf_export
@tf_export("data.experimental.dense_to_sparse_batch")
def dense_to_sparse_batch(batch_size, row_shape):
"""A transformation that batches ragged elements into `tf.SparseTensor`s.
Like `Dataset.padded_batch()`, this transformation combines multiple
consecutive elements of the dataset, which might have different
shapes, into a single element. The resulting element has three
components (`indices`, `values`, and `dense_shape`), which
comprise a `tf.SparseTensor` that represents the same data. The
`row_shape` represents the dense shape of each row in the
resulting `tf.SparseTensor`, to which the effective batch size is
prepended. For example:
```python
# NOTE: The following examples use `{ ... }` to represent the
# contents of a dataset.
a = { ['a', 'b', 'c'], ['a', 'b'], ['a', 'b', 'c', 'd'] }
a.apply(tf.data.experimental.dense_to_sparse_batch(
batch_size=2, row_shape=[6])) ==
{
([[0, 0], [0, 1], [0, 2], [1, 0], [1, 1]], # indices
['a', 'b', 'c', 'a', 'b'], # values
[2, 6]), # dense_shape
([[0, 0], [0, 1], [0, 2], [0, 3]],
['a', 'b', 'c', 'd'],
[1, 6])
}
```
Args:
batch_size: A `tf.int64` scalar `tf.Tensor`, representing the number of
consecutive elements of this dataset to combine in a single batch.
row_shape: A `tf.TensorShape` or `tf.int64` vector tensor-like object
representing the equivalent dense shape of a row in the resulting
`tf.SparseTensor`. Each element of this dataset must have the same rank as
`row_shape`, and must have size less than or equal to `row_shape` in each
dimension.
Returns:
A `Dataset` transformation function, which can be passed to
`tf.data.Dataset.apply`.
"""
def _apply_fn(dataset):
return _DenseToSparseBatchDataset(dataset, batch_size, row_shape)
return _apply_fn
@deprecation.deprecated(None, "Use `tf.data.experimental.map_and_batch()")
@tf_export(v1=["data.experimental.map_and_batch_with_legacy_function"])
def map_and_batch_with_legacy_function(map_func,
batch_size,
num_parallel_batches=None,
drop_remainder=False,
num_parallel_calls=None):
"""Fused implementation of `map` and `batch`.
NOTE: This is an escape hatch for existing uses of `map_and_batch` that do not
work with V2 functions. New uses are strongly discouraged and existing uses
should migrate to `map_and_batch` as this method will not be removed in V2.
Args:
map_func: A function mapping a nested structure of tensors to another
nested structure of tensors.
batch_size: A `tf.int64` scalar `tf.Tensor`, representing the number of
consecutive elements of this dataset to combine in a single batch.
num_parallel_batches: (Optional.) A `tf.int64` scalar `tf.Tensor`,
representing the number of batches to create in parallel. On one hand,
higher values can help mitigate the effect of stragglers. On the other
hand, higher values can increase contention if CPU is scarce.
drop_remainder: (Optional.) A `tf.bool` scalar `tf.Tensor`, representing
whether the last batch should be dropped in case its size is smaller than
desired; the default behavior is not to drop the smaller batch.
num_parallel_calls: (Optional.) A `tf.int32` scalar `tf.Tensor`,
representing the number of elements to process in parallel. If not
specified, `batch_size * num_parallel_batches` elements will be processed
in parallel. If the value `tf.data.experimental.AUTOTUNE` is used, then
the number of parallel calls is set dynamically based on available CPU.
Returns:
A `Dataset` transformation function, which can be passed to
`tf.data.Dataset.apply`.
Raises:
ValueError: If both `num_parallel_batches` and `num_parallel_calls` are
specified.
"""
if num_parallel_batches is None and num_parallel_calls is None:
num_parallel_calls = batch_size
elif num_parallel_batches is not None and num_parallel_calls is None:
num_parallel_calls = batch_size * num_parallel_batches
elif num_parallel_batches is not None and num_parallel_calls is not None:
raise ValueError("The `num_parallel_batches` and `num_parallel_calls` "
"arguments are mutually exclusive.")
def _apply_fn(dataset):
return _MapAndBatchDataset(dataset, map_func, batch_size,
num_parallel_calls, drop_remainder,
use_legacy_function=True)
return _apply_fn
@deprecation.deprecated(
None,
"Use `tf.data.Dataset.map(map_func, num_parallel_calls)` followed by "
"`tf.data.Dataset.batch(batch_size, drop_remainder)`. Static tf.data "
"optimizations will take care of using the fused implementation.")
@tf_export("data.experimental.map_and_batch")
def map_and_batch(map_func,
batch_size,
num_parallel_batches=None,
drop_remainder=False,
num_parallel_calls=None):
"""Fused implementation of `map` and `batch`.
Maps `map_func` across `batch_size` consecutive elements of this dataset
and then combines them into a batch. Functionally, it is equivalent to `map`
followed by `batch`. However, by fusing the two transformations together, the
implementation can be more efficient. Surfacing this transformation in the API
is temporary. Once automatic input pipeline optimization is implemented,
the fusing of `map` and `batch` will happen automatically and this API will be
deprecated.
Args:
map_func: A function mapping a nested structure of tensors to another
nested structure of tensors.
batch_size: A `tf.int64` scalar `tf.Tensor`, representing the number of
consecutive elements of this dataset to combine in a single batch.
num_parallel_batches: (Optional.) A `tf.int64` scalar `tf.Tensor`,
representing the number of batches to create in parallel. On one hand,
higher values can help mitigate the effect of stragglers. On the other
hand, higher values can increase contention if CPU is scarce.
drop_remainder: (Optional.) A `tf.bool` scalar `tf.Tensor`, representing
whether the last batch should be dropped in case its size is smaller than
desired; the default behavior is not to drop the smaller batch.
num_parallel_calls: (Optional.) A `tf.int32` scalar `tf.Tensor`,
representing the number of elements to process in parallel. If not
specified, `batch_size * num_parallel_batches` elements will be processed
in parallel. If the value `tf.data.experimental.AUTOTUNE` is used, then
the number of parallel calls is set dynamically based on available CPU.
Returns:
A `Dataset` transformation function, which can be passed to
`tf.data.Dataset.apply`.
Raises:
ValueError: If both `num_parallel_batches` and `num_parallel_calls` are
specified.
"""
if num_parallel_batches is None and num_parallel_calls is None:
num_parallel_calls = batch_size
elif num_parallel_batches is not None and num_parallel_calls is None:
num_parallel_calls = batch_size * num_parallel_batches
elif num_parallel_batches is not None and num_parallel_calls is not None:
raise ValueError("The `num_parallel_batches` and `num_parallel_calls` "
"arguments are mutually exclusive.")
def _apply_fn(dataset):
return _MapAndBatchDataset(dataset, map_func, batch_size,
num_parallel_calls, drop_remainder)
return _apply_fn
@deprecation.deprecated(None, "Use `tf.data.Dataset.unbatch()`.")
@tf_export("data.experimental.unbatch")
def unbatch():
"""Splits elements of a dataset into multiple elements on the batch dimension.
For example, if elements of the dataset are shaped `[B, a0, a1, ...]`,
where `B` may vary for each input element, then for each element in the
dataset, the unbatched dataset will contain `B` consecutive elements
of shape `[a0, a1, ...]`.
```python
# NOTE: The following example uses `{ ... }` to represent the contents
# of a dataset.
a = { ['a', 'b', 'c'], ['a', 'b'], ['a', 'b', 'c', 'd'] }
a.apply(tf.data.experimental.unbatch()) == {
'a', 'b', 'c', 'a', 'b', 'a', 'b', 'c', 'd'}
```
Returns:
A `Dataset` transformation function, which can be passed to
`tf.data.Dataset.apply`.
"""
def _apply_fn(dataset):
return dataset.unbatch()
return _apply_fn
class _DenseToSparseBatchDataset(dataset_ops.UnaryDataset):
"""A `Dataset` that batches ragged dense elements into `tf.SparseTensor`s."""
def __init__(self, input_dataset, batch_size, row_shape):
"""See `Dataset.dense_to_sparse_batch()` for more details."""
if not isinstance(
dataset_ops.get_legacy_output_types(input_dataset), dtypes.DType):
raise TypeError("DenseToSparseDataset requires an input whose elements "
"have a single component, whereas the input has %r." %
dataset_ops.get_legacy_output_types(input_dataset))
self._input_dataset = input_dataset
self._batch_size = batch_size
self._row_shape = row_shape
self._element_spec = sparse_tensor.SparseTensorSpec(
tensor_shape.TensorShape([None]).concatenate(self._row_shape),
dataset_ops.get_legacy_output_types(input_dataset))
variant_tensor = ged_ops.dense_to_sparse_batch_dataset(
self._input_dataset._variant_tensor, # pylint: disable=protected-access
self._batch_size,
row_shape=convert.partial_shape_to_tensor(self._row_shape),
**self._flat_structure)
super(_DenseToSparseBatchDataset, self).__init__(input_dataset,
variant_tensor)
@property
def element_spec(self):
return self._element_spec
class _MapAndBatchDataset(dataset_ops.UnaryDataset):
"""A `Dataset` that maps a function over a batch of elements."""
def __init__(self, input_dataset, map_func, batch_size, num_parallel_calls,
drop_remainder, use_legacy_function=False):
"""See `Dataset.map()` for details."""
self._input_dataset = input_dataset
self._map_func = dataset_ops.StructuredFunctionWrapper(
map_func,
"tf.data.experimental.map_and_batch()",
dataset=input_dataset,
use_legacy_function=use_legacy_function)
self._batch_size_t = ops.convert_to_tensor(
batch_size, dtype=dtypes.int64, name="batch_size")
self._num_parallel_calls_t = ops.convert_to_tensor(
num_parallel_calls, dtype=dtypes.int64, name="num_parallel_calls")
self._drop_remainder_t = ops.convert_to_tensor(
drop_remainder, dtype=dtypes.bool, name="drop_remainder")
constant_drop_remainder = tensor_util.constant_value(self._drop_remainder_t)
# pylint: disable=protected-access
if constant_drop_remainder:
# NOTE(mrry): `constant_drop_remainder` may be `None` (unknown statically)
# or `False` (explicitly retaining the remainder).
# pylint: disable=g-long-lambda
self._element_spec = nest.map_structure(
lambda component_spec: component_spec._batch(
tensor_util.constant_value(self._batch_size_t)),
self._map_func.output_structure)
else:
self._element_spec = nest.map_structure(
lambda component_spec: component_spec._batch(None),
self._map_func.output_structure)
# pylint: enable=protected-access
variant_tensor = ged_ops.map_and_batch_dataset(
self._input_dataset._variant_tensor, # pylint: disable=protected-access
self._map_func.function.captured_inputs,
f=self._map_func.function,
batch_size=self._batch_size_t,
num_parallel_calls=self._num_parallel_calls_t,
drop_remainder=self._drop_remainder_t,
preserve_cardinality=True,
**self._flat_structure)
super(_MapAndBatchDataset, self).__init__(input_dataset, variant_tensor)
def _functions(self):
return [self._map_func]
@property
def element_spec(self):
return self._element_spec
|
chemelnucfin/tensorflow
|
tensorflow/python/data/experimental/ops/batching.py
|
Python
|
apache-2.0
| 13,674 | 0.003145 |
# Copyright 2020 The StackStorm Authors.
# Copyright 2019 Extreme Networks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from integration.orquesta import base
from st2common.constants import action as action_constants
class FunctionsWiringTest(base.TestWorkflowExecution):
def test_task_functions_in_yaql(self):
wf_name = "examples.orquesta-test-yaql-task-functions"
expected_output = {
"last_task4_result": "False",
"task9__1__parent": "task8__1",
"task9__2__parent": "task8__2",
"that_task_by_name": "task1",
"this_task_by_name": "task1",
"this_task_no_arg": "task1",
}
expected_result = {"output": expected_output}
self._execute_workflow(
wf_name, execute_async=False, expected_result=expected_result
)
def test_task_functions_in_jinja(self):
wf_name = "examples.orquesta-test-jinja-task-functions"
expected_output = {
"last_task4_result": "False",
"task9__1__parent": "task8__1",
"task9__2__parent": "task8__2",
"that_task_by_name": "task1",
"this_task_by_name": "task1",
"this_task_no_arg": "task1",
}
expected_result = {"output": expected_output}
self._execute_workflow(
wf_name, execute_async=False, expected_result=expected_result
)
def test_task_nonexistent_in_yaql(self):
wf_name = "examples.orquesta-test-yaql-task-nonexistent"
expected_output = None
expected_errors = [
{
"type": "error",
"message": (
"YaqlEvaluationException: Unable to evaluate expression "
"'<% task(\"task0\") %>'. ExpressionEvaluationException: "
'Unable to find task execution for "task0".'
),
"task_transition_id": "continue__t0",
"task_id": "task1",
"route": 0,
}
]
expected_result = {"output": expected_output, "errors": expected_errors}
self._execute_workflow(
wf_name,
execute_async=False,
expected_status=action_constants.LIVEACTION_STATUS_FAILED,
expected_result=expected_result,
)
def test_task_nonexistent_in_jinja(self):
wf_name = "examples.orquesta-test-jinja-task-nonexistent"
expected_output = None
expected_errors = [
{
"type": "error",
"message": (
"JinjaEvaluationException: Unable to evaluate expression "
"'{{ task(\"task0\") }}'. ExpressionEvaluationException: "
'Unable to find task execution for "task0".'
),
"task_transition_id": "continue__t0",
"task_id": "task1",
"route": 0,
}
]
expected_result = {"output": expected_output, "errors": expected_errors}
self._execute_workflow(
wf_name,
execute_async=False,
expected_status=action_constants.LIVEACTION_STATUS_FAILED,
expected_result=expected_result,
)
|
StackStorm/st2
|
st2tests/integration/orquesta/test_wiring_functions_task.py
|
Python
|
apache-2.0
| 3,825 | 0.000523 |
#!/usr/bin/env python
# pmx Copyright Notice
# ============================
#
# The pmx source code is copyrighted, but you can freely use and
# copy it as long as you don't change or remove any of the copyright
# notices.
#
# ----------------------------------------------------------------------
# pmx is Copyright (C) 2006-2013 by Daniel Seeliger
#
# All Rights Reserved
#
# Permission to use, copy, modify, distribute, and distribute modified
# versions of this software and its documentation for any purpose and
# without fee is hereby granted, provided that the above copyright
# notice appear in all copies and that both the copyright notice and
# this permission notice appear in supporting documentation, and that
# the name of Daniel Seeliger not be used in advertising or publicity
# pertaining to distribution of the software without specific, written
# prior permission.
#
# DANIEL SEELIGER DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS
# SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
# FITNESS. IN NO EVENT SHALL DANIEL SEELIGER BE LIABLE FOR ANY
# SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER
# RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF
# CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
# ----------------------------------------------------------------------
import sys,os,shutil
import commands
from glob import glob
from pmx import *
from pmx.ndx import *
from pmx.library import _one_letter
from pmx.odict import *
from pmx.forcefield import MDP
from pmx.forcefield2 import ITPFile, Topology
from pmx.parser import *
def run_command( func, string ):
s = func.__name__+'(): '+ string
err_file = func.__name__+'_ERROR.log'
status, out = commands.getstatusoutput( string )
if status != 0:
print >>sys.stderr, 'ERROR in %s ' % func.__name__
print >>sys.stderr, 'Output written to %s' % err_file
fp = open(err_file,'w')
print >>fp, s
print >>fp, out
fp.close()
sys.exit(1)
else:
print "%-90s" % s, ': ok'
chain_ids = 'ABCDEFGHIJKLMNOPQRSTVWXYZ'
class MD:
## def __init__(self, pdb_in = None, ff = 'amber99sbmut', water = 'tip3p', conc = .15,
## box_type = 'dodecahedron', box_size = 1.2, vsite = False, princ = False, **kwargs):
def __init__(self, **kwargs):
self.ff = 'amber99sb'
self.water = 'tip3p'
self.conc = .15
self.box_type = 'triclinic'
self.box_size = 1.2
self.vsite = False
self.princ = False
self.md_in_conf = 'md_in.gro'
self.amberize_input = True
for key, val in kwargs.items():
setattr(self,key,val)
def setup(self):
if self.amberize_input:
self.amberize_pdb(self.pdb_in)
# self.generate_topology( self.pdb_in)
# self.renumber_pdb( )
self.generate_topology( self.pdb_in)
self.generate_sim_box( )
self.fill_sim_box( )
self.tpr_from_box()
self.add_ions( )
self.make_em_tpr()
self.run_em()
self.pdb_from_em() # -> min.gro
#self.generate_topology('min.gro') # -> gmx.gro
#self.make_em_tpr('gmx.gro')
#shutil.copy('gmx.gro',self.md_in_conf)
#self.md_in_conf = 'gmx.gro'
self.compact_repr( 'min.gro', self.md_in_conf)
self.clean_backups()
def write_residue_map( self, model ):
fp = open('residue_map.txt','w')
for r in model.residues:
if r.chain_id == ' ':
chain = '_'
r.old_chain_id = '_'
else:
chain = r.chain_id
print >>fp, '%d|%s|%s -> %d|%s|%s' %( r.orig_id, r.resname, r.old_chain_id, r.id, r.resname, chain)
fp.close()
def renumber_pdb(self ):
m = Model('gmx.gro')
for i, chain in enumerate(m.chains):
if chain.id != ' ':
for r in chain.residues:
r.old_chain_id = chain.id
chain.set_chain_id( chain_ids[i] )
self.write_residue_map( m )
m.write('start.gro')
self.pdb_in = 'start.gro'
def __str__(self):
s = '< MD (%s) > ' % self.pdb_in
return s
def clean_backups(self):
files = glob('#*#')
for f in files:
os.unlink(f)
def amberize_pdb(self, pdb_in):
amb = 'make_amber.py -f %s -o amber.pdb ' % ( pdb_in )
run_command( self.amberize_pdb, amb )
self.pdb_in = 'amber.pdb'
def generate_topology(self, pdb_in):
run = 'pdb2gmx -f %s -water %s -ff %s -o gmx.gro ' % ( pdb_in, self.water, self.ff )
if self.vsite:
run+=' -vsite hydrogens'
run_command( self.generate_topology, run )
def generate_sim_box(self ):
if self.princ:
run = 'echo 4 | editconf -f gmx.gro -o tmp.gro -c -princ '
run_command( run_editconf, run)
run = 'editconf -f tmp.gro -o ed.gro -d %g -bt %s ' % (self.box_size, self.box_type)
run_command( generate_sim_box, run)
else:
run = 'editconf -f gmx.gro -o ed.gro -d %g -bt %s ' % (self.box_size, self.box_type)
run_command( self.generate_sim_box, run)
def fill_sim_box(self ):
if self.water == 'spce': water = 'spc216'
else: water = self.water
run = 'genbox -cp ed.gro -cs %s -p -o box.gro' % water
run_command( self.fill_sim_box, run)
self.check_for_multiple_molecule_entries()
def check_for_multiple_molecule_entries(self):
tp = Topology('topol.top', assign_types = False)
mol_dic = OrderedDict()
for m in tp.molecules:
if mol_dic.has_key(m[0]):
mol_dic[m[0]]+=m[1]
else:
mol_dic[m[0]] = m[1]
tp.molecules = []
for key, val in mol_dic.items():
tp.molecules.append( [key, val] )
tp.write('topol.top')
def tpr_from_box(self):
run = 'grompp -f ~/mdp/em.mdp -c box.gro'
run_command( self.tpr_from_box, run)
def get_solvent_index(self ):
run = 'echo q | make_ndx -f topol.tpr -o tmp.ndx'
run_command( self.get_solvent_index, run)
ndx = IndexFile("tmp.ndx")
return ndx.names.index('SOL')
def add_ions(self ):
idx = self.get_solvent_index()
run = 'echo %d | genion -conc %g -neutral -p -o ion.gro -nname ClJ -pname NaJ' % (idx, self.conc)
run_command( self.add_ions, run)
def make_em_tpr(self, f = 'ion.gro'):
run = 'grompp -f ~/mdp/em.mdp -c %s' % f
run_command( self.make_em_tpr, run)
def run_em(self):
run = 'mdrun -v -c em.gro'
run_command( self.run_em, run)
def pdb_from_em(self):
run = 'echo 0| trjconv -f em.gro -s topol.tpr -o min.gro'
run_command( self.pdb_from_em, run)
def compact_repr(self, pdb_in, pdb_out ):
run = 'echo 0 | trjconv -f %s -s topol.tpr -ur compact -pbc mol -o %s' % (pdb_in, pdb_out)
run_command( self.compact_repr, run)
class FreeEnergyMD(MD):
def __init__(self, mutation_file, **kwargs):
MD.__init__( self )
for key, val in kwargs.items():
setattr(self,key,val)
self.mutation_tags = []
self.read_mutation_file( mutation_file )
self.mutations = []
if self.mutation_tags:
self.mutations_from_tags()
self.runs = []
self.is_single_chain = False
def read_mutation_file(self, mut_file ):
print '\n\t\t\tReading mutation file: %s\n' % mut_file
l = open(mut_file).readlines()
l = kickOutComments(l)
count = 1
for line in l:
entr = line.strip()
if entr:
print '\t\t\t (%d) -> %s' % (count, entr)
self.mutation_tags.append( entr )
count+=1
def setup(self):
self.read_pdb()
print '\n\n'
for m in self.mutations:
self.setup_mutation_run(m)
def read_pdb( self ):
self.model = Model(self.md_in_conf)
if len(self.model.chains) == 1:
self.is_single_chain = True
def mutations_from_tags(self):
for t in self.mutation_tags:
mut = t.split()
new_mut = []
for m in mut:
resid, resn, chain = m.split('|')
resid = int(resid)
new_mut.append( ( resid, resn, chain ) )
self.mutations.append( new_mut )
def setup_mutation_run(self, mutation ):
muts = []
affected_chains = []
for m in mutation:
resid, resn, chain = m
residue = self.model.residues[resid-1]
resname = _one_letter[residue.resname]
muts.append( chain+'.'+resname+str(resid)+resn )
if chain not in affected_chains:
affected_chains.append( chain )
name = '-'.join(muts)
if str(self.__class__).split('.')[1] == 'DiscreteTI':
name+='.dti'
elif str(self.__class__).split('.')[1] == 'CrooksMD':
name+='.crooks'
print '\n\t\t\tPreparing mutation run -> %s \n' % name
if os.path.isdir( name ):
shutil.rmtree(name)
os.mkdir(name)
self.runs.append( name )
shutil.copy(self.md_in_conf, name)
script_file = os.path.join(name,'mutations.txt')
fp = open(script_file,'w')
for m in mutation:
resid, resn, chain = m
print >>fp, resid, resn
fp.close()
self.make_mutation(name, affected_chains)
def make_mutation(self, path, affected_chains ):
os.chdir(path)
ff = "../ffamber99sb_mut.tgz"
shutil.copy(ff,'.')
run_command( self.make_mutation, 'tar -zxvf %s' % ff)
run = '~/code/pmx/scripts/mutate_beta.py -f %s -script mutations.txt -o mut.gro ' % (self.md_in_conf)
run_command( self.make_mutation, run )
self.generate_topology( 'mut.gro')
if self.is_single_chain:
itp_file = 'topol_A.itp'
run = '~/code/pmx/scripts/make_bstate_beta.py -itp %s ' % (itp_file)
run_command( self.make_mutation, run )
shutil.move(itp_file, itp_file+'.save')
shutil.move('newtop.itp', itp_file)
self.apply_ion_modifications(5)
else:
for chain in affected_chains:
print 'Applying changes to topology of chain %s' % chain
itp_file = 'topol_%s.itp' % chain
run = '~/code/pmx/scripts/make_bstate_beta.py -itp %s ' % (itp_file)
run_command( self.make_mutation, run )
shutil.move(itp_file, itp_file+'.save')
shutil.move('newtop.itp', itp_file)
os.chdir('..')
def apply_ion_modifications( self, nions ):
itp = ITPFile( 'topol_A.itp')
qA = itp.get_qA()
qB = itp.get_qB()
if qA == qB: return
else:
diff = qA - qB
print '\t Charges of state A and state B differ: ', diff
itpI = ITPFile( 'topol_B.itp')
nNa = 0
for atom in itpI.atoms:
if atom.name.startswith('Na'):
nNa+=1
if nNa < nions:
nions = nNa
print '\t Modifying %d ions' % nions
qIa = itpI.get_qA()
# we modify NaJ ions only
deltaQ = diff/ float( nions )
for atom in itpI.atoms[:nions]:
atom.atomtypeB = atom.atomtype
atom.qB = atom.q+deltaQ
atom.mB = atom.m
shutil.move('topol_B.itp','topol_B.itp.save')
target_qB = []
for i in range(nions):
target_qB.append( 1+deltaQ )
itpI.write('topol_B.itp', target_qB = target_qB)
class CrooksMD( FreeEnergyMD ):
def __init__(self, mutation_file, **kwargs):
FreeEnergyMD.__init__(self, mutation_file)
for key, val in kwargs.items():
setattr(self,key,val)
def do_crooks( self, mdp_file, min_mdp_file, time, nruns ):
mdp = MDP().read( mdp_file)
min_mdp = MDP().read( min_mdp_file)
self.prepare_min_mdp_files(min_mdp, time )
self.prepare_eq_mdp_files(mdp, time )
for r in self.runs:
print '\n\t\t\tSetting up Crooks run -> %s\n' % r
self.minimize_states( r )
self.setup_equilibration_runs( r, nruns )
def minimize_states( self, path ):
os.chdir(path)
run = 'grompp -f ../crooks_minA.mdp -c gmx.gro -o minA.tpr'
run_command( self.minimize_states, run )
run = 'grompp -f ../crooks_minB.mdp -c gmx.gro -o minB.tpr'
run_command( self.minimize_states, run )
print '\n\t\t\tRunning energy minimization on %s ( state A ) \n' % ( path )
run = 'mdrun -v -c emA.pdb -s minA.tpr'
run_command( self.minimize_states, run )
print '\n\t\t\tRunning energy minimization on %s ( state B ) \n' % ( path )
run = 'mdrun -v -c emB.pdb -s minB.tpr'
run_command( self.minimize_states, run )
os.chdir('..')
def prepare_eq_mdp_files( self, mdp, time ):
nsteps = 1000*time/.002
mdp['nsteps'] = nsteps
mdp['free-energy'] = 'yes'
mdp['init-lambda'] = 0
fp = open('crooks_eqA.mdp','w')
print >>fp, mdp
fp.close()
mdp['init-lambda'] = 1
fp = open('crooks_eqB.mdp','w')
print >>fp, mdp
fp.close()
def prepare_min_mdp_files( self, mdp, time ):
mdp['init-lambda'] = 0
mdp['free-energy'] = 'yes'
fp = open('crooks_minA.mdp','w')
print >>fp, mdp
fp.close()
mdp['init-lambda'] = 1
fp = open('crooks_minB.mdp','w')
print >>fp, mdp
fp.close()
def setup_equilibration_runs( self, path, nruns ):
os.chdir(path)
for i in range(nruns):
print '\n\t\t\tPreparing run input file for %s ( run %d ) \n' % ( path, i )
os.mkdir('runA.%d' % i)
os.mkdir('runB.%d' % i)
run = 'grompp -f ../crooks_eqA.mdp -c emA.pdb -o runA.%d/topol.tpr' % i
run_command( self.setup_equilibration_runs, run )
run = 'grompp -f ../crooks_eqB.mdp -c emB.pdb -o runB.%d/topol.tpr' % i
run_command( self.setup_equilibration_runs, run )
self.clean_backups()
os.chdir('..')
class DiscreteTI( FreeEnergyMD ):
def __init__(self, mutation_file, **kwargs):
FreeEnergyMD.__init__(self, mutation_file)
for key, val in kwargs.items():
setattr(self,key,val)
def read_lambda_steps( self, filename ):
l = open(filename).readlines()
self.lambda_steps = []
for line in l:
entr = line.strip()
if entr:
self.lambda_steps.append( float(entr) )
def prepare_dti_mdp_files( self, mdp_file, time ):
mdp = MDP().read(mdp_file)
nsteps = 1000*time/.002
mdp['nsteps'] = nsteps
for lda in self.lambda_steps:
fp = open('dti_%4.3f.mdp' % round(lda,3),'w' )
mdp['init-lambda'] = lda
mdp['delta-lambda'] = 0
print >>fp, mdp
fp.close()
def prepare_dti_min_mdp_files( self, mdp_file):
mdp = MDP().read(mdp_file)
for lda in self.lambda_steps:
fp = open('dti_min_%4.3f.mdp' % round(lda,3),'w' )
mdp['init-lambda'] = lda
print >>fp, mdp
fp.close()
def setup_runs( self, path ):
os.chdir( path )
for lda in self.lambda_steps:
min_mdp = 'dti_min_%4.3f.mdp' % round(lda,3)
run_mdp = 'dti_%4.3f.mdp' % round(lda,3)
run_dir = 'run_%4.3f' % round(lda,3)
os.mkdir( run_dir )
print '\n\t\t\tRunning energy minimization on %s/%s \n' % ( path, run_dir )
run = 'grompp -f ../%s -c gmx.gro -o %s/em.tpr' % (min_mdp, run_dir )
run_command( self.setup_runs, run )
os.chdir( run_dir )
run = 'mdrun -v -c em.pdb -s em.tpr'
run_command( self.setup_runs, run )
os.chdir('..')
print '\n\t\t\tPreparing run input file for %s/%s \n' % ( path, run_dir )
run = 'grompp -f ../%s -c %s/em.pdb -o %s/topol.tpr' % (run_mdp, run_dir, run_dir )
run_command( self.setup_runs, run )
self.clean_backups()
os.chdir( '..')
def do_dti( self, mdp, min_mdp, time ):
self.prepare_dti_min_mdp_files( min_mdp)
self.prepare_dti_mdp_files( mdp, time)
for r in self.runs:
print '\n\t\t\tSetting up Discrete TI run -> %s\n' % r
self.setup_runs( r )
def main(argv):
version = "1.0"
options = [
Option( "-water", "string", "tip3p", "water model"),
Option( "-box_type", "string", "triclinic", "box geometry (triclinic, octahedron, dodecahedron)"),
Option( "-box_size", "float", 1.2, "distance from solute to box"),
Option( "-conc", "float", 0.15, "ion concentration"),
Option( "-vsite", "bool", False, "use virtual sites"),
Option( "-fe.dti", "bool", False, "do discrete TI setup"),
Option( "-dti_run_time", "float", 10., "Simulation time [ns] at each lambda point"),
Option( "-fe.crooks", "bool", False, "do Crooks setup"),
Option( "-n_crooks_runs", "int", 1, "setup # crooks runs for each mutation"),
Option( "-crooks_run_time", "float", 50., "Simulation time [ns] for crooks equilibrium runs"),
Option( "-skip_md_setup", "bool", False, "skip md setup and use -f as starting configuration for free energy runs"),
Option( "-amberize", "bool", True, "Make amber names for input pdb file"),
]
files = [
FileOption("-f", "r",["pdb","gro"],"protein", "input pdb file"),
FileOption("-m", "r/o",["txt"],"mutations", "mutations to make"),
FileOption("-crooks_mdp", "r/o",["mdp"],"template", "template run mdp file for crooks equilibrium runs"),
FileOption("-dti_mdp", "r/o",["mdp"],"template", "template run mdp file for discrete TI calculations"),
FileOption("-min_mdp", "r/o",["mdp"],"em", "template minimization mdp file ( for TI or Crooks )"),
FileOption("-lambda_steps", "r/o",["txt"],"lambda_steps", "text file with lambda steps for DTI runs"),
]
help_text = ("Script for setting up plain MD runs",
)
cmdl = Commandline( argv, options = options,
fileoptions = files,
program_desc = help_text,
check_for_existing_files = False, version = version)
# make some basic checks
bFreeEnergy = False
if cmdl['-fe.dti'] or cmdl['-fe.crooks']:
bFreeEnergy = True
mutation_file = cmdl['-m']
try:
open(mutation_file).readlines()
except:
print >>sys.stderr, 'Error: Cannot open %s' % mutation_file
sys.exit(1)
if cmdl['-fe.dti']: # require lambda steps
try:
open(cmdl['-lambda_steps']).readlines()
except:
print >>sys.stderr, 'Error: Cannot open %s' % cmdl['-lambda_steps']
sys.exit(1)
pdb_in = cmdl['-f']
amberize_input = cmdl['-amberize']
water = cmdl['-water']
box_type = cmdl['-box_type']
box_size = cmdl['-box_size']
conc = cmdl['-conc']
free_energy_start_pdb = None
bVsite = cmdl['-vsite']
if bVsite and bFreeEnergy:
print >>sys.stderr, 'Error: Cannot use virtual sites in free energy calculations !'
sys.exit(1)
if not cmdl['-skip_md_setup']:
md = MD( pdb_in = pdb_in, water = water, conc = conc, box_type = box_type, box_size = box_size, amberize_input = amberize_input )
md.setup()
free_energy_start_pdb = md.md_in_conf
else:
if not bFreeEnergy:
print 'Nothing to do........... (no MD, no Free Energy)'
sys.exit()
if bFreeEnergy:
if not free_energy_start_pdb:
free_energy_start_pdb = pdb_in
if cmdl['-fe.crooks']:
crooksMD = CrooksMD( mutation_file, md_in_conf = free_energy_start_pdb)
crooksMD.setup()
crooksMD.do_crooks( cmdl['-crooks_mdp'], cmdl['-min_mdp'], cmdl['-crooks_run_time'], cmdl['-n_crooks_runs'] )
if cmdl['-fe.dti']:
dti = DiscreteTI( mutation_file, md_in_conf = free_energy_start_pdb )
dti.setup()
dti.read_lambda_steps( cmdl['-lambda_steps'] )
dti.do_dti(cmdl['-dti_mdp'], cmdl['-min_mdp'], cmdl['-dti_run_time'] )
print '\n\t\t\t ....... DONE .......... \n'
## n_crooks_runs = cmdl['-n_crooks_runs']
## if cmdl.opt['-m'].is_set:
## mut_file = cmdl['-m']
## else:
## mutations = []
## if cmdl.opt['-mdp'].is_set:
## mdp = MDP().read( cmdl['-mdp'] )
## if cmdl.opt['-min_mdp'].is_set:
## min_mdp = MDP().read( cmdl['-min_mdp'] )
## if cmdl.opt['-lambda_steps'].is_set:
## lambda_file = cmdl['-lambda_steps']
## md = MD( pdb_in = pdb_in, water = water, conc = conc, box_type = box_type, box_size = box_size )
## # md.setup()
## dti = DiscreteTI( mut_file, md_in_conf = md.md_in_conf )
## dti.setup()
## dti.read_lambda_steps( lambda_file )
## dti.do_dti(mdp, min_mdp, 10. )
## crooksMD = CrooksMD( mut_file, md_in_conf = md.md_in_conf)
## crooksMD.setup()
## crooksMD.do_crooks(mdp, 20, cmdl['-n_crooks_runs'])
# fe_md = FreeEnergyMD( mutations, md_in_conf = md.md_in_conf )
# fe_md.setup()
## generate_topology(pdb_in, water, 'amber99sbmut')
## generate_sim_box( box_type, box_size )
## fill_sim_box( water )
## tpr_from_box()
## add_ions( conc )
## make_em_tpr()
## run_em()
## pdb_from_em()
## generate_topology('min.pdb', water, 'amber99sbmut')
## compact_repr( 'min.pdb')
## clean_backups()
if __name__=='__main__':
main( sys.argv )
|
Alwnikrotikz/pmx
|
scripts/mdsetup_407.py
|
Python
|
lgpl-3.0
| 22,643 | 0.019035 |
# Copyright 2017-2018 IBM Corp. All Rights Reserved.
# See LICENSE for details.
#
# Author: Henrik Loeser
#
# Manage workspaces for IBM Watson Assistant service on IBM Cloud.
# See the README for documentation.
#
import json, argparse, importlib
from os.path import join, dirname
from ibm_watson import AssistantV1
from ibm_cloud_sdk_core.authenticators import IAMAuthenticator
privcontext=None
assistant=None
def loadAndInit(confFile=None):
# Credentials are read from a file
with open(confFile) as confFile:
config=json.load(confFile)
configWA=config['credentials']
if 'ICF_KEY' in config:
global privcontext
icf_key=config['ICF_KEY'].split(':')
privcontext={"private": {"icfcreds": {"user": icf_key[0], "password": icf_key[1]}}}
# Initialize the Watson Assistant client
global assistant
if 'apikey' in configWA:
# Authentication via IAM
authenticator = IAMAuthenticator(configWA['apikey'])
assistant = AssistantV1(
authenticator=authenticator,
version=configWA['version'])
assistant.set_service_url(configWA['url'])
else:
print('Apikey for Watson Assistant in credentials.')
exit
# Define parameters that we want to catch and some basic command help
def initParser(args=None):
parser = argparse.ArgumentParser(description='Manage Watson Assistant workspaces (skills)',
prog='wctool.py',
usage='%(prog)s [-h | -l | -g | -c | -u | -delete] [options]')
parser.add_argument("-l", "-list" ,dest='listWorkspaces', action='store_true', help='list workspaces')
parser.add_argument("-c", "-create", dest='createWorkspace', action='store_true', help='create workspace')
parser.add_argument("-u", "-update", dest='updateWorkspace', action='store_true', help='update workspace')
parser.add_argument("-delete",dest='deleteWorkspace', action='store_true', help='delete workspace')
parser.add_argument("-g", "-get", dest='getWorkspace', action='store_true', help='get details for single workspace')
parser.add_argument("-logs",dest='listLogs', action='store_true', help='list logs')
parser.add_argument("-dialog",dest='dialog', action='store_true', help='have dialog')
parser.add_argument("-outputonly",dest='outputOnly', action='store_true', help='print dialog output only')
parser.add_argument("-full",dest='fullWorkspace', action='store_true', help='get the full workspace')
parser.add_argument("-id",dest='workspaceID', help='Workspace ID')
parser.add_argument("-o", "-outfile", dest='outFile', help='Workspace Output File')
parser.add_argument("-i", "-infile", dest='inFile', help='Workspace Input File')
parser.add_argument("-name",dest='wsName', help='Workspace Name')
parser.add_argument("-desc",dest='wsDescription', help='Workspace Description')
parser.add_argument("-lang",dest='wsLang', help='Workspace Language')
parser.add_argument("-actionmodule",dest='actionModule', help='Module for client action handling')
parser.add_argument("-filter",dest='filter', help='filter query')
parser.add_argument("-context",dest='context', help='context file')
parser.add_argument("-intents",dest='wsIntents', action='store_true', help='Update Intents')
parser.add_argument("-entities",dest='wsEntities', action='store_true', help='Update Entities')
parser.add_argument("-dialog_nodes",dest='wsDialogNodes', action='store_true', help='Update Dialog Nodes')
parser.add_argument("-counterexamples",dest='wsCounterexamples', action='store_true', help='Update Counterexamples')
parser.add_argument("-metadata",dest='wsMetadata', action='store_true', help='Update Metadata')
parser.add_argument("-config",dest='confFile', default='config.json', help='configuration file')
parser.add_argument("-append",dest='append', action='store_true', help='append to or replace workspace')
return parser
# List available dialogs
def listWorkspaces():
print(json.dumps(assistant.list_workspaces().get_result(), indent=2))
# Get and print a specific workspace by ID
def getPrintWorkspace(workspaceID,exportWS):
print(json.dumps(assistant.get_workspace(workspace_id=workspaceID,export=exportWS).get_result(), indent=2))
# Get a specific workspace by ID and export to file
def getSaveWorkspace(workspaceID,outFile):
ws=assistant.get_workspace(workspace_id=workspaceID,export=True).get_result()
with open(outFile,'w') as jsonFile:
json.dump(ws, jsonFile, indent=2)
print ("Workspace saved to " + outFile)
# Update a workspace
# The workspace parts to be updated were specified as command line options
def updateWorkspace(workspaceID,
intents,
entities,
dialog_nodes,
counterexamples,
metadata,
newName=None,
newDescription=None,
newLang=None,
inFile=None,
append=False):
payload = {'intents': None,
'entities': None,
'dialog_nodes': None,
'counterexamples': None,
'metadata': None,
'append': False}
# Only read from file if specified
if (inFile is not None):
with open(inFile) as jsonFile:
ws=json.load(jsonFile)
# Read the sections to be updated
if intents:
payload['intents'] = ws['intents']
if entities:
payload['entities'] = ws['entities']
if dialog_nodes:
payload['dialog_nodes'] = ws['dialog_nodes']
if counterexamples:
payload['counterexamples'] = ws['counterexamples']
if metadata:
payload['metadata'] = ws['metadata']
# Now update the workspace
ws=assistant.update_workspace(workspace_id=workspaceID,
name=newName,
description=newDescription,
language=newLang,
intents=payload['intents'],
entities=payload['entities'],
dialog_nodes=payload['dialog_nodes'],
counterexamples=payload['counterexamples'],
metadata=payload['metadata'],
append=append).get_result()
print ("Workspace updated - new workspace")
print(json.dumps(ws, indent=2))
# Create a new workspace
def createWorkspace(newName, newDescription, newLang, inFile):
with open(inFile) as jsonFile:
ws=json.load(jsonFile)
newWorkspace=assistant.create_workspace(name=newName,
description=newDescription,
language=newLang,
intents=ws["intents"],
entities=ws["entities"],
dialog_nodes=ws["dialog_nodes"],
counterexamples=ws["counterexamples"],
metadata=ws['metadata']).get_result()
print(json.dumps(newWorkspace, indent=2))
# Delete a workspaceID
def deleteWorkspace(workspaceID):
assistant.delete_workspace(workspaceID)
print ("Workspace deleted")
# List logs for a specific workspace by ID
# For now just dump them, do not filter, do not store
def listLogs(workspaceID, filter):
print(json.dumps(assistant.list_logs(workspace_id=workspaceID,filter=filter).get_result(), indent=2))
# Start a dialog and converse with Watson
def converse(workspaceID, outputOnly=None, contextFile=None):
contextFile="session_context.json"
print ("Starting a conversation, stop by Ctrl+C or saying 'bye'")
print ("======================================================")
# Start with an empty context object
context={}
first=True
## Load conversation context on start or not?
contextStart = input("Start with empty context? (Y/n)\n")
if (contextStart == "n" or contextStart == "N"):
print ("loading old session context...")
with open(contextFile) as jsonFile:
context=json.load(jsonFile)
jsonFile.close()
# Now loop to chat
while True:
# get some input
minput = input("\nPlease enter your input message:\n")
# if we catch a "bye" then exit
if (minput == "bye"):
break
# Read the session context from file if we are not entering the loop
# for the first time
if not first:
try:
with open(contextFile) as jsonFile:
context=json.load(jsonFile)
except IOError:
# do nothing
print ("ignoring")
else:
jsonFile.close()
else:
first=False
# Process IBM Cloud Function credentials if present
if privcontext is not None:
context.update(privcontext)
# send the input to Watson Assistant
# Set alternate_intents to False for less output
resp=assistant.message(workspace_id=workspaceID,
input={'text': minput},
alternate_intents=True,
context=context,
entities=None,
intents=None,
output=None).get_result()
# Save returned context for next round of conversation
context=resp['context']
if ('actions' in resp and len(resp['actions']) and resp['actions'][0]['type']=='client'):
# Dump the returned answer
if not outputOnly:
print ("")
print ("Full response object of intermediate step:")
print ("------------------------------------------")
print(json.dumps(resp, indent=2))
if (hca is not None):
contextNew=hca.handleClientActions(context,resp['actions'], resp)
# call Watson Assistant with result from client action(s)
resp=assistant.message(workspace_id=workspaceID,
input=resp['input'],
alternate_intents=True,
context=contextNew,
entities=resp['entities'],
intents=resp['intents'],
output=resp['output']).get_result()
context=resp['context']
else:
print("\n\nplease use -actionmodule to define module to handle client actions")
break
# Dump the returned answer
if (outputOnly):
print ("Response:")
print(json.dumps(resp["output"]["text"], indent=2))
else:
print ("")
print ("Full response object:")
print ("---------------------")
print(json.dumps(resp, indent=2))
# Persist the current context object to file.
with open(contextFile,'w') as jsonFile:
json.dump(context, jsonFile, indent=2)
jsonFile.close()
#
# Main program, for now just detect what function to call and invoke it
#
if __name__ == '__main__':
# Assume no module for client actions
hca=None
# initialize parser
parser = initParser()
parms = parser.parse_args()
# enable next line to print parameters
# print parms
# load configuration and initialize Watson
loadAndInit(confFile=parms.confFile)
if (parms.listWorkspaces):
listWorkspaces()
elif (parms.getWorkspace and parms.workspaceID):
if (parms.outFile):
getSaveWorkspace(parms.workspaceID,parms.outFile)
else:
getPrintWorkspace(parms.workspaceID,exportWS=parms.fullWorkspace)
elif (parms.updateWorkspace and parms.workspaceID):
updateWorkspace(parms.workspaceID,
parms.wsIntents,
parms.wsEntities,
parms.wsDialogNodes,
parms.wsCounterexamples,
parms.wsMetadata,
parms.wsName,
parms.wsDescription,
parms.wsLang,
parms.inFile,
parms.append)
elif (parms.createWorkspace and parms.wsName and parms.wsDescription and parms.wsLang and parms.inFile):
createWorkspace(newName=parms.wsName,
newDescription=parms.wsDescription,
newLang=parms.wsLang,
inFile=parms.inFile)
elif (parms.deleteWorkspace and parms.workspaceID):
deleteWorkspace(parms.workspaceID)
elif (parms.listLogs and parms.workspaceID):
listLogs(parms.workspaceID,filter=parms.filter)
elif (parms.dialog and parms.workspaceID):
if parms.actionModule:
hca=importlib.import_module(parms.actionModule)
converse(parms.workspaceID, parms.outputOnly)
else:
parser.print_usage()
|
data-henrik/watson-conversation-tool
|
wctool.py
|
Python
|
apache-2.0
| 13,189 | 0.012207 |
#!/usr/bin/env python -i
# preceeding line should have path for Python on your machine
# vizplotgui_gl.py
# Purpose: viz running LAMMPS simulation via GL tool with plot and GUI
# Syntax: vizplotgui_gl.py in.lammps Nfreq compute-ID
# in.lammps = LAMMPS input script
# Nfreq = plot data point and viz shapshot every this many steps
# compute-ID = ID of compute that calculates temperature
# (or any other scalar quantity)
# IMPORTANT: this script cannot yet be run in parallel via Pypar,
# because I can't seem to do a MPI-style broadcast in Pypar
import sys,time
sys.path.append("./pizza")
# methods called by GUI
def run():
global runflag
runflag = 1
def stop():
global runflag
runflag = 0
def settemp(value):
global temptarget
temptarget = slider.get()
def quit():
global breakflag
breakflag = 1
# method called by timestep loop every Nfreq steps
# read dump snapshot and viz it, update plot with compute value
def update(ntimestep):
d.next()
d.unscale()
g.show(ntimestep)
value = lmp.extract_compute(compute,0,0)
xaxis.append(ntimestep)
yaxis.append(value)
gn.plot(xaxis,yaxis)
# parse command line
argv = sys.argv
if len(argv) != 4:
print "Syntax: vizplotgui_gl.py in.lammps Nfreq compute-ID"
sys.exit()
infile = sys.argv[1]
nfreq = int(sys.argv[2])
compute = sys.argv[3]
me = 0
# uncomment if running in parallel via Pypar
#import pypar
#me = pypar.rank()
#nprocs = pypar.size()
from lammps import lammps
lmp = lammps()
# run infile all at once
# assumed to have no run command in it
# dump a file in native LAMMPS dump format for Pizza.py dump tool
lmp.file(infile)
lmp.command("thermo %d" % nfreq)
lmp.command("dump python all atom %d tmp.dump" % nfreq)
# initial 0-step run to generate initial 1-point plot, dump file, and image
lmp.command("run 0 pre yes post no")
value = lmp.extract_compute(compute,0,0)
ntimestep = 0
xaxis = [ntimestep]
yaxis = [value]
breakflag = 0
runflag = 0
temptarget = 1.0
# wrapper on GL window via Pizza.py gl tool
# just proc 0 handles reading of dump file and viz
if me == 0:
from Tkinter import *
tkroot = Tk()
tkroot.withdraw()
from dump import dump
from gl import gl
d = dump("tmp.dump",0)
g = gl(d)
d.next()
d.unscale()
g.zoom(1)
g.shift(0,0)
g.rotate(0,270)
g.q(10)
g.box(1)
g.show(ntimestep)
# display GUI with run/stop buttons and slider for temperature
if me == 0:
from Tkinter import *
tkroot = Tk()
tkroot.withdraw()
root = Toplevel(tkroot)
root.title("LAMMPS GUI")
frame = Frame(root)
Button(frame,text="Run",command=run).pack(side=LEFT)
Button(frame,text="Stop",command=stop).pack(side=LEFT)
slider = Scale(frame,from_=0.0,to=5.0,resolution=0.1,
orient=HORIZONTAL,label="Temperature")
slider.bind('<ButtonRelease-1>',settemp)
slider.set(temptarget)
slider.pack(side=LEFT)
Button(frame,text="Quit",command=quit).pack(side=RIGHT)
frame.pack()
tkroot.update()
# wrapper on GnuPlot via Pizza.py gnu tool
if me == 0:
from gnu import gnu
gn = gnu()
gn.plot(xaxis,yaxis)
gn.title(compute,"Timestep","Temperature")
# endless loop, checking status of GUI settings every Nfreq steps
# run with pre yes/no and post yes/no depending on go/stop status
# re-invoke fix langevin with new seed when temperature slider changes
# after re-invoke of fix langevin, run with pre yes
running = 0
temp = temptarget
seed = 12345
lmp.command("fix 2 all langevin %g %g 0.1 %d" % (temp,temp,seed))
while 1:
if me == 0: tkroot.update()
if temp != temptarget:
temp = temptarget
seed += me+1
lmp.command("fix 2 all langevin %g %g 0.1 12345" % (temp,temp))
running = 0
if runflag and running:
lmp.command("run %d pre no post no" % nfreq)
ntimestep += nfreq
if me == 0: update(ntimestep)
elif runflag and not running:
lmp.command("run %d pre yes post no" % nfreq)
ntimestep += nfreq
if me == 0: update(ntimestep)
elif not runflag and running:
lmp.command("run %d pre no post yes" % nfreq)
ntimestep += nfreq
if me == 0: update(ntimestep)
if breakflag: break
if runflag: running = 1
else: running = 0
time.sleep(0.01)
lmp.command("run 0 pre no post yes")
# uncomment if running in parallel via Pypar
#print "Proc %d out of %d procs has" % (me,nprocs), lmp
#pypar.finalize()
|
slitvinov/lammps-sph-multiphase
|
python/examples/vizplotgui_gl.py
|
Python
|
gpl-2.0
| 4,373 | 0.02424 |
import struct
import time
from .types import t_string, t_list, t_set, t_zset, t_hash
from .errors import *
class Coloradoes(object):
STRUCT_KEY = '!ic'
STRUCT_KEY_VALUE = '!icd'
STRUCT_ID = '!i'
def __init__(self, storage=None):
if storage is None:
raise ValueError('A storage is required')
super(Coloradoes, self).__init__()
self.storage = storage
self.database = 0
def set_database(self, database):
self.database = database
def rename(self, source, target):
value = self.storage.get(source)
if value:
self.storage.set(target, value)
self.storage.delete(source)
def increment_by(self, key, increment):
id = int(self.storage.get(key) or 0) + increment
self.storage.set(key, str(id))
return id
def get(self, key):
return self.storage.get(key)
def set(self, key, value):
return self.storage.set(key, value)
def exists(self, key):
return self.storage.exists(key)
def delete(self, key):
return self.storage.delete(key)
def get_id(self):
return self.increment_by(struct.pack(self.STRUCT_ID, self.database)
+ 'id', 1)
def set_key(self, key, type, expire=None):
self.command_del(key)
id = self.get_id()
k = struct.pack(self.STRUCT_KEY, self.database, 'K') + key
self.storage.set(k, struct.pack(self.STRUCT_KEY_VALUE, id, type,
expire or 0))
return id
def delete_key(self, key, id=None, type=None):
if id is None or type is None:
id, type = self.get_key(key, delete_expire=False)[:2]
self.storage.delete(struct.pack(self.STRUCT_KEY, self.database, 'K') +
key)
# Do any type cleanup here
# TODO: call type-specific clean up method
self.storage.delete(struct.pack('!ici', self.database, type, id))
def get_key(self, key, delete_expire=True):
data = self.storage.get(struct.pack(self.STRUCT_KEY, self.database,
'K') + key)
id, type, expire = None, None, None
if data is not None:
id, type, expire = struct.unpack(self.STRUCT_KEY_VALUE, data)
if expire == 0:
expire = None
if delete_expire is True and (expire is not None and
expire < time.time()):
id, type, expire = None, None, None
self.delete_key(key=key, id=id, type=type)
return id, type, expire
def command_select(self, database):
d = int(database)
if d >= 0 and d < 17:
self.set_database(d)
return True
else:
raise ValueError(INVALID_DB_INDEX)
def command_type(self, key):
type = self.get_key(key)[1]
if type is None:
return 'none'
if type == 'S':
return 'string'
if type == 'L':
return 'list'
raise Exception('Unknown type "%s"', type)
def __getattr__(self, attrName):
if attrName not in self.__dict__:
if attrName.startswith('command_'):
for t in (t_string, t_list, t_set, t_zset, t_hash):
if hasattr(t, attrName):
def func(*args, **kwargs):
return getattr(t, attrName)(self, *args, **kwargs)
return func
raise AttributeError()
return self.__dict__[attrName]
|
pombredanne/coloradoes.py
|
src/coloradoes/coloradoes.py
|
Python
|
bsd-2-clause
| 3,539 | 0.001413 |
from sympy.logic.boolalg import *
import equibel as eb
def test_global_completion_cardinality():
G = eb.star_graph(3)
G.add_formula(1, 'p')
G.add_formula(2, 'p')
G.add_formula(3, '~p')
R_semantic = eb.global_completion(G, method=eb.SEMANTIC, opt_type=eb.CARDINALITY, simplify=True)
assert(R_semantic.formula_conj(0) == eb.parse_formula('p'))
R_syntactic = eb.global_completion(G, method=eb.SYNTACTIC, opt_type=eb.CARDINALITY, simplify=True)
assert(R_syntactic == R_semantic)
def test_global_completion_two_nodes():
G = eb.path_graph(2)
G.add_formula(0, 'p')
atoms = G.atoms()
R_semantic = eb.global_completion(G, method=eb.SEMANTIC, simplify=True)
print(R_semantic.formulas())
assert(R_semantic.formulas() == {0: atoms, 1: atoms})
R_syntactic = eb.global_completion(G, method=eb.SYNTACTIC, simplify=True)
assert(R_semantic == R_syntactic)
def test_global_completion_chain_1():
p = eb.parse_formula('p')
G = eb.path_graph(5)
G.add_formula(0, p)
G.add_formula(4, ~p)
R_semantic = eb.global_completion(G, method=eb.SEMANTIC, simplify=True)
print(R_semantic.formulas())
assert(R_semantic.formulas() == {0: set([p]), 1: set([]), 2: set([]), 3: set([]), 4: set([~p])})
R_syntactic = eb.global_completion(G, method=eb.SYNTACTIC, simplify=True)
assert(R_semantic == R_syntactic)
def test_global_completion_chain_2():
p = eb.parse_formula('p')
q = eb.parse_formula('q')
G = eb.path_graph(4)
G.add_formula(0, p & q)
G.add_formula(3, ~p)
R_semantic = eb.global_completion(G, simplify=True)
assert(R_semantic.formulas() == {0: set([p & q]), 1: set([q]), 2: set([q]), 3: set([~p & q])})
R_syntactic = eb.global_completion(G, method=eb.SYNTACTIC, simplify=True)
assert(R_semantic == R_syntactic)
def test_global_completion_big_chain():
p = eb.parse_formula('p')
q = eb.parse_formula('q')
G = eb.path_graph(10)
G.add_formula(0, 'p & q')
G.add_formula(9, '~p & ~q')
R_semantic = eb.global_completion(G, simplify=True)
for node in range(1,9):
assert(R_semantic.formulas(node) == set([]))
assert(R_semantic.formulas(0) == set([p & q]))
assert(R_semantic.formulas(9) == set([~p & ~q]))
R_syntactic = eb.global_completion(G, method=eb.SYNTACTIC, simplify=True)
assert(R_semantic == R_syntactic)
if __name__ == '__main__':
test_global_completion_cardinality()
test_global_completion_two_nodes()
test_global_completion_chain_1()
test_global_completion_chain_2()
test_global_completion_big_chain()
|
asteroidhouse/equibel
|
tests/completion_tests.py
|
Python
|
mit
| 2,605 | 0.002303 |
# Copyright (c) 2020 Aldo Hoeben / fieldOfView
# OctoPrintPlugin is released under the terms of the AGPLv3 or higher.
from cura.PrinterOutput.GenericOutputController import GenericOutputController
try:
# Cura 4.1 and newer
from cura.PrinterOutput.PrinterOutputDevice import PrinterOutputDevice, ConnectionState
from cura.PrinterOutput.Models.PrinterOutputModel import PrinterOutputModel
except ImportError:
# Cura 3.5 - Cura 4.0
from cura.PrinterOutputDevice import PrinterOutputDevice, ConnectionState
from cura.PrinterOutput.PrinterOutputModel import PrinterOutputModel
class OctoPrintOutputController(GenericOutputController):
def __init__(self, output_device: "PrinterOutputDevice") -> None:
super().__init__(output_device)
def moveHead(self, printer: "PrinterOutputModel", x, y, z, speed) -> None:
axis_information = self._output_device.getAxisInformation()
if axis_information["x"].inverted:
x = -x
if axis_information["y"].inverted:
y = -y
if axis_information["z"].inverted:
z = -z
self._output_device.sendCommand("G91")
self._output_device.sendCommand("G0 X%s Y%s Z%s F%s" % (x, y, z, speed))
self._output_device.sendCommand("G90")
|
fieldOfView/OctoPrintPlugin
|
OctoPrintOutputController.py
|
Python
|
agpl-3.0
| 1,276 | 0.003135 |
from django.conf.urls import url, patterns, include
from accounts import views
user_tool_patterns = patterns(
"",
url(r"^lending/$", views.LendingManager.as_view(), name="lending"),
url(r"^manager/$", views.ToolManager.as_view(), name="manager"),
)
# namespaced under account:
urlpatterns = patterns(
"",
url(r"^$", views.SettingsView.as_view(), name="settings"),
url(r"^login/$", views.LoginView.as_view(), name="login"),
url(r"^logout/$", views.LogoutView.as_view(), name="logout"),
url(r"^register/$", views.SignupView.as_view(), name="signup"),
url(r"^user/(?P<username>[-_\w]+)/$",
views.UserDetailView.as_view(), name="user_detail"),
url(r"^confirm_email/(?P<key>\w+)/$", views.ConfirmEmailView.as_view(),
name="confirm_email"),
url(r"^password/$", views.ChangePasswordView.as_view(),
name="password"),
url(r"^password/reset/$", views.PasswordResetView.as_view(),
name="password_reset"),
url(r"^password/reset/(?P<uidb36>[0-9A-Za-z]+)-(?P<token>.+)/$",
views.PasswordResetTokenView.as_view(),
name="password_reset_token"),
url(r"^delete/$", views.DeleteView.as_view(), name="delete"),
url(r"^tool/", include(user_tool_patterns, namespace="tool")),
)
|
toolhub/toolhub.co
|
accounts/urls.py
|
Python
|
bsd-3-clause
| 1,267 | 0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.