signature
stringlengths 29
44.1k
| implementation
stringlengths 0
85.2k
|
---|---|
def _wrap_users ( users , request ) :
"""Returns a list with the given list of users and / or the currently logged in user , if the list
contains the magic item SELF ."""
|
result = set ( )
for u in users :
if u is SELF and is_authenticated ( request ) :
result . add ( request . user . get_username ( ) )
else :
result . add ( u )
return result
|
def new ( self , fn_input , fn_name , name = None , tags = None , properties = None , details = None , instance_type = None , depends_on = None , ** kwargs ) :
''': param fn _ input : Function input
: type fn _ input : dict
: param fn _ name : Name of the function to be called
: type fn _ name : string
: param name : Name for the new job ( default is " < parent job name > : < fn _ name > " )
: type name : string
: param tags : Tags to associate with the job
: type tags : list of strings
: param properties : Properties to associate with the job
: type properties : dict with string values
: param details : Details to set for the job
: type details : dict or list
: param instance _ type : Instance type on which the job will be run , or a dict mapping function names to instance type requests
: type instance _ type : string or dict
: param depends _ on : List of data objects or jobs to wait that need to enter the " closed " or " done " states , respectively , before the new job will be run ; each element in the list can either be a dxpy handler or a string ID
: type depends _ on : list
Creates and enqueues a new job that will execute a particular
function ( from the same app or applet as the one the current job
is running ) .
. . note : : This method is intended for calls made from within
already - executing jobs or apps . If it is called from outside
of an Execution Environment , an exception will be thrown . To
create new jobs from outside the Execution Environment , use
: func : ` dxpy . bindings . dxapplet . DXApplet . run ` or
: func : ` dxpy . bindings . dxapp . DXApp . run ` .'''
|
final_depends_on = [ ]
if depends_on is not None :
if isinstance ( depends_on , list ) :
for item in depends_on :
if isinstance ( item , DXJob ) or isinstance ( item , DXDataObject ) :
if item . get_id ( ) is None :
raise DXError ( 'A dxpy handler given in depends_on does not have an ID set' )
final_depends_on . append ( item . get_id ( ) )
elif isinstance ( item , basestring ) :
final_depends_on . append ( item )
else :
raise DXError ( 'Expected elements of depends_on to only be either instances of DXJob or DXDataObject, or strings' )
else :
raise DXError ( 'Expected depends_on field to be a list' )
if 'DX_JOB_ID' in os . environ :
req_input = { }
req_input [ "input" ] = fn_input
req_input [ "function" ] = fn_name
if name is not None :
req_input [ "name" ] = name
if tags is not None :
req_input [ "tags" ] = tags
if properties is not None :
req_input [ "properties" ] = properties
if instance_type is not None :
req_input [ "systemRequirements" ] = SystemRequirementsDict . from_instance_type ( instance_type , fn_name ) . as_dict ( )
if depends_on is not None :
req_input [ "dependsOn" ] = final_depends_on
if details is not None :
req_input [ "details" ] = details
resp = dxpy . api . job_new ( req_input , ** kwargs )
self . set_id ( resp [ "id" ] )
else :
self . set_id ( queue_entry_point ( function = fn_name , input_hash = fn_input , depends_on = final_depends_on , name = name ) )
|
def iso_string_to_python_datetime ( isostring : str ) -> Optional [ datetime . datetime ] :
"""Takes an ISO - 8601 string and returns a ` ` datetime ` ` ."""
|
if not isostring :
return None
# if you parse ( ) an empty string , you get today ' s date
return dateutil . parser . parse ( isostring )
|
def _save_table ( self , raw = False , cls = None , force_insert = False , force_update = False , using = None , update_fields = None , ) :
"""Overwrites model ' s ` ` _ save _ table ` ` method to save translations after instance
has been saved ( required to retrieve the object ID for ` ` Translation ` `
model ) .
Preferred over overriding the object ' s ` ` save ` ` method
to ensure that ` pre _ save ` and ` ` post _ save ` ` signals happen
respectively before and after the translations have been saved to the database .
Thus ` ` pre _ save ` ` signals have access to the ` ` has _ changed ` ` attribute on translated fields
before the translations are saved and the attribute is reset .
And ` post _ save ` ` signals always have access to the updated translations ."""
|
updated = super ( ModelMixin , self ) . _save_table ( raw = raw , cls = cls , force_insert = force_insert , force_update = force_update , using = using , update_fields = update_fields , )
self . _linguist . decider . objects . save_translations ( [ self ] )
return updated
|
def list_reference_bases ( self , id_ , start = 0 , end = None ) :
"""Returns an iterator over the bases from the server in the form
of consecutive strings . This command does not conform to the
patterns of the other search and get requests , and is implemented
differently ."""
|
request = protocol . ListReferenceBasesRequest ( )
request . start = pb . int ( start )
request . end = pb . int ( end )
request . reference_id = id_
not_done = True
# TODO We should probably use a StringIO here to make string buffering
# a bit more efficient .
bases_list = [ ]
while not_done :
response = self . _run_list_reference_bases_page_request ( request )
bases_list . append ( response . sequence )
not_done = bool ( response . next_page_token )
request . page_token = response . next_page_token
return "" . join ( bases_list )
|
def _set_trusted_key ( self , v , load = False ) :
"""Setter method for trusted _ key , mapped from YANG variable / ntp / trusted _ key ( trust - key )
If this variable is read - only ( config : false ) in the
source YANG file , then _ set _ trusted _ key is considered as a private
method . Backends looking to populate this variable should
do so via calling thisObj . _ set _ trusted _ key ( ) directly ."""
|
if hasattr ( v , "_utype" ) :
v = v . _utype ( v )
try :
t = YANGDynClass ( v , base = TypedListType ( allowed_type = RestrictedClassType ( base_type = RestrictedClassType ( base_type = long , restriction_dict = { 'range' : [ '0..4294967295' ] } , int_size = 32 ) , restriction_dict = { 'range' : [ u'1 .. 65535' ] } ) ) , is_leaf = False , yang_name = "trusted-key" , rest_name = "trusted-key" , parent = self , path_helper = self . _path_helper , extmethods = self . _extmethods , register_paths = True , extensions = { u'tailf-common' : { u'info' : u'NTP trusted key' , u'cli-full-command' : None , u'callpoint' : u'ntp_trust_cp' , u'sort-priority' : u'31' , u'cli-full-no' : None } } , namespace = 'urn:brocade.com:mgmt:brocade-ntp' , defining_module = 'brocade-ntp' , yang_type = 'trust-key' , is_config = True )
except ( TypeError , ValueError ) :
raise ValueError ( { 'error-string' : """trusted_key must be of a type compatible with trust-key""" , 'defined-type' : "brocade-ntp:trust-key" , 'generated-type' : """YANGDynClass(base=TypedListType(allowed_type=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1 .. 65535']})), is_leaf=False, yang_name="trusted-key", rest_name="trusted-key", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'NTP trusted key', u'cli-full-command': None, u'callpoint': u'ntp_trust_cp', u'sort-priority': u'31', u'cli-full-no': None}}, namespace='urn:brocade.com:mgmt:brocade-ntp', defining_module='brocade-ntp', yang_type='trust-key', is_config=True)""" , } )
self . __trusted_key = t
if hasattr ( self , '_set' ) :
self . _set ( )
|
def get_default_config ( self ) :
"""Override SNMPCollector . get _ default _ config method to provide
default _ config for the SNMPInterfaceCollector"""
|
default_config = super ( SNMPInterfaceCollector , self ) . get_default_config ( )
default_config [ 'path' ] = 'interface'
default_config [ 'byte_unit' ] = [ 'bit' , 'byte' ]
return default_config
|
def update_transients ( self , add_names , remove_names , * class_build_args , ** class_build_kwargs ) :
"""Adds transients ( service , topic , etc ) named in add _ names if they are not exposed in resolved _ dict
and removes transients named in remove _ names if they are exposed in resolved _ dict
This method can be used to force exposing / withholding any transient , bypassing other mechanisms .
No extra check is made here regarding regex or system status to try to guess if it should be added / removed .
This is left ot the caller .
: param add _ names : the names of the transients to add
: param remove _ names : the names of the transients to remove
: param class _ build _ args : the args to pass to the resolved transient constructor
: param class _ build _ kwargs : the kwargs to pass to the resolved transient constructor
: return : the list of transients exposed"""
|
# Important : no effect if names is empty list . only return empty . functional style .
added = [ ]
removed = [ ]
for tst_name in [ tst for tst in add_names if tst not in self . transients . keys ( ) ] :
try :
ttype = self . transient_type_resolver ( tst_name )
# should return None if error - TODO : handle ( and reraise ) exception ! !
if ttype is not None : # transient can be resolved
self . transients [ tst_name ] = self . TransientMaker ( tst_name , ttype , * class_build_args , ** class_build_kwargs )
added += [ tst_name ]
logging . info ( "[{name}] Interfacing with {desc} {transient}" . format ( name = __name__ , desc = self . transients_desc , transient = tst_name ) )
else :
logging . warning ( "[{name}] Type of {desc} {transient} unknown. Giving up trying to interface." . format ( name = __name__ , desc = self . transients_desc , transient = tst_name ) )
except Exception as e :
logging . warn ( "[{name}] Cannot interface with {desc} {transient} : {exc}" . format ( name = __name__ , desc = self . transients_desc , transient = tst_name , exc = e ) )
exc_info = sys . exc_info ( )
six . reraise ( exc_info [ 0 ] , exc_info [ 1 ] , exc_info [ 2 ] )
for tst_name in [ tst for tst in remove_names if tst in self . transients . keys ( ) ] :
if tst_name in self . transients : # we make sure the transient is still exposed
# because we might have modified resolved _ dict after building the list to loop on
logging . info ( "[{name}] Removing {desc} {transient}" . format ( name = __name__ , desc = self . transients_desc , transient = tst_name ) )
self . TransientCleaner ( self . transients [ tst_name ] )
# calling the cleanup function in case we need to do something
self . transients . pop ( tst_name , None )
removed += [ tst_name ]
return DiffTuple ( added , removed )
|
def reset_user_db ( self , comment = None ) :
"""Executes a Send Reset LDAP User DB Request operation on this
node .
: param str comment : comment to audit
: raises NodeCommandFailed : failure resetting db
: return : None"""
|
self . make_request ( NodeCommandFailed , method = 'update' , resource = 'reset_user_db' , params = { 'comment' : comment } )
|
def page_from_image ( input_file ) :
"""Create ` OcrdPage < / . . / . . / ocrd _ models / ocrd _ models . ocrd _ page . html > ` _
from an ` OcrdFile < / . . / . . / ocrd _ models / ocrd _ models . ocrd _ file . html > ` _
representing an image ( i . e . should have ` ` mimetype ` ` starting with ` ` image / ` ` ) .
Arguments :
* input _ file ( OcrdFile ) :"""
|
if input_file . local_filename is None :
raise Exception ( "input_file must have 'local_filename' property" )
exif = exif_from_filename ( input_file . local_filename )
now = datetime . now ( )
return PcGtsType ( Metadata = MetadataType ( Creator = "OCR-D/core %s" % VERSION , Created = now , LastChange = now ) , Page = PageType ( imageWidth = exif . width , imageHeight = exif . height , # XXX brittle
imageFilename = input_file . url if input_file . url is not None else 'file://' + input_file . local_filename ) )
|
def undo ( config = 'root' , files = None , num_pre = None , num_post = None ) :
'''Undo all file changes that happened between num _ pre and num _ post , leaving
the files into the state of num _ pre .
. . warning : :
If one of the files has changes after num _ post , they will be overwritten
The snapshots are used to determine the file list , but the current
version of the files will be overwritten by the versions in num _ pre .
You to undo changes between num _ pre and the current version of the
files use num _ post = 0.
CLI Example :
. . code - block : : bash
salt ' * ' snapper . undo'''
|
pre , post = _get_num_interval ( config , num_pre , num_post )
changes = status ( config , pre , post )
changed = set ( changes . keys ( ) )
requested = set ( files or changed )
if not requested . issubset ( changed ) :
raise CommandExecutionError ( 'Given file list contains files that are not present' 'in the changed filelist: {0}' . format ( changed - requested ) )
cmdret = __salt__ [ 'cmd.run' ] ( 'snapper -c {0} undochange {1}..{2} {3}' . format ( config , pre , post , ' ' . join ( requested ) ) )
try :
components = cmdret . split ( ' ' )
ret = { }
for comp in components :
key , val = comp . split ( ':' )
ret [ key ] = val
return ret
except ValueError as exc :
raise CommandExecutionError ( 'Error while processing Snapper response: {0}' . format ( cmdret ) )
|
def _get_version_mode ( self , mode = None ) :
"""Return a VersionMode for a mode name .
When the mode is None , we are working with the ' base ' mode ."""
|
version_mode = self . _version_modes . get ( mode )
if not version_mode :
version_mode = self . _version_modes [ mode ] = VersionMode ( name = mode )
return version_mode
|
def _do_search ( self ) :
"""Perform the mlt call , then convert that raw format into a
SearchResults instance and return it ."""
|
if self . _results_cache is None :
response = self . raw ( )
results = self . to_python ( response . get ( 'hits' , { } ) . get ( 'hits' , [ ] ) )
self . _results_cache = DictSearchResults ( self . type , response , results , None )
return self . _results_cache
|
def import_modname ( modname ) :
r"""Args :
modname ( str ) : module name
Returns :
module : module
CommandLine :
python - m utool . util _ import - - test - import _ modname
Example :
> > > # ENABLE _ DOCTEST
> > > from utool . util _ import import * # NOQA
> > > modname _ list = [
> > > ' utool ' ,
> > > ' utool . _ internal ' ,
> > > ' utool . _ internal . meta _ util _ six ' ,
> > > ' utool . util _ path ' ,
> > > # ' utool . util _ path . checkpath ' ,
> > > modules = [ import _ modname ( modname ) for modname in modname _ list ]
> > > result = ( [ m . _ _ name _ _ for m in modules ] )
> > > assert result = = modname _ list"""
|
# The _ _ import _ _ statment is weird
if util_inject . PRINT_INJECT_ORDER :
if modname not in sys . modules :
util_inject . noinject ( modname , N = 2 , via = 'ut.import_modname' )
if '.' in modname :
fromlist = modname . split ( '.' ) [ - 1 ]
fromlist_ = list ( map ( str , fromlist ) )
# needs to be ascii for python2.7
module = __import__ ( modname , { } , { } , fromlist_ , 0 )
else :
module = __import__ ( modname , { } , { } , [ ] , 0 )
return module
|
def get_unread_forums ( self , user ) :
"""Returns the list of unread forums for the given user ."""
|
return self . get_unread_forums_from_list ( user , self . perm_handler . get_readable_forums ( Forum . objects . all ( ) , user ) )
|
def sort_func ( self , key ) :
"""Used to sort keys when writing Entry to JSON format .
Should be supplemented / overridden by inheriting classes ."""
|
if key == self . _KEYS . SCHEMA :
return 'aaa'
if key == self . _KEYS . NAME :
return 'aab'
if key == self . _KEYS . SOURCES :
return 'aac'
if key == self . _KEYS . ALIAS :
return 'aad'
if key == self . _KEYS . MODELS :
return 'aae'
if key == self . _KEYS . PHOTOMETRY :
return 'zzy'
if key == self . _KEYS . SPECTRA :
return 'zzz'
return key
|
def uri_to_kwargs ( uri ) :
"""Return a URI as kwargs for connecting to PostgreSQL with psycopg2,
applying default values for non - specified areas of the URI .
: param str uri : The connection URI
: rtype : dict"""
|
parsed = urlparse ( uri )
default_user = get_current_user ( )
password = unquote ( parsed . password ) if parsed . password else None
kwargs = { 'host' : parsed . hostname , 'port' : parsed . port , 'dbname' : parsed . path [ 1 : ] or default_user , 'user' : parsed . username or default_user , 'password' : password }
values = parse_qs ( parsed . query )
if 'host' in values :
kwargs [ 'host' ] = values [ 'host' ] [ 0 ]
for k in [ k for k in values if k in KEYWORDS ] :
kwargs [ k ] = values [ k ] [ 0 ] if len ( values [ k ] ) == 1 else values [ k ]
try :
if kwargs [ k ] . isdigit ( ) :
kwargs [ k ] = int ( kwargs [ k ] )
except AttributeError :
pass
return kwargs
|
def mrc_header_from_params ( shape , dtype , kind , ** kwargs ) :
"""Create a minimal MRC2014 header from the given parameters .
Parameters
shape : 3 - sequence of ints
3D shape of the stored data . The values are used as
` ` ' nx ' , ' ny ' , ' nz ' ` ` header entries , in this order . Note that
this is different from the actual data storage shape for
non - trivial ` ` axis _ order ` ` .
dtype : { ' int8 ' , ' int16 ' , ' float32 ' , ' uint16 ' }
Data type specifier as understood by ` numpy . dtype ` . It is
translated to a ` ` ' mode ' ` ` header entry . See ` this page
< http : / / www . ccpem . ac . uk / mrc _ format / mrc2014 . php > ` _ for valid
modes .
kind : { ' volume ' , ' projections ' }
Interpretation of the 3D data , either as single 3D volume or as
a stack of 2D projections . The value is used for the ` ` ' ispg ' ` `
header entry .
extent : 3 - sequence of floats , optional
Size of the 3D volume in meters . The values are used for
the ` ` ' cella ' ` ` header entry .
Default : ` ` shape ` ` , resulting in ` ` ( 1 , 1 , 1 ) ` ` unit cells
axis _ order : permutation of ` ` ( 0 , 1 , 2 ) ` ` optional
Order of the data axes as they should appear in the stored file .
The values are used for the ` ` ' mapc ' , ' mapr ' , ' maps ' ` ` header
entries .
Default : ` ` ( 0 , 1 , 2 ) ` `
dmin , dmax : float , optional
Minimum and maximum values of the data , used for header entries
` ` ' dmin ' ` ` and ` ` ' dmax ' ` ` , resp .
Default : 1.0 , 0.0 . These values indicate according to [ Che + 2015]
that the values are considered as undetermined .
dmean , rms : float , optional
Mean and variance of the data , used for header entries ` ` ' dmean ' ` `
and ` ` ' rms ' ` ` , resp .
Default : ` ` min ( dmin , dmax ) - 1 , - 1.0 ` ` . These values indicate
according to [ Che + 2015 ] that the values are considered as
undetermined .
mrc _ version : 2 - tuple of int , optional
Version identifier for the MRC file , used for the ` ` ' nversion ' ` `
header entry .
Default : ` ` ( 2014 , 0 ) ` `
text _ labels : sequence of strings , optional
Maximal 10 strings with 80 characters each , used for the
` ` ' nlabl ' ` ` and ` ` ' label ' ` ` header entries .
Default : ` ` [ ] ` `
Returns
header : ` OrderedDict `
Header stored in an ordered dictionary , where each entry has the
following form : :
' name ' : { ' value ' : value _ as _ array ,
' offset ' : offset _ in _ bytes
' description ' : description _ string }
All ` ` ' value ' ` ` ' s are ` numpy . ndarray ` ' s with at least one
dimension .
References
[ Che + 2015 ] Cheng , A et al . * MRC2014 : Extensions to the MRC format header
for electron cryo - microscopy and tomography * . Journal of Structural
Biology , 129 ( 2015 ) , pp 146 - - 150."""
|
# Positional args
shape = [ int ( n ) for n in shape ]
kind , kind_in = str ( kind ) . lower ( ) , kind
if kind not in ( 'volume' , 'projections' ) :
raise ValueError ( "`kind '{}' not understood" . format ( kind_in ) )
# Keyword args
extent = kwargs . pop ( 'extent' , shape )
axis_order = kwargs . pop ( 'axis_order' , ( 0 , 1 , 2 ) )
if tuple ( axis_order ) not in permutations ( ( 0 , 1 , 2 ) ) :
raise ValueError ( '`axis_order` must be a permutation of (0, 1, 2), ' 'got {}' . format ( axis_order ) )
dmin = kwargs . pop ( 'dmin' , 1.0 )
dmax = kwargs . pop ( 'dmax' , 0.0 )
dmean = kwargs . pop ( 'dmean' , min ( dmin , dmax ) - 1.0 )
rms = kwargs . pop ( 'rms' , - 1.0 )
mrc_version = kwargs . pop ( 'mrc_version' , ( 2014 , 0 ) )
if len ( mrc_version ) != 2 :
raise ValueError ( '`mrc_version` must be a sequence of length 2, got ' '{}' . format ( mrc_version ) )
# Text labels : fill each label up with whitespace to 80 characters .
# Create the remaining labels as 80 * ' \ x00'
text_labels_in = kwargs . pop ( 'text_labels' , [ ] )
nlabl = len ( text_labels_in )
if nlabl > 10 :
raise ValueError ( 'expexted maximum of 10 labels, got {} labels' '' . format ( nlabl ) )
text_labels = [ str ( label ) . ljust ( 80 ) for label in text_labels_in ]
if any ( len ( label ) > 80 for label in text_labels ) :
raise ValueError ( 'labels cannot have more than 80 characters each' )
# Convert to header - friendly form . Names are required to match
# exactly the header field names , and all of them must exist ,
# so that ` eval ` below succeeds for all fields .
nx , ny , nz = [ np . array ( n , dtype = 'int32' ) . reshape ( [ 1 ] ) for n in shape ]
mode = np . array ( NPY_DTYPE_TO_MRC_MODE [ np . dtype ( dtype ) ] , dtype = 'int32' ) . reshape ( [ 1 ] )
mx , my , mz = nx , ny , nz
cella = np . array ( extent ) . reshape ( [ 3 ] ) . astype ( 'float32' )
mapc , mapr , maps = [ np . array ( m , dtype = 'int32' ) . reshape ( [ 1 ] ) + 1 for m in axis_order ]
dmin , dmax , dmean , rms = [ np . array ( x , dtype = 'float32' ) . reshape ( [ 1 ] ) for x in ( dmin , dmax , dmean , rms ) ]
ispg = 1 if kind == 'volume' else 0
ispg = np . array ( ispg , dtype = 'int32' , ndmin = 1 )
nsymbt = np . array ( [ 0 ] , dtype = 'int32' )
exttype = np . fromstring ( ' ' , dtype = 'S1' )
nversion = np . array ( 10 * mrc_version [ 0 ] + mrc_version [ 1 ] , dtype = 'int32' ) . reshape ( [ 1 ] )
origin = np . zeros ( 3 , dtype = 'int32' )
map = np . fromstring ( 'MAP ' , dtype = 'S1' )
# TODO : no idea how to properly choose the machine stamp
machst = np . fromiter ( b'DD ' , dtype = 'S1' )
nlabl = np . array ( nlabl , dtype = 'int32' ) . reshape ( [ 1 ] )
label = np . zeros ( ( 10 , 80 ) , dtype = 'S1' )
# ensure correct size
for i , label_i in enumerate ( text_labels ) :
label [ i ] = np . fromstring ( label_i , dtype = 'S1' )
# Make the header
# We use again the specification to set the values
header_fields = header_fields_from_table ( MRC_2014_SPEC_TABLE , MRC_SPEC_KEYS , MRC_DTYPE_TO_NPY_DTYPE )
header = OrderedDict ( )
for field in header_fields :
header [ field [ 'name' ] ] = { 'offset' : field [ 'offset' ] , 'value' : eval ( field [ 'name' ] ) }
return header
|
def hook_symbol ( self , symbol_name , simproc , kwargs = None , replace = None ) :
"""Resolve a dependency in a binary . Looks up the address of the given symbol , and then hooks that
address . If the symbol was not available in the loaded libraries , this address may be provided
by the CLE externs object .
Additionally , if instead of a symbol name you provide an address , some secret functionality will
kick in and you will probably just hook that address , UNLESS you ' re on powerpc64 ABIv1 or some
yet - unknown scary ABI that has its function pointers point to something other than the actual
functions , in which case it ' ll do the right thing .
: param symbol _ name : The name of the dependency to resolve .
: param simproc : The SimProcedure instance ( or function ) with which to hook the symbol
: param kwargs : If you provide a SimProcedure for the hook , these are the keyword
arguments that will be passed to the procedure ' s ` run ` method
eventually .
: param replace : Control the behavior on finding that the address is already hooked . If
true , silently replace the hook . If false , warn and do not replace the
hook . If none ( default ) , warn and replace the hook .
: returns : The address of the new symbol .
: rtype : int"""
|
if type ( symbol_name ) is not int :
sym = self . loader . find_symbol ( symbol_name )
if sym is None : # it could be a previously unresolved weak symbol . . ?
new_sym = None
for reloc in self . loader . find_relevant_relocations ( symbol_name ) :
if not reloc . symbol . is_weak :
raise Exception ( "Symbol is strong but we couldn't find its resolution? Report to @rhelmot." )
if new_sym is None :
new_sym = self . loader . extern_object . make_extern ( symbol_name )
reloc . resolve ( new_sym )
reloc . relocate ( [ ] )
if new_sym is None :
l . error ( "Could not find symbol %s" , symbol_name )
return None
sym = new_sym
basic_addr = sym . rebased_addr
else :
basic_addr = symbol_name
symbol_name = None
hook_addr , _ = self . simos . prepare_function_symbol ( symbol_name , basic_addr = basic_addr )
self . hook ( hook_addr , simproc , kwargs = kwargs , replace = replace )
return hook_addr
|
def docker_list ( registry_pass ) : # type : ( str ) - > None
"""List docker images stored in the remote registry .
Args :
registry _ pass ( str ) :
Remote docker registry password ."""
|
registry = conf . get ( 'docker.registry' , None )
if registry is None :
log . err ( "You must define docker.registry conf variable to list images" )
sys . exit ( - 1 )
registry_user = conf . get ( 'docker.registry_user' , None )
if registry_user is None :
registry_user = click . prompt ( "Username" )
rc = client . RegistryClient ( registry , registry_user , registry_pass )
images = { x : rc . list_tags ( x ) for x in rc . list_images ( ) }
shell . cprint ( "<32>Images in <34>{} <32>registry:" , registry )
for image , tags in images . items ( ) :
shell . cprint ( ' <92>{}' , image )
for tag in tags :
shell . cprint ( ' <90>{}:<35>{}' , image , tag )
|
def initialize_notebook ( ) :
"""Initialize the IPython notebook display elements"""
|
try :
from IPython . core . display import display , HTML
except ImportError :
print ( "IPython Notebook could not be loaded." )
# Thanks to @ jakevdp :
# https : / / github . com / jakevdp / mpld3 / blob / master / mpld3 / _ display . py # L85
load_lib = """
function vct_load_lib(url, callback){
if(
typeof d3 !== 'undefined' &&
url === '//cdnjs.cloudflare.com/ajax/libs/d3/3.5.3/d3.min.js'){
callback()
}
var s = document.createElement('script');
s.src = url;
s.async = true;
s.onreadystatechange = s.onload = callback;
s.onerror = function(){
console.warn("failed to load library " + url);
};
document.getElementsByTagName("head")[0].appendChild(s);
};
var vincent_event = new CustomEvent(
"vincent_libs_loaded",
{bubbles: true, cancelable: true}
);
"""
lib_urls = [ "'//cdnjs.cloudflare.com/ajax/libs/d3/3.5.3/d3.min.js'" , ( "'//cdnjs.cloudflare.com/ajax/libs/d3-geo-projection/0.2.9/" "d3.geo.projection.min.js'" ) , "'//wrobstory.github.io/d3-cloud/d3.layout.cloud.js'" , "'//wrobstory.github.io/vega/vega.v1.3.3.js'" ]
get_lib = """vct_load_lib(%s, function(){
%s
});"""
load_js = get_lib
ipy_trigger = "window.dispatchEvent(vincent_event);"
for elem in lib_urls [ : - 1 ] :
load_js = load_js % ( elem , get_lib )
load_js = load_js % ( lib_urls [ - 1 ] , ipy_trigger )
html = """
<script>
%s
function load_all_libs(){
console.log('Loading Vincent libs...')
%s
};
if(typeof define === "function" && define.amd){
if (window['d3'] === undefined ||
window['topojson'] === undefined){
require.config(
{paths: {
d3: '//cdnjs.cloudflare.com/ajax/libs/d3/3.5.3/d3.min',
topojson: '//cdnjs.cloudflare.com/ajax/libs/topojson/1.6.9/topojson.min'
}
}
);
require(["d3"], function(d3){
console.log('Loading Vincent from require.js...')
window.d3 = d3;
require(["topojson"], function(topojson){
window.topojson = topojson;
load_all_libs();
});
});
} else {
load_all_libs();
};
}else{
console.log('Require.js not found, loading manually...')
load_all_libs();
};
</script>""" % ( load_lib , load_js , )
return display ( HTML ( html ) )
|
def readBimFile ( basefilename ) :
"""Helper fuinction that reads bim files"""
|
# read bim file
bim_fn = basefilename + '.bim'
rv = SP . loadtxt ( bim_fn , delimiter = '\t' , usecols = ( 0 , 3 ) , dtype = int )
return rv
|
def create ( self , ** kwargs ) :
"""Custom creation logic to handle edge cases
This shouldn ' t be needed , but ASM has a tendency to raise various errors that
are painful to handle from a customer point - of - view
The error itself are described in their exception handler
To address these failure , we try a number of exception handling cases to catch
and reliably deal with the error .
: param kwargs :
: return :"""
|
for x in range ( 0 , 30 ) :
try :
return self . _create ( ** kwargs )
except iControlUnexpectedHTTPError as ex :
if self . _check_exception ( ex ) :
continue
else :
raise
|
def _write ( self , cmd , * datas ) :
"""Helper function to simplify writing ."""
|
cmd = Command ( write = cmd )
cmd . write ( self . _transport , self . _protocol , * datas )
|
def _get_datasets ( dataset_ids ) :
"""Get all the datasets in a list of dataset IDS . This must be done in chunks of 999,
as sqlite can only handle ' in ' with < 1000 elements ."""
|
dataset_dict = { }
datasets = [ ]
if len ( dataset_ids ) > qry_in_threshold :
idx = 0
extent = qry_in_threshold
while idx < len ( dataset_ids ) :
log . info ( "Querying %s datasets" , len ( dataset_ids [ idx : extent ] ) )
rs = db . DBSession . query ( Dataset ) . filter ( Dataset . id . in_ ( dataset_ids [ idx : extent ] ) ) . all ( )
datasets . extend ( rs )
idx = idx + qry_in_threshold
if idx + qry_in_threshold > len ( dataset_ids ) :
extent = len ( dataset_ids )
else :
extent = extent + qry_in_threshold
else :
datasets = db . DBSession . query ( Dataset ) . filter ( Dataset . id . in_ ( dataset_ids ) )
for r in datasets :
dataset_dict [ r . id ] = r
log . info ( "Retrieved %s datasets" , len ( dataset_dict ) )
return dataset_dict
|
def GetDataStream ( self , name , case_sensitive = True ) :
"""Retrieves a data stream by name .
Args :
name ( str ) : name of the data stream .
case _ sensitive ( Optional [ bool ] ) : True if the name is case sensitive .
Returns :
DataStream : a data stream or None if not available .
Raises :
ValueError : if the name is not string ."""
|
if not isinstance ( name , py2to3 . STRING_TYPES ) :
raise ValueError ( 'Name is not a string.' )
name_lower = name . lower ( )
matching_data_stream = None
for data_stream in self . _GetDataStreams ( ) :
if data_stream . name == name :
return data_stream
if not case_sensitive and data_stream . name . lower ( ) == name_lower :
if not matching_data_stream :
matching_data_stream = data_stream
return matching_data_stream
|
def label_count ( self , label_list_ids = None ) :
"""Return a dictionary containing the number of times , every label - value in this corpus is occurring .
Args :
label _ list _ ids ( list ) : If not None , only labels from label - lists with an id contained in this list
are considered .
Returns :
dict : A dictionary containing the number of occurrences with the label - value as key ."""
|
count = collections . defaultdict ( int )
for utterance in self . utterances . values ( ) :
for label_value , utt_count in utterance . label_count ( label_list_ids = label_list_ids ) . items ( ) :
count [ label_value ] += utt_count
return count
|
def converge ( f , step , tol , max_h ) :
"""simple newton iteration based convergence function"""
|
g = f ( 0 )
dx = 10000
h = step
while ( dx > tol ) :
g2 = f ( h )
dx = abs ( g - g2 )
g = g2
h += step
if h > max_h :
raise Exception ( "Did not converge before {}" . format ( h ) )
return g
|
def __raise ( self , * args ) :
"""Ensures that the Widget stays on top of the parent stack forcing the redraw .
: param \ * args : Arguments .
: type \ * args : \ *"""
|
children = self . parent ( ) . children ( ) . remove ( self )
if children :
self . stackUnder ( children [ - 1 ] )
else :
self . lower ( )
self . raise_ ( )
|
def zip_source_model ( ssmLT , archive_zip = '' , log = logging . info ) :
"""Zip the source model files starting from the smmLT . xml file"""
|
basedir = os . path . dirname ( ssmLT )
if os . path . basename ( ssmLT ) != 'ssmLT.xml' :
orig = ssmLT
ssmLT = os . path . join ( basedir , 'ssmLT.xml' )
with open ( ssmLT , 'wb' ) as f :
f . write ( open ( orig , 'rb' ) . read ( ) )
archive_zip = archive_zip or os . path . join ( basedir , 'ssmLT.zip' )
if os . path . exists ( archive_zip ) :
sys . exit ( '%s exists already' % archive_zip )
oq = mock . Mock ( inputs = { 'source_model_logic_tree' : ssmLT } )
checksum = readinput . get_checksum32 ( oq )
checkfile = os . path . join ( os . path . dirname ( ssmLT ) , 'CHECKSUM.txt' )
with open ( checkfile , 'w' ) as f :
f . write ( str ( checksum ) )
files = logictree . collect_info ( ssmLT ) . smpaths + [ os . path . abspath ( ssmLT ) , os . path . abspath ( checkfile ) ]
general . zipfiles ( files , archive_zip , log = log , cleanup = True )
return archive_zip
|
def get_assessments_offered_for_assessment ( self , assessment_id ) :
"""Gets an ` ` AssessmentOfferedList ` ` by the given assessment .
In plenary mode , the returned list contains all known
assessments offered or an error results . Otherwise , the returned
list may contain only those assessments offered that are
accessible through this session .
arg : assessment _ id ( osid . id . Id ) : ` ` Id ` ` of an ` ` Assessment ` `
return : ( osid . assessment . AssessmentOfferedList ) - the returned
` ` AssessmentOffered ` ` list
raise : NullArgument - ` ` assessment _ id ` ` is ` ` null ` `
raise : OperationFailed - unable to complete request
raise : PermissionDenied - authorization failure occurred
* compliance : mandatory - - This method must be implemented . *"""
|
# Implemented from template for
# osid . learning . ActivityLookupSession . get _ activities _ for _ objective _ template
# NOTE : This implementation currently ignores plenary view
collection = JSONClientValidated ( 'assessment' , collection = 'AssessmentOffered' , runtime = self . _runtime )
result = collection . find ( dict ( { 'assessmentId' : str ( assessment_id ) } , ** self . _view_filter ( ) ) )
return objects . AssessmentOfferedList ( result , runtime = self . _runtime )
|
def list_firewall_policies ( self , retrieve_all = True , ** _params ) :
"""Fetches a list of all firewall policies for a project ."""
|
# Pass filters in " params " argument to do _ request
return self . list ( 'firewall_policies' , self . firewall_policies_path , retrieve_all , ** _params )
|
def store ( self , value , l , dir_only ) :
"""Group patterns by literals and potential magic patterns ."""
|
if l and value in ( b'' , '' ) :
return
globstar = value in ( b'**' , '**' ) and self . globstar
magic = self . is_magic ( value )
if magic :
value = compile ( value , self . flags )
l . append ( WcGlob ( value , magic , globstar , dir_only , False ) )
|
def get_current_url ( ) :
"""Return the current URL including the query string as a relative path . If the app uses
subdomains , return an absolute path"""
|
if current_app . config . get ( 'SERVER_NAME' ) and ( # Check current hostname against server name , ignoring port numbers , if any ( split on ' : ' )
request . environ [ 'HTTP_HOST' ] . split ( ':' , 1 ) [ 0 ] != current_app . config [ 'SERVER_NAME' ] . split ( ':' , 1 ) [ 0 ] ) :
return request . url
url = url_for ( request . endpoint , ** request . view_args )
query = request . query_string
if query :
return url + '?' + query . decode ( )
else :
return url
|
def start ( name , call = None ) :
'''Start a VM in Linode .
name
The name of the VM to start .
CLI Example :
. . code - block : : bash
salt - cloud - a stop vm _ name'''
|
if call != 'action' :
raise SaltCloudException ( 'The start action must be called with -a or --action.' )
node_id = get_linode_id_from_name ( name )
node = get_linode ( kwargs = { 'linode_id' : node_id } )
if node [ 'STATUS' ] == 1 :
return { 'success' : True , 'action' : 'start' , 'state' : 'Running' , 'msg' : 'Machine already running' }
response = _query ( 'linode' , 'boot' , args = { 'LinodeID' : node_id } ) [ 'DATA' ]
if _wait_for_job ( node_id , response [ 'JobID' ] ) :
return { 'state' : 'Running' , 'action' : 'start' , 'success' : True }
else :
return { 'action' : 'start' , 'success' : False }
|
def myRank ( grade , badFormat , year , length ) :
'''rank of candidateNumber in year
Arguments :
grade { int } - - a weighted average for a specific candidate number and year
badFormat { dict } - - candNumber : [ results for candidate ]
year { int } - - year you are in
length { int } - - length of each row in badFormat divided by 2
Returns :
int - - rank of candidateNumber in year'''
|
return int ( sorted ( everyonesAverage ( year , badFormat , length ) , reverse = True ) . index ( grade ) + 1 )
|
def send_to_room ( self , message , room_name ) :
"""Sends a given message to a given room"""
|
room = self . get_room ( room_name )
if room is not None :
room . send_message ( message )
|
def init_app ( self , app ) :
"""Initialise the formatter with app - specific values from ` ` app ` ` ’ s configuration"""
|
if self . __inited :
return
config = app . config . get ( 'FLASK_LOGGING_EXTRAS' , { } )
blueprint_config = config . get ( 'BLUEPRINT' , { } )
self . bp_var = blueprint_config . get ( 'FORMAT_NAME' , 'blueprint' )
self . bp_app = blueprint_config . get ( 'APP_BLUEPRINT' , '<app>' )
self . bp_noreq = blueprint_config . get ( 'NO_REQUEST_BLUEPRINT' , '<not a request>' )
for var_name , resolver_fqn in config . get ( 'RESOLVERS' , { } ) . items ( ) :
if resolver_fqn is None :
resolver = None
else :
try :
resolver = _import_by_string ( resolver_fqn )
except ImportError :
resolver = resolver_fqn
self . resolvers [ var_name ] = resolver
self . __inited = True
|
def diff_missed_lines ( self , filename ) :
"""Return a list of 2 - element tuples ` ( lineno , is _ new ) ` for the given
file ` filename ` where ` lineno ` is a missed line number and ` is _ new `
indicates whether the missed line was introduced ( True ) or removed
( False ) ."""
|
line_changed = [ ]
for line in self . file_source ( filename ) :
if line . status is not None :
is_new = not line . status
line_changed . append ( ( line . number , is_new ) )
return line_changed
|
def _fromJSON ( cls , jsonobject ) :
"""Generates a new instance of : class : ` maspy . core . MzmlProduct ` from a
decoded JSON object ( as generated by
: func : ` maspy . core . MzmlProduct . _ reprJSON ( ) ` ) .
: param jsonobject : decoded JSON object
: returns : a new instance of : class : ` MzmlProduct `"""
|
isolationWindow = [ tuple ( param ) for param in jsonobject ]
return cls ( isolationWindow )
|
def _render_parts ( self , header_parts ) :
"""Helper function to format and quote a single header .
Useful for single headers that are composed of multiple items . E . g . ,
' Content - Disposition ' fields .
: param header _ parts :
A sequence of ( k , v ) tuples or a : class : ` dict ` of ( k , v ) to format
as ` k1 = " v1 " ; k2 = " v2 " ; . . . ` ."""
|
parts = [ ]
iterable = header_parts
if isinstance ( header_parts , dict ) :
iterable = header_parts . items ( )
for name , value in iterable :
if value is not None :
parts . append ( self . _render_part ( name , value ) )
return '; ' . join ( parts )
|
def get_management_commands ( settings ) :
"""Find registered managemend commands and return their classes
as a : class : ` dict ` . Keys are names of the management command
and values are classes of the management command ."""
|
app_commands = getattr ( settings , 'MANAGEMENT_COMMANDS' , ( ) )
commands = { }
for name in itertools . chain ( SHELTER_MANAGEMENT_COMMANDS , app_commands ) :
command_obj = import_object ( name )
if not issubclass ( command_obj , BaseCommand ) :
raise ValueError ( "'%s' is not subclass of the BaseCommand" % name )
commands [ command_obj . name ] = command_obj
return commands
|
def execute_pubsub ( self , command , * channels ) :
"""Executes Redis ( p ) subscribe / ( p ) unsubscribe commands .
ConnectionsPool picks separate connection for pub / sub
and uses it until explicitly closed or disconnected
( unsubscribing from all channels / patterns will leave connection
locked for pub / sub use ) .
There is no auto - reconnect for this PUB / SUB connection .
Returns asyncio . gather coroutine waiting for all channels / patterns
to receive answers ."""
|
conn , address = self . get_connection ( command )
if conn is not None :
return conn . execute_pubsub ( command , * channels )
else :
return self . _wait_execute_pubsub ( address , command , channels , { } )
|
def iou_binary ( preds , labels , EMPTY = 1. , ignore = None , per_image = True ) :
"""IoU for foreground class
binary : 1 foreground , 0 background"""
|
if not per_image :
preds , labels = ( preds , ) , ( labels , )
ious = [ ]
for pred , label in zip ( preds , labels ) :
intersection = ( ( label == 1 ) & ( pred == 1 ) ) . sum ( )
union = ( ( label == 1 ) | ( ( pred == 1 ) & ( label != ignore ) ) ) . sum ( )
if not union :
iou = EMPTY
else :
iou = float ( intersection ) / union
ious . append ( iou )
iou = mean ( ious )
# mean accross images if per _ image
return 100 * iou
|
def _get_size ( self ) -> Tuple [ int , int ] :
"""Return the ( width , height ) for this Image .
Returns :
Tuple [ int , int ] : The ( width , height ) of this Image"""
|
w = ffi . new ( "int *" )
h = ffi . new ( "int *" )
lib . TCOD_image_get_size ( self . image_c , w , h )
return w [ 0 ] , h [ 0 ]
|
def handle ( self , connection_id , message_content ) :
"""Handles parsing incoming requests , and wrapping the final response .
Args :
connection _ id ( str ) : ZMQ identity sent over ZMQ socket
message _ content ( bytes ) : Byte encoded request protobuf to be parsed
Returns :
HandlerResult : result to be sent in response back to client"""
|
try :
request = self . _request_proto ( )
request . ParseFromString ( message_content )
except DecodeError :
LOGGER . info ( 'Protobuf %s failed to deserialize' , request )
return self . _wrap_result ( self . _status . INTERNAL_ERROR )
try :
response = self . _respond ( request )
except _ResponseFailed as e :
response = e . status
return self . _wrap_result ( response )
|
def __load_settings_from_file ( self ) :
"""Loads settings info from the settings json file
: returns : True if the settings info is valid
: rtype : boolean"""
|
filename = self . get_base_path ( ) + 'settings.json'
if not exists ( filename ) :
raise OneLogin_Saml2_Error ( 'Settings file not found: %s' , OneLogin_Saml2_Error . SETTINGS_FILE_NOT_FOUND , filename )
# In the php toolkit instead of being a json file it is a php file and
# it is directly included
json_data = open ( filename , 'r' )
settings = json . load ( json_data )
json_data . close ( )
advanced_filename = self . get_base_path ( ) + 'advanced_settings.json'
if exists ( advanced_filename ) :
json_data = open ( advanced_filename , 'r' )
settings . update ( json . load ( json_data ) )
# Merge settings
json_data . close ( )
return self . __load_settings_from_dict ( settings )
|
def random_host_extinction ( log , sampleNumber , extinctionType , extinctionConstant , hostExtinctionDistributions , plot = False ) :
"""* Generate a Numpy array of random host extinctions *
* * Key Arguments : * *
- ` ` log ` ` - - logger
- ` ` sampleNumber ` ` - - the sample number , i . e . array size
- ` ` extinctionType ` ` - - constant or random ?
- ` ` extinctionConstant ` ` - - the constant value ( when extinctionType is constant )
- ` ` hostExtinctionDistributions ` ` - - the host extinction distribution ( when extinctionType is random )
- ` ` plot ` ` - - generate plot ?
* * Return : * *
- ` ` hostExtinctionArray ` `"""
|
# # # # # # > IMPORTS # # # # #
# # STANDARD LIB # #
# # THIRD PARTY # #
import numpy as np
# # LOCAL APPLICATION # #
# # # # # # > VARIABLE SETTINGS # # # # #
# # # # # # > ACTION ( S ) # # # # #
# xxx come back here and add in the random extinctions generation - - will
# need to account for what type of SN we have # #
if extinctionType == "constant" :
hostExtinctionArray = np . zeros ( sampleNumber ) * extinctionConstant
else :
log . error ( 'host extiction distributions not included yet' )
return hostExtinctionArray
|
def _load_lsm_data ( self , data_var , conversion_factor = 1 , calc_4d_method = None , calc_4d_dim = None , time_step = None ) :
"""This extracts the LSM data from a folder of netcdf files"""
|
data = self . xd . lsm . getvar ( data_var , yslice = self . yslice , xslice = self . xslice , calc_4d_method = calc_4d_method , calc_4d_dim = calc_4d_dim )
if isinstance ( time_step , datetime ) :
data = data . loc [ { self . lsm_time_dim : [ pd . to_datetime ( time_step ) ] } ]
elif time_step is not None :
data = data [ { self . lsm_time_dim : [ time_step ] } ]
data = data . fillna ( 0 )
data . values *= conversion_factor
return data
|
def bitop ( self , operation , dest , * keys ) :
"""Perform a bitwise operation using ` ` operation ` ` between ` ` keys ` ` and
store the result in ` ` dest ` ` ."""
|
return self . execute_command ( 'BITOP' , operation , dest , * keys )
|
def draw_annulus ( self , center , inner_radius , outer_radius , array , value , mode = "set" ) :
"""Draws an annulus of specified radius on the input array and fills it with specified value
: param center : a tuple for the center of the annulus
: type center : tuple ( x , y )
: param inner _ radius : how many pixels in radius the interior empty circle is , where the annulus begins
: type inner _ radius : int
: param outer _ radius : how many pixels in radius the larger outer circle is , where the annulus ends
: typde outer _ radius : int
: param array : image to draw annulus on
: type array : size ( m , n ) numpy array
: param value : what value to fill the annulus with
: type value : float
: param mode : if " set " will assign the circle interior value , if " add " will add the value to the circle interior ,
throws exception otherwise
: type mode : string , either " set " or " add "
: return : updates input array and then returns it with the annulus coordinates as a tuple"""
|
if mode == "add" :
self . draw_circle ( center , outer_radius , array , value )
self . draw_circle ( center , inner_radius , array , - value )
elif mode == "set" :
ri , ci , existing = self . draw_circle ( center , inner_radius , array , - value )
self . draw_circle ( center , outer_radius , array , value )
array [ ri , ci ] = existing
else :
raise ValueError ( "draw_annulus mode must be 'set' or 'add' but {} used" . format ( mode ) )
|
def to_title_caps ( underscore_case ) :
r"""Args :
underscore _ case ( ? ) :
Returns :
str : title _ str
CommandLine :
python - m utool . util _ str - - exec - to _ title _ caps
Example :
> > > # DISABLE _ DOCTEST
> > > from utool . util _ str import * # NOQA
> > > underscore _ case = ' the _ foo _ bar _ func '
> > > title _ str = to _ title _ caps ( underscore _ case )
> > > result = ( ' title _ str = % s ' % ( str ( title _ str ) , ) )
> > > print ( result )
title _ str = The Foo Bar Func"""
|
words = underscore_case . split ( '_' )
words2 = [ word [ 0 ] . upper ( ) + word [ 1 : ] for count , word in enumerate ( words ) ]
title_str = ' ' . join ( words2 )
return title_str
|
def transport_param ( image ) :
"""Parse DockerImage info into skopeo parameter
: param image : DockerImage
: return : string . skopeo parameter specifying image"""
|
transports = { SkopeoTransport . CONTAINERS_STORAGE : "containers-storage:" , SkopeoTransport . DIRECTORY : "dir:" , SkopeoTransport . DOCKER : "docker://" , SkopeoTransport . DOCKER_ARCHIVE : "docker-archive" , SkopeoTransport . DOCKER_DAEMON : "docker-daemon:" , SkopeoTransport . OCI : "oci:" , SkopeoTransport . OSTREE : "ostree:" }
transport = image . transport
tag = image . tag
repository = image . name
path = image . path
if not transport :
transport = SkopeoTransport . DOCKER
command = transports [ transport ]
path_required = [ SkopeoTransport . DIRECTORY , SkopeoTransport . DOCKER_ARCHIVE , SkopeoTransport . OCI ]
if transport in path_required and path is None :
raise ValueError ( transports [ transport ] + " path is required to be specified" )
if transport == SkopeoTransport . DIRECTORY :
return command + path
if transport == SkopeoTransport . DOCKER_ARCHIVE :
command += path
if repository is None :
return command
command += ":"
if transport in [ SkopeoTransport . CONTAINERS_STORAGE , SkopeoTransport . DOCKER , SkopeoTransport . DOCKER_ARCHIVE , transport . DOCKER_DAEMON ] :
return command + repository + ":" + tag
if transport == SkopeoTransport . OCI :
return command + path + ":" + tag
if transport == SkopeoTransport . OSTREE :
return command + repository + ( "@" + path if path else "" )
raise ConuException ( "This transport is not supported" )
|
def hr_avg ( self ) :
"""Average heart rate of the workout"""
|
hr_data = self . hr_values ( )
return int ( sum ( hr_data ) / len ( hr_data ) )
|
def _to_dict ( self ) :
"""Return a json dictionary representing this model ."""
|
_dict = { }
if hasattr ( self , 'batches' ) and self . batches is not None :
_dict [ 'batches' ] = [ x . _to_dict ( ) for x in self . batches ]
return _dict
|
def kms_key_policy ( ) :
"""Creates a key policy for use of a KMS Key ."""
|
statements = [ ]
statements . extend ( kms_key_root_statements ( ) )
return Policy ( Version = "2012-10-17" , Id = "root-account-access" , Statement = statements )
|
def _yield_children ( rec ) : # type : ( dr . DirectoryRecord ) - > Generator
'''An internal function to gather and yield all of the children of a Directory
Record .
Parameters :
rec - The Directory Record to get all of the children from ( must be a
directory )
Yields :
Children of this Directory Record .
Returns :
Nothing .'''
|
if not rec . is_dir ( ) :
raise pycdlibexception . PyCdlibInvalidInput ( 'Record is not a directory!' )
last = b''
for child in rec . children : # Check to see if the filename of this child is the same as the
# last one , and if so , skip the child . This can happen if we
# have very large files with more than one directory entry .
fi = child . file_identifier ( )
if fi == last :
continue
last = fi
if child . rock_ridge is not None and child . rock_ridge . child_link_record_exists ( ) and child . rock_ridge . cl_to_moved_dr is not None and child . rock_ridge . cl_to_moved_dr . parent is not None : # If this is the case , this is a relocated entry . We actually
# want to go find the entry this was relocated to ; we do that
# by following the child _ link , then going up to the parent and
# finding the entry that links to the same one as this one .
cl_parent = child . rock_ridge . cl_to_moved_dr . parent
for cl_child in cl_parent . children :
if cl_child . rock_ridge is not None and cl_child . rock_ridge . name ( ) == child . rock_ridge . name ( ) :
child = cl_child
break
# If we ended up not finding the right one in the parent of the
# moved entry , weird , but just return the one we would have
# anyway .
yield child
|
def clear ( self ) :
"""Empties DEPQ . Performance : O ( 1)"""
|
with self . lock :
self . data . clear ( )
self . items . clear ( )
|
def context ( self ) :
"""Get the context ."""
|
stats = status_codes_by_date_stats ( )
attacks_data = [ { 'type' : 'line' , 'zIndex' : 9 , 'name' : _ ( 'Attacks' ) , 'data' : [ ( v [ 0 ] , v [ 1 ] [ 'attacks' ] ) for v in stats ] } ]
codes_data = [ { 'zIndex' : 4 , 'name' : '2xx' , 'data' : [ ( v [ 0 ] , v [ 1 ] [ 200 ] ) for v in stats ] } , { 'zIndex' : 5 , 'name' : '3xx' , 'data' : [ ( v [ 0 ] , v [ 1 ] [ 300 ] ) for v in stats ] } , { 'zIndex' : 6 , 'name' : '4xx' , 'data' : [ ( v [ 0 ] , v [ 1 ] [ 400 ] ) for v in stats ] } , { 'zIndex' : 8 , 'name' : '5xx' , 'data' : [ ( v [ 0 ] , v [ 1 ] [ 500 ] ) for v in stats ] } ]
return { 'generic_chart' : json . dumps ( status_codes_by_date_chart ( ) ) , 'attacks_data' : json . dumps ( attacks_data ) , 'codes_data' : json . dumps ( codes_data ) }
|
def secp256r1 ( ) :
"""create the secp256r1 curve"""
|
GFp = FiniteField ( int ( "FFFFFFFF00000001000000000000000000000000FFFFFFFFFFFFFFFFFFFFFFFF" , 16 ) )
ec = EllipticCurve ( GFp , 115792089210356248762697446949407573530086143415290314195533631308867097853948 , 41058363725152142129326129780047268409114441015993725554835256314039467401291 )
# return ECDSA ( GFp , ec . point ( 0x6B17D1F2E12C4247F8BCE6E563A440F277037D812DEB33A0F4A13945D898C296,0x4FE342E2FE1A7F9B8EE7EB4A7C0F9E162BCE33576B315ECECBB6406837BF51F5 ) , int ( " FFFFF00000100000FFFFFC " , 16 ) )
return ECDSA ( ec , ec . point ( 0x6B17D1F2E12C4247F8BCE6E563A440F277037D812DEB33A0F4A13945D898C296 , 0x4FE342E2FE1A7F9B8EE7EB4A7C0F9E162BCE33576B315ECECBB6406837BF51F5 ) , GFp )
|
def init_app ( self , app ) :
"""Initialize the APScheduler with a Flask application instance ."""
|
self . app = app
self . app . apscheduler = self
self . _load_config ( )
self . _load_jobs ( )
if self . api_enabled :
self . _load_api ( )
|
def run ( self , packets ) :
"""Run automatically .
Positional arguments :
* packets - - list < dict > , list of packet dicts to be reassembled"""
|
for packet in packets :
frag_check ( packet , protocol = self . protocol )
info = Info ( packet )
self . reassembly ( info )
self . _newflg = True
|
def QueryPermissions ( self , user_link , query , options = None ) :
"""Queries permissions for a user .
: param str user _ link :
The link to the user entity .
: param ( str or dict ) query :
: param dict options :
The request options for the request .
: return :
Query Iterable of Permissions .
: rtype :
query _ iterable . QueryIterable"""
|
if options is None :
options = { }
path = base . GetPathFromLink ( user_link , 'permissions' )
user_id = base . GetResourceIdOrFullNameFromLink ( user_link )
def fetch_fn ( options ) :
return self . __QueryFeed ( path , 'permissions' , user_id , lambda r : r [ 'Permissions' ] , lambda _ , b : b , query , options ) , self . last_response_headers
return query_iterable . QueryIterable ( self , query , options , fetch_fn )
|
def policy_exists ( vhost , name , runas = None ) :
'''Return whether the policy exists based on rabbitmqctl list _ policies .
Reference : http : / / www . rabbitmq . com / ha . html
CLI Example :
. . code - block : : bash
salt ' * ' rabbitmq . policy _ exists / HA'''
|
if runas is None and not salt . utils . platform . is_windows ( ) :
runas = salt . utils . user . get_user ( )
policies = list_policies ( runas = runas )
return bool ( vhost in policies and name in policies [ vhost ] )
|
def split_tarball_path ( path ) :
'''split _ tarball _ path ( path ) yields a tuple ( tarball , p ) in which tarball is the path to the tarball
referenced by path and p is the internal path that followed that tarball . If no tarball is
included in the path , then ( None , path ) is returned . If no internal path is found following
the tarball , then ( path , ' ' ) is returned .'''
|
lpath = path . lower ( )
for e in tarball_endings :
if lpath . endswith ( e ) :
return ( path , '' )
ee = e + ':'
if ee not in lpath :
continue
spl = path . split ( ee )
tarball = spl [ 0 ] + e
p = ee . join ( spl [ 1 : ] )
return ( tarball , p )
return ( None , path )
|
def as_graph_queue ( self , manifest , limit_to = None ) :
"""Returns a queue over nodes in the graph that tracks progress of
dependecies ."""
|
if limit_to is None :
graph_nodes = self . graph . nodes ( )
else :
graph_nodes = limit_to
new_graph = _subset_graph ( self . graph , graph_nodes )
return GraphQueue ( new_graph , manifest )
|
def retry ( exception_cls , max_tries = 10 , sleep = 0.05 ) :
"""Decorator for retrying a function if it throws an exception .
: param exception _ cls : an exception type or a parenthesized tuple of exception types
: param max _ tries : maximum number of times this function can be executed . Must be at least 1.
: param sleep : number of seconds to sleep between function retries"""
|
assert max_tries > 0
def with_max_retries_call ( delegate ) :
for i in xrange ( 0 , max_tries ) :
try :
return delegate ( )
except exception_cls :
if i + 1 == max_tries :
raise
time . sleep ( sleep )
def outer ( fn ) :
is_generator = inspect . isgeneratorfunction ( fn )
@ functools . wraps ( fn )
def retry_fun ( * args , ** kwargs ) :
return with_max_retries_call ( lambda : fn ( * args , ** kwargs ) )
@ functools . wraps ( fn )
def retry_generator_fun ( * args , ** kwargs ) :
def get_first_item ( ) :
results = fn ( * args , ** kwargs )
for first_result in results :
return [ first_result ] , results
return [ ] , results
cache , generator = with_max_retries_call ( get_first_item )
for item in cache :
yield item
for item in generator :
yield item
if not is_generator : # so that qcore . inspection . get _ original _ fn can retrieve the original function
retry_fun . fn = fn
# Necessary for pickling of Cythonized functions to work . Cython ' s _ _ reduce _ _
# method always returns the original name of the function .
retry_fun . __reduce__ = lambda : fn . __name__
return retry_fun
else :
retry_generator_fun . fn = fn
retry_generator_fun . __reduce__ = lambda : fn . __name__
return retry_generator_fun
return outer
|
def __get_return_value_withargs ( self , index_list , * args , ** kwargs ) :
"""Pre - conditions :
(1 ) The user has created a stub and specified the stub behaviour
(2 ) The user has called the stub function with the specified " args " and " kwargs "
(3 ) One or more ' withArgs ' conditions were applicable in this case
Args :
index _ list : list , the list of indices in conditions for which the user args / kwargs match
args : tuple , the arguments inputed by the user
kwargs : dictionary , the keyword arguments inputed by the user
Returns :
any type , the appropriate return value , based on the stub ' s behaviour setup and the user input"""
|
c = self . _conditions
args_list = self . _wrapper . args_list
kwargs_list = self . _wrapper . kwargs_list
# indices with an arg and oncall have higher priority and should be checked first
indices_with_oncall = [ i for i in reversed ( index_list ) if c [ "oncall" ] [ i ] ]
# if there are any combined withArgs + onCall conditions
if indices_with_oncall :
call_count = self . __get_call_count ( args , kwargs , args_list , kwargs_list )
for i in indices_with_oncall :
if c [ "oncall" ] [ i ] == call_count :
return c [ "action" ] [ i ] ( * args , ** kwargs )
# else if there are simple withArgs conditions
indices_without_oncall = [ i for i in reversed ( index_list ) if not c [ "oncall" ] [ i ] ]
if indices_without_oncall :
max_index = max ( indices_without_oncall )
return c [ "action" ] [ max_index ] ( * args , ** kwargs )
# else all conditions did not match
return c [ "default" ] ( * args , ** kwargs )
|
def create_property ( name , ptype ) :
"""Creates a custom property with a getter that performs computing
functionality ( if available ) and raise a type error if setting
with the wrong type .
Note :
By default , the setter attempts to convert the object to the
correct type ; a type error is raised if this fails ."""
|
pname = '_' + name
def getter ( self ) : # This will be where the data is store ( e . g . self . _ name )
# This is the default property " getter " for container data objects .
# If the property value is None , this function will check for a
# convenience method with the signature , self . compute _ name ( ) and call
# it prior to returning the property value .
if not hasattr ( self , pname ) and hasattr ( self , '{}{}' . format ( self . _getter_prefix , pname ) ) :
self [ '{}{}' . format ( self . _getter_prefix , pname ) ] ( )
if not hasattr ( self , pname ) :
raise AttributeError ( 'Please compute or set {} first.' . format ( name ) )
return getattr ( self , pname )
def setter ( self , obj ) : # This is the default property " setter " for container data objects .
# Prior to setting a property value , this function checks that the
# object ' s type is correct .
if not isinstance ( obj , ptype ) :
try :
obj = ptype ( obj )
except Exception :
raise TypeError ( 'Must be able to convert object {0} to {1} (or must be of type {1})' . format ( name , ptype ) )
setattr ( self , pname , obj )
def deleter ( self ) : # Deletes the property ' s value .
del self [ pname ]
return property ( getter , setter , deleter )
|
def next_page ( self ) :
"""Next page
Uses query object to fetch next slice of items unless on last page in
which case does nothing"""
|
if self . is_last_page ( ) :
return False
self . page += 1
self . items = self . fetch_items ( )
return True
|
def clean ( self , value ) :
"""Propagate to list elements ."""
|
value = super ( ListField , self ) . clean ( value )
if value is not None :
return map ( self . itemspec . clean , value )
|
def dirty ( self ) :
'''The set of instances in this : class : ` Session ` which have
been modified .'''
|
return frozenset ( chain ( * tuple ( ( sm . dirty for sm in itervalues ( self . _models ) ) ) ) )
|
def field_or_value ( clause ) :
"""For a clause that could be a field or value ,
create the right one and return it"""
|
if hasattr ( clause , "getName" ) and clause . getName ( ) != "field" :
return Value ( resolve ( clause ) )
else :
return Field ( clause )
|
def remove_this_predicateAnchor ( self , predAnch_id ) :
"""Removes the predicate anchor for the given predicate anchor identifier
@ type predAnch _ id : string
@ param predAnch _ id : the predicate anchor identifier to be removed"""
|
for predAnch in self . get_predicateAnchors ( ) :
if predAnch . get_id ( ) == predAnch_id :
self . node . remove ( predAnch . get_node ( ) )
break
|
def mode ( inlist ) :
"""Returns a list of the modal ( most common ) score ( s ) in the passed
list . If there is more than one such score , all are returned . The
bin - count for the mode ( s ) is also returned .
Usage : lmode ( inlist )
Returns : bin - count for mode ( s ) , a list of modal value ( s )"""
|
scores = pstat . unique ( inlist )
scores . sort ( )
freq = [ ]
for item in scores :
freq . append ( inlist . count ( item ) )
maxfreq = max ( freq )
mode = [ ]
stillmore = 1
while stillmore :
try :
indx = freq . index ( maxfreq )
mode . append ( scores [ indx ] )
del freq [ indx ]
del scores [ indx ]
except ValueError :
stillmore = 0
return maxfreq , mode
|
def _ssh_state ( chunks , st_kwargs , kwargs , test = False ) :
'''Function to run a state with the given chunk via salt - ssh'''
|
file_refs = salt . client . ssh . state . lowstate_file_refs ( chunks , _merge_extra_filerefs ( kwargs . get ( 'extra_filerefs' , '' ) , __opts__ . get ( 'extra_filerefs' , '' ) ) )
# Create the tar containing the state pkg and relevant files .
trans_tar = salt . client . ssh . state . prep_trans_tar ( __context__ [ 'fileclient' ] , chunks , file_refs , __pillar__ , st_kwargs [ 'id_' ] )
trans_tar_sum = salt . utils . hashutils . get_hash ( trans_tar , __opts__ [ 'hash_type' ] )
cmd = 'state.pkg {0}/salt_state.tgz test={1} pkg_sum={2} hash_type={3}' . format ( __opts__ [ 'thin_dir' ] , test , trans_tar_sum , __opts__ [ 'hash_type' ] )
single = salt . client . ssh . Single ( __opts__ , cmd , fsclient = __context__ [ 'fileclient' ] , minion_opts = __salt__ . minion_opts , ** st_kwargs )
single . shell . send ( trans_tar , '{0}/salt_state.tgz' . format ( __opts__ [ 'thin_dir' ] ) )
stdout , stderr , _ = single . cmd_block ( )
# Clean up our tar
try :
os . remove ( trans_tar )
except ( OSError , IOError ) :
pass
# Read in the JSON data and return the data structure
try :
return salt . utils . data . decode ( salt . utils . json . loads ( stdout , object_hook = salt . utils . data . encode_dict ) )
except Exception as e :
log . error ( "JSON Render failed for: %s\n%s" , stdout , stderr )
log . error ( str ( e ) )
# If for some reason the json load fails , return the stdout
return salt . utils . data . decode ( stdout )
|
def vpn_connections ( self ) :
"""Instance depends on the API version :
* 2018-04-01 : : class : ` VpnConnectionsOperations < azure . mgmt . network . v2018_04_01 . operations . VpnConnectionsOperations > `"""
|
api_version = self . _get_api_version ( 'vpn_connections' )
if api_version == '2018-04-01' :
from . v2018_04_01 . operations import VpnConnectionsOperations as OperationClass
else :
raise NotImplementedError ( "APIVersion {} is not available" . format ( api_version ) )
return OperationClass ( self . _client , self . config , Serializer ( self . _models_dict ( api_version ) ) , Deserializer ( self . _models_dict ( api_version ) ) )
|
def add_parent ( self , parent ) :
"""Adds self as child of parent , then adds parent ."""
|
parent . add_child ( self )
self . parent = parent
return parent
|
def del_all_svc_comments ( self , service ) :
"""Delete all service comments
Format of the line that triggers function call : :
DEL _ ALL _ SVC _ COMMENTS ; < host _ name > ; < service _ description >
: param service : service to edit
: type service : alignak . objects . service . Service
: return : None"""
|
comments = list ( service . comments . keys ( ) )
for uuid in comments :
service . del_comment ( uuid )
self . send_an_element ( service . get_update_status_brok ( ) )
|
def positive ( data ) :
r"""Positivity operator
This method preserves only the positive coefficients of the input data , all
negative coefficients are set to zero
Parameters
data : int , float , list , tuple or np . ndarray
Input data
Returns
int or float , or np . ndarray array with only positive coefficients
Raises
TypeError
For invalid input type .
Examples
> > > from modopt . signal . positivity import positive
> > > a = np . arange ( 9 ) . reshape ( 3 , 3 ) - 5
array ( [ [ - 5 , - 4 , - 3 ] ,
[ - 2 , - 1 , 0 ] ,
[ 1 , 2 , 3 ] ] )
> > > positive ( a )
array ( [ [ 0 , 0 , 0 ] ,
[0 , 0 , 0 ] ,
[1 , 2 , 3 ] ] )"""
|
if not isinstance ( data , ( int , float , list , tuple , np . ndarray ) ) :
raise TypeError ( 'Invalid data type, input must be `int`, `float`, ' '`list`, `tuple` or `np.ndarray`.' )
def pos_thresh ( data ) :
return data * ( data > 0 )
def pos_recursive ( data ) :
data = np . array ( data )
if not data . dtype == 'O' :
result = list ( pos_thresh ( data ) )
else :
result = [ pos_recursive ( x ) for x in data ]
return result
if isinstance ( data , ( int , float ) ) :
return pos_thresh ( data )
else :
return np . array ( pos_recursive ( data ) )
|
def parse_single_report ( file_obj ) :
"""Take a filename , parse the data assuming it ' s a flagstat file
Returns a dictionary { ' lineName _ pass ' : value , ' lineName _ fail ' : value }"""
|
parsed_data = { }
re_groups = [ 'passed' , 'failed' , 'passed_pct' , 'failed_pct' ]
for k , r in flagstat_regexes . items ( ) :
r_search = re . search ( r , file_obj , re . MULTILINE )
if r_search :
for i , j in enumerate ( re_groups ) :
try :
key = "{}_{}" . format ( k , j )
val = r_search . group ( i + 1 ) . strip ( '% ' )
parsed_data [ key ] = float ( val ) if ( '.' in val ) else int ( val )
except IndexError :
pass
# Not all regexes have percentages
except ValueError :
parsed_data [ key ] = float ( 'nan' )
# Work out the total read count
try :
parsed_data [ 'flagstat_total' ] = parsed_data [ 'total_passed' ] + parsed_data [ 'total_failed' ]
except KeyError :
pass
return parsed_data
|
def get_canonical_encoding_name ( name ) : # type : ( str ) - > str
"""Given an encoding name , get the canonical name from a codec lookup .
: param str name : The name of the codec to lookup
: return : The canonical version of the codec name
: rtype : str"""
|
import codecs
try :
codec = codecs . lookup ( name )
except LookupError :
return name
else :
return codec . name
|
def ArgList ( args , lparen = LParen ( ) , rparen = RParen ( ) ) :
"""A parenthesised argument list , used by Call ( )"""
|
node = Node ( syms . trailer , [ lparen . clone ( ) , rparen . clone ( ) ] )
if args :
node . insert_child ( 1 , Node ( syms . arglist , args ) )
return node
|
def registerViewType ( self , cls , window = None ) :
"""Registers the inputed widget class as a potential view class . If the optional window argument is supplied , then the registerToWindow method will be called for the class .
: param cls | < subclass of XView >
window | < QMainWindow > | | < QDialog > | | None"""
|
if ( not cls in self . _viewTypes ) :
self . _viewTypes . append ( cls )
if ( window ) :
cls . registerToWindow ( window )
|
def paragraph_ends ( self ) :
"""The end positions of ` ` paragraphs ` ` layer elements ."""
|
if not self . is_tagged ( PARAGRAPHS ) :
self . tokenize_paragraphs ( )
return self . ends ( PARAGRAPHS )
|
def _query ( self , query_str , query_args = None , ** query_options ) :
"""* * query _ options - - dict
ignore _ result - - boolean - - true to not attempt to fetch results
fetchone - - boolean - - true to only fetch one result
count _ result - - boolean - - true to return the int count of rows affected"""
|
ret = True
# http : / / stackoverflow . com / questions / 6739355 / dictcursor - doesnt - seem - to - work - under - psycopg2
connection = query_options . get ( 'connection' , None )
with self . connection ( connection ) as connection :
cur = connection . cursor ( )
ignore_result = query_options . get ( 'ignore_result' , False )
count_result = query_options . get ( 'count_result' , False )
one_result = query_options . get ( 'fetchone' , query_options . get ( 'one_result' , False ) )
cursor_result = query_options . get ( 'cursor_result' , False )
try :
if query_args :
self . log ( "{}{}{}" , query_str , os . linesep , query_args )
cur . execute ( query_str , query_args )
else :
self . log ( query_str )
cur . execute ( query_str )
if cursor_result :
ret = cur
elif not ignore_result :
if one_result :
ret = self . _normalize_result_dict ( cur . fetchone ( ) )
elif count_result :
ret = cur . rowcount
else :
ret = self . _normalize_result_list ( cur . fetchall ( ) )
except Exception as e :
self . log ( e )
raise
return ret
|
def getAlgorithmInstance ( self , layer = "L2" , column = 0 ) :
"""Returns an instance of the underlying algorithm . For example ,
layer = L2 and column = 1 could return the actual instance of ColumnPooler
that is responsible for column 1."""
|
assert ( ( column >= 0 ) and ( column < self . numColumns ) ) , ( "Column number not " "in valid range" )
if layer == "L2" :
return self . L2Columns [ column ] . getAlgorithmInstance ( )
elif layer == "L4" :
return self . L4Columns [ column ] . getAlgorithmInstance ( )
else :
raise Exception ( "Invalid layer. Must be 'L4' or 'L2'" )
|
def _get_ssl_attribute ( value , mapping , default_value , warning_message ) :
"""Get the TLS attribute based on the compatibility mapping .
If no valid attribute can be found , fall - back on default and
display a warning .
: param str value :
: param dict mapping : Dictionary based mapping
: param default _ value : Default fall - back value
: param str warning _ message : Warning message
: return :"""
|
for key in mapping :
if not key . endswith ( value . lower ( ) ) :
continue
return mapping [ key ]
LOGGER . warning ( warning_message , value )
return default_value
|
def _poll_vq_single ( self , dname , use_devmode , ddresp ) :
"""Initiate a view query for a view located in a design document
: param ddresp : The design document to poll ( as JSON )
: return : True if successful , False if no views ."""
|
vname = None
query = None
v_mr = ddresp . get ( 'views' , { } )
v_spatial = ddresp . get ( 'spatial' , { } )
if v_mr :
vname = single_dict_key ( v_mr )
query = Query ( )
elif v_spatial :
vname = single_dict_key ( v_spatial )
query = SpatialQuery ( )
if not vname :
return False
query . stale = STALE_OK
query . limit = 1
for r in self . _cb . query ( dname , vname , use_devmode = use_devmode , query = query ) :
pass
return True
|
def hash ( self ) :
"""Return md5 hash for current dataset ."""
|
if self . _hash is None :
m = hashlib . new ( 'md5' )
if self . _preprocessor is None : # generate hash from numpy array
m . update ( numpy_buffer ( self . _X_train ) )
m . update ( numpy_buffer ( self . _y_train ) )
if self . _X_test is not None :
m . update ( numpy_buffer ( self . _X_test ) )
if self . _y_test is not None :
m . update ( numpy_buffer ( self . _y_test ) )
elif callable ( self . _preprocessor ) : # generate hash from user defined object ( source code )
m . update ( inspect . getsource ( self . _preprocessor ) . encode ( 'utf-8' ) )
self . _hash = m . hexdigest ( )
return self . _hash
|
def generate_script ( self ) :
"""Create the SGE script that will run the jobs in the JobGroup , with
the passed arguments ."""
|
self . script = ""
# Holds the script string
total = 1
# total number of jobs in this group
# for now , SGE _ TASK _ ID becomes TASK _ ID , but we base it at zero
self . script += """let "TASK_ID=$SGE_TASK_ID - 1"\n"""
# build the array definitions ; force ordering for Python3.5 tests
for key in sorted ( self . arguments . keys ( ) ) :
values = self . arguments [ key ]
line = ( "%s_ARRAY=( " % ( key ) )
for value in values :
line += value
line += " "
line += " )\n"
self . script += line
total *= len ( values )
self . script += "\n"
# now , build the decoding logic in the script ; force ordering
for key in sorted ( self . arguments . keys ( ) ) :
count = len ( self . arguments [ key ] )
self . script += """let "%s_INDEX=$TASK_ID %% %d"\n""" % ( key , count )
self . script += """%s=${%s_ARRAY[$%s_INDEX]}\n""" % ( key , key , key )
self . script += """let "TASK_ID=$TASK_ID / %d"\n""" % ( count )
# now , add the command to run the job
self . script += "\n"
self . script += self . command
self . script += "\n"
# set the number of tasks in this group
self . tasks = total
|
def dendrite_filter ( n ) :
'''Select only dendrites'''
|
return n . type == NeuriteType . basal_dendrite or n . type == NeuriteType . apical_dendrite
|
def _rename_file ( self , line = "" ) :
"""Rename an ontology
2016-04-11 : not a direct command anymore"""
|
if not self . all_ontologies :
self . _help_nofiles ( )
else :
out = [ ]
for each in self . all_ontologies :
if line in each :
out += [ each ]
choice = self . _selectFromList ( out , line )
if choice :
fullpath = self . LOCAL_MODELS + "/" + choice
print ( fullpath )
if os . path . isfile ( fullpath ) :
self . _print ( "--------------" )
self . _print ( "Please enter a new name for <%s>, including the extension (blank=abort)" % choice )
var = input ( )
if var :
try :
os . rename ( fullpath , self . LOCAL_MODELS + "/" + var )
manager . rename_pickled_ontology ( choice , var )
self . _print ( "<%s> was renamed succesfully." % choice )
self . all_ontologies = manager . get_localontologies ( )
except :
self . _print ( "Not a valid name. An error occurred." )
return
else :
return
else :
self . _print ( "File not found." )
# delete
if self . current and self . current [ 'fullpath' ] == fullpath :
self . current = None
self . currentEntity = None
self . prompt = _get_prompt ( )
return
|
def ensure_contexted ( func ) :
"""This decorator ensure that an instance of the
Evtx class is used within a context statement . That is ,
that the ` with ` statement is used , or ` _ _ enter _ _ ( ) `
and ` _ _ exit _ _ ( ) ` are called explicitly ."""
|
@ wraps ( func )
def wrapped ( self , * args , ** kwargs ) :
if self . _buf is None :
raise TypeError ( "An Evtx object must be used with" " a context (see the `with` statement)." )
else :
return func ( self , * args , ** kwargs )
return wrapped
|
def check_versions ( self , conn ) :
""": param conn : a DB API 2 connection
: returns : a message with the versions that will be applied or None"""
|
scripts = self . read_scripts ( skip_versions = self . get_db_versions ( conn ) )
versions = [ s [ 'version' ] for s in scripts ]
if versions :
return ( 'Your database is not updated. You can update it by ' 'running oq engine --upgrade-db which will process the ' 'following new versions: %s' % versions )
|
def find_ss_regions ( dssp_residues ) :
"""Separates parsed DSSP data into groups of secondary structure .
Notes
Example : all residues in a single helix / loop / strand will be gathered
into a list , then the next secondary structure element will be
gathered into a separate list , and so on .
Parameters
dssp _ residues : [ list ]
Each internal list contains :
[0 ] int Residue number
[1 ] str Secondary structure type
[2 ] str Chain identifier
[3 ] str Residue type
[4 ] float Phi torsion angle
[5 ] float Psi torsion angle
Returns
fragments : [ [ list ] ]
Lists grouped in continuous regions of secondary structure .
Innermost list has the same format as above ."""
|
loops = [ ' ' , 'B' , 'S' , 'T' ]
current_ele = None
fragment = [ ]
fragments = [ ]
first = True
for ele in dssp_residues :
if first :
first = False
fragment . append ( ele )
elif current_ele in loops :
if ele [ 1 ] in loops :
fragment . append ( ele )
else :
fragments . append ( fragment )
fragment = [ ele ]
else :
if ele [ 1 ] == current_ele :
fragment . append ( ele )
else :
fragments . append ( fragment )
fragment = [ ele ]
current_ele = ele [ 1 ]
return fragments
|
def encodeRNAStructure ( seq_vec , maxlen = None , seq_align = "start" , W = 240 , L = 160 , U = 1 , tmpdir = "/tmp/RNAplfold/" ) :
"""Compute RNA secondary structure with RNAplfold implemented in
Kazan et al 2010 , [ doi ] ( https : / / doi . org / 10.1371 / journal . pcbi . 1000832 ) .
# Note
Secondary structure is represented as the probability
to be in the following states :
- ` [ " Pairedness " , " Hairpin loop " , " Internal loop " , " Multi loop " , " External region " ] `
See Kazan et al 2010 , [ doi ] ( https : / / doi . org / 10.1371 / journal . pcbi . 1000832)
for more information .
# Arguments
seq _ vec : list of DNA / RNA sequences
maxlen : Maximum sequence length . See ` concise . preprocessing . pad _ sequences ` for more detail
seq _ align : How to align the sequences of variable lengths . See ` concise . preprocessing . pad _ sequences ` for more detail
W : Int ; span - window length
L : Int ; maxiumm span
U : Int ; size of unpaired region
tmpdir : Where to store the intermediary files of RNAplfold .
# Note
Recommended parameters :
- for human , mouse use W , L , u : 240 , 160 , 1
- for fly , yeast use W , L , u : 80 , 40 , 1
# Returns
np . ndarray of shape ` ( len ( seq _ vec ) , maxlen , 5 ) `"""
|
# extend the tmpdir with uuid string to allow for parallel execution
tmpdir = tmpdir + "/" + str ( uuid4 ( ) ) + "/"
if not isinstance ( seq_vec , list ) :
seq_vec = seq_vec . tolist ( )
if not os . path . exists ( tmpdir ) :
os . makedirs ( tmpdir )
fasta_path = tmpdir + "/input.fasta"
write_fasta ( fasta_path , seq_vec )
run_RNAplfold ( fasta_path , tmpdir , W = W , L = L , U = U )
# 1 . split the fasta into pieces
# 2 . run _ RNAplfold for each of them
# 3 . Read the results
return read_RNAplfold ( tmpdir , maxlen , seq_align = seq_align , pad_with = "E" )
|
def _import_module ( name , package = 'vlfd' , warn = True , prefix = '_py_' , ignore = '_' ) :
"""Try import all public attributes from module into global namespace .
Existing attributes with name clashes are renamed with prefix .
Attributes starting with underscore are ignored by default .
Return True on successful import ."""
|
import warnings
from importlib import import_module
try :
try :
module = import_module ( name )
except ImportError :
module = import_module ( '.' + name , package = package )
except ImportError :
if warn :
warnings . warn ( "failed to import module %s" % name )
else :
for attr in dir ( module ) :
if ignore and attr . startswith ( ignore ) :
continue
if prefix :
if attr in globals ( ) :
globals ( ) [ prefix + attr ] = globals ( ) [ attr ]
elif warn :
warnings . warn ( "no Python implementation of " + attr )
globals ( ) [ attr ] = getattr ( module , attr )
return True
|
def get ( self , name ) :
"""Get the resource URI for a specified resource name .
If an entry for the specified resource name does not exist in the
Name - URI cache , the cache is refreshed from the HMC with all resources
of the manager holding this cache .
If an entry for the specified resource name still does not exist after
that , ` ` NotFound ` ` is raised ."""
|
self . auto_invalidate ( )
try :
return self . _uris [ name ]
except KeyError :
self . refresh ( )
try :
return self . _uris [ name ]
except KeyError :
raise NotFound ( { self . _manager . _name_prop : name } , self . _manager )
|
def owner_to ( dbname , ownername , user = None , host = None , port = None , password = None , runas = None ) :
'''Set the owner of all schemas , functions , tables , views and sequences to
the given username .
CLI Example :
. . code - block : : bash
salt ' * ' postgres . owner _ to ' dbname ' ' username ' '''
|
sqlfile = tempfile . NamedTemporaryFile ( )
sqlfile . write ( 'begin;\n' )
sqlfile . write ( 'alter database "{0}" owner to "{1}";\n' . format ( dbname , ownername ) )
queries = ( # schemas
( 'alter schema {n} owner to {owner};' , 'select quote_ident(schema_name) as n from ' 'information_schema.schemata;' ) , # tables and views
( 'alter table {n} owner to {owner};' , 'select quote_ident(table_schema)||\'.\'||quote_ident(table_name) as ' 'n from information_schema.tables where table_schema not in ' '(\'pg_catalog\', \'information_schema\');' ) , # functions
( 'alter function {n} owner to {owner};' , 'select p.oid::regprocedure::text as n from pg_catalog.pg_proc p ' 'join pg_catalog.pg_namespace ns on p.pronamespace=ns.oid where ' 'ns.nspname not in (\'pg_catalog\', \'information_schema\') ' ' and not p.proisagg;' ) , # aggregate functions
( 'alter aggregate {n} owner to {owner};' , 'select p.oid::regprocedure::text as n from pg_catalog.pg_proc p ' 'join pg_catalog.pg_namespace ns on p.pronamespace=ns.oid where ' 'ns.nspname not in (\'pg_catalog\', \'information_schema\') ' 'and p.proisagg;' ) , # sequences
( 'alter sequence {n} owner to {owner};' , 'select quote_ident(sequence_schema)||\'.\'||' 'quote_ident(sequence_name) as n from information_schema.sequences;' ) )
for fmt , query in queries :
ret = psql_query ( query , user = user , host = host , port = port , maintenance_db = dbname , password = password , runas = runas )
for row in ret :
sqlfile . write ( fmt . format ( owner = ownername , n = row [ 'n' ] ) + '\n' )
sqlfile . write ( 'commit;\n' )
sqlfile . flush ( )
os . chmod ( sqlfile . name , 0o644 )
# ensure psql can read the file
# run the generated sqlfile in the db
cmdret = _psql_prepare_and_run ( [ '-f' , sqlfile . name ] , user = user , runas = runas , host = host , port = port , password = password , maintenance_db = dbname )
return cmdret
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.