signature
stringlengths
29
44.1k
implementation
stringlengths
0
85.2k
def load ( self , source : Source ) -> None : """Attempts to load all resources ( i . e . , bugs , tools , and blueprints ) provided by a given source . If the given source has already been loaded , then that resources for that source are unloaded and reloaded ."""
logger . info ( 'loading source %s at %s' , source . name , source . location ) if source . name in self . __sources : self . unload ( source ) bugs = [ ] blueprints = [ ] tools = [ ] # find and parse all bugzoo files glob_pattern = '{}/**/*.bugzoo.y*ml' . format ( source . location ) for fn in glob . iglob ( glob_pattern , recursive = True ) : if fn . endswith ( '.yml' ) or fn . endswith ( '.yaml' ) : logger . debug ( 'found manifest file: %s' , fn ) self . __parse_file ( source , fn , bugs , blueprints , tools ) logger . debug ( 'parsed manifest file: %s' , fn ) # register contents for bug in bugs : self . __installation . bugs . add ( bug ) for blueprint in blueprints : self . __installation . build . add ( blueprint ) for tool in tools : self . __installation . tools . add ( tool ) # record contents of source contents = SourceContents ( [ b . name for b in blueprints ] , [ b . name for b in bugs ] , [ t . name for t in tools ] ) self . __sources [ source . name ] = source self . __contents [ source . name ] = contents logger . info ( "loaded source: %s" , source . name )
def on_receive ( self , message = None , wire = None , event_origin = None ) : """event handler bound to the receive event of the link the server is wired too . Arguments : - message ( message . Message ) : incoming message Keyword arguments : - event _ origin ( connection . Link )"""
self . trigger ( "before_call" , message ) fn_name = message . data pmsg = self . prepare_message try : for handler in self . handlers : handler . incoming ( message , self ) fn = self . get_function ( fn_name , message . path ) except Exception as inst : wire . respond ( message , ErrorMessage ( str ( inst ) ) ) return if callable ( fn ) and getattr ( fn , "exposed" , False ) : try : r = fn ( * message . args , ** message . kwargs ) if isinstance ( r , Message ) : wire . respond ( message , pmsg ( r ) ) else : wire . respond ( message , pmsg ( Message ( r ) ) ) except Exception as inst : if self . debug : wire . respond ( message , pmsg ( ErrorMessage ( str ( traceback . format_exc ( ) ) ) ) ) else : wire . respond ( message , pmsg ( ErrorMessage ( str ( inst ) ) ) ) else : wire . respond ( message , pmsg ( ErrorMessage ( "action '%s' not exposed on API (%s)" % ( fn_name , self . __class__ . __name__ ) ) ) ) self . trigger ( "after_call" , message )
def Hooper2K ( Di , Re , name = None , K1 = None , Kinfty = None ) : r'''Returns loss coefficient for any various fittings , depending on the name input . Alternatively , the Hooper constants K1 , Kinfty may be provided and used instead . Source of data is [ 1 ] _ . Reviews of this model are favorable less favorable than the Darby method but superior to the constant - K method . . . math : : K = \ frac { K _ 1 } { Re } + K _ \ infty \ left ( 1 + \ frac { 1 \ text { inch } } { D _ { in } } \ right ) Note this model uses actual inside pipe diameter in inches . Parameters Di : float Actual inside diameter of the pipe , [ in ] Re : float Reynolds number , [ - ] name : str , optional String from Hooper dict representing a fitting K1 : float , optional K1 parameter of Hooper model , optional [ - ] Kinfty : float , optional Kinfty parameter of Hooper model , optional [ - ] Returns K : float Loss coefficient [ - ] Notes Also described in Ludwig ' s Applied Process Design . Relatively uncommon to see it used . No actual example found . Examples > > > Hooper2K ( Di = 2 . , Re = 10000 . , name = ' Valve , Globe , Standard ' ) 6.15 > > > Hooper2K ( Di = 2 . , Re = 10000 . , K1 = 900 , Kinfty = 4) 6.09 References . . [ 1 ] Hooper , W . B . , " The 2 - K Method Predicts Head Losses in Pipe Fittings , " Chem . Eng . , p . 97 , Aug . 24 ( 1981 ) . . . [ 2 ] Hooper , William B . " Calculate Head Loss Caused by Change in Pipe Size . " Chemical Engineering 95 , no . 16 ( November 7 , 1988 ) : 89. . . [ 3 ] Kayode Coker . Ludwig ' s Applied Process Design for Chemical and Petrochemical Plants . 4E . Amsterdam ; Boston : Gulf Professional Publishing , 2007.'''
if name : if name in Hooper : d = Hooper [ name ] K1 , Kinfty = d [ 'K1' ] , d [ 'Kinfty' ] else : raise Exception ( 'Name of fitting not in list' ) elif K1 and Kinfty : pass else : raise Exception ( 'Name of fitting or constants are required' ) return K1 / Re + Kinfty * ( 1. + 1. / Di )
def parse_lheading ( self , m ) : """Parse setext heading ."""
level = 1 if m . group ( 2 ) == '=' else 2 self . renderer . heading ( m . group ( 1 ) , level = level )
def _get_alignment_lines ( self ) : '''This function parses the Clustal Omega alignment output and returns the aligned sequences in a dict : sequence _ id - > sequence _ string . The special key - 1 is reserved for the match line ( e . g . ' . : * * * * * * ' ) .'''
# Strip the boilerplate lines lines = self . alignment_output . split ( "\n" ) assert ( lines [ 0 ] . startswith ( 'CLUSTAL' ) ) lines = '\n' . join ( lines [ 1 : ] ) . lstrip ( ) . split ( '\n' ) # The sequence IDs should be unique . Reassert this here assert ( len ( self . sequence_ids . values ( ) ) == len ( set ( self . sequence_ids . values ( ) ) ) ) # Create the list of sequence IDs id_list = [ v for k , v in sorted ( self . sequence_ids . iteritems ( ) ) ] # Determine the indentation level first_id = id_list [ 0 ] header_regex = re . compile ( "(.*?\s+)(.*)" ) alignment_regex = re . compile ( "^([A-Z\-]+)\s*$" ) mtchs = header_regex . match ( lines [ 0 ] ) assert ( mtchs . group ( 1 ) . strip ( ) == first_id ) indentation = len ( mtchs . group ( 1 ) ) sequence = mtchs . group ( 2 ) assert ( sequence ) assert ( alignment_regex . match ( sequence ) ) # Create empty lists for the sequences sequences = { } for id in id_list : sequences [ id ] = [ ] sequences [ - 1 ] = [ ] # Get the lists of sequences num_ids = len ( id_list ) for x in range ( 0 , len ( lines ) , num_ids + 2 ) : for y in range ( num_ids ) : id = id_list [ y ] assert ( lines [ x + y ] [ : indentation ] . strip ( ) == id ) assert ( lines [ x + y ] [ indentation - 1 ] == ' ' ) sequence = lines [ x + y ] [ indentation : ] . strip ( ) assert ( alignment_regex . match ( sequence ) ) sequences [ id ] . append ( sequence ) # Get the length of the sequence lines length_of_sequences = list ( set ( map ( len , [ v [ - 1 ] for k , v in sequences . iteritems ( ) if k != - 1 ] ) ) ) assert ( len ( length_of_sequences ) == 1 ) length_of_sequences = length_of_sequences [ 0 ] # Parse the Clustal match line assert ( lines [ x + num_ids ] [ : indentation ] . strip ( ) == '' ) match_sequence = lines [ x + num_ids ] [ indentation : indentation + length_of_sequences ] assert ( match_sequence . strip ( ) == lines [ x + num_ids ] . strip ( ) ) assert ( lines [ x + y ] [ indentation - 1 ] == ' ' ) sequences [ - 1 ] . append ( match_sequence ) # Check for the empty line assert ( lines [ x + num_ids + 1 ] . strip ( ) == '' ) # Create the sequences , making sure that all sequences are the same length lengths = set ( ) for k , v in sequences . iteritems ( ) : sequences [ k ] = "" . join ( v ) lengths . add ( len ( sequences [ k ] ) ) assert ( len ( lengths ) == 1 ) return sequences
def call_insert ( tup ) : """Importable helper for multi - proc calling of ArtifactCache . insert on an ArtifactCache instance . See docstring on call _ use _ cached _ files explaining why this is useful . : param tup : A 4 - tuple of an ArtifactCache and the 3 args passed to ArtifactCache . insert : eg ( some _ cache _ instance , cache _ key , [ some _ file , another _ file ] , False )"""
try : cache , key , files , overwrite = tup return cache . insert ( key , files , overwrite ) except NonfatalArtifactCacheError as e : logger . warn ( 'Error while inserting into artifact cache: {0}' . format ( e ) ) return False
def create_iplist_with_data ( name , iplist ) : """Create an IPList with initial list contents . : param str name : name of IPList : param list iplist : list of IPList IP ' s , networks , etc : return : href of list location"""
iplist = IPList . create ( name = name , iplist = iplist ) return iplist
def _get_list_widget ( self , filters , actions = None , order_column = "" , order_direction = "" , page = None , page_size = None , widgets = None , ** args ) : """get joined base filter and current active filter for query"""
widgets = widgets or { } actions = actions or self . actions page_size = page_size or self . page_size if not order_column and self . base_order : order_column , order_direction = self . base_order joined_filters = filters . get_joined_filters ( self . _base_filters ) count , lst = self . datamodel . query ( joined_filters , order_column , order_direction , page = page , page_size = page_size , ) pks = self . datamodel . get_keys ( lst ) # serialize composite pks pks = [ self . _serialize_pk_if_composite ( pk ) for pk in pks ] widgets [ "list" ] = self . list_widget ( label_columns = self . label_columns , include_columns = self . list_columns , value_columns = self . datamodel . get_values ( lst , self . list_columns ) , order_columns = self . order_columns , formatters_columns = self . formatters_columns , page = page , page_size = page_size , count = count , pks = pks , actions = actions , filters = filters , modelview_name = self . __class__ . __name__ , ) return widgets
def assert_independent ( package , * packages ) : """: param package : Python name of a module / package : param packages : Python names of modules / packages Make sure the ` package ` does not depend from the ` packages ` ."""
assert packages , 'At least one package must be specified' import_package = 'from openquake.baselib.general import import_all\n' 'print(import_all("%s"))' % package imported_modules = run_in_process ( import_package ) for mod in imported_modules : for pkg in packages : if mod . startswith ( pkg ) : raise CodeDependencyError ( '%s depends on %s' % ( package , pkg ) )
def allocate ( self , manager = None ) : """This function is called once we have completed the initialization of the : class : ` Work ` . It sets the manager of each task ( if not already done ) and defines the working directories of the tasks . Args : manager : : class : ` TaskManager ` object or None"""
for i , task in enumerate ( self ) : if not hasattr ( task , "manager" ) : # Set the manager # Use the one provided in input else the one of the work / flow . if manager is not None : task . set_manager ( manager ) else : # Look first in work and then in the flow . if hasattr ( self , "manager" ) : task . set_manager ( self . manager ) else : task . set_manager ( self . flow . manager ) task_workdir = os . path . join ( self . workdir , "t" + str ( i ) ) if not hasattr ( task , "workdir" ) : task . set_workdir ( task_workdir ) else : if task . workdir != task_workdir : raise ValueError ( "task.workdir != task_workdir: %s, %s" % ( task . workdir , task_workdir ) )
def update ( self , text , revision = None ) : """Modifies the internal state based a change to the content and returns the sets of words added and removed . : Parameters : text : str The text content of a revision revision : ` mixed ` Revision metadata : Returns : A triple of lists : current _ tokens : ` list ` ( : class : ` ~ mwpersistence . Token ` ) A sequence of Tokens representing the revision that was just processed . tokens _ added : ` list ` ( : class : ` ~ mwpersistence . Token ` ) Tokens that were added while updating state . tokens _ removed : ` list ` ( : class : ` ~ mwpersistence . Token ` ) Tokens that were removed while updating state ."""
return self . _update ( text = text , revision = revision )
def get_tree_from_sha ( self , ref ) : '''Return a pygit2 . Tree object matching a SHA'''
try : return self . repo . revparse_single ( ref ) . tree except ( KeyError , TypeError , ValueError , AttributeError ) : return None
def logical_cores ( self ) : """Return the number of cpu cores as reported to the os . May be different from physical _ cores if , ie , intel ' s hyperthreading is enabled ."""
try : return self . _logical_cores ( ) except Exception as e : from rez . utils . logging_ import print_error print_error ( "Error detecting logical core count, defaulting to 1: %s" % str ( e ) ) return 1
def ape ( self , ape_path = None ) : '''Open in ApE if ` ApE ` is in your command line path .'''
# TODO : simplify - make ApE look in PATH only cmd = 'ApE' if ape_path is None : # Check for ApE in PATH ape_executables = [ ] for path in os . environ [ 'PATH' ] . split ( os . pathsep ) : exepath = os . path . join ( path , cmd ) ape_executables . append ( os . access ( exepath , os . X_OK ) ) if not any ( ape_executables ) : raise Exception ( 'Ape not in PATH. Use ape_path kwarg.' ) else : cmd = ape_path # Check whether ApE exists in PATH tmp = tempfile . mkdtemp ( ) if self . name is not None and self . name : filename = os . path . join ( tmp , '{}.ape' . format ( self . name ) ) else : filename = os . path . join ( tmp , 'tmp.ape' ) coral . seqio . write_dna ( self , filename ) process = subprocess . Popen ( [ cmd , filename ] ) # Block until window is closed try : process . wait ( ) shutil . rmtree ( tmp ) except KeyboardInterrupt : shutil . rmtree ( tmp )
def sub_notes ( docs ) : """Substitutes the special controls for notes , warnings , todos , and bugs with the corresponding div ."""
def substitute ( match ) : ret = "</p><div class=\"alert alert-{}\" role=\"alert\"><h4>{}</h4>" "<p>{}</p></div>" . format ( NOTE_TYPE [ match . group ( 1 ) . lower ( ) ] , match . group ( 1 ) . capitalize ( ) , match . group ( 2 ) ) if len ( match . groups ( ) ) >= 4 and not match . group ( 4 ) : ret += '\n<p>' return ret for regex in NOTE_RE : docs = regex . sub ( substitute , docs ) return docs
def context_processor ( self , fn ) : """Registers a template context processor function ."""
self . _defer ( lambda app : app . context_processor ( fn ) ) return fn
def _encode_config ( conf_dict ) : """Encode ` conf _ dict ` to string ."""
out = [ ] # get variables in order defined in settings . _ ALLOWED _ MERGES for var in settings . _ALLOWED_MERGES : out . append ( conf_dict [ var ] ) # convert bools to chars out = map ( lambda x : "t" if x else "f" , out ) return "" . join ( out )
def network_create_func ( self , net ) : """Create network in database and dcnm : param net : network dictionary"""
net_id = net [ 'id' ] net_name = net . get ( 'name' ) network_db_elem = self . get_network ( net_id ) # Check if the source of network creation is FW and if yes , skip # this event . # Check if there ' s a way to read the DB from service class # TODO ( padkrish ) if self . fw_api . is_network_source_fw ( network_db_elem , net_name ) : LOG . info ( "Service network %s, returning" , net_name ) return if not network_db_elem : self . network [ net_id ] = { } self . network [ net_id ] . update ( net ) net_name = net . get ( 'name' ) tenant_id = net . get ( 'tenant_id' ) # Extract segmentation _ id from the network name net_ext_name = self . cfg . dcnm . dcnm_net_ext nobj = re . search ( net_ext_name , net_name ) try : seg_id = int ( ( net_name [ nobj . start ( 0 ) + len ( net_ext_name ) - 1 : ] if nobj else None ) ) except ( IndexError , TypeError , ValueError ) : seg_id = None # Check if network is already created . query_net = self . get_network_by_segid ( seg_id ) if seg_id else None if query_net : # The network is already created no need to process the event . if query_net . source . lower ( ) == 'dcnm' : # DCNM created the network . Only update network id in database . prev_id = query_net . network_id params = dict ( columns = dict ( network_id = net_id ) ) self . update_network ( prev_id , ** params ) # Update the network cache . prev_info = self . network . pop ( prev_id ) prev_info [ 'id' ] = net_id self . network [ net_id ] = prev_info # Update the network name . After extracting the segmentation _ id # no need to keep it in the name . Removing it and update # the network . updated_net_name = ( net_name [ : nobj . start ( 0 ) + len ( net_ext_name ) - 1 ] ) try : body = { 'network' : { 'name' : updated_net_name , } } dcnm_net = self . neutronclient . update_network ( net_id , body = body ) . get ( 'network' ) LOG . debug ( 'Updated network %(network)s' , dcnm_net ) except Exception as exc : LOG . exception ( 'Failed to update network ' '%(network)s. Reason %(err)s.' , { 'network' : updated_net_name , 'err' : str ( exc ) } ) return LOG . info ( 'network_create_event: network %(name)s was created ' 'by %(source)s. Ignoring processing the event.' , { 'name' : net_name , 'source' : 'dcnm' } ) return if network_db_elem : LOG . debug ( "Network %s exists, not processing" % net_name ) return # Check if project ( i . e . tenant ) exist . tenant_name = self . get_project_name ( tenant_id ) if not tenant_name : LOG . error ( 'Failed to create network %(name)s. Project ' '%(tenant_id)s does not exist.' , { 'name' : net_name , 'tenant_id' : tenant_id } ) return pseg_id = self . network [ net_id ] . get ( 'provider:segmentation_id' ) seg_id = self . _get_segmentation_id ( net_id , pseg_id , 'openstack' ) self . network [ net_id ] [ 'segmentation_id' ] = seg_id try : cfgp , fwd_mod = self . dcnm_client . get_config_profile_for_network ( net . get ( 'name' ) ) self . network [ net_id ] [ 'config_profile' ] = cfgp self . network [ net_id ] [ 'fwd_mod' ] = fwd_mod self . add_network_db ( net_id , self . network [ net_id ] , 'openstack' , constants . SUBNET_PENDING ) LOG . debug ( 'network_create_event: network=%s' , self . network ) except dexc . DfaClientRequestFailed : # Fail to get config profile from DCNM . # Save the network info with failure result and send the request # to DCNM later . self . add_network_db ( net_id , self . network [ net_id ] , 'openstack' , constants . CREATE_FAIL ) LOG . error ( 'Failed to create network=%s.' , self . network )
def account_overview ( object ) : """Create layout for user profile"""
return Layout ( Container ( Row ( Column2 ( Panel ( 'Avatar' , Img ( src = "{}{}" . format ( settings . MEDIA_URL , object . avatar ) ) , collapse = True , ) , ) , Column10 ( Panel ( 'Account information' , DescriptionList ( 'email' , 'first_name' , 'last_name' , ) , ) ) , ) ) )
def _update_from_pb ( self , instance_pb ) : """Refresh self from the server - provided protobuf . Helper for : meth : ` from _ pb ` and : meth : ` reload ` ."""
if not instance_pb . display_name : # Simple field ( string ) raise ValueError ( "Instance protobuf does not contain display_name" ) self . display_name = instance_pb . display_name self . type_ = instance_pb . type self . labels = dict ( instance_pb . labels ) self . _state = instance_pb . state
def from_url ( url , ** options ) : """Downloads the contents of a given URL and loads it into a new TableFu instance"""
resp = urllib2 . urlopen ( url ) return TableFu ( resp , ** options )
def toString ( self ) : """Returns time as string ."""
slist = self . toList ( ) string = angle . slistStr ( slist ) return string if slist [ 0 ] == '-' else string [ 1 : ]
def sample_t ( self , n ) : """NAME : sample _ t PURPOSE : generate a stripping time ( time since stripping ) ; simple implementation could be replaced by more complicated distributions in sub - classes of streamdf INPUT : n - number of points to return OUTPUT : array of n stripping times HISTORY : 2015-09-16 - Written - Bovy ( UofT )"""
return numpy . random . uniform ( size = n ) * self . _tdisrupt
def process_experiences ( self , current_info : AllBrainInfo , next_info : AllBrainInfo ) : """Checks agent histories for processing condition , and processes them as necessary . Processing involves calculating value and advantage targets for model updating step . : param current _ info : Current AllBrainInfo : param next _ info : Next AllBrainInfo"""
info_student = next_info [ self . brain_name ] for l in range ( len ( info_student . agents ) ) : if info_student . local_done [ l ] : agent_id = info_student . agents [ l ] self . stats [ 'Environment/Cumulative Reward' ] . append ( self . cumulative_rewards . get ( agent_id , 0 ) ) self . stats [ 'Environment/Episode Length' ] . append ( self . episode_steps . get ( agent_id , 0 ) ) self . cumulative_rewards [ agent_id ] = 0 self . episode_steps [ agent_id ] = 0
def _StatusUpdateThreadMain ( self ) : """Main function of the status update thread ."""
while self . _status_update_active : # Make a local copy of the PIDs in case the dict is changed by # the main thread . for pid in list ( self . _process_information_per_pid . keys ( ) ) : self . _CheckStatusWorkerProcess ( pid ) self . _UpdateForemanProcessStatus ( ) tasks_status = self . _task_manager . GetStatusInformation ( ) if self . _task_queue_profiler : self . _task_queue_profiler . Sample ( tasks_status ) self . _processing_status . UpdateTasksStatus ( tasks_status ) if self . _status_update_callback : self . _status_update_callback ( self . _processing_status ) time . sleep ( self . _STATUS_UPDATE_INTERVAL )
def lint ( ctx , scenario_name ) : # pragma : no cover """Lint the role ."""
args = ctx . obj . get ( 'args' ) subcommand = base . _get_subcommand ( __name__ ) command_args = { 'subcommand' : subcommand , } base . execute_cmdline_scenarios ( scenario_name , args , command_args )
def find_index ( model_meta , weights = None , verbosity = 0 ) : """Return a tuple of index metadata for the model metadata dict provided return value format is : field _ name , ' primary _ key ' : boolean representing whether it ' s the primary key , ' unique ' : boolean representing whether it ' s a unique index score ,"""
weights = weights or find_index . default_weights N = model_meta [ 'Meta' ] . get ( 'count' , 0 ) for field_name , field_meta in model_meta . iteritems ( ) : if field_name == 'Meta' : continue pkfield = field_meta . get ( 'primary_key' ) if pkfield : if verbosity > 1 : print pkfield # TODO : Allow more than one index per model / table return { field_name : { 'primary_key' : True , 'unique' : field_meta . get ( 'unique' ) or ( N >= 3 and field_meta . get ( 'num_null' ) <= 1 and field_meta . get ( 'num_distinct' ) == N ) , } } score_names = [ ] for field_name , field_meta in model_meta . iteritems ( ) : score = 0 for feature , weight in weights : # for categorical features ( strings ) , need to look for a particular value value = field_meta . get ( feature ) if isinstance ( weight , tuple ) : if value is not None and value in ( float , int ) : score += weight * value if callable ( weight [ 1 ] ) : score += weight [ 0 ] * weight [ 1 ] ( field_meta . get ( feature ) ) else : score += weight [ 0 ] * ( field_meta . get ( feature ) == weight [ 1 ] ) else : feature_value = field_meta . get ( feature ) if feature_value is not None : score += weight * field_meta . get ( feature ) score_names += [ ( score , field_name ) ] max_name = max ( score_names ) field_meta = model_meta [ max_name [ 1 ] ] return ( max_name [ 1 ] , { 'primary_key' : True , 'unique' : field_meta . get ( 'unique' ) or ( N >= 3 and field_meta . get ( 'num_null' ) <= 1 and field_meta . get ( 'num_distinct' ) == N ) , } , max_name [ 0 ] , )
def kbtype ( self , value ) : """Set kbtype ."""
if value is None : # set the default value return # or set one of the available values kbtype = value [ 0 ] if len ( value ) > 0 else 'w' if kbtype not in [ 't' , 'd' , 'w' ] : raise ValueError ( 'unknown type "{value}", please use one of \ following values: "taxonomy", "dynamic" or \ "written_as"' . format ( value = value ) ) self . _kbtype = kbtype
def initialize_ray ( ) : """Initializes ray based on environment variables and internal defaults ."""
if threading . current_thread ( ) . name == "MainThread" : plasma_directory = None object_store_memory = os . environ . get ( "MODIN_MEMORY" , None ) if os . environ . get ( "MODIN_OUT_OF_CORE" , "False" ) . title ( ) == "True" : from tempfile import gettempdir plasma_directory = gettempdir ( ) # We may have already set the memory from the environment variable , we don ' t # want to overwrite that value if we have . if object_store_memory is None : # Round down to the nearest Gigabyte . mem_bytes = ray . utils . get_system_memory ( ) // 10 ** 9 * 10 ** 9 # Default to 8x memory for out of core object_store_memory = 8 * mem_bytes # In case anything failed above , we can still improve the memory for Modin . if object_store_memory is None : # Round down to the nearest Gigabyte . object_store_memory = int ( 0.6 * ray . utils . get_system_memory ( ) // 10 ** 9 * 10 ** 9 ) # If the memory pool is smaller than 2GB , just use the default in ray . if object_store_memory == 0 : object_store_memory = None else : object_store_memory = int ( object_store_memory ) ray . init ( include_webui = False , ignore_reinit_error = True , plasma_directory = plasma_directory , object_store_memory = object_store_memory , ) # Register custom serializer for method objects to avoid warning message . # We serialize ` MethodType ` objects when we use AxisPartition operations . ray . register_custom_serializer ( types . MethodType , use_pickle = True )
def data_kva_compare ( db_data , user_data ) : """Validate key / value data in KeyValueArray . Args : db _ data ( list ) : The data store in Redis . user _ data ( dict ) : The user provided data . Returns : bool : True if the data passed validation ."""
for kv_data in db_data : if kv_data . get ( 'key' ) == user_data . get ( 'key' ) : if kv_data . get ( 'value' ) == user_data . get ( 'value' ) : return True return False
def init_with_context ( self , context ) : """Please refer to : meth : ` ~ admin _ tools . menu . items . MenuItem . init _ with _ context ` documentation from : class : ` ~ admin _ tools . menu . items . MenuItem ` class ."""
from admin_tools . menu . models import Bookmark for b in Bookmark . objects . filter ( user = context [ 'request' ] . user ) : self . children . append ( MenuItem ( mark_safe ( b . title ) , b . url ) ) if not len ( self . children ) : self . enabled = False
def is_dtype_equal ( source , target ) : """Check if two dtypes are equal . Parameters source : The first dtype to compare target : The second dtype to compare Returns boolean Whether or not the two dtypes are equal . Examples > > > is _ dtype _ equal ( int , float ) False > > > is _ dtype _ equal ( " int " , int ) True > > > is _ dtype _ equal ( object , " category " ) False > > > is _ dtype _ equal ( CategoricalDtype ( ) , " category " ) True > > > is _ dtype _ equal ( DatetimeTZDtype ( ) , " datetime64 " ) False"""
try : source = _get_dtype ( source ) target = _get_dtype ( target ) return source == target except ( TypeError , AttributeError ) : # invalid comparison # object = = category will hit this return False
def add_threat_list ( self , threat_list ) : """Add threat list entry if it does not exist ."""
q = '''INSERT OR IGNORE INTO threat_list (threat_type, platform_type, threat_entry_type, timestamp) VALUES (?, ?, ?, current_timestamp) ''' params = [ threat_list . threat_type , threat_list . platform_type , threat_list . threat_entry_type ] with self . get_cursor ( ) as dbc : dbc . execute ( q , params )
def interface ( cls ) : '''Marks the decorated class as an abstract interface . Injects following classmethods : . . py : method : : . all ( context ) Returns a list of instances of each component in the ` ` context ` ` implementing this ` ` @ interface ` ` : param context : context to look in : type context : : class : ` Context ` : returns : list ( ` ` cls ` ` ) . . py : method : : . any ( context ) Returns the first suitable instance implementing this ` ` @ interface ` ` or raises : exc : ` NoImplementationError ` if none is available . : param context : context to look in : type context : : class : ` Context ` : returns : ` ` cls ` ` . . py : method : : . classes ( ) Returns a list of classes implementing this ` ` @ interface ` ` : returns : list ( class )'''
if not cls : return None cls . implementations = [ ] # Inject methods def _all ( cls , context , ignore_exceptions = False ) : return list ( context . get_components ( cls , ignore_exceptions = ignore_exceptions ) ) cls . all = _all . __get__ ( cls ) def _any ( cls , context ) : instances = cls . all ( context ) if instances : return instances [ 0 ] raise NoImplementationError ( cls ) cls . any = _any . __get__ ( cls ) def _classes ( cls ) : return list ( cls . implementations ) cls . classes = _classes . __get__ ( cls ) log . debug ( 'Registering [%s] (interface)' , get_fqdn ( cls ) ) return cls
def filter_channels_by_status ( channel_states : List [ NettingChannelState ] , exclude_states : Optional [ List [ str ] ] = None , ) -> List [ NettingChannelState ] : """Filter the list of channels by excluding ones for which the state exists in ` exclude _ states ` ."""
if exclude_states is None : exclude_states = [ ] states = [ ] for channel_state in channel_states : if channel . get_status ( channel_state ) not in exclude_states : states . append ( channel_state ) return states
def translate_changes ( initial_change ) : """Translate rope . base . change . Change instances to dictionaries . See Refactor . get _ changes for an explanation of the resulting dictionary ."""
agenda = [ initial_change ] result = [ ] while agenda : change = agenda . pop ( 0 ) if isinstance ( change , rope_change . ChangeSet ) : agenda . extend ( change . changes ) elif isinstance ( change , rope_change . ChangeContents ) : result . append ( { 'action' : 'change' , 'file' : change . resource . real_path , 'contents' : change . new_contents , 'diff' : change . get_description ( ) } ) elif isinstance ( change , rope_change . CreateFile ) : result . append ( { 'action' : 'create' , 'type' : 'file' , 'file' : change . resource . real_path } ) elif isinstance ( change , rope_change . CreateFolder ) : result . append ( { 'action' : 'create' , 'type' : 'directory' , 'path' : change . resource . real_path } ) elif isinstance ( change , rope_change . MoveResource ) : result . append ( { 'action' : 'move' , 'type' : ( 'directory' if change . new_resource . is_folder ( ) else 'file' ) , 'source' : change . resource . real_path , 'destination' : change . new_resource . real_path } ) elif isinstance ( change , rope_change . RemoveResource ) : if change . resource . is_folder ( ) : result . append ( { 'action' : 'delete' , 'type' : 'directory' , 'path' : change . resource . real_path } ) else : result . append ( { 'action' : 'delete' , 'type' : 'file' , 'file' : change . resource . real_path } ) return result
def delete_prefix ( self , name ) : # noqa : D302 r"""Delete hierarchy levels from all nodes in the tree . : param nodes : Prefix to delete : type nodes : : ref : ` NodeName ` : raises : * RuntimeError ( Argument \ ` name \ ` is not a valid prefix ) * RuntimeError ( Argument \ ` name \ ` is not valid ) For example : > > > from _ _ future _ _ import print _ function > > > import ptrie > > > tobj = ptrie . Trie ( ' / ' ) > > > tobj . add _ nodes ( [ . . . { ' name ' : ' hello / world / root ' , ' data ' : [ ] } , . . . { ' name ' : ' hello / world / root / anode ' , ' data ' : 7 } , . . . { ' name ' : ' hello / world / root / bnode ' , ' data ' : 8 } , . . . { ' name ' : ' hello / world / root / cnode ' , ' data ' : False } , . . . { ' name ' : ' hello / world / root / bnode / anode ' , ' data ' : [ ' a ' , ' b ' ] } , . . . { ' name ' : ' hello / world / root / cnode / anode / leaf ' , ' data ' : True } > > > tobj . collapse _ subtree ( ' hello ' , recursive = False ) > > > print ( tobj ) hello / world / root β”œ anode ( * ) β”œ bnode ( * ) β”‚ β”” anode ( * ) β”” cnode ( * ) β”” anode β”” leaf ( * ) > > > tobj . delete _ prefix ( ' hello / world ' ) > > > print ( tobj ) root β”œ anode ( * ) β”œ bnode ( * ) β”‚ β”” anode ( * ) β”” cnode ( * ) β”” anode β”” leaf ( * )"""
if self . _validate_node_name ( name ) : raise RuntimeError ( "Argument `name` is not valid" ) if ( not self . root_name . startswith ( name ) ) or ( self . root_name == name ) : raise RuntimeError ( "Argument `name` is not a valid prefix" ) self . _delete_prefix ( name )
def rotate_quat ( attitude , roll , pitch , yaw ) : '''Returns rotated quaternion : param attitude : quaternion [ w , x , y , z ] : param roll : rotation in rad : param pitch : rotation in rad : param yaw : rotation in rad : returns : quaternion [ w , x , y , z ]'''
quat = Quaternion ( attitude ) rotation = Quaternion ( [ roll , pitch , yaw ] ) res = rotation * quat return res . q
def is_waiting_for_input ( self ) : """could make one step further : return :"""
return self . waiting_for and not isinstance ( self . waiting_for , forking . SwitchOnValue ) and not is_base_type ( self . waiting_for )
def get_correction ( self , entry ) : """Gets the BandFilling correction for a defect entry"""
eigenvalues = entry . parameters [ "eigenvalues" ] kpoint_weights = entry . parameters [ "kpoint_weights" ] potalign = entry . parameters [ "potalign" ] vbm = entry . parameters [ "vbm" ] cbm = entry . parameters [ "cbm" ] bf_corr = self . perform_bandfill_corr ( eigenvalues , kpoint_weights , potalign , vbm , cbm ) entry . parameters [ "bandfilling_meta" ] = dict ( self . metadata ) return { "bandfilling" : bf_corr }
def transform ( self , X , y = None , copy = None ) : """Perform standardization by centering and scaling using the parameters . : param X : Data matrix to scale . : type X : numpy . ndarray , shape [ n _ samples , n _ features ] : param y : Passthrough for scikit - learn ` ` Pipeline ` ` compatibility . : type y : None : param bool copy : Copy the X matrix . : return : Scaled version of the X data matrix . : rtype : numpy . ndarray , shape [ n _ samples , n _ features ]"""
check_is_fitted ( self , 'scale_' ) copy = copy if copy is not None else self . copy X = check_array ( X , accept_sparse = 'csr' , copy = copy , warn_on_dtype = True , estimator = self , dtype = FLOAT_DTYPES ) if sparse . issparse ( X ) : if self . with_mean : raise ValueError ( "Cannot center sparse matrices: pass `with_mean=False` " "instead. See docstring for motivation and alternatives." ) if self . scale_ is not None : inplace_column_scale ( X , 1 / self . scale_ ) else : if self . with_mean : X -= self . mean_ if self . with_std : X /= self . scale_ return X
def add_rna_args ( parser , min_mapping_quality_default = MIN_READ_MAPPING_QUALITY ) : """Extends an ArgumentParser instance with the following commandline arguments : - - bam - - min - reads - - min - mapping - quality - - use - duplicate - reads - - drop - secondary - alignments"""
rna_group = parser . add_argument_group ( "RNA" ) rna_group . add_argument ( "--bam" , required = True , help = "BAM file containing RNAseq reads" ) rna_group . add_argument ( "--min-mapping-quality" , type = int , default = min_mapping_quality_default , help = "Minimum MAPQ value to allow for a read (default %(default)s)" ) rna_group . add_argument ( "--use-duplicate-reads" , default = False , action = "store_true" ) rna_group . add_argument ( "--drop-secondary-alignments" , default = False , action = "store_true" ) return rna_group
def compiled_quil ( self ) : """If the Quil program associated with the Job was compiled ( e . g . , to translate it to the QPU ' s natural gateset ) return this compiled program . : rtype : Optional [ Program ]"""
prog = self . _raw . get ( "program" , { } ) . get ( "compiled-quil" , None ) if prog is not None : return parse_program ( prog ) else : # if we failed too early to even get a " compiled - quil " field , # then alert the user to that problem instead if self . _raw [ 'status' ] == 'ERROR' : return self . result ( )
def update_room ( self , stream_id , room_definition ) : '''update a room definition'''
req_hook = 'pod/v2/room/' + str ( stream_id ) + '/update' req_args = json . dumps ( room_definition ) status_code , response = self . __rest__ . POST_query ( req_hook , req_args ) self . logger . debug ( '%s: %s' % ( status_code , response ) ) return status_code , response
def do_signal ( self , signame : str ) -> None : """Send a Unix signal"""
if hasattr ( signal , signame ) : os . kill ( os . getpid ( ) , getattr ( signal , signame ) ) else : self . _sout . write ( 'Unknown signal %s\n' % signame )
def dump_registers_peek ( registers , data , separator = ' ' , width = 16 ) : """Dump data pointed to by the given registers , if any . @ type registers : dict ( str S { - > } int ) @ param registers : Dictionary mapping register names to their values . This value is returned by L { Thread . get _ context } . @ type data : dict ( str S { - > } str ) @ param data : Dictionary mapping register names to the data they point to . This value is returned by L { Thread . peek _ pointers _ in _ registers } . @ rtype : str @ return : Text suitable for logging ."""
if None in ( registers , data ) : return '' names = compat . keys ( data ) names . sort ( ) result = '' for reg_name in names : tag = reg_name . lower ( ) dumped = HexDump . hexline ( data [ reg_name ] , separator , width ) result += '%s -> %s\n' % ( tag , dumped ) return result
def sendSomeData ( self , howMany ) : """Send some DATA commands to my peer ( s ) to relay some data . @ param howMany : an int , the number of chunks to send out ."""
# print ' sending some data ' , howMany if self . transport is None : return peer = self . transport . getQ2QPeer ( ) while howMany > 0 : # sort transloads so that the least - frequently - serviced ones will # come first tloads = [ ( findin ( tl . name , self . sentTransloads ) , tl ) for tl in self . nexus . transloadsForPeer ( peer ) ] tloads . sort ( ) tloads = [ tl for ( idx , tl ) in tloads if tl . peerNeedsData ( peer ) ] if not tloads : break wasHowMany = howMany for myTransload in tloads : # move this transload to the end so it will be sorted last next # time . name = myTransload . name if name in self . sentTransloads : self . sentTransloads . remove ( name ) self . sentTransloads . append ( name ) knowledge = myTransload . peers [ peer ] chunkNumber , chunkData = myTransload . selectOptimalChunk ( peer ) if chunkNumber is None : continue peerToIntroduce = knowledge . selectPeerToIntroduce ( myTransload . peers . keys ( ) ) if peerToIntroduce is not None : self . introduce ( myTransload . name , peerToIntroduce ) self . data ( name , chunkNumber , chunkData ) # Don ' t re - send that chunk again unless they explicitly tell us # they need it for some reason knowledge . mask [ chunkNumber ] = 1 howMany -= 1 if howMany <= 0 : break if wasHowMany == howMany : # couldn ' t find anything to send . break
def tuple_to_schema ( tuple_ ) : """Convert a tuple representing an XML data structure into a schema tuple that can be used in the ` ` . schema ` ` property of a sub - class of PREMISElement ."""
schema = [ ] for element in tuple_ : if isinstance ( element , ( tuple , list ) ) : try : if isinstance ( element [ 1 ] , six . string_types ) : schema . append ( ( element [ 0 ] , ) ) else : schema . append ( tuple_to_schema ( element ) ) except IndexError : schema . append ( ( element [ 0 ] , ) ) else : schema . append ( element ) return tuple ( schema )
def calc_qa_v1 ( self ) : """Calculate outflow . The working equation is the analytical solution of the linear storage equation under the assumption of constant change in inflow during the simulation time step . Required flux sequence : | RK | Required state sequence : | QZ | Updated state sequence : | QA | Basic equation : : math : ` QA _ { neu } = QA _ { alt } + ( QZ _ { alt } - QA _ { alt } ) \\ cdot ( 1 - exp ( - RK ^ { - 1 } ) ) + ( QZ _ { neu } - QZ _ { alt } ) \\ cdot ( 1 - RK \\ cdot ( 1 - exp ( - RK ^ { - 1 } ) ) ) ` Examples : A normal test case : > > > from hydpy . models . lstream import * > > > parameterstep ( ) > > > fluxes . rk ( 0.1) > > > states . qz . old = 2.0 > > > states . qz . new = 4.0 > > > states . qa . old = 3.0 > > > model . calc _ qa _ v1 ( ) > > > states . qa qa ( 3.800054) First extreme test case ( zero division is circumvented ) : > > > fluxes . rk ( 0.0) > > > model . calc _ qa _ v1 ( ) > > > states . qa qa ( 4.0) Second extreme test case ( numerical overflow is circumvented ) : > > > fluxes . rk ( 1e201) > > > model . calc _ qa _ v1 ( ) > > > states . qa qa ( 5.0)"""
flu = self . sequences . fluxes . fastaccess old = self . sequences . states . fastaccess_old new = self . sequences . states . fastaccess_new aid = self . sequences . aides . fastaccess if flu . rk <= 0. : new . qa = new . qz elif flu . rk > 1e200 : new . qa = old . qa + new . qz - old . qz else : aid . temp = ( 1. - modelutils . exp ( - 1. / flu . rk ) ) new . qa = ( old . qa + ( old . qz - old . qa ) * aid . temp + ( new . qz - old . qz ) * ( 1. - flu . rk * aid . temp ) )
def add_path ( self , path ) : # type : ( _ BaseSourcePaths , str ) - > None """Add a local path : param _ BaseSourcePaths self : this : param str path : path to add"""
if isinstance ( path , pathlib . Path ) : self . _paths . append ( path ) else : self . _paths . append ( pathlib . Path ( path ) )
def resolve_alias ( self , name ) : """Resolve a calendar alias for retrieval . Parameters name : str The name of the requested calendar . Returns canonical _ name : str The real name of the calendar to create / return ."""
seen = [ ] while name in self . _aliases : seen . append ( name ) name = self . _aliases [ name ] # This is O ( N * * 2 ) , but if there ' s an alias chain longer than 2, # something strange has happened . if name in seen : seen . append ( name ) raise CyclicCalendarAlias ( cycle = " -> " . join ( repr ( k ) for k in seen ) ) return name
def append_styles ( self , tag , attrs ) : """Append classes found in HTML elements to the list of styles used . Because we haven ' t built the tree , we aren ' t using the ` tag ` parameter for now . @ param < string > tag The HTML tag we ' re parsing @ param < tuple > attrs A tuple of HTML element attributes such as ' class ' , ' id ' , ' style ' , etc . The tuple is of the form ( ' html _ attribute ' , ' attr1 ' , ' attr2 ' , ' attr3 ' . . . ' attrN ' )"""
dattrs = dict ( attrs ) if 'class' in dattrs : # print " Found classes ' % s ' " % dattrs [ ' class ' ] class_names = dattrs [ 'class' ] . split ( ) dotted_names = map ( prepend_dot , class_names ) dotted_names . sort ( ) self . used_classes . extend ( ' ' . join ( dotted_names ) ) self . unchained_classes . extend ( dotted_names ) if 'id' in dattrs : # print " Found id ' % s ' " % dattrs [ ' id ' ] self . used_ids . extend ( prepend_hash ( dattrs [ 'id' ] . strip ( ) ) )
def resolve_variables ( self , provided_variables ) : """Resolve the values of the blueprint variables . This will resolve the values of the template parameters with values from the env file , the config , and any lookups resolved . The resolution is run twice , in case the blueprint is jinja2 templated and requires provided variables to render . Args : provided _ variables ( list of : class : ` stacker . variables . Variable ` ) : list of provided variables"""
# Pass 1 to set resolved _ variables to provided variables self . resolved_variables = { } variable_dict = dict ( ( var . name , var ) for var in provided_variables ) for var_name , _var_def in variable_dict . items ( ) : value = resolve_variable ( variable_dict . get ( var_name ) , self . name ) if value is not None : self . resolved_variables [ var_name ] = value # Pass 2 to render the blueprint and set resolved _ variables according # to defined variables defined_variables = self . get_parameter_definitions ( ) self . resolved_variables = { } variable_dict = dict ( ( var . name , var ) for var in provided_variables ) for var_name , _var_def in defined_variables . items ( ) : value = resolve_variable ( variable_dict . get ( var_name ) , self . name ) if value is not None : self . resolved_variables [ var_name ] = value
def sum_layout_dimensions ( dimensions ) : """Sum a list of : class : ` . LayoutDimension ` instances ."""
min = sum ( [ d . min for d in dimensions if d . min is not None ] ) max = sum ( [ d . max for d in dimensions if d . max is not None ] ) preferred = sum ( [ d . preferred for d in dimensions ] ) return LayoutDimension ( min = min , max = max , preferred = preferred )
def check ( self , results_id ) : """Check for results of a membership request . : param str results _ id : the ID of a membership request : return : successfully created memberships : rtype : : class : ` list ` : raises groupy . exceptions . ResultsNotReady : if the results are not ready : raises groupy . exceptions . ResultsExpired : if the results have expired"""
path = 'results/{}' . format ( results_id ) url = utils . urljoin ( self . url , path ) response = self . session . get ( url ) if response . status_code == 503 : raise exceptions . ResultsNotReady ( response ) if response . status_code == 404 : raise exceptions . ResultsExpired ( response ) return response . data [ 'members' ]
def get_vnetwork_vms_input_last_rcvd_instance ( self , ** kwargs ) : """Auto Generated Code"""
config = ET . Element ( "config" ) get_vnetwork_vms = ET . Element ( "get_vnetwork_vms" ) config = get_vnetwork_vms input = ET . SubElement ( get_vnetwork_vms , "input" ) last_rcvd_instance = ET . SubElement ( input , "last-rcvd-instance" ) last_rcvd_instance . text = kwargs . pop ( 'last_rcvd_instance' ) callback = kwargs . pop ( 'callback' , self . _callback ) return callback ( config )
def full_route ( self ) : '''The full : attr : ` route ` for this : class : ` . Router ` . It includes the : attr : ` parent ` portion of the route if a parent router is available .'''
if self . _parent : return self . _parent . full_route + self . _route else : return self . _route
def on_connect ( client ) : """Default on - connect actions ."""
client . nick ( client . user . nick ) client . userinfo ( client . user . username , client . user . realname )
def atmos ( ctx , atmo , contrast , bias , jobs , out_dtype , src_path , dst_path , creation_options , as_color , ) : """Atmospheric correction"""
if as_color : click . echo ( "rio color {} {} {}" . format ( src_path , dst_path , simple_atmo_opstring ( atmo , contrast , bias ) ) ) exit ( 0 ) with rasterio . open ( src_path ) as src : opts = src . profile . copy ( ) windows = [ ( window , ij ) for ij , window in src . block_windows ( ) ] opts . update ( ** creation_options ) opts [ "transform" ] = guard_transform ( opts [ "transform" ] ) out_dtype = out_dtype if out_dtype else opts [ "dtype" ] opts [ "dtype" ] = out_dtype args = { "atmo" : atmo , "contrast" : contrast , "bias" : bias , "out_dtype" : out_dtype } jobs = check_jobs ( jobs ) if jobs > 1 : with riomucho . RioMucho ( [ src_path ] , dst_path , atmos_worker , windows = windows , options = opts , global_args = args , mode = "manual_read" , ) as mucho : mucho . run ( jobs ) else : with rasterio . open ( dst_path , "w" , ** opts ) as dest : with rasterio . open ( src_path ) as src : rasters = [ src ] for window , ij in windows : arr = atmos_worker ( rasters , window , ij , args ) dest . write ( arr , window = window )
def createContactItem ( self , person , label , number ) : """Create a L { PhoneNumber } item for C { number } , associated with C { person } . @ type person : L { Person } @ param label : The value to use for the I { label } attribute of the new L { PhoneNumber } item . @ type label : C { unicode } @ param number : The value to use for the I { number } attribute of the new L { PhoneNumber } item . If C { ' ' } , no item will be created . @ type number : C { unicode } @ rtype : L { PhoneNumber } or C { NoneType }"""
if number : return PhoneNumber ( store = person . store , person = person , label = label , number = number )
def list_contrib ( name = None , ret = False , _debug = False ) : """Show the list of all existing contribs . Params : - name : filter to search the contribs - ret : whether the function should return a dict instead of printing it"""
# _ debug : checks that all contrib modules have correctly defined : # # scapy . contrib . description = [ . . . ] # # scapy . contrib . status = [ . . . ] # # scapy . contrib . name = [ . . . ] ( optional ) # or set the flag : # # scapy . contrib . description = skip # to skip the file if name is None : name = "*.py" elif "*" not in name and "?" not in name and not name . endswith ( ".py" ) : name += ".py" results = [ ] dir_path = os . path . join ( os . path . dirname ( __file__ ) , "contrib" ) if sys . version_info >= ( 3 , 5 ) : name = os . path . join ( dir_path , "**" , name ) iterator = glob . iglob ( name , recursive = True ) else : name = os . path . join ( dir_path , name ) iterator = glob . iglob ( name ) for f in iterator : mod = f . replace ( os . path . sep , "." ) . partition ( "contrib." ) [ 2 ] if mod . startswith ( "__" ) : continue if mod . endswith ( ".py" ) : mod = mod [ : - 3 ] desc = { "description" : None , "status" : None , "name" : mod } for l in io . open ( f , errors = "replace" ) : if l [ 0 ] != "#" : continue p = l . find ( "scapy.contrib." ) if p >= 0 : p += 14 q = l . find ( "=" , p ) key = l [ p : q ] . strip ( ) value = l [ q + 1 : ] . strip ( ) desc [ key ] = value if desc [ "status" ] == "skip" : break if desc [ "description" ] and desc [ "status" ] : results . append ( desc ) break if _debug : if desc [ "status" ] == "skip" : pass elif not desc [ "description" ] or not desc [ "status" ] : raise Scapy_Exception ( "Module %s is missing its " "contrib infos !" % mod ) results . sort ( key = lambda x : x [ "name" ] ) if ret : return results else : for desc in results : print ( "%(name)-20s: %(description)-40s status=%(status)s" % desc )
def safeRef ( target , onDelete = None ) : """Return a * safe * weak reference to a callable target target - - the object to be weakly referenced , if it ' s a bound method reference , will create a BoundMethodWeakref , otherwise creates a simple weakref . onDelete - - if provided , will have a hard reference stored to the callable to be called after the safe reference goes out of scope with the reference object , ( either a weakref or a BoundMethodWeakref ) as argument ."""
if hasattr ( target , im_self ) : if getattr ( target , im_self ) is not None : # Turn a bound method into a BoundMethodWeakref instance . # Keep track of these instances for lookup by disconnect ( ) . assert hasattr ( target , im_func ) , """safeRef target %r has %s, but no %s, don't know how to create reference""" % ( target , im_self , im_func ) reference = BoundMethodWeakref ( target = target , onDelete = onDelete ) return reference if onDelete is not None : return weakref . ref ( target , onDelete ) else : return weakref . ref ( target )
def or_list ( items : Sequence [ str ] ) -> Optional [ str ] : """Given [ A , B , C ] return ' A , B , or C ' ."""
if not items : raise ValueError if len ( items ) == 1 : return items [ 0 ] if len ( items ) == 2 : return items [ 0 ] + " or " + items [ 1 ] * selected , last_item = items [ : MAX_LENGTH ] return ", " . join ( selected ) + " or " + last_item
def extract_helices_dssp ( in_pdb ) : """Uses DSSP to find alpha - helices and extracts helices from a pdb file . Returns a length 3 list with a helix id , the chain id and a dict containing the coordinates of each residues CA . Parameters in _ pdb : string Path to a PDB file ."""
from ampal . pdb_parser import split_pdb_lines dssp_out = subprocess . check_output ( [ global_settings [ 'dssp' ] [ 'path' ] , in_pdb ] ) helix = 0 helices = [ ] h_on = False for line in dssp_out . splitlines ( ) : dssp_line = line . split ( ) try : if dssp_line [ 4 ] == 'H' : if helix not in [ x [ 0 ] for x in helices ] : helices . append ( [ helix , dssp_line [ 2 ] , { int ( dssp_line [ 1 ] ) : None } ] ) else : helices [ helix ] [ 2 ] [ int ( dssp_line [ 1 ] ) ] = None h_on = True else : if h_on : helix += 1 h_on = False except IndexError : pass with open ( in_pdb , 'r' ) as pdb : pdb_atoms = split_pdb_lines ( pdb . read ( ) ) for atom in pdb_atoms : for helix in helices : if ( atom [ 2 ] == "CA" ) and ( atom [ 5 ] == helix [ 1 ] ) and ( atom [ 6 ] in helix [ 2 ] . keys ( ) ) : helix [ 2 ] [ atom [ 6 ] ] = tuple ( atom [ 8 : 11 ] ) return helices
def _is_exception_rule ( self , element ) : """Check for " exception rule " . Address elements will be appended onto a new line on the lable except for when the penultimate lable line fulfils certain criteria , in which case the element will be concatenated onto the penultimate line . This method checks for those criteria . i ) First and last characters of the Building Name are numeric ( eg ' 1to1 ' or ' 100:1 ' ) ii ) First and penultimate characters are numeric , last character is alphabetic ( eg ' 12A ' ) iii ) Building Name has only one character ( eg ' A ' )"""
if element [ 0 ] . isdigit ( ) and element [ - 1 ] . isdigit ( ) : return True if len ( element ) > 1 and element [ 0 ] . isdigit ( ) and element [ - 2 ] . isdigit ( ) and element [ - 1 ] . isalpha ( ) : return True if len ( element ) == 1 and element . isalpha ( ) : return True return False
def delete ( self , key ) : '''Removes the object named by ` key ` . Writes to both ` ` cache _ datastore ` ` and ` ` child _ datastore ` ` .'''
self . cache_datastore . delete ( key ) self . child_datastore . delete ( key )
def _do_analysis_cross_validation ( self ) : """Find the best model ( fit ) based on cross - valiation ( leave one out )"""
assert len ( self . df ) < 15 , "Cross-validation is not implemented if your sample contains more than 15 datapoints" # initialization : first model is the mean , but compute cv correctly . errors = [ ] response_term = [ Term ( [ LookupFactor ( self . y ) ] ) ] model_terms = [ Term ( [ ] ) ] # empty term is the intercept model_desc = ModelDesc ( response_term , model_terms ) for i in self . df . index : # make new _ fit , compute cross - validation and store error df_ = self . df . drop ( i , axis = 0 ) fit = fm . ols ( model_desc , data = df_ ) . fit ( ) cross_prediction = self . _predict ( fit = fit , df = self . df . loc [ [ i ] , : ] ) errors . append ( cross_prediction [ 'predicted' ] - cross_prediction [ self . y ] ) self . _list_of_fits = [ fm . ols ( model_desc , data = self . df ) . fit ( ) ] self . list_of_cverrors = [ np . mean ( np . abs ( np . array ( errors ) ) ) ] # try to improve the model until no improvements can be found all_model_terms_dict = { x : Term ( [ LookupFactor ( x ) ] ) for x in self . list_of_x } while all_model_terms_dict : # import pdb ; pdb . set _ trace ( ) # try each x in all _ exog and overwrite if we find a better one # at the end of iteration ( and not earlier ) , save the best of the iteration better_model_found = False best = dict ( fit = self . _list_of_fits [ - 1 ] , cverror = self . list_of_cverrors [ - 1 ] ) for x , term in all_model_terms_dict . items ( ) : model_desc = ModelDesc ( response_term , self . _list_of_fits [ - 1 ] . model . formula . rhs_termlist + [ term ] ) # cross _ validation , currently only implemented for monthly data # compute the mean error for a given formula based on leave - one - out . errors = [ ] for i in self . df . index : # make new _ fit , compute cross - validation and store error df_ = self . df . drop ( i , axis = 0 ) fit = fm . ols ( model_desc , data = df_ ) . fit ( ) cross_prediction = self . _predict ( fit = fit , df = self . df . loc [ [ i ] , : ] ) errors . append ( cross_prediction [ 'predicted' ] - cross_prediction [ self . y ] ) cverror = np . mean ( np . abs ( np . array ( errors ) ) ) # compare the model with the current fit if cverror < best [ 'cverror' ] : # better model , keep it # first , reidentify using all the datapoints best [ 'fit' ] = fm . ols ( model_desc , data = self . df ) . fit ( ) best [ 'cverror' ] = cverror better_model_found = True best_x = x if better_model_found : self . _list_of_fits . append ( best [ 'fit' ] ) self . list_of_cverrors . append ( best [ 'cverror' ] ) else : # if we did not find a better model , exit break # next iteration with the found exog removed all_model_terms_dict . pop ( best_x ) self . _fit = self . _list_of_fits [ - 1 ]
def require_scalar ( self , * args : Type ) -> None : """Require the node to be a scalar . If additional arguments are passed , these are taken as a list of valid types ; if the node matches one of these , then it is accepted . Example : # Match either an int or a string node . require _ scalar ( int , str ) Arguments : args : One or more types to match one of ."""
node = Node ( self . yaml_node ) if len ( args ) == 0 : if not node . is_scalar ( ) : raise RecognitionError ( ( '{}{}A scalar is required' ) . format ( self . yaml_node . start_mark , os . linesep ) ) else : for typ in args : if node . is_scalar ( typ ) : return raise RecognitionError ( ( '{}{}A scalar of type {} is required' ) . format ( self . yaml_node . start_mark , os . linesep , args ) )
def load_unicode ( self , resource_path ) : """Gets the content of a resource"""
resource_content = pkg_resources . resource_string ( self . module_name , resource_path ) return resource_content . decode ( 'utf-8' )
def matches ( self , properties ) : """Tests if the given criterion matches this LDAP criterion : param properties : A dictionary of properties : return : True if the properties matches this criterion , else False"""
try : # Use the comparator return self . comparator ( self . value , properties [ self . name ] ) except KeyError : # Criterion key is not in the properties return False
def done ( self ) : """Returns True if the call was successfully cancelled or finished running , False otherwise . This function updates the executionQueue so it receives all the awaiting message ."""
# Flush the current future in the local buffer ( potential deadlock # otherwise ) try : scoop . _control . execQueue . remove ( self ) scoop . _control . execQueue . socket . sendFuture ( self ) except ValueError as e : # Future was not in the local queue , everything is fine pass # Process buffers scoop . _control . execQueue . updateQueue ( ) return self . _ended ( )
def _add_two_way_unqualified_edge ( self , u : BaseEntity , v : BaseEntity , relation : str ) -> str : """Add an unqualified edge both ways ."""
self . add_unqualified_edge ( v , u , relation ) return self . add_unqualified_edge ( u , v , relation )
def print_errors ( function ) : """Prints the exceptions raised by the decorated function without interfering . For debugging purpose ."""
def wrapper ( * args , ** kwargs ) : try : return function ( * args , ** kwargs ) except BaseException as e : print ( "Exception raise calling %s: %s" % ( reflect . canonical_name ( function ) , get_exception_message ( e ) ) ) raise return wrapper
def getInfoMutator ( self ) : """Returns a info mutator"""
if self . _infoMutator : return self . _infoMutator infoItems = [ ] for sourceDescriptor in self . sources : if sourceDescriptor . layerName is not None : continue loc = Location ( sourceDescriptor . location ) sourceFont = self . fonts [ sourceDescriptor . name ] if sourceFont is None : continue if hasattr ( sourceFont . info , "toMathInfo" ) : infoItems . append ( ( loc , sourceFont . info . toMathInfo ( ) ) ) else : infoItems . append ( ( loc , self . mathInfoClass ( sourceFont . info ) ) ) bias , self . _infoMutator = self . getVariationModel ( infoItems , axes = self . serializedAxes , bias = self . newDefaultLocation ( ) ) return self . _infoMutator
def _delete ( self , obj , ** kwargs ) : """Delete the object directly . . . code - block : : python DBSession . sacrud ( Users ) . _ delete ( UserObj ) If you no needed commit session . . code - block : : python DBSession . sacrud ( Users , commit = False ) . _ delete ( UserObj )"""
if isinstance ( obj , sqlalchemy . orm . query . Query ) : obj = obj . one ( ) obj = self . preprocessing ( obj = obj ) . delete ( ) self . session . delete ( obj ) if kwargs . get ( 'commit' , self . commit ) is True : try : self . session . commit ( ) except AssertionError : transaction . commit ( ) return True
def setup_logfile_raw ( self , logfile , mode = 'w' ) : '''start logging raw bytes to the given logfile , without timestamps'''
self . logfile_raw = open ( logfile , mode = mode )
def state_view_for_block ( block_wrapper , state_view_factory ) : """Returns the state view for an arbitrary block . Args : block _ wrapper ( BlockWrapper ) : The block for which a state view is to be returned state _ view _ factory ( StateViewFactory ) : The state view factory used to create the StateView object Returns : StateView object associated with the block"""
state_root_hash = block_wrapper . state_root_hash if block_wrapper is not None else None return state_view_factory . create_view ( state_root_hash )
def create_host_only_network_interface ( self ) : """Creates a new adapter for Host Only Networking . out host _ interface of type : class : ` IHostNetworkInterface ` Created host interface object . return progress of type : class : ` IProgress ` Progress object to track the operation completion . raises : class : ` OleErrorInvalidarg ` Host network interface @ a name already exists ."""
( progress , host_interface ) = self . _call ( "createHostOnlyNetworkInterface" ) progress = IProgress ( progress ) host_interface = IHostNetworkInterface ( host_interface ) return ( progress , host_interface )
def _paste_using_mouse_button_2 ( self ) : """Paste using the mouse : Press the second mouse button , then release it again ."""
focus = self . localDisplay . get_input_focus ( ) . focus xtest . fake_input ( focus , X . ButtonPress , X . Button2 ) xtest . fake_input ( focus , X . ButtonRelease , X . Button2 ) logger . debug ( "Mouse Button2 event sent." )
def colorize_errors ( event , colored ) : """Highlights some commonly known Lambda error cases in red : - Nodejs process crashes - Lambda function timeouts"""
nodejs_crash_msg = "Process exited before completing request" timeout_msg = "Task timed out" if nodejs_crash_msg in event . message or timeout_msg in event . message : event . message = colored . red ( event . message ) return event
def is_socket_closed ( sock ) : # pragma nocover """Check if socket ` ` sock ` ` is closed ."""
if not sock : return True try : if not poll : # pragma nocover if not select : return False try : return bool ( select ( [ sock ] , [ ] , [ ] , 0.0 ) [ 0 ] ) except socket . error : return True # This version is better on platforms that support it . p = poll ( ) p . register ( sock , POLLIN ) for ( fno , ev ) in p . poll ( 0.0 ) : if fno == sock . fileno ( ) : # Either data is buffered ( bad ) , or the connection is dropped . return True except Exception : return True
def recursive_file_count ( files , item = None , checksum = False ) : """Given a filepath or list of filepaths , return the total number of files ."""
if not isinstance ( files , ( list , set ) ) : files = [ files ] total_files = 0 if checksum is True : md5s = [ f . get ( 'md5' ) for f in item . files ] else : md5s = list ( ) if isinstance ( files , dict ) : # make sure to use local filenames . _files = files . values ( ) else : if isinstance ( files [ 0 ] , tuple ) : _files = dict ( files ) . values ( ) else : _files = files for f in _files : try : is_dir = os . path . isdir ( f ) except TypeError : try : f = f [ 0 ] is_dir = os . path . isdir ( f ) except ( AttributeError , TypeError ) : is_dir = False if is_dir : for x , _ in iter_directory ( f ) : lmd5 = get_md5 ( open ( x , 'rb' ) ) if lmd5 in md5s : continue else : total_files += 1 else : try : lmd5 = get_md5 ( open ( f , 'rb' ) ) except TypeError : # Support file - like objects . lmd5 = get_md5 ( f ) if lmd5 in md5s : continue else : total_files += 1 return total_files
def parse_all_arguments ( func ) : """determine all positional and named arguments as a dict"""
args = dict ( ) if sys . version_info < ( 3 , 0 ) : func_args = inspect . getargspec ( func ) if func_args . defaults is not None : val = len ( func_args . defaults ) for i , itm in enumerate ( func_args . args [ - val : ] ) : args [ itm ] = func_args . defaults [ i ] else : func_args = inspect . signature ( func ) for itm in list ( func_args . parameters ) [ 1 : ] : param = func_args . parameters [ itm ] if param . default is not param . empty : args [ param . name ] = param . default return args
def to_dict ( self , val = UNSET ) : """Creates dict object from dict2 object Args : val ( : obj : ` dict2 ` ) : Value to create from Returns : Equivalent dict object ."""
if val is UNSET : val = self if isinstance ( val , dict2 ) or isinstance ( val , dict ) : res = dict ( ) for k , v in val . items ( ) : res [ k ] = self . to_dict ( v ) return res elif isinstance ( val , list ) : res = [ ] for item in val : res . append ( self . to_dict ( item ) ) return res else : return val
def _maybe_log_technical_terms ( global_options , tool_options ) : """Log technical terms as appropriate if the user requested it . As a side effect , if - - log - technical - terms - to is passed to the linter then open up the file specified ( or create it ) and then merge the set of technical words that we have now with the technical words already in it ."""
log_technical_terms_to_path = global_options . get ( "log_technical_terms_to" , None ) log_technical_terms_to_queue = tool_options . get ( "log_technical_terms_to" , None ) if log_technical_terms_to_path : assert log_technical_terms_to_queue is not None try : os . makedirs ( os . path . dirname ( log_technical_terms_to_path ) ) except OSError as error : if error . errno != errno . EEXIST : raise error if not log_technical_terms_to_queue . empty ( ) : with closing ( os . fdopen ( os . open ( log_technical_terms_to_path , os . O_RDWR | os . O_CREAT ) , "r+" ) ) as terms_file : # pychecker can ' t see through the handle returned by closing # so we need to suppress these warnings . terms = set ( terms_file . read ( ) . splitlines ( ) ) new_terms = set ( freduce ( lambda x , y : x | y , _drain ( log_technical_terms_to_queue ) ) ) if not terms . issuperset ( new_terms ) : terms_file . seek ( 0 ) terms_file . truncate ( 0 ) terms_file . write ( "\n" . join ( list ( terms | set ( new_terms ) ) ) )
def _folder_item_method ( self , analysis_brain , item ) : """Fills the analysis ' method to the item passed in . : param analysis _ brain : Brain that represents an analysis : param item : analysis ' dictionary counterpart that represents a row"""
is_editable = self . is_analysis_edition_allowed ( analysis_brain ) method_title = analysis_brain . getMethodTitle item [ 'Method' ] = method_title or '' if is_editable : method_vocabulary = self . get_methods_vocabulary ( analysis_brain ) if method_vocabulary : item [ 'Method' ] = analysis_brain . getMethodUID item [ 'choices' ] [ 'Method' ] = method_vocabulary item [ 'allow_edit' ] . append ( 'Method' ) self . show_methodinstr_columns = True elif method_title : item [ 'replace' ] [ 'Method' ] = get_link ( analysis_brain . getMethodURL , method_title ) self . show_methodinstr_columns = True
def upload_numpy_to_s3_shards ( num_shards , s3 , bucket , key_prefix , array , labels = None ) : """Upload the training ` ` array ` ` and ` ` labels ` ` arrays to ` ` num _ shards ` ` s3 objects , stored in " s3 : / / ` ` bucket ` ` / ` ` key _ prefix ` ` / " ."""
shards = _build_shards ( num_shards , array ) if labels is not None : label_shards = _build_shards ( num_shards , labels ) uploaded_files = [ ] if key_prefix [ - 1 ] != '/' : key_prefix = key_prefix + '/' try : for shard_index , shard in enumerate ( shards ) : with tempfile . TemporaryFile ( ) as file : if labels is not None : write_numpy_to_dense_tensor ( file , shard , label_shards [ shard_index ] ) else : write_numpy_to_dense_tensor ( file , shard ) file . seek ( 0 ) shard_index_string = str ( shard_index ) . zfill ( len ( str ( len ( shards ) ) ) ) file_name = "matrix_{}.pbr" . format ( shard_index_string ) key = key_prefix + file_name logger . debug ( "Creating object {} in bucket {}" . format ( key , bucket ) ) s3 . Object ( bucket , key ) . put ( Body = file ) uploaded_files . append ( file_name ) manifest_key = key_prefix + ".amazon.manifest" manifest_str = json . dumps ( [ { 'prefix' : 's3://{}/{}' . format ( bucket , key_prefix ) } ] + uploaded_files ) s3 . Object ( bucket , manifest_key ) . put ( Body = manifest_str . encode ( 'utf-8' ) ) return "s3://{}/{}" . format ( bucket , manifest_key ) except Exception as ex : # pylint : disable = broad - except try : for file in uploaded_files : s3 . Object ( bucket , key_prefix + file ) . delete ( ) finally : raise ex
def Initialize ( config = None , external_hostname = None , admin_password = None , redownload_templates = False , repack_templates = True , token = None ) : """Initialize or update a GRR configuration ."""
print ( "Checking write access on config %s" % config [ "Config.writeback" ] ) if not os . access ( config . parser . filename , os . W_OK ) : raise IOError ( "Config not writeable (need sudo?)" ) print ( "\nStep 0: Importing Configuration from previous installation." ) options_imported = 0 prev_config_file = config . Get ( "ConfigUpdater.old_config" , default = None ) if prev_config_file and os . access ( prev_config_file , os . R_OK ) : print ( "Found config file %s." % prev_config_file ) # pytype : disable = wrong - arg - count if builtins . input ( "Do you want to import this configuration? " "[yN]: " ) . upper ( ) == "Y" : options_imported = ImportConfig ( prev_config_file , config ) # pytype : enable = wrong - arg - count else : print ( "No old config file found." ) print ( "\nStep 1: Setting Basic Configuration Parameters" ) print ( "We are now going to configure the server using a bunch of questions." ) ConfigureDatastore ( config ) ConfigureUrls ( config , external_hostname = external_hostname ) ConfigureEmails ( config ) print ( "\nStep 2: Key Generation" ) if config . Get ( "PrivateKeys.server_key" , default = None ) : if options_imported > 0 : print ( "Since you have imported keys from another installation in the " "last step,\nyou probably do not want to generate new keys now." ) # pytype : disable = wrong - arg - count if ( builtins . input ( "You already have keys in your config, do you want to" " overwrite them? [yN]: " ) . upper ( ) or "N" ) == "Y" : config_updater_keys_util . GenerateKeys ( config , overwrite_keys = True ) # pytype : enable = wrong - arg - count else : config_updater_keys_util . GenerateKeys ( config ) FinalizeConfigInit ( config , token , admin_password = admin_password , redownload_templates = redownload_templates , repack_templates = repack_templates , prompt = True )
def mousePressEvent ( self , event ) : """Handle file link clicks ."""
super ( OutputWindow , self ) . mousePressEvent ( event ) if self . _link_match : path = self . _link_match . group ( 'url' ) line = self . _link_match . group ( 'line' ) if line is not None : line = int ( line ) - 1 else : line = 0 self . open_file_requested . emit ( path , line )
def process_alias_export_namespace ( namespace ) : """Validate input arguments when the user invokes ' az alias export ' . Args : namespace : argparse namespace object ."""
namespace . export_path = os . path . abspath ( namespace . export_path ) if os . path . isfile ( namespace . export_path ) : raise CLIError ( FILE_ALREADY_EXISTS_ERROR . format ( namespace . export_path ) ) export_path_dir = os . path . dirname ( namespace . export_path ) if not os . path . isdir ( export_path_dir ) : os . makedirs ( export_path_dir ) if os . path . isdir ( namespace . export_path ) : namespace . export_path = os . path . join ( namespace . export_path , ALIAS_FILE_NAME )
def strxor ( s1 , s2 ) : """Returns the binary XOR of the 2 provided strings s1 and s2 . s1 and s2 must be of same length ."""
return b"" . join ( map ( lambda x , y : chb ( orb ( x ) ^ orb ( y ) ) , s1 , s2 ) )
def MergeData ( self , merge_data , raw_data = None ) : """Merges data read from a config file into the current config ."""
self . FlushCache ( ) if raw_data is None : raw_data = self . raw_data for k , v in iteritems ( merge_data ) : # A context clause . if isinstance ( v , dict ) and k not in self . type_infos : if k not in self . valid_contexts : raise InvalidContextError ( "Invalid context specified: %s" % k ) context_data = raw_data . setdefault ( k , collections . OrderedDict ( ) ) self . MergeData ( v , context_data ) else : # Find the descriptor for this field . descriptor = self . type_infos . get ( k ) if descriptor is None : msg = ( "Missing config definition for %s. This option is likely " "deprecated or renamed. Check the release notes." % k ) if flags . FLAGS . disallow_missing_config_definitions : raise MissingConfigDefinitionError ( msg ) if isinstance ( v , string_types ) : v = v . strip ( ) # If we are already initialized and someone tries to modify a constant # value ( e . g . via Set ( ) ) , break loudly . if self . initialized and k in self . constants : raise ConstModificationError ( "Attempting to modify constant value %s" % k ) raw_data [ k ] = v
def add_lat_lon ( self , lat , lon , precision = 1e7 ) : """Add lat , lon to gps ( lat , lon in float ) ."""
self . _ef [ "GPS" ] [ piexif . GPSIFD . GPSLatitudeRef ] = "N" if lat > 0 else "S" self . _ef [ "GPS" ] [ piexif . GPSIFD . GPSLongitudeRef ] = "E" if lon > 0 else "W" self . _ef [ "GPS" ] [ piexif . GPSIFD . GPSLongitude ] = decimal_to_dms ( abs ( lon ) , int ( precision ) ) self . _ef [ "GPS" ] [ piexif . GPSIFD . GPSLatitude ] = decimal_to_dms ( abs ( lat ) , int ( precision ) )
def _refine_enc ( enc ) : '''Return the properly formatted ssh value for the authorized encryption key type . ecdsa defaults to 256 bits , must give full ecdsa enc schema string if using higher enc . If the type is not found , raise CommandExecutionError .'''
rsa = [ 'r' , 'rsa' , 'ssh-rsa' ] dss = [ 'd' , 'dsa' , 'dss' , 'ssh-dss' ] ecdsa = [ 'e' , 'ecdsa' , 'ecdsa-sha2-nistp521' , 'ecdsa-sha2-nistp384' , 'ecdsa-sha2-nistp256' ] ed25519 = [ 'ed25519' , 'ssh-ed25519' ] if enc in rsa : return 'ssh-rsa' elif enc in dss : return 'ssh-dss' elif enc in ecdsa : # ecdsa defaults to ecdsa - sha2 - nistp256 # otherwise enc string is actual encoding string if enc in [ 'e' , 'ecdsa' ] : return 'ecdsa-sha2-nistp256' return enc elif enc in ed25519 : return 'ssh-ed25519' else : raise CommandExecutionError ( 'Incorrect encryption key type \'{0}\'.' . format ( enc ) )
def _set_get_vnetwork_portgroups ( self , v , load = False ) : """Setter method for get _ vnetwork _ portgroups , mapped from YANG variable / brocade _ vswitch _ rpc / get _ vnetwork _ portgroups ( rpc ) If this variable is read - only ( config : false ) in the source YANG file , then _ set _ get _ vnetwork _ portgroups is considered as a private method . Backends looking to populate this variable should do so via calling thisObj . _ set _ get _ vnetwork _ portgroups ( ) directly . YANG Description : Shows discovered PortGroups"""
if hasattr ( v , "_utype" ) : v = v . _utype ( v ) try : t = YANGDynClass ( v , base = get_vnetwork_portgroups . get_vnetwork_portgroups , is_leaf = True , yang_name = "get-vnetwork-portgroups" , rest_name = "get-vnetwork-portgroups" , parent = self , path_helper = self . _path_helper , extmethods = self . _extmethods , register_paths = False , extensions = { u'tailf-common' : { u'hidden' : u'rpccmd' , u'actionpoint' : u'pg-name' } } , namespace = 'urn:brocade.com:mgmt:brocade-vswitch' , defining_module = 'brocade-vswitch' , yang_type = 'rpc' , is_config = True ) except ( TypeError , ValueError ) : raise ValueError ( { 'error-string' : """get_vnetwork_portgroups must be of a type compatible with rpc""" , 'defined-type' : "rpc" , 'generated-type' : """YANGDynClass(base=get_vnetwork_portgroups.get_vnetwork_portgroups, is_leaf=True, yang_name="get-vnetwork-portgroups", rest_name="get-vnetwork-portgroups", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions={u'tailf-common': {u'hidden': u'rpccmd', u'actionpoint': u'pg-name'}}, namespace='urn:brocade.com:mgmt:brocade-vswitch', defining_module='brocade-vswitch', yang_type='rpc', is_config=True)""" , } ) self . __get_vnetwork_portgroups = t if hasattr ( self , '_set' ) : self . _set ( )
def remove ( self ) : """Remove this node from the list of children of its current parent , if the current parent is not ` ` None ` ` , otherwise do nothing . . . versionadded : : 1.7.0"""
if self . parent is not None : for i , child in enumerate ( self . parent . children ) : if id ( child ) == id ( self ) : self . parent . remove_child ( i ) self . parent = None break
def dynamics_bs ( times , masses , smas , eccs , incls , per0s , long_ans , mean_anoms , t0 = 0.0 , vgamma = 0.0 , stepsize = 0.01 , orbiterror = 1e-16 , ltte = False , return_roche_euler = False ) : """Burlisch - Stoer integration of orbits to give positions and velocities of any given number of stars in hierarchical orbits . This code currently uses the NBody code in Josh Carter ' s photodynam code available here : [ [ TODO : include link ] ] If using the Nbody mode in PHOEBE , please cite him as well : [ [ TODO : include citation ] ] See : func : ` dynamics _ from _ bundle ` for a wrapper around this function which automatically handles passing everything in the correct order and in the correct units . For each iterable input , stars and orbits should be listed in order from primary - > secondary for each nested hierarchy level . Each iterable for orbits should have length one less than those for each star ( ie if 3 masses are provided , then 2 smas , eccs , etc need to be provided ) Args : times : ( iterable ) times at which to compute positions and velocities for each star masses : ( iterable ) mass for each star in [ solMass ] smas : ( iterable ) semi - major axis for each orbit [ AU ] eccs : ( iterable ) eccentricities for each orbit incls : ( iterable ) inclinations for each orbit [ rad ] per0s : ( iterable ) longitudes of periastron for each orbit [ rad ] long _ ans : ( iterable ) longitudes of the ascending node for each orbit [ rad ] mean _ anoms : ( iterable ) mean anomalies for each orbit t0 : ( float ) time at which to start the integrations stepsize : ( float , optional ) stepsize of the integrations [ default : 0.01] orbiterror : ( float , optional ) orbiterror of the integrations [ default : 1e - 16] ltte : ( bool , default False ) whether to account for light travel time effects . Returns : t , xs , ys , zs , vxs , vys , vzs . t is a numpy array of all times , the remaining are a list of numpy arrays ( a numpy array per star - in order given by b . hierarchy . get _ stars ( ) ) for the cartesian positions and velocities of each star at those same times ."""
if not _can_bs : raise ImportError ( "photodynam is not installed (http://github.com/phoebe-project/photodynam)" ) times = _ensure_tuple ( times ) masses = _ensure_tuple ( masses ) smas = _ensure_tuple ( smas ) eccs = _ensure_tuple ( eccs ) incls = _ensure_tuple ( incls ) per0s = _ensure_tuple ( per0s ) long_ans = _ensure_tuple ( long_ans ) mean_anoms = _ensure_tuple ( mean_anoms ) # TODO : include vgamma ! ! ! ! # print " * * * bs . do _ dynamics " , masses , smas , eccs , incls , per0s , long _ ans , mean _ anoms , t0 d = photodynam . do_dynamics ( times , masses , smas , eccs , incls , per0s , long_ans , mean_anoms , t0 , stepsize , orbiterror , ltte , return_roche_euler ) # d is in the format : { ' t ' : ( . . . ) , ' x ' : ( ( 1,2,3 ) , ( 1,2,3 ) , . . . ) , ' y ' : . . . , ' z ' : . . . } nobjects = len ( masses ) ntimes = len ( times ) # TODO : need to return euler angles . . . if that even makes sense ? ? Or maybe we # need to make a new place in orbit ? ? au_to_solrad = ( 1 * u . AU ) . to ( u . solRad ) . value ts = np . array ( d [ 't' ] ) xs = [ ( - 1 * np . array ( [ d [ 'x' ] [ ti ] [ oi ] for ti in range ( ntimes ) ] ) * au_to_solrad ) for oi in range ( nobjects ) ] ys = [ ( - 1 * np . array ( [ d [ 'y' ] [ ti ] [ oi ] for ti in range ( ntimes ) ] ) * au_to_solrad ) for oi in range ( nobjects ) ] zs = [ ( np . array ( [ d [ 'z' ] [ ti ] [ oi ] for ti in range ( ntimes ) ] ) * au_to_solrad ) for oi in range ( nobjects ) ] vxs = [ ( - 1 * np . array ( [ d [ 'vx' ] [ ti ] [ oi ] for ti in range ( ntimes ) ] ) * au_to_solrad ) for oi in range ( nobjects ) ] vys = [ ( - 1 * np . array ( [ d [ 'vy' ] [ ti ] [ oi ] for ti in range ( ntimes ) ] ) * au_to_solrad ) for oi in range ( nobjects ) ] vzs = [ ( np . array ( [ d [ 'vz' ] [ ti ] [ oi ] for ti in range ( ntimes ) ] ) * au_to_solrad ) for oi in range ( nobjects ) ] if return_roche_euler : # raise NotImplementedError ( " euler angles for BS not currently supported " ) # a ( sma ) , e ( ecc ) , in ( incl ) , o ( per0 ? ) , ln ( long _ an ? ) , m ( mean _ anom ? ) ds = [ ( np . array ( [ d [ 'kepl_a' ] [ ti ] [ oi ] for ti in range ( ntimes ) ] ) * au_to_solrad ) for oi in range ( nobjects ) ] # TODO : fix this Fs = [ ( np . array ( [ 1.0 for ti in range ( ntimes ) ] ) * au_to_solrad ) for oi in range ( nobjects ) ] # TODO : check to make sure this is the right angle # TODO : need to add np . pi for secondary component ? # true anomaly + periastron ethetas = [ ( np . array ( [ d [ 'kepl_o' ] [ ti ] [ oi ] + d [ 'kepl_m' ] [ ti ] [ oi ] + np . pi / 2 for ti in range ( ntimes ) ] ) * au_to_solrad ) for oi in range ( nobjects ) ] # elongans = [ ( np . array ( [ d [ ' kepl _ ln ' ] [ ti ] [ oi ] + long _ ans [ 0 if oi = = 0 else oi - 1 ] for ti in range ( ntimes ) ] ) * au _ to _ solrad ) for oi in range ( nobjects ) ] elongans = [ ( np . array ( [ d [ 'kepl_ln' ] [ ti ] [ oi ] + long_ans [ 0 if oi == 0 else oi - 1 ] for ti in range ( ntimes ) ] ) * au_to_solrad ) for oi in range ( nobjects ) ] # eincls = [ ( np . array ( [ d [ ' kepl _ in ' ] [ ti ] [ oi ] + incls [ 0 if oi = = 0 else oi - 1 ] for ti in range ( ntimes ) ] ) * au _ to _ solrad ) for oi in range ( nobjects ) ] eincls = [ ( np . array ( [ d [ 'kepl_in' ] [ ti ] [ oi ] + np . pi - incls [ 0 if oi == 0 else oi - 1 ] for ti in range ( ntimes ) ] ) * au_to_solrad ) for oi in range ( nobjects ) ] # d , solRad , solRad / d , rad return ts , xs , ys , zs , vxs , vys , vzs , ds , Fs , ethetas , elongans , eincls else : # d , solRad , solRad / d return ts , xs , ys , zs , vxs , vys , vzs
def send_feature_report ( self , data , report_id = 0x00 ) : """Send a Feature report to a HID device . Feature reports are sent over the Control endpoint as a Set _ Report transfer . Parameters : data The data to send Returns : This function returns the actual number of bytes written"""
if not self . _is_open : raise HIDException ( "HIDDevice not open" ) report = bytearray ( [ report_id ] ) + bytearray ( data ) cdata = ffi . new ( "const unsigned char[]" , bytes ( report ) ) bytes_written = hidapi . hid_send_feature_report ( self . _device , cdata , len ( report ) ) if bytes_written == - 1 : raise HIDException ( "Failed to send feature report to HID device" ) return bytes_written
def binaryTree_nodeNames ( binaryTree ) : """creates names for the leave and internal nodes of the newick tree from the leaf labels"""
def fn ( binaryTree , labels ) : if binaryTree . internal : fn ( binaryTree . left , labels ) fn ( binaryTree . right , labels ) labels [ binaryTree . traversalID . mid ] = labels [ binaryTree . left . traversalID . mid ] + "_" + labels [ binaryTree . right . traversalID . mid ] return labels [ binaryTree . traversalID . mid ] else : labels [ binaryTree . traversalID . mid ] = str ( binaryTree . iD ) return labels [ binaryTree . traversalID . mid ] labels = [ None ] * binaryTree . traversalID . midEnd fn ( binaryTree , labels ) return labels
def done ( self ) : """Return True the future is done , False otherwise . This still returns True in failure cases ; checking : meth : ` result ` or : meth : ` exception ` is the canonical way to assess success or failure ."""
return self . _exception != self . _SENTINEL or self . _result != self . _SENTINEL