signature
stringlengths
29
44.1k
implementation
stringlengths
0
85.2k
def populate_tabs ( self ) : """Populating tabs based on layer metadata ."""
self . delete_tabs ( ) layer_purpose = self . metadata . get ( 'layer_purpose' ) if not layer_purpose : message = tr ( 'Key layer_purpose is not found in the layer {layer_name}' ) . format ( layer_name = self . layer . name ( ) ) raise KeywordNotFoundError ( message ) if layer_purpose == layer_purpose_exposure [ 'key' ] : layer_subcategory = self . metadata . get ( 'exposure' ) elif layer_purpose == layer_purpose_hazard [ 'key' ] : layer_subcategory = self . metadata . get ( 'hazard' ) else : layer_subcategory = None field_groups = get_field_groups ( layer_purpose , layer_subcategory ) for field_group in field_groups : tab = FieldMappingTab ( field_group , self , self . iface ) tab . set_layer ( self . layer , self . metadata ) self . addTab ( tab , field_group [ 'name' ] ) self . tabs . append ( tab )
def wait_until_queue_empty ( self , channels , report = True , clear_end = True ) : """Waits until all queues of channels are empty ."""
state = { 'message' : '' } self . logger . debug ( "wait_until_queue_empty: report=%s %s" % ( str ( report ) , str ( [ channel + ':' + str ( len ( self . queues [ channel ] ) ) for channel in channels ] ) , ) ) queues = [ ] for channel in channels : queues += self . queues [ channel ] [ : ] def print_progress ( ) : if report : self . logger . debug ( "all_empty=%s" % ( str ( all_empty ) , ) ) sys . __stderr__ . write ( '\b' * len ( state [ 'message' ] ) ) sys . __stderr__ . write ( "\033[K" ) state [ 'message' ] = "%.2f kB/s // %.2fkB of %.2fkB // %.2f%%" % ( self . bytes_speed / 1024 , self . bytes_sent / 1024 , self . bytes_total / 1024 , ( self . bytes_sent / self . bytes_total * 100 ) if self . bytes_total else 0 ) sys . __stderr__ . write ( state [ 'message' ] ) sys . __stderr__ . flush ( ) while True : all_empty = all ( m [ '_sent' ] for m in queues ) print_progress ( ) if all_empty : break time . sleep ( 0.2 ) print_progress ( ) if report and clear_end : sys . __stderr__ . write ( '\b' * len ( state [ 'message' ] ) ) sys . __stderr__ . write ( "\033[K" ) sys . __stderr__ . flush ( )
def list_tags ( self ) : """List Tags and return JSON encoded result ."""
try : tags = Tags . list ( ) except NipapError , e : return json . dumps ( { 'error' : 1 , 'message' : e . args , 'type' : type ( e ) . __name__ } ) return json . dumps ( tags , cls = NipapJSONEncoder )
def _load ( contract_name ) : """Retrieve the contract instance for ` contract _ name ` that represent the smart contract in the keeper network . : param contract _ name : str name of the solidity keeper contract without the network name . : return : web3 . eth . Contract instance"""
contract_definition = ContractHandler . get_contract_dict_by_name ( contract_name ) address = Web3Provider . get_web3 ( ) . toChecksumAddress ( contract_definition [ 'address' ] ) abi = contract_definition [ 'abi' ] contract = Web3Provider . get_web3 ( ) . eth . contract ( address = address , abi = abi ) ContractHandler . _contracts [ contract_name ] = ( contract , ConciseContract ( contract ) ) return ContractHandler . _contracts [ contract_name ]
def send ( self , command ) : """Sends commands to this hypervisor . : param command : a uBridge hypervisor command : returns : results as a list"""
# uBridge responses are of the form : # 1xx yyyyy \ r \ n # 1xx yyyyy \ r \ n # 100 - yyyy \ r \ n # or # 2xx - yyyy \ r \ n # Where 1xx is a code from 100-199 for a success or 200-299 for an error # The result might be multiple lines and might be less than the buffer size # but still have more data . The only thing we know for sure is the last line # will begin with ' 100 - ' or a ' 2xx - ' and end with ' \ r \ n ' if self . _writer is None or self . _reader is None : raise UbridgeError ( "Not connected" ) try : command = command . strip ( ) + '\n' log . debug ( "sending {}" . format ( command ) ) self . _writer . write ( command . encode ( ) ) yield from self . _writer . drain ( ) except OSError as e : raise UbridgeError ( "Lost communication with {host}:{port} :{error}, Dynamips process running: {run}" . format ( host = self . _host , port = self . _port , error = e , run = self . is_running ( ) ) ) # Now retrieve the result data = [ ] buf = '' retries = 0 max_retries = 10 while True : try : try : chunk = yield from self . _reader . read ( 1024 ) except asyncio . CancelledError : # task has been canceled but continue to read # any remaining data sent by the hypervisor continue except ConnectionResetError as e : # Sometimes WinError 64 ( ERROR _ NETNAME _ DELETED ) is returned here on Windows . # These happen if connection reset is received before IOCP could complete # a previous operation . Ignore and try again . . . . log . warning ( "Connection reset received while reading uBridge response: {}" . format ( e ) ) continue if not chunk : if retries > max_retries : raise UbridgeError ( "No data returned from {host}:{port}, uBridge process running: {run}" . format ( host = self . _host , port = self . _port , run = self . is_running ( ) ) ) else : retries += 1 yield from asyncio . sleep ( 0.1 ) continue retries = 0 buf += chunk . decode ( "utf-8" ) except OSError as e : raise UbridgeError ( "Lost communication with {host}:{port} :{error}, uBridge process running: {run}" . format ( host = self . _host , port = self . _port , error = e , run = self . is_running ( ) ) ) # If the buffer doesn ' t end in ' \ n ' then we can ' t be done try : if buf [ - 1 ] != '\n' : continue except IndexError : raise UbridgeError ( "Could not communicate with {host}:{port}, uBridge process running: {run}" . format ( host = self . _host , port = self . _port , run = self . is_running ( ) ) ) data += buf . split ( '\r\n' ) if data [ - 1 ] == '' : data . pop ( ) buf = '' # Does it contain an error code ? if self . error_re . search ( data [ - 1 ] ) : raise UbridgeError ( data [ - 1 ] [ 4 : ] ) # Or does the last line begin with ' 100 - ' ? Then we are done ! if data [ - 1 ] [ : 4 ] == '100-' : data [ - 1 ] = data [ - 1 ] [ 4 : ] if data [ - 1 ] == 'OK' : data . pop ( ) break # Remove success responses codes for index in range ( len ( data ) ) : if self . success_re . search ( data [ index ] ) : data [ index ] = data [ index ] [ 4 : ] log . debug ( "returned result {}" . format ( data ) ) return data
def _spawn_minions ( self , timeout = 60 ) : '''Spawn all the coroutines which will sign in to masters'''
# Run masters discovery over SSDP . This may modify the whole configuration , # depending of the networking and sets of masters . If match is ' any ' we let # eval _ master handle the discovery instead so disconnections can also handle # discovery if isinstance ( self . opts [ 'discovery' ] , dict ) and self . opts [ 'discovery' ] . get ( 'multimaster' ) : self . _discover_masters ( ) masters = self . opts [ 'master' ] if ( self . opts [ 'master_type' ] in ( 'failover' , 'distributed' ) ) or not isinstance ( self . opts [ 'master' ] , list ) : masters = [ masters ] for master in masters : s_opts = copy . deepcopy ( self . opts ) s_opts [ 'master' ] = master s_opts [ 'multimaster' ] = True minion = self . _create_minion_object ( s_opts , s_opts [ 'auth_timeout' ] , False , io_loop = self . io_loop , loaded_base_name = 'salt.loader.{0}' . format ( s_opts [ 'master' ] ) , jid_queue = self . jid_queue ) self . io_loop . spawn_callback ( self . _connect_minion , minion ) self . io_loop . call_later ( timeout , self . _check_minions )
def resources_preparing_factory ( app , wrapper ) : """Factory which wrap all resources in settings ."""
settings = app . app . registry . settings config = settings . get ( CONFIG_RESOURCES , None ) if not config : return resources = [ ( k , [ wrapper ( r , GroupResource ( k , v ) ) for r in v ] ) for k , v in config ] settings [ CONFIG_RESOURCES ] = resources
def check_recipe_choices ( self ) : '''Checks what recipes are being built to see which of the alternative and optional dependencies are being used , and returns a list of these .'''
recipes = [ ] built_recipes = self . ctx . recipe_build_order for recipe in self . depends : if isinstance ( recipe , ( tuple , list ) ) : for alternative in recipe : if alternative in built_recipes : recipes . append ( alternative ) break for recipe in self . opt_depends : if recipe in built_recipes : recipes . append ( recipe ) return sorted ( recipes )
def on_valid ( valid_content_type , on_invalid = json ) : """Renders as the specified content type only if no errors are found in the provided data object"""
invalid_kwargs = introspect . generate_accepted_kwargs ( on_invalid , 'request' , 'response' ) invalid_takes_response = introspect . takes_all_arguments ( on_invalid , 'response' ) def wrapper ( function ) : valid_kwargs = introspect . generate_accepted_kwargs ( function , 'request' , 'response' ) valid_takes_response = introspect . takes_all_arguments ( function , 'response' ) @ content_type ( valid_content_type ) @ wraps ( function ) def output_content ( content , response , ** kwargs ) : if type ( content ) == dict and 'errors' in content : response . content_type = on_invalid . content_type if invalid_takes_response : kwargs [ 'response' ] = response return on_invalid ( content , ** invalid_kwargs ( kwargs ) ) if valid_takes_response : kwargs [ 'response' ] = response return function ( content , ** valid_kwargs ( kwargs ) ) return output_content return wrapper
def run ( self ) : """Build package and fix ordelist per checksum"""
self . files_exist ( ) self . info_file ( ) sources = self . sources if len ( sources ) > 1 and self . sbo_sources != sources : sources = self . sbo_sources # If the list does not have the same order use from . info # order . BuildPackage ( self . script , sources , self . path , auto = True ) . build ( ) raise SystemExit ( )
def use_active_assessment_part_view ( self ) : """Pass through to provider AssessmentPartLookupSession . use _ active _ assessment _ part _ view"""
self . _operable_views [ 'assessment_part' ] = ACTIVE # self . _ get _ provider _ session ( ' assessment _ part _ lookup _ session ' ) # To make sure the session is tracked for session in self . _get_provider_sessions ( ) : try : session . use_active_assessment_part_view ( ) except AttributeError : pass
def definitiondir ( self , filetype , ** kwargs ) : """Returns definition subdirectory in : envvar : ` PLATELIST _ DIR ` of the form : ` ` NNNNXX ` ` . Parameters filetype : str File type parameter . designid : int or str Design ID number . Will be converted to int internally . Returns definitiondir : str Definition directory in the format ` ` NNNNXX ` ` ."""
designid = int ( kwargs [ 'designid' ] ) designid100 = designid // 100 subdir = "{:0>4d}" . format ( designid100 ) + "XX" return subdir
def getCiphertextLen ( self , ciphertext ) : """Given a ` ` ciphertext ` ` with a valid header , returns the length of the ciphertext inclusive of ciphertext expansion ."""
plaintext_length = self . getPlaintextLen ( ciphertext ) ciphertext_length = plaintext_length + Encrypter . _CTXT_EXPANSION return ciphertext_length
def get_combined_dim ( combination : str , tensor_dims : List [ int ] ) -> int : """For use with : func : ` combine _ tensors ` . This function computes the resultant dimension when calling ` ` combine _ tensors ( combination , tensors ) ` ` , when the tensor dimension is known . This is necessary for knowing the sizes of weight matrices when building models that use ` ` combine _ tensors ` ` . Parameters combination : ` ` str ` ` A comma - separated list of combination pieces , like ` ` " 1,2,1*2 " ` ` , specified identically to ` ` combination ` ` in : func : ` combine _ tensors ` . tensor _ dims : ` ` List [ int ] ` ` A list of tensor dimensions , where each dimension is from the ` last axis ` of the tensors that will be input to : func : ` combine _ tensors ` ."""
if len ( tensor_dims ) > 9 : raise ConfigurationError ( "Double-digit tensor lists not currently supported" ) combination = combination . replace ( 'x' , '1' ) . replace ( 'y' , '2' ) return sum ( [ _get_combination_dim ( piece , tensor_dims ) for piece in combination . split ( ',' ) ] )
def _vis_calibrate ( self , data , key ) : """VIS channel calibration ."""
# radiance to reflectance taken as in mipp / xrit / MSG . py # again FCI User Guide is not clear on how to do this sirr = self . nc [ '/data/{}/measured/channel_effective_solar_irradiance' . format ( key . name ) ] [ ... ] # reflectance = radiance / sirr * 100 data . data [ : ] /= sirr data . data [ : ] *= 100
def wrap ( self , methodName , types , skip = 2 ) : """Create a message handler that invokes a wrapper method with the in - order message fields as parameters , skipping over the first ` ` skip ` ` fields , and parsed according to the ` ` types ` ` list ."""
def handler ( fields ) : try : args = [ field if typ is str else int ( field or 0 ) if typ is int else float ( field or 0 ) if typ is float else bool ( int ( field or 0 ) ) for ( typ , field ) in zip ( types , fields [ skip : ] ) ] method ( * args ) except Exception : self . logger . exception ( f'Error for {methodName}:' ) method = getattr ( self . wrapper , methodName , None ) return handler if method else lambda * args : None
def load_uri ( uri , base_uri = None , loader = None , jsonschema = False , load_on_repr = True ) : """Load JSON data from ` ` uri ` ` with JSON references proxied to their referent data . : param uri : URI to fetch the JSON from : param kwargs : This function takes any of the keyword arguments from : meth : ` JsonRef . replace _ refs `"""
if loader is None : loader = jsonloader if base_uri is None : base_uri = uri return JsonRef . replace_refs ( loader ( uri ) , base_uri = base_uri , loader = loader , jsonschema = jsonschema , load_on_repr = load_on_repr , )
def read_blitzorg_csv ( f = None ) : """Function to read csv data downloaded from Blitzorgs historical data section . Time is in POSIX timestamps ( x100000 ) . An eg is kept in stormstats / egdata / archive _ 2 _ raw . txt . If no data file is specified the function will assume you want to read this example data . Geopandas dataframe will be returned . : paramter f : optional string giving path / filename of csv data : Example : > > > stormstats . storm . read _ blitzorg _ csv ( f = None )"""
factor = 1000000000 # don ' t change this magic number ! Its from Bzorg . if f : tmp = pd . read_csv ( f ) else : f = pkg . resource_filename ( 'stormstats' , "egdata/archive_2_raw.txt" ) tmp = pd . read_csv ( f ) dt_list = [ dt . datetime . fromtimestamp ( ts / factor ) . strftime ( '%Y-%m-%d %H:%M:%S:%f' ) for ts in tmp . time ] tmp_list = [ [ Point ( lon , lat ) , ts ] for lon , lat , ts in zip ( tmp . lon , tmp . lat , dt_list ) ] df = gpd . GeoDataFrame ( tmp_list , columns = [ 'geometry' , 'dt' ] ) return df
def deserialize_namespace ( data ) : '''Deserialize a Namespace object . : param data : bytes or str : return : namespace'''
if isinstance ( data , bytes ) : data = data . decode ( 'utf-8' ) kvs = data . split ( ) uri_to_prefix = { } for kv in kvs : i = kv . rfind ( ':' ) if i == - 1 : raise ValueError ( 'no colon in namespace ' 'field {}' . format ( repr ( kv ) ) ) uri , prefix = kv [ 0 : i ] , kv [ i + 1 : ] if not is_valid_schema_uri ( uri ) : # Currently this can ' t happen because the only invalid URIs # are those which contain a space raise ValueError ( 'invalid URI {} in namespace ' 'field {}' . format ( repr ( uri ) , repr ( kv ) ) ) if not is_valid_prefix ( prefix ) : raise ValueError ( 'invalid prefix {} in namespace field' ' {}' . format ( repr ( prefix ) , repr ( kv ) ) ) if uri in uri_to_prefix : raise ValueError ( 'duplicate URI {} in ' 'namespace {}' . format ( repr ( uri ) , repr ( data ) ) ) uri_to_prefix [ uri ] = prefix return Namespace ( uri_to_prefix )
def browse_clicked ( self , widget , data = None ) : """Function sets the directory to entry"""
text = self . gui_helper . create_file_chooser_dialog ( "Please select directory" , self . path_window ) if text is not None : data . set_text ( text )
async def async_set_qs_value ( self , qsid , val , success_cb = None ) : """Push state to QSUSB , retry with backoff ."""
set_url = URL_SET . format ( self . _url , qsid , val ) for _repeat in range ( 1 , 6 ) : set_result = await self . get_json ( set_url , 2 ) if set_result and set_result . get ( 'data' , 'NO REPLY' ) != 'NO REPLY' : if success_cb : success_cb ( ) return True await asyncio . sleep ( 0.01 * _repeat ) _LOGGER . error ( "Unable to set %s" , set_url ) return False
def normalize ( dt , tz ) : """Given a object with a timezone return a datetime object normalized to the proper timezone . This means take the give localized datetime and returns the datetime normalized to match the specificed timezone ."""
if not isinstance ( tz , tzinfo ) : tz = pytz . timezone ( tz ) dt = tz . normalize ( dt ) return dt
def delayed_assattr ( self , node ) : """Visit a AssAttr node This adds name to locals and handle members definition ."""
try : frame = node . frame ( ) for inferred in node . expr . infer ( ) : if inferred is util . Uninferable : continue try : if inferred . __class__ is bases . Instance : inferred = inferred . _proxied iattrs = inferred . instance_attrs if not _can_assign_attr ( inferred , node . attrname ) : continue elif isinstance ( inferred , bases . Instance ) : # Const , Tuple , . . . we may be wrong , may be not , but # anyway we don ' t want to pollute builtin ' s namespace continue elif inferred . is_function : iattrs = inferred . instance_attrs else : iattrs = inferred . locals except AttributeError : # XXX log error continue values = iattrs . setdefault ( node . attrname , [ ] ) if node in values : continue # get assign in _ _ init _ _ first XXX useful ? if ( frame . name == "__init__" and values and values [ 0 ] . frame ( ) . name != "__init__" ) : values . insert ( 0 , node ) else : values . append ( node ) except exceptions . InferenceError : pass
def data ( self , value ) : """Setter for the _ data attribute . Should be set from response . read ( ) : param value : The body of the response object for the LRSResponse : type value : unicode"""
if value is not None and not isinstance ( value , unicode ) : value = value . decode ( 'utf-8' ) self . _data = value
def get_type_data ( name ) : """Return dictionary representation of type . Can be used to initialize primordium . type . primitives . Type"""
name = name . upper ( ) if name in CELESTIAL_TIME_TYPES : namespace = 'time' domain = 'Celestial Time Systems' time_name = CELESTIAL_TIME_TYPES [ name ] elif name in EARTH_TIME_TYPES : namespace = 'time' domain = 'Earth Time Systems' time_name = EARTH_TIME_TYPES [ name ] elif name in SUPER_FUN_TIME_TYPES : namespace = 'time' domain = 'Alternative Time Systems' time_name = SUPER_FUN_TIME_TYPES [ name ] else : raise NotFound ( 'Time Type: ' + name ) return { 'authority' : 'okapia.net' , 'namespace' : namespace , 'identifier' : name , 'domain' : domain , 'display_name' : time_name + ' Time Type' , 'display_label' : time_name , 'description' : ( 'The time type for ' + time_name + ' time.' ) }
def predict ( model_dir , images ) : """Local instant prediction ."""
results = _tf_predict ( model_dir , images ) predicted_and_scores = [ ( predicted , label_scores [ list ( labels ) . index ( predicted ) ] ) for predicted , labels , label_scores in results ] return predicted_and_scores
def copy ( self , name : str ) -> 'Selection' : """Return a new | Selection | object with the given name and copies of the handles | Nodes | and | Elements | objects based on method | Devices . copy | ."""
return type ( self ) ( name , copy . copy ( self . nodes ) , copy . copy ( self . elements ) )
def management_command ( self , command , * args , ** kwargs ) : """Runs a Django management command"""
self . setup_django ( ) if 'verbosity' not in kwargs : kwargs [ 'verbosity' ] = self . verbosity if not self . use_colour : kwargs [ 'no_color' ] = False self . debug ( self . yellow_style ( '$ manage.py %s' % command ) ) return call_command ( command , * args , ** kwargs )
def cache_request_user ( user_cls , request , user_id ) : """Helper function to cache currently logged in user . User is cached at ` request . _ user ` . Caching happens only only if user is not already cached or if cached user ' s pk does not match ` user _ id ` . : param user _ cls : User model class to use for user lookup . : param request : Pyramid Request instance . : user _ id : Current user primary key field value ."""
pk_field = user_cls . pk_field ( ) user = getattr ( request , '_user' , None ) if user is None or getattr ( user , pk_field , None ) != user_id : request . _user = user_cls . get_item ( ** { pk_field : user_id } )
def build_messages_metrics ( messages ) : """Build reports ' s metrics"""
count_types = collections . Counter ( line . get ( 'type' ) or None for line in messages ) count_modules = collections . Counter ( line . get ( 'module' ) or None for line in messages ) count_symbols = collections . Counter ( line . get ( 'symbol' ) or None for line in messages ) count_paths = collections . Counter ( line . get ( 'path' ) or None for line in messages ) return { 'types' : count_types , 'modules' : count_modules , 'symbols' : count_symbols , 'paths' : count_paths , }
def fit ( self , X , y = None , groups = None , ** fit_params ) : """Run fit with all sets of parameters . Parameters X : array - like , shape = [ n _ samples , n _ features ] Training vector , where n _ samples is the number of samples and n _ features is the number of features . y : array - like , shape = [ n _ samples ] or [ n _ samples , n _ output ] , optional Target relative to X for classification or regression ; None for unsupervised learning . groups : array - like , shape = [ n _ samples ] , optional Group labels for the samples used while splitting the dataset into train / test set . * * fit _ params Parameters passed to the ` ` fit ` ` method of the estimator"""
estimator = self . estimator from sklearn . metrics . scorer import _check_multimetric_scoring scorer , multimetric = _check_multimetric_scoring ( estimator , scoring = self . scoring ) if not multimetric : scorer = scorer [ "score" ] self . multimetric_ = multimetric if self . multimetric_ : if self . refit is not False and ( not isinstance ( self . refit , str ) # This will work for both dict / list ( tuple ) or self . refit not in scorer ) : raise ValueError ( "For multi-metric scoring, the parameter " "refit must be set to a scorer key " "to refit an estimator with the best " "parameter setting on the whole data and " "make the best_* attributes " "available for that metric. If this is " "not needed, refit should be set to " "False explicitly. %r was ." "passed." % self . refit ) self . scorer_ = scorer error_score = self . error_score if not ( isinstance ( error_score , numbers . Number ) or error_score == "raise" ) : raise ValueError ( "error_score must be the string 'raise' or a" " numeric value." ) dsk , keys , n_splits = build_graph ( estimator , self . cv , self . scorer_ , list ( self . _get_param_iterator ( ) ) , X , y , groups , fit_params , iid = self . iid , refit = self . refit , error_score = error_score , return_train_score = self . return_train_score , cache_cv = self . cache_cv , multimetric = multimetric , ) self . dask_graph_ = dsk self . n_splits_ = n_splits n_jobs = _normalize_n_jobs ( self . n_jobs ) scheduler = dask . base . get_scheduler ( scheduler = self . scheduler ) if not scheduler : scheduler = dask . threaded . get if scheduler is dask . threaded . get and n_jobs == 1 : scheduler = dask . local . get_sync out = scheduler ( dsk , keys , num_workers = n_jobs ) results = handle_deprecated_train_score ( out [ 0 ] , self . return_train_score ) self . cv_results_ = results if self . refit : if self . multimetric_ : key = self . refit else : key = "score" self . best_index_ = np . flatnonzero ( results [ "rank_test_{}" . format ( key ) ] == 1 ) [ 0 ] self . best_estimator_ = out [ 1 ] return self
def query ( query_string , secure = False , container = 'namedtuple' , verbose = False , user_agent = api . USER_AGENT , no_redirect = False , no_html = False , skip_disambig = False ) : """Generates and sends a query to DuckDuckGo API . Args : query _ string : Query to be passed to DuckDuckGo API . secure : Use secure SSL / TLS connection . Default - False . Syntactic sugar is secure _ query function which is passed the same parameters . container : Indicates how dict - like objects are serialized . There are two possible options : namedtuple and dict . If ' namedtuple ' is passed the objects will be serialized to namedtuple instance of certain class . If ' dict ' is passed the objects won ' t be deserialized . Default value : ' namedtuple ' . verbose : Don ' t raise any exception if error occurs . Default value : False . user _ agent : User - Agent header of HTTP requests to DuckDuckGo API . Default value : ' duckduckpy 0.2' no _ redirect : Skip HTTP redirects ( for ! bang commands ) . Default value : False . no _ html : Remove HTML from text , e . g . bold and italics . Default value : False . skip _ disambig : Skip disambiguation ( D ) Type . Default value : False . Raises : DuckDuckDeserializeError : JSON serialization failed . DuckDuckConnectionError : Something went wrong with client operation . DuckDuckArgumentError : Passed argument is wrong . Returns : Container depends on container parameter . Each field in the response is converted to the so - called snake case . Usage : > > > import duckduckpy > > > # Namedtuple is used as a container : > > > response = duckduckpy . query ( ' Python ' ) > > > response Response ( redirect = u ' ' , definition = u ' ' , image _ width = 0 , . . . } > > > type ( response ) < class ' duckduckpy . api . Response ' > > > > response . related _ topics [ 0] Result ( first _ url = u ' https : / / duckduckgo . com / Python ' , text = . . . ) > > > type ( response . related _ topics [ 0 ] ) < class ' duckduckpy . api . Result ' > > > > # Dict is used as a container : > > > response = duckduckpy . query ( ' Python ' , container = ' dict ' ) > > > type ( response ) < type ' dict ' > > > > response { u ' abstract ' : u ' ' , u ' results ' : [ ] , u ' image _ is _ logo ' : 0 , . . . } > > > type ( response [ ' related _ topics ' ] [ 0 ] ) < type ' dict ' > > > > response [ ' related _ topics ' ] [ 0] { u ' first _ url ' : u ' https : / / duckduckgo . com / Python ' , u ' text ' : . . . }"""
if container not in Hook . containers : raise exc . DuckDuckArgumentError ( "Argument 'container' must be one of the values: " "{0}" . format ( ', ' . join ( Hook . containers ) ) ) headers = { "User-Agent" : user_agent } url = url_assembler ( query_string , no_redirect = no_redirect , no_html = no_html , skip_disambig = skip_disambig ) if secure : conn = http_client . HTTPSConnection ( api . SERVER_HOST ) else : conn = http_client . HTTPConnection ( api . SERVER_HOST ) try : conn . request ( "GET" , url , "" , headers ) resp = conn . getresponse ( ) data = decoder ( resp . read ( ) ) except socket . gaierror as e : raise exc . DuckDuckConnectionError ( e . strerror ) finally : conn . close ( ) hook = Hook ( container , verbose = verbose ) try : obj = json . loads ( data , object_hook = hook ) except ValueError : raise exc . DuckDuckDeserializeError ( "Unable to deserialize response to an object" ) return obj
def gen_lines_from_textfiles ( files : Iterable [ TextIO ] ) -> Generator [ str , None , None ] : """Generates lines from file - like objects . Args : files : iterable of : class : ` TextIO ` objects Yields : each line of all the files"""
for file in files : for line in file : yield line
def addUsage_Label ( self , usage_label ) : '''Appends one Usage _ Label to usage _ labels'''
if isinstance ( usage_label , Usage_Label ) : self . usage_labels . append ( usage_label ) else : raise ( Usage_LabelError , 'usage_label Type should be Usage_Label, not %s' % type ( usage_label ) )
def tupleize ( element , ignore_types = ( str , bytes ) ) : """Cast a single element to a tuple ."""
if hasattr ( element , '__iter__' ) and not isinstance ( element , ignore_types ) : return element else : return tuple ( ( element , ) )
def get_editor_style_by_name ( name ) : """Get Style class . This raises ` pygments . util . ClassNotFound ` when there is no style with this name ."""
if name == 'vim' : vim_style = Style . from_dict ( default_vim_style ) else : vim_style = style_from_pygments_cls ( get_style_by_name ( name ) ) return merge_styles ( [ vim_style , Style . from_dict ( style_extensions ) , ] )
def pack ( self ) : """Pack the frame into a string according to the following scheme : | F | R | R | R | opcode | M | Payload len | Extended payload length | | I | S | S | S | ( 4 ) | A | ( 7 ) | ( 16/64 ) | | N | V | V | V | | S | | ( if payload len = = 126/127 ) | | | 1 | 2 | 3 | | K | | | | Extended payload length continued , if payload len = = 127 | | | Masking - key , if MASK set to 1 | | Masking - key ( continued ) | Payload Data | : Payload Data continued . . . : | Payload Data continued . . . |"""
header = struct . pack ( '!B' , ( self . final << 7 ) | ( self . rsv1 << 6 ) | ( self . rsv2 << 5 ) | ( self . rsv3 << 4 ) | ( self . opcode & 0xf ) ) mask = bool ( self . masking_key ) << 7 payload_len = len ( self . payload ) if payload_len <= 125 : header += struct . pack ( '!B' , mask | payload_len ) elif payload_len < ( 1 << 16 ) : header += struct . pack ( '!BH' , mask | 126 , payload_len ) elif payload_len < ( 1 << 63 ) : header += struct . pack ( '!BQ' , mask | 127 , payload_len ) else : # FIXME : RFC 6455 defines an action for this . . . raise Exception ( 'the payload length is too damn high!' ) if mask : return header + self . masking_key + self . mask_payload ( ) return header + self . payload
def Handle ( self , args , token = None ) : """Renders list of descriptors for all the flows ."""
if data_store . RelationalDBEnabled ( ) : flow_iterator = iteritems ( registry . FlowRegistry . FLOW_REGISTRY ) else : flow_iterator = iteritems ( registry . AFF4FlowRegistry . FLOW_REGISTRY ) result = [ ] for name , cls in sorted ( flow_iterator ) : # Flows without a category do not show up in the GUI . if not getattr ( cls , "category" , None ) : continue # Only show flows that the user is allowed to start . try : if self . access_check_fn : self . access_check_fn ( token . username , name ) except access_control . UnauthorizedAccess : continue result . append ( ApiFlowDescriptor ( ) . InitFromFlowClass ( cls , token = token ) ) return ApiListFlowDescriptorsResult ( items = result )
def group_with ( self , to_user , project , bypass_limit = False ) : """Join the users in a group ."""
from_user = self from_assoc = from_user . fetch_group_assoc ( project ) to_assoc = to_user . fetch_group_assoc ( project ) if from_user == to_user or from_assoc == to_assoc and from_assoc : raise GroupWithException ( 'You are already part of that group.' ) if not from_assoc and not to_assoc : to_assoc = UserToGroup ( group = Group ( project = project ) , project = project , user = to_user ) Session . add ( to_assoc ) from_count = 1 elif not to_assoc : from_assoc , to_assoc = to_assoc , from_assoc from_user , to_user = to_user , from_user from_count = 1 elif not from_assoc : from_count = 1 elif to_assoc . user_count > from_assoc . user_count : from_assoc , to_assoc = to_assoc , from_assoc from_user , to_user = to_user , from_user from_count = from_assoc . user_count else : from_count = from_assoc . user_count if not bypass_limit and project . group_max < to_assoc . user_count + from_count : raise GroupWithException ( 'There are too many users to join that ' 'group.' ) if from_assoc : # Move the submissions and users old_group = from_assoc . group for submission in from_assoc . group . submissions [ : ] : submission . group = to_assoc . group for assoc in from_assoc . group . group_assocs [ : ] : assoc . group = to_assoc . group if to_assoc . group . viewed_at is None : to_assoc . group . viewed_at = old_group . viewed_at elif old_group . viewed_at : to_assoc . group . viewed_at = max ( old_group . viewed_at , to_assoc . group . viewed_at ) Session . delete ( old_group ) else : # Add the user to the group from_assoc = UserToGroup ( group = to_assoc . group , project = project , user = from_user ) Session . add ( from_assoc ) # Update the group ' s submissions ' files ' permissions files = set ( assoc . file for sub in to_assoc . group . submissions for assoc in sub . files ) for user in to_assoc . group . users : user . files . update ( files ) return to_assoc . group
def add_transition_view_for_model ( self , transition_m , parent_state_m ) : """Creates a ` TransitionView ` and adds it to the canvas The method creates a ` TransitionView ` from the given ` TransitionModel ` transition _ m ` and adds it to the canvas . : param TransitionModel transition _ m : The transition for which a view is to be created : param ContainerStateModel parent _ state _ m : The parental ` StateModel ` of the transition"""
parent_state_v = self . canvas . get_view_for_model ( parent_state_m ) hierarchy_level = parent_state_v . hierarchy_level transition_v = TransitionView ( transition_m , hierarchy_level ) # Draw transition above all other state elements self . canvas . add ( transition_v , parent_state_v , index = None ) self . _connect_transition_to_ports ( transition_m , transition_v , parent_state_m , parent_state_v ) return transition_v
def InitAgeCheck ( self ) : """make an interactive grid in which users can edit ages"""
age_df = self . contribution . tables [ 'ages' ] . df self . panel = wx . Panel ( self , style = wx . SIMPLE_BORDER ) self . grid_frame = grid_frame3 . GridFrame ( self . contribution , self . WD , 'ages' , 'ages' , self . panel , main_frame = self . main_frame ) self . grid_frame . exitButton . SetLabel ( 'Save and continue' ) grid = self . grid_frame . grid self . grid_frame . Bind ( wx . EVT_BUTTON , lambda event : self . onContinue ( event , grid , None ) , self . grid_frame . exitButton ) # add back button self . backButton = wx . Button ( self . grid_frame . panel , id = - 1 , label = 'Back' , name = 'back_btn' ) self . Bind ( wx . EVT_BUTTON , lambda event : self . onbackButton ( event , self . InitLocCheck ) , self . backButton ) self . grid_frame . main_btn_vbox . Add ( self . backButton , flag = wx . ALL , border = 5 ) # re - do fit self . grid_frame . do_fit ( None , self . min_size ) # center self . grid_frame . Centre ( ) return
def find_good ( control_board , actuation_steps , resistor_index , start_index , end_index ) : '''Use a binary search over the range of provided actuation _ steps to find the maximum actuation voltage that is measured by the board feedback circuit using the specified feedback resistor .'''
lower = start_index upper = end_index while lower < upper - 1 : index = lower + ( upper - lower ) / 2 v = actuation_steps [ index ] control_board . set_waveform_voltage ( v ) data = measure_board_rms ( control_board ) valid_data = data [ data [ 'divider resistor index' ] >= 0 ] if ( valid_data [ 'divider resistor index' ] < resistor_index ) . sum ( ) : # We have some measurements from another resistor . upper = index else : lower = index control_board . set_waveform_voltage ( actuation_steps [ lower ] ) data = measure_board_rms ( control_board ) return lower , data
def walk_dict ( data ) : """Generates pairs ` ` ( keys , value ) ` ` for each item in given dictionary , including nested dictionaries . Each pair contains : ` keys ` a tuple of 1 . . n keys , e . g . ` ` ( ' foo ' , ) ` ` for a key on root level or ` ` ( ' foo ' , ' bar ' ) ` ` for a key in a nested dictionary . ` value ` the value of given key or ` ` None ` ` if it is a nested dictionary and therefore can be further unwrapped ."""
assert hasattr ( data , '__getitem__' ) for key , value in data . items ( ) : if isinstance ( value , dict ) : yield ( key , ) , None for keys , value in walk_dict ( value ) : path = ( key , ) + keys yield path , value else : yield ( key , ) , value
def register_ops_if_needed ( graph_ops ) : """Register graph ops absent in op _ def _ registry , if present in c + + registry . Args : graph _ ops : set with graph op names to register . Raises : RuntimeError : if ` graph _ ops ` contains ops that are not in either python or c + + registry ."""
missing_ops = graph_ops - set ( op_def_registry . get_registered_ops ( ) . keys ( ) ) if not missing_ops : return p_buffer = c_api . TF_GetAllOpList ( ) cpp_op_list = op_def_pb2 . OpList ( ) cpp_op_list . ParseFromString ( c_api . TF_GetBuffer ( p_buffer ) ) cpp_registry_ops = { op . name : op for op in cpp_op_list . op } missing_op_list = op_def_pb2 . OpList ( ) for missing_op in missing_ops : if missing_op not in cpp_registry_ops : logging . info ( "Op %s is missing from both the python and C++ registry." , missing_op ) else : missing_op_list . op . extend ( [ cpp_registry_ops [ missing_op ] ] ) logging . info ( "Adding op %s from c++ registry to python registry." , missing_op ) op_def_registry . register_op_list ( missing_op_list ) # Note : Only raise missing op ValueError after trying to load ops . # This allows the test to exercise all the calls into TensorFlow # without having to write a C + python test . if not missing_ops <= set ( cpp_registry_ops . keys ( ) ) : raise RuntimeError ( "Graph ops missing from the python registry (%s) are also absent from " "the c++ registry." % missing_ops . difference ( set ( cpp_registry_ops . keys ( ) ) ) )
def freeze_extensions ( self ) : """Freeze the set of extensions into a single file . Freezing extensions can speed up the extension loading process on machines with slow file systems since it requires only a single file to store all of the extensions . Calling this method will save a file into the current virtual environment that stores a list of all currently found extensions that have been installed as entry _ points . Future calls to ` load _ extensions ` will only search the one single file containing frozen extensions rather than enumerating all installed distributions ."""
output_path = os . path . join ( _registry_folder ( ) , 'frozen_extensions.json' ) with open ( output_path , "w" ) as outfile : json . dump ( self . _dump_extensions ( ) , outfile )
def _MergeDifferentId ( self ) : """Tries to merge all possible combinations of entities . This tries to merge every entity in the old schedule with every entity in the new schedule . Unlike _ MergeSameId , the ids do not need to match . However , _ MergeDifferentId is much slower than _ MergeSameId . This method makes use of various methods like _ Merge and _ Migrate which are not implemented in the abstract DataSetMerger class . These method should be overwritten in a subclass to allow _ MergeSameId to work with different entity types . Returns : The number of merged entities ."""
# TODO : The same entity from A could merge with multiple from B . # This should either generate an error or should be prevented from # happening . for a in self . _GetIter ( self . feed_merger . a_schedule ) : for b in self . _GetIter ( self . feed_merger . b_schedule ) : try : self . _Add ( a , b , self . _MergeEntities ( a , b ) ) self . _num_merged += 1 except MergeError : continue for a in self . _GetIter ( self . feed_merger . a_schedule ) : if a not in self . feed_merger . a_merge_map : self . _num_not_merged_a += 1 newid = self . _HasId ( self . feed_merger . b_schedule , self . _GetId ( a ) ) self . _Add ( a , None , self . _Migrate ( a , self . feed_merger . a_schedule , newid ) ) for b in self . _GetIter ( self . feed_merger . b_schedule ) : if b not in self . feed_merger . b_merge_map : self . _num_not_merged_b += 1 newid = self . _HasId ( self . feed_merger . a_schedule , self . _GetId ( b ) ) self . _Add ( None , b , self . _Migrate ( b , self . feed_merger . b_schedule , newid ) ) return self . _num_merged
def is_a_valid_coordination_geometry ( self , mp_symbol = None , IUPAC_symbol = None , IUCr_symbol = None , name = None , cn = None ) : """Checks whether a given coordination geometry is valid ( exists ) and whether the parameters are coherent with each other . : param IUPAC _ symbol : : param IUCr _ symbol : : param name : : param cn : : param mp _ symbol : The mp _ symbol of the coordination geometry ."""
if name is not None : raise NotImplementedError ( 'is_a_valid_coordination_geometry not implemented for the name' ) if mp_symbol is None and IUPAC_symbol is None and IUCr_symbol is None : raise SyntaxError ( 'missing argument for is_a_valid_coordination_geometry : at least one of mp_symbol, ' 'IUPAC_symbol and IUCr_symbol must be passed to the function' ) if mp_symbol is not None : try : cg = self . get_geometry_from_mp_symbol ( mp_symbol ) if IUPAC_symbol is not None : if IUPAC_symbol != cg . IUPAC_symbol : return False if IUCr_symbol is not None : if IUCr_symbol != cg . IUCr_symbol : return False if cn is not None : if int ( cn ) != int ( cg . coordination_number ) : return False return True except LookupError : return False elif IUPAC_symbol is not None : try : cg = self . get_geometry_from_IUPAC_symbol ( IUPAC_symbol ) if IUCr_symbol is not None : if IUCr_symbol != cg . IUCr_symbol : return False if cn is not None : if cn != cg . coordination_number : return False return True except LookupError : return False elif IUCr_symbol is not None : try : cg = self . get_geometry_from_IUCr_symbol ( IUCr_symbol ) if cn is not None : if cn != cg . coordination_number : return False return True except LookupError : return True raise Exception ( 'Should not be here !' )
def get_form ( self , request , obj = None , ** kwargs ) : """Returns a Form class for use in the admin add view . This is used by add _ view and change _ view ."""
parent_id = request . REQUEST . get ( 'parent_id' , None ) if parent_id : return FolderForm else : folder_form = super ( FolderAdmin , self ) . get_form ( request , obj = None , ** kwargs ) def folder_form_clean ( form_obj ) : cleaned_data = form_obj . cleaned_data folders_with_same_name = Folder . objects . filter ( parent = form_obj . instance . parent , name = cleaned_data [ 'name' ] ) if form_obj . instance . pk : folders_with_same_name = folders_with_same_name . exclude ( pk = form_obj . instance . pk ) if folders_with_same_name . exists ( ) : raise ValidationError ( 'Folder with this name already exists.' ) return cleaned_data # attach clean to the default form rather than defining a new form # class folder_form . clean = folder_form_clean return folder_form
def validate_count_api ( rule_payload , endpoint ) : """Ensures that the counts api is set correctly in a payload ."""
rule = ( rule_payload if isinstance ( rule_payload , dict ) else json . loads ( rule_payload ) ) bucket = rule . get ( 'bucket' ) counts = set ( endpoint . split ( "/" ) ) & { "counts.json" } if len ( counts ) == 0 : if bucket is not None : msg = ( """There is a count bucket present in your payload, but you are using not using the counts API. Please check your endpoints and try again""" ) logger . error ( msg ) raise ValueError
def _to_dict ( self ) : """Return a json dictionary representing this model ."""
_dict = { } if hasattr ( self , 'key' ) and self . key is not None : _dict [ 'key' ] = self . key . _to_dict ( ) if hasattr ( self , 'value' ) and self . value is not None : _dict [ 'value' ] = self . value . _to_dict ( ) return _dict
def CreateFromDocument ( xml_text , default_namespace = None , location_base = None ) : """Parse the given XML and use the document element to create a Python instance . @ param xml _ text An XML document . This should be data ( Python 2 str or Python 3 bytes ) , or a text ( Python 2 unicode or Python 3 str ) in the L { pyxb . _ InputEncoding } encoding . @ keyword default _ namespace The L { pyxb . Namespace } instance to use as the default namespace where there is no default namespace in scope . If unspecified or C { None } , the namespace of the module containing this function will be used . @ keyword location _ base : An object to be recorded as the base of all L { pyxb . utils . utility . Location } instances associated with events and objects handled by the parser . You might pass the URI from which the document was obtained ."""
if pyxb . XMLStyle_saxer != pyxb . _XMLStyle : dom = pyxb . utils . domutils . StringToDOM ( xml_text ) return CreateFromDOM ( dom . documentElement , default_namespace = default_namespace ) if default_namespace is None : default_namespace = Namespace . fallbackNamespace ( ) saxer = pyxb . binding . saxer . make_parser ( fallback_namespace = default_namespace , location_base = location_base ) handler = saxer . getContentHandler ( ) xmld = xml_text if isinstance ( xmld , pyxb . utils . six . text_type ) : xmld = xmld . encode ( pyxb . _InputEncoding ) saxer . parse ( io . BytesIO ( xmld ) ) instance = handler . rootObject ( ) return instance
def update_label ( self , old_label , new_label ) : """Update a label Replace ' old _ label ' by ' new _ label '"""
logger . info ( "%s : Updating label ([%s] -> [%s])" % ( str ( self ) , old_label . name , new_label . name ) ) labels = self . labels try : labels . remove ( old_label ) except ValueError : # this document doesn ' t have this label return logger . info ( "%s : Updating label ([%s] -> [%s])" % ( str ( self ) , old_label . name , new_label . name ) ) labels . append ( new_label ) with self . fs . open ( self . fs . join ( self . path , self . LABEL_FILE ) , 'w' ) as file_desc : for label in labels : file_desc . write ( "%s,%s\n" % ( label . name , label . get_color_str ( ) ) )
def _check_area_bbox ( self ) : """The method checks if the area bounding box is completely inside the OSM grid . That means that its latitudes must be contained in the interval ( - 85.0511 , 85.0511) : raises : ValueError"""
for coord in self . area_bbox : if abs ( coord ) > self . POP_WEB_MAX : raise ValueError ( 'OsmTileSplitter only works for areas which have latitude in interval ' '(-85.0511, 85.0511)' )
def chassis_name ( self , ** kwargs ) : """Get device ' s chassis name / Model . Args : rbridge _ id ( str ) : The rbridge ID of the device callback ( function ) : A function executed upon completion of the method . The only parameter passed to ` callback ` will be the ` ` ElementTree ` ` ` config ` . Returns : Return value of ` callback ` . Raises : KeyError : if ` rbridge _ id ` is not specified . Examples : > > > import pynos . device > > > conn = ( ' 10.24.39.211 ' , ' 22 ' ) > > > auth = ( ' admin ' , ' password ' ) > > > with pynos . device . Device ( conn = conn , auth = auth ) as dev : . . . output = dev . system . chassis _ name ( rbridge _ id = ' 225 ' ) . . . assert output = = ' VDX6740'"""
namespace = "urn:brocade.com:mgmt:brocade-rbridge" rbridge_id = kwargs . pop ( 'rbridge_id' , '1' ) chassis_name = ' ' callback = kwargs . pop ( 'callback' , self . _callback ) rid_args = dict ( rbridge_id = rbridge_id , chassis_name = chassis_name ) rid = getattr ( self . _rbridge , 'rbridge_id_switch_attributes_chassis_name' ) config = rid ( ** rid_args ) output = callback ( config , handler = 'get_config' ) chassis_name = output . data . find ( './/{%s}chassis-name' % namespace ) . text return chassis_name
def create_packet ( self , primary_ip_address , vlan_id = None ) : """Prepare a VRRP packet . Returns a newly created ryu . lib . packet . packet . Packet object with appropriate protocol header objects added by add _ protocol ( ) . It ' s caller ' s responsibility to serialize ( ) . The serialized packet would looks like the ones described in the following sections . * RFC 3768 5.1 . VRRP Packet Format * RFC 5798 5.1 . VRRP Packet Format Argument Description primary _ ip _ address Source IP address vlan _ id VLAN ID . None for no VLAN ."""
if self . is_ipv6 : traffic_class = 0xc0 # set tos to internetwork control flow_label = 0 payload_length = ipv6 . ipv6 . _MIN_LEN + len ( self ) # XXX _ MIN _ LEN e = ethernet . ethernet ( VRRP_IPV6_DST_MAC_ADDRESS , vrrp_ipv6_src_mac_address ( self . vrid ) , ether . ETH_TYPE_IPV6 ) ip = ipv6 . ipv6 ( 6 , traffic_class , flow_label , payload_length , inet . IPPROTO_VRRP , VRRP_IPV6_HOP_LIMIT , primary_ip_address , VRRP_IPV6_DST_ADDRESS ) else : header_length = ipv4 . ipv4 . _MIN_LEN // 4 # XXX _ MIN _ LEN total_length = 0 tos = 0xc0 # set tos to internetwork control identification = self . get_identification ( ) e = ethernet . ethernet ( VRRP_IPV4_DST_MAC_ADDRESS , vrrp_ipv4_src_mac_address ( self . vrid ) , ether . ETH_TYPE_IP ) ip = ipv4 . ipv4 ( 4 , header_length , tos , total_length , identification , 0 , 0 , VRRP_IPV4_TTL , inet . IPPROTO_VRRP , 0 , primary_ip_address , VRRP_IPV4_DST_ADDRESS ) p = packet . Packet ( ) p . add_protocol ( e ) if vlan_id is not None : vlan_ = vlan . vlan ( 0 , 0 , vlan_id , e . ethertype ) e . ethertype = ether . ETH_TYPE_8021Q p . add_protocol ( vlan_ ) p . add_protocol ( ip ) p . add_protocol ( self ) return p
def _getCurrentj9Dict ( ) : """Downloads and parses all the webpages For Backend"""
urls = j9urlGenerator ( ) j9Dict = { } for url in urls : d = _getDict ( urllib . request . urlopen ( url ) ) if len ( d ) == 0 : raise RuntimeError ( "Parsing failed, this is could require an update of the parser." ) j9Dict . update ( d ) return j9Dict
def generate_models ( config , raml_resources ) : """Generate model for each resource in : raml _ resources : The DB model name is generated using singular titled version of current resource ' s url . E . g . for resource under url ' / stories ' , model with name ' Story ' will be generated . : param config : Pyramid Configurator instance . : param raml _ resources : List of ramlfications . raml . ResourceNode ."""
from . models import handle_model_generation if not raml_resources : return for raml_resource in raml_resources : # No need to generate models for dynamic resource if is_dynamic_uri ( raml_resource . path ) : continue # Since POST resource must define schema use only POST # resources to generate models if raml_resource . method . upper ( ) != 'POST' : continue # Generate DB model # If this is an attribute resource we don ' t need to generate model resource_uri = get_resource_uri ( raml_resource ) route_name = get_route_name ( resource_uri ) if not attr_subresource ( raml_resource , route_name ) : log . info ( 'Configuring model for route `{}`' . format ( route_name ) ) model_cls , is_auth_model = handle_model_generation ( config , raml_resource ) if is_auth_model : config . registry . auth_model = model_cls
def discover_system_effect ( self , pedalboard_info ) : """Generate the system effect based in pedalboard _ info : param dict pedalboard _ info : For obtain this , see : meth : ` ~ pluginsmanager . util . mod _ pedalboard _ converter . ModPedalboardConvert . get _ pedalboard _ info ( ) ` : return SystemEffect : SystemEffect generated based in pedalboard _ info"""
# MOD swap ins and outs ! ! ! hardware = pedalboard_info [ 'hardware' ] total_audio_outs = hardware [ 'audio_ins' ] total_audio_ins = hardware [ 'audio_outs' ] outputs = [ 'capture_{}' . format ( i ) for i in range ( 1 , total_audio_outs + 1 ) ] inputs = [ 'playback_{}' . format ( i ) for i in range ( 1 , total_audio_ins + 1 ) ] midi_inputs = [ 'serial_midi_out' if hardware [ 'serial_midi_out' ] else midi_out [ 'symbol' ] for midi_out in hardware [ 'midi_outs' ] if midi_out [ 'valid' ] ] midi_outputs = [ 'serial_midi_in' if hardware [ 'serial_midi_in' ] else midi_in [ 'symbol' ] for midi_in in hardware [ 'midi_ins' ] if midi_in [ 'valid' ] ] return SystemEffect ( 'system' , outputs , inputs , midi_outputs , midi_inputs )
def endpoint_class ( collection ) : """Return the : class : ` sandman . model . Model ` associated with the endpoint * collection * . : param string collection : a : class : ` sandman . model . Model ` endpoint : rtype : : class : ` sandman . model . Model `"""
with app . app_context ( ) : try : cls = current_app . class_references [ collection ] except KeyError : raise InvalidAPIUsage ( 404 ) return cls
def set_zones_device_assignment ( self , internal_devices , external_devices ) -> dict : """sets the devices for the security zones Args : internal _ devices ( List [ Device ] ) : the devices which should be used for the internal zone external _ devices ( List [ Device ] ) : the devices which should be used for the external ( hull ) zone Returns : the result of _ restCall"""
internal = [ x . id for x in internal_devices ] external = [ x . id for x in external_devices ] data = { "zonesDeviceAssignment" : { "INTERNAL" : internal , "EXTERNAL" : external } } return self . _restCall ( "home/security/setZonesDeviceAssignment" , body = json . dumps ( data ) )
def simple_returns ( prices ) : """Compute simple returns from a timeseries of prices . Parameters prices : pd . Series , pd . DataFrame or np . ndarray Prices of assets in wide - format , with assets as columns , and indexed by datetimes . Returns returns : array - like Returns of assets in wide - format , with assets as columns , and index coerced to be tz - aware ."""
if isinstance ( prices , ( pd . DataFrame , pd . Series ) ) : out = prices . pct_change ( ) . iloc [ 1 : ] else : # Assume np . ndarray out = np . diff ( prices , axis = 0 ) np . divide ( out , prices [ : - 1 ] , out = out ) return out
def get_or_create ( self , login ) : """Get the qid of the item by its external id or create if doesn ' t exist : param login : WDLogin item : return : tuple of ( qid , list of warnings ( strings ) , success ( True if success , returns the Exception otherwise ) )"""
if self . p : try : return self . p . get_or_create ( login ) except Exception as e : return None , self . p . warnings , e else : return None , [ ] , self . e
def expires_at ( self ) : """A : py : obj : ` ~ datetime . datetime ` of when this signature expires , if a signature expiration date is specified . Otherwise , ` ` None ` `"""
if 'SignatureExpirationTime' in self . _signature . subpackets : expd = next ( iter ( self . _signature . subpackets [ 'SignatureExpirationTime' ] ) ) . expires return self . created + expd return None
def run_validators ( self , value ) : """Test the given value against all the validators on the field , and either raise a ` ValidationError ` or simply return ."""
errors = [ ] for validator in self . validators : if hasattr ( validator , 'set_context' ) : validator . set_context ( self ) try : validator ( value ) except ValidationError as exc : # If the validation error contains a mapping of fields to # errors then simply raise it immediately rather than # attempting to accumulate a list of errors . if isinstance ( exc . detail , dict ) : raise errors . extend ( exc . detail ) except DjangoValidationError as exc : errors . extend ( exc . messages ) if errors : raise ValidationError ( errors )
def safe_date ( self , x ) : """Transform x [ self . col _ name ] into a date string . Args : x ( dict like / pandas . Series ) : Row containing data to cast safely . Returns : str"""
t = x [ self . col_name ] if np . isnan ( t ) : return t elif np . isposinf ( t ) : t = sys . maxsize elif np . isneginf ( t ) : t = - sys . maxsize tmp = time . localtime ( float ( t ) / 1e9 ) return time . strftime ( self . date_format , tmp )
def check_xlim_change ( self ) : '''check for new X bounds'''
if self . xlim_pipe is None : return None xlim = None while self . xlim_pipe [ 0 ] . poll ( ) : try : xlim = self . xlim_pipe [ 0 ] . recv ( ) except EOFError : return None if xlim != self . xlim : return xlim return None
def get_mode ( path , follow_symlinks = True ) : '''Return the mode of a file path file or directory of which to get the mode follow _ symlinks indicated if symlinks should be followed CLI Example : . . code - block : : bash salt ' * ' file . get _ mode / etc / passwd . . versionchanged : : 2014.1.0 ` ` follow _ symlinks ` ` option added'''
return stats ( os . path . expanduser ( path ) , follow_symlinks = follow_symlinks ) . get ( 'mode' , '' )
def setup_and_get_default_path ( self , jar_base_filename ) : """Determine the user - specific install path for the Stanford Dependencies jar if the jar _ url is not specified and ensure that it is writable ( that is , make sure the directory exists ) . Returns the full path for where the jar file should be installed ."""
import os import errno install_dir = os . path . expanduser ( INSTALL_DIR ) try : os . makedirs ( install_dir ) except OSError as ose : if ose . errno != errno . EEXIST : raise ose jar_filename = os . path . join ( install_dir , jar_base_filename ) return jar_filename
def merge_figure ( fig , subfig ) : """Merge a sub - figure into a parent figure Note : This function mutates the input fig dict , but it does not mutate the subfig dict Parameters fig : dict The plotly figure dict into which the sub figure will be merged subfig : dict The plotly figure dict that will be copied and then merged into ` fig `"""
# traces data = fig . setdefault ( 'data' , [ ] ) data . extend ( copy . deepcopy ( subfig . get ( 'data' , [ ] ) ) ) # layout layout = fig . setdefault ( 'layout' , { } ) _merge_layout_objs ( layout , subfig . get ( 'layout' , { } ) )
def _open_response ( self , objects , namespace , pull_type , ** params ) : """Build an open . . . response once the objects have been extracted from the repository ."""
max_obj_cnt = params [ 'MaxObjectCount' ] if max_obj_cnt is None : max_obj_cnt = _DEFAULT_MAX_OBJECT_COUNT default_server_timeout = 40 timeout = default_server_timeout if params [ 'OperationTimeout' ] is None else params [ 'OperationTimeout' ] if len ( objects ) <= max_obj_cnt : eos = u'TRUE' context_id = "" rtn_inst_names = objects else : eos = u'FALSE' context_id = self . _create_contextid ( ) # TODO : ks Future . Use the timeout along with response delay . Then # user could timeout pulls . This means adding timer test to # pulls and close . Timer should be used to close old contexts # also . self . enumeration_contexts [ context_id ] = { 'pull_type' : pull_type , 'data' : objects , 'namespace' : namespace , 'time' : time . clock ( ) , 'interoptimeout' : timeout } rtn_inst_names = objects [ 0 : max_obj_cnt ] del objects [ 0 : max_obj_cnt ] return self . _make_pull_imethod_resp ( rtn_inst_names , eos , context_id )
def withdraw ( self , amount ) : """Withdraws specified neopoints from the user ' s account , returns result Parameters : amount ( int ) - - Amount of neopoints to withdraw Returns bool - True if successful , False otherwise Raises notEnoughBalance"""
pg = self . usr . getPage ( "http://www.neopets.com/bank.phtml" ) try : results = pg . find ( text = "Account Type:" ) . parent . parent . parent . find_all ( "td" , align = "center" ) self . balance = results [ 1 ] . text . replace ( " NP" , "" ) except Exception : logging . getLogger ( "neolib.user" ) . exception ( "Could not parse user's bank balance." , { 'pg' : pg } ) if int ( amount ) > int ( self . balance . replace ( "," , "" ) ) : raise notEnoughBalance form = pg . form ( action = "process_bank.phtml" ) form . update ( { 'type' : 'withdraw' , 'amount' : str ( amount ) } ) form . usePin = True pg = form . submit ( ) # Success redirects to bank page if "It's great to see you again" in pg . content : self . __loadDetails ( pg ) return True else : logging . getLogger ( "neolib.user" ) . info ( "Failed to withdraw NPs for unknown reason. User NPs: " + str ( self . usr . nps ) + ". Amount: " + str ( amount ) , { 'pg' : pg } ) return False
def getcompress ( self ) : """Retrieves info about dataset compression type and mode . Args : : no argument Returns : : tuple holding : - compression type ( one of the SDC . COMP _ xxx constants ) - optional values , depending on the compression type COMP _ NONE 0 value no additional value COMP _ SKPHUFF 1 value : skip size COMP _ DEFLATE 1 value : gzip compression level ( 1 to 9) COMP _ SZIP 5 values : options mask , pixels per block ( 2 to 32) pixels per scanline , bits per pixel ( number of bits in the SDS datatype ) pixels ( number of elements in the SDS ) Note : in the context of an SDS , the word " pixel " should really be understood as meaning " data element " , eg a cell value inside a multidimensional grid . Test the options mask against constants SDC . COMP _ SZIP _ NN and SDC . COMP _ SZIP _ EC , eg : if optionMask & SDC . COMP _ SZIP _ EC : print " EC encoding scheme used " An exception is raised if dataset is not compressed . . . note : : Starting with v0.8 , an exception is always raised if pyhdf was installed with the NOCOMPRESS macro set . C library equivalent : SDgetcompress"""
status , comp_type , value , v2 , v3 , v4 , v5 = _C . _SDgetcompress ( self . _id ) _checkErr ( 'getcompress' , status , 'no compression' ) if comp_type == SDC . COMP_NONE : return ( comp_type , ) elif comp_type == SDC . COMP_SZIP : return comp_type , value , v2 , v3 , v4 , v5 else : return comp_type , value
def search ( self ) : """Return list of cells to be removed ."""
matches = [ ] for index , cell in enumerate ( self . cells ) : for pattern in Config . patterns : if ismatch ( cell , pattern ) : matches . append ( index ) break return matches
def add_permission ( self , topic , label , account_ids , actions ) : """Adds a statement to a topic ' s access control policy , granting access for the specified AWS accounts to the specified actions . : type topic : string : param topic : The ARN of the topic . : type label : string : param label : A unique identifier for the new policy statement . : type account _ ids : list of strings : param account _ ids : The AWS account ids of the users who will be give access to the specified actions . : type actions : list of strings : param actions : The actions you want to allow for each of the specified principal ( s ) ."""
params = { 'ContentType' : 'JSON' , 'TopicArn' : topic , 'Label' : label } self . build_list_params ( params , account_ids , 'AWSAccountId' ) self . build_list_params ( params , actions , 'ActionName' ) response = self . make_request ( 'AddPermission' , params , '/' , 'GET' ) body = response . read ( ) if response . status == 200 : return json . loads ( body ) else : boto . log . error ( '%s %s' % ( response . status , response . reason ) ) boto . log . error ( '%s' % body ) raise self . ResponseError ( response . status , response . reason , body )
def get_css_classes ( cls , instance ) : """Returns a list of CSS classes to be added as class = " . . . " to the current HTML tag ."""
css_classes = [ ] if hasattr ( cls , 'default_css_class' ) : css_classes . append ( cls . default_css_class ) for attr in getattr ( cls , 'default_css_attributes' , [ ] ) : css_class = instance . glossary . get ( attr ) if isinstance ( css_class , six . string_types ) : css_classes . append ( css_class ) elif isinstance ( css_class , list ) : css_classes . extend ( css_class ) return css_classes
def item_afdeling_adapter ( obj , request ) : """Adapter for rendering an object of : class : ` crabpy . gateway . capakey . Afdeling ` to json ."""
return { 'id' : obj . id , 'naam' : obj . naam , 'gemeente' : { 'id' : obj . gemeente . id , 'naam' : obj . gemeente . naam } , 'centroid' : obj . centroid , 'bounding_box' : obj . bounding_box }
def destroy_all_models_in_dict ( target_dict ) : """Method runs the prepare destruction method of models which are assumed in list or tuple as values within a dict"""
if target_dict : for model_list in target_dict . values ( ) : if isinstance ( model_list , ( list , tuple ) ) : for model in model_list : model . prepare_destruction ( ) if model . _parent : model . _parent = None else : raise Exception ( "wrong data in clipboard" )
def replace_country_by_id ( cls , country_id , country , ** kwargs ) : """Replace Country Replace all attributes of Country This method makes a synchronous HTTP request by default . To make an asynchronous HTTP request , please pass async = True > > > thread = api . replace _ country _ by _ id ( country _ id , country , async = True ) > > > result = thread . get ( ) : param async bool : param str country _ id : ID of country to replace ( required ) : param Country country : Attributes of country to replace ( required ) : return : Country If the method is called asynchronously , returns the request thread ."""
kwargs [ '_return_http_data_only' ] = True if kwargs . get ( 'async' ) : return cls . _replace_country_by_id_with_http_info ( country_id , country , ** kwargs ) else : ( data ) = cls . _replace_country_by_id_with_http_info ( country_id , country , ** kwargs ) return data
def get_games ( ctx ) : """Prints out games owned by a Steam user ."""
username = ctx . obj [ 'username' ] games = User ( username ) . get_games_owned ( ) for game in sorted ( games . values ( ) , key = itemgetter ( 'title' ) ) : click . echo ( '%s [appid: %s]' % ( game [ 'title' ] , game [ 'appid' ] ) ) click . secho ( 'Total gems owned by `%s`: %d' % ( username , len ( games ) ) , fg = 'green' )
def exit_with_error ( self , error , ** kwargs ) : """Report an error and exit . This raises a SystemExit exception to ask the interpreter to quit . Parameters error : string The error to report before quitting ."""
self . error ( error , ** kwargs ) raise SystemExit ( error )
def unit_tophat_ee ( x ) : """Tophat function on the unit interval , left - exclusive and right - exclusive . Returns 1 if 0 < x < 1 , 0 otherwise ."""
x = np . asarray ( x ) x1 = np . atleast_1d ( x ) r = ( ( 0 < x1 ) & ( x1 < 1 ) ) . astype ( x . dtype ) if x . ndim == 0 : return np . asscalar ( r ) return r
def dicttoxml ( obj , root = True , custom_root = 'root' , ids = False , attr_type = True , item_func = default_item_func , cdata = False ) : """Converts a python object into XML . Arguments : - root specifies whether the output is wrapped in an XML root element Default is True - custom _ root allows you to specify a custom root element . Default is ' root ' - ids specifies whether elements get unique ids . Default is False - attr _ type specifies whether elements get a data type attribute . Default is True - item _ func specifies what function should generate the element name for items in a list . Default is ' item ' - cdata specifies whether string values should be wrapped in CDATA sections . Default is False"""
LOG . info ( 'Inside dicttoxml(): type(obj) is: "%s", obj="%s"' % ( type ( obj ) . __name__ , unicode_me ( obj ) ) ) output = [ ] addline = output . append if root == True : addline ( '<?xml version="1.0" encoding="UTF-8" ?>' ) addline ( '<%s>%s</%s>' % ( custom_root , convert ( obj , ids , attr_type , item_func , cdata , parent = custom_root ) , custom_root , ) ) else : addline ( convert ( obj , ids , attr_type , item_func , cdata , parent = '' ) ) return '' . join ( output ) . encode ( 'utf-8' )
def _remove ( self , args ) : '''Remove a package'''
if len ( args ) < 2 : raise SPMInvocationError ( 'A package must be specified' ) packages = args [ 1 : ] msg = 'Removing packages:\n\t{0}' . format ( '\n\t' . join ( packages ) ) if not self . opts [ 'assume_yes' ] : self . ui . confirm ( msg ) for package in packages : self . ui . status ( '... removing {0}' . format ( package ) ) if not self . _pkgdb_fun ( 'db_exists' , self . opts [ 'spm_db' ] ) : raise SPMDatabaseError ( 'No database at {0}, cannot remove {1}' . format ( self . opts [ 'spm_db' ] , package ) ) # Look at local repo index pkg_info = self . _pkgdb_fun ( 'info' , package , self . db_conn ) if pkg_info is None : raise SPMInvocationError ( 'Package {0} not installed' . format ( package ) ) # Find files that have not changed and remove them files = self . _pkgdb_fun ( 'list_files' , package , self . db_conn ) dirs = [ ] for filerow in files : if self . _pkgfiles_fun ( 'path_isdir' , filerow [ 0 ] ) : dirs . append ( filerow [ 0 ] ) continue file_hash = hashlib . sha1 ( ) digest = self . _pkgfiles_fun ( 'hash_file' , filerow [ 0 ] , file_hash , self . files_conn ) if filerow [ 1 ] == digest : self . _verbose ( 'Removing file {0}' . format ( filerow [ 0 ] ) , log . trace ) self . _pkgfiles_fun ( 'remove_file' , filerow [ 0 ] , self . files_conn ) else : self . _verbose ( 'Not removing file {0}' . format ( filerow [ 0 ] ) , log . trace ) self . _pkgdb_fun ( 'unregister_file' , filerow [ 0 ] , package , self . db_conn ) # Clean up directories for dir_ in sorted ( dirs , reverse = True ) : self . _pkgdb_fun ( 'unregister_file' , dir_ , package , self . db_conn ) try : self . _verbose ( 'Removing directory {0}' . format ( dir_ ) , log . trace ) os . rmdir ( dir_ ) except OSError : # Leave directories in place that still have files in them self . _verbose ( 'Cannot remove directory {0}, probably not empty' . format ( dir_ ) , log . trace ) self . _pkgdb_fun ( 'unregister_pkg' , package , self . db_conn )
def create ( self , name , ** kwargs ) : """Create a new node"""
# These arguments are required self . required ( 'create' , kwargs , [ 'hostname' , 'port' , 'storage_hostname' , 'volume_type_name' , 'size' ] ) kwargs [ 'name' ] = name return self . http_post ( '/nodes' , params = kwargs )
def save_colormap ( self , name = None ) : """Saves the colormap with the specified name . None means use internal name . ( See get _ name ( ) )"""
if name == None : name = self . get_name ( ) if name == "" or not type ( name ) == str : return "Error: invalid name." # get the colormaps directory colormaps = _os . path . join ( _settings . path_home , 'colormaps' ) # make sure we have the colormaps directory _settings . MakeDir ( colormaps ) # assemble the path to the colormap path = _os . path . join ( _settings . path_home , 'colormaps' , name + ".cmap" ) # open the file and overwrite f = open ( path , 'w' ) f . write ( str ( self . _colorpoint_list ) ) f . close ( ) return self
def getCitiesDrawingXML ( points ) : '''Build an XML string that contains a square for each city'''
xml = "" for p in points : x = str ( p . x ) z = str ( p . y ) xml += '<DrawBlock x="' + x + '" y="7" z="' + z + '" type="beacon"/>' xml += '<DrawItem x="' + x + '" y="10" z="' + z + '" type="ender_pearl"/>' return xml
def spawn_uwsgi ( self , only = None ) : """Spawns uWSGI process ( es ) which will use configuration ( s ) from the module . Returns list of tuples : ( configuration _ alias , uwsgi _ process _ id ) If only one configuration found current process ( uwsgiconf ) is replaced with a new one ( uWSGI ) , otherwise a number of new detached processes is spawned . : param str | unicode only : Configuration alias to run from the module . If not set uWSGI will be spawned for every configuration found in the module . : rtype : list"""
spawned = [ ] configs = self . configurations if len ( configs ) == 1 : alias = configs [ 0 ] . alias UwsgiRunner ( ) . spawn ( self . fpath , alias , replace = True ) spawned . append ( ( alias , os . getpid ( ) ) ) else : for config in configs : # type : Configuration alias = config . alias if only is None or alias == only : pid = UwsgiRunner ( ) . spawn ( self . fpath , alias ) spawned . append ( ( alias , pid ) ) return spawned
def write ( self , string ) : """Write string to file ."""
self . make_dir ( ) with open ( self . path , "w" ) as f : if not string . endswith ( "\n" ) : return f . write ( string + "\n" ) else : return f . write ( string )
def insert ( self , val ) : """Inserts a value and returns a : class : ` Pair < Pair > ` . If the generated key exists or memcache cannot store it , a : class : ` KeyInsertError < shorten . KeyInsertError > ` is raised ( or a : class : ` TokenInsertError < shorten . TokenInsertError > ` if a token exists or cannot be stored ) ."""
key , token , formatted_key , formatted_token = self . next_formatted_pair ( ) if self . has_key ( key ) : raise KeyInsertError ( key ) if self . has_token ( token ) : raise TokenInsertError ( token ) # Memcache is down or read - only if not self . _mc . add ( formatted_key , ( val , token ) ) : raise KeyInsertError ( key , 'key could not be stored' ) if not self . _mc . add ( formatted_token , key ) : raise TokenInsertError ( token , 'token could not be stored' ) return Pair ( key , token )
def data ( self ) : """Data for packet creation ."""
header = struct . pack ( '>BLB' , 4 , # version self . created , # creation self . algo_id ) # public key algorithm ID oid = util . prefix_len ( '>B' , self . curve_info [ 'oid' ] ) blob = self . curve_info [ 'serialize' ] ( self . verifying_key ) return header + oid + blob + self . ecdh_packet
def filter ( self , table , column_slice = None ) : """Use the current Query object to create a mask ( a boolean array ) for ` table ` . Parameters table : NumPy structured array , astropy Table , etc . column _ slice : Column to return . Default is None ( return all columns ) . Returns table : filtered table"""
if self . _operator is None and self . _operands is None : return table if column_slice is None else self . _get_table_column ( table , column_slice ) if self . _operator == 'AND' and column_slice is None : for op in self . _operands : table = op . filter ( table ) return table return self . _mask_table ( table if column_slice is None else self . _get_table_column ( table , column_slice ) , self . mask ( table ) )
def usage ( self , subcommand ) : """Return a brief description of how to use this command , by default from the attribute ` ` self . help ` ` ."""
if len ( self . option_list ) > 0 : usage = '%%prog %s [options] %s' % ( subcommand , self . args ) else : usage = '%%prog %s %s' % ( subcommand , self . args ) if self . help : return '%s\n\n%s' % ( usage , self . help ) else : return usage
def itemtypes ( cls , mapped_types , coll_type , visitor ) : """Like : py : meth : ` normalize . visitor . VisitorPattern . aggregate ` , but returns . This will normally only get called with a single type ."""
rv = list ( v for k , v in mapped_types ) return rv [ 0 ] if len ( rv ) == 1 else rv
def read_hypergraph ( string ) : """Read a hypergraph from a string in dot format . Nodes and edges specified in the input will be added to the current hypergraph . @ type string : string @ param string : Input string in dot format specifying a graph . @ rtype : hypergraph @ return : Hypergraph"""
hgr = hypergraph ( ) dotG = pydot . graph_from_dot_data ( string ) # Read the hypernode nodes . . . # Note 1 : We need to assume that all of the nodes are listed since we need to know if they # are a hyperedge or a normal node # Note 2 : We should read in all of the nodes before putting in the links for each_node in dotG . get_nodes ( ) : if 'hypernode' == each_node . get ( 'hyper_node_type' ) : hgr . add_node ( each_node . get_name ( ) ) elif 'hyperedge' == each_node . get ( 'hyper_node_type' ) : hgr . add_hyperedge ( each_node . get_name ( ) ) # Now read in the links to connect the hyperedges for each_link in dotG . get_edges ( ) : if hgr . has_node ( each_link . get_source ( ) ) : link_hypernode = each_link . get_source ( ) link_hyperedge = each_link . get_destination ( ) elif hgr . has_node ( each_link . get_destination ( ) ) : link_hypernode = each_link . get_destination ( ) link_hyperedge = each_link . get_source ( ) hgr . link ( link_hypernode , link_hyperedge ) return hgr
def result ( self , timeout = None ) : """Return the result of the call that the future represents . Args : timeout : The number of seconds to wait for the result if the future isn ' t done . If None , then there is no limit on the wait time . Returns : The result of the call that the future represents . Raises : TimeoutError : If the future didn ' t finish executing before the given timeout . exceptions . Exception : If the call raised then that exception will be raised ."""
if self . _state == self . RUNNING : self . _context . wait_all_futures ( [ self ] , timeout ) return self . __get_result ( )
def tell ( self ) : """Returns the current position of write head . Examples > > > record = mx . recordio . MXIndexedRecordIO ( ' tmp . idx ' , ' tmp . rec ' , ' w ' ) > > > print ( record . tell ( ) ) > > > for i in range ( 5 ) : . . . record . write _ idx ( i , ' record _ % d ' % i ) . . . print ( record . tell ( ) ) 16 32 48 64 80"""
assert self . writable pos = ctypes . c_size_t ( ) check_call ( _LIB . MXRecordIOWriterTell ( self . handle , ctypes . byref ( pos ) ) ) return pos . value
def shell ( commands , splitlines = False , ignore_errors = False ) : '''Subprocess based implementation of pyinfra / api / ssh . py ' s ` ` run _ shell _ command ` ` . Args : commands ( string , list ) : command or list of commands to execute spltlines ( bool ) : optionally have the output split by lines ignore _ errors ( bool ) : ignore errors when executing these commands'''
if isinstance ( commands , six . string_types ) : commands = [ commands ] all_stdout = [ ] # Checking for pseudo _ state means this function works outside a deploy # eg the vagrant connector . print_output = ( pseudo_state . print_output if pseudo_state . isset ( ) else False ) for command in commands : print_prefix = 'localhost: ' if print_output : print ( '{0}>>> {1}' . format ( print_prefix , command ) ) process = Popen ( command , shell = True , stdout = PIPE , stderr = STDOUT ) stdout = read_buffer ( process . stdout , print_output = print_output , print_func = lambda line : '{0}{1}' . format ( print_prefix , line ) , ) # Get & check result result = process . wait ( ) # Close any open file descriptor process . stdout . close ( ) if result > 0 and not ignore_errors : raise PyinfraError ( 'Local command failed: {0}\n{1}' . format ( command , stdout ) , ) all_stdout . extend ( stdout ) if not splitlines : return '\n' . join ( all_stdout ) return all_stdout
def insert ( self , index , text ) : """Insert line to the document"""
if index < 0 or index > self . _doc . blockCount ( ) : raise IndexError ( 'Invalid block index' , index ) if index == 0 : # first cursor = QTextCursor ( self . _doc . firstBlock ( ) ) cursor . insertText ( text ) cursor . insertBlock ( ) elif index != self . _doc . blockCount ( ) : # not the last cursor = QTextCursor ( self . _doc . findBlockByNumber ( index ) . previous ( ) ) cursor . movePosition ( QTextCursor . EndOfBlock ) cursor . insertBlock ( ) cursor . insertText ( text ) else : # last append to the end self . append ( text )
def close ( self ) : """in write mode , closing the handle adds the sentinel value into the queue and joins the thread executing the HTTP request . in read mode , this clears out the read response object so there are no references to it , and the resources can be reclaimed ."""
if self . _mode . find ( 'w' ) >= 0 : self . _queue . put ( self . _sentinel ) self . _thread . join ( timeout = self . _timeout ) if self . _thread . is_alive ( ) : raise RemoteFileException ( "Closing file timed out." ) response = self . _response_queue . get_nowait ( ) try : response . raise_for_status ( ) except Exception as e : raise RestApiError ( cause = e ) else : self . _read_response = None
def countries ( self ) : """Return the a dictionary of countries , modified by any overriding options . The result is cached so future lookups are less work intensive ."""
if not hasattr ( self , "_countries" ) : only = self . get_option ( "only" ) if only : only_choices = True if not isinstance ( only , dict ) : for item in only : if isinstance ( item , six . string_types ) : only_choices = False break if only and only_choices : self . _countries = dict ( only ) else : # Local import so that countries aren ' t loaded into memory # until first used . from django_countries . data import COUNTRIES self . _countries = dict ( COUNTRIES ) if self . get_option ( "common_names" ) : self . _countries . update ( self . COMMON_NAMES ) override = self . get_option ( "override" ) if override : self . _countries . update ( override ) self . _countries = dict ( ( code , name ) for code , name in self . _countries . items ( ) if name is not None ) if only and not only_choices : countries = { } for item in only : if isinstance ( item , six . string_types ) : countries [ item ] = self . _countries [ item ] else : key , value = item countries [ key ] = value self . _countries = countries self . countries_first = [ ] first = self . get_option ( "first" ) or [ ] for code in first : code = self . alpha2 ( code ) if code in self . _countries : self . countries_first . append ( code ) return self . _countries