signature
stringlengths
29
44.1k
implementation
stringlengths
0
85.2k
def close ( self ) : """Call this method to force print the last progress bar update based on the latest n value"""
if self . leave : if self . last_print_n < self . n : cur_t = time . time ( ) self . sp . print_status ( format_meter ( self . n , self . total , cur_t - self . start_t , self . ncols , self . prefix , self . unit , self . unit_format , self . ascii ) ) self . file . write ( '\n' ) else : self . sp . print_status ( '' ) self . file . write ( '\r' )
def importalma ( asdm , ms ) : """Convert an ALMA low - level ASDM dataset to Measurement Set format . asdm ( str ) The path to the input ASDM dataset . ms ( str ) The path to the output MS dataset . This implementation automatically infers the value of the " tbuff " parameter . Example : : from pwkit . environments . casa import tasks tasks . importalma ( ' myalma . asdm ' , ' myalma . ms ' )"""
from . scripting import CasapyScript script = os . path . join ( os . path . dirname ( __file__ ) , 'cscript_importalma.py' ) with CasapyScript ( script , asdm = asdm , ms = ms ) as cs : pass
def create_response_object ( self , service_id , version_number , name , status = "200" , response = "OK" , content = "" , request_condition = None , cache_condition = None ) : """Creates a new Response Object ."""
body = self . _formdata ( { "name" : name , "status" : status , "response" : response , "content" : content , "request_condition" : request_condition , "cache_condition" : cache_condition , } , FastlyResponseObject . FIELDS ) content = self . _fetch ( "/service/%s/version/%d/response_object" % ( service_id , version_number ) , method = "POST" , body = body ) return FastlyResponseObject ( self , content )
def mdf_path ( self ) : """Absolute path of the MDF file . Empty string if file is not present ."""
# Lazy property to avoid multiple calls to has _ abiext . try : return self . _mdf_path except AttributeError : path = self . outdir . has_abiext ( "MDF.nc" ) if path : self . _mdf_path = path return path
def enable ( soft_fail = False ) : """Enable ufw : param soft _ fail : If set to True silently disables IPv6 support in ufw , otherwise a UFWIPv6Error exception is raised when IP6 support is broken . : returns : True if ufw is successfully enabled"""
if is_enabled ( ) : return True if not is_ipv6_ok ( soft_fail ) : disable_ipv6 ( ) output = subprocess . check_output ( [ 'ufw' , 'enable' ] , universal_newlines = True , env = { 'LANG' : 'en_US' , 'PATH' : os . environ [ 'PATH' ] } ) m = re . findall ( '^Firewall is active and enabled on system startup\n' , output , re . M ) hookenv . log ( output , level = 'DEBUG' ) if len ( m ) == 0 : hookenv . log ( "ufw couldn't be enabled" , level = 'WARN' ) return False else : hookenv . log ( "ufw enabled" , level = 'INFO' ) return True
def args_parser ( self , parser_func : CommandHandler_T ) -> CommandHandler_T : """Decorator to register a function as the arguments parser of the corresponding command ."""
self . cmd . args_parser_func = parser_func return parser_func
def delete_with_casper_admin_save ( self , pkg ) : """Delete a pkg from the distribution server . Args : pkg : Can be a jss . Package object , an int ID of a package , or a filename ."""
# The POST needs the package ID . if pkg . __class__ . __name__ == "Package" : package_to_delete = pkg . id elif isinstance ( pkg , int ) : package_to_delete = pkg elif isinstance ( pkg , str ) : package_to_delete = self . connection [ "jss" ] . Package ( pkg ) . id else : raise TypeError data_dict = { "username" : self . connection [ "jss" ] . user , "password" : self . connection [ "jss" ] . password , "deletedPackageID" : package_to_delete } self . connection [ "jss" ] . session . post ( url = self . connection [ "delete_url" ] , data = data_dict )
def replace_nones ( list_ , repl = - 1 ) : r"""Recursively removes Nones in all lists and sublists and replaces them with the repl variable Args : list _ ( list ) : repl ( obj ) : replacement value Returns : list CommandLine : python - m utool . util _ list - - test - replace _ nones Example : > > > # ENABLE _ DOCTEST > > > from utool . util _ list import * # NOQA > > > # build test data > > > list _ = [ None , 0 , 1 , 2] > > > repl = - 1 > > > # execute function > > > repl _ list = replace _ nones ( list _ , repl ) > > > # verify results > > > result = str ( repl _ list ) > > > print ( result ) [ - 1 , 0 , 1 , 2]"""
repl_list = [ repl if item is None else ( replace_nones ( item , repl ) if isinstance ( item , list ) else item ) for item in list_ ] return repl_list
def translate_request ( request ) : """This function takes the arguments passed to the request handler and uses them to generate a WSGI compatible environ dictionary ."""
class AwaitablePayload ( object ) : def __init__ ( self , payload ) : self . payload = payload or b'' async def read ( self , length = None ) : if length is None : r = self . payload self . payload = b'' else : r = self . payload [ : length ] self . payload = self . payload [ length : ] return r uri_parts = urlsplit ( request . url ) environ = { 'wsgi.input' : AwaitablePayload ( request . body ) , 'wsgi.errors' : sys . stderr , 'wsgi.version' : ( 1 , 0 ) , 'wsgi.async' : True , 'wsgi.multithread' : False , 'wsgi.multiprocess' : False , 'wsgi.run_once' : False , 'SERVER_SOFTWARE' : 'sanic' , 'REQUEST_METHOD' : request . method , 'QUERY_STRING' : uri_parts . query or '' , 'RAW_URI' : request . url , 'SERVER_PROTOCOL' : 'HTTP/' + request . version , 'REMOTE_ADDR' : '127.0.0.1' , 'REMOTE_PORT' : '0' , 'SERVER_NAME' : 'sanic' , 'SERVER_PORT' : '0' , 'sanic.request' : request } for hdr_name , hdr_value in request . headers . items ( ) : hdr_name = hdr_name . upper ( ) if hdr_name == 'CONTENT-TYPE' : environ [ 'CONTENT_TYPE' ] = hdr_value continue elif hdr_name == 'CONTENT-LENGTH' : environ [ 'CONTENT_LENGTH' ] = hdr_value continue key = 'HTTP_%s' % hdr_name . replace ( '-' , '_' ) if key in environ : hdr_value = '%s,%s' % ( environ [ key ] , hdr_value ) environ [ key ] = hdr_value environ [ 'wsgi.url_scheme' ] = environ . get ( 'HTTP_X_FORWARDED_PROTO' , 'http' ) path_info = uri_parts . path environ [ 'PATH_INFO' ] = path_info environ [ 'SCRIPT_NAME' ] = '' return environ
def write_structure ( times = None ) : """Produce a formatted record of a times data structure . Args : times ( Times , optional ) : If not provided , uses the current root timer . Returns : str : Timer tree hierarchy in a formatted string . Raises : TypeError : If provided argument is not a Times object ."""
if times is None : return report_loc . write_structure ( f . root . times ) else : if not isinstance ( times , Times ) : raise TypeError ( "Expected Times instance for param 'times' (default is root)." ) return report_loc . write_structure ( times )
def migrator ( state ) : """Nameless conversations will be lost ."""
cleverbot_kwargs , convos_kwargs = state cb = Cleverbot ( ** cleverbot_kwargs ) for convo_kwargs in convos_kwargs : cb . conversation ( ** convo_kwargs ) return cb
def columnCount ( self , index = QModelIndex ( ) ) : """DataFrame column number"""
if self . axis == 0 : if self . total_cols <= self . cols_loaded : return self . total_cols else : return self . cols_loaded else : return max ( 1 , self . _shape [ 1 ] )
def _ops_to_requests ( self , ops ) : """Return a list of grpc requests . Returns list from an input list of etcd3 . transactions . { Put , Get , Delete , Txn } objects ."""
request_ops = [ ] for op in ops : if isinstance ( op , transactions . Put ) : request = self . _build_put_request ( op . key , op . value , op . lease , op . prev_kv ) request_op = etcdrpc . RequestOp ( request_put = request ) request_ops . append ( request_op ) elif isinstance ( op , transactions . Get ) : request = self . _build_get_range_request ( op . key , op . range_end ) request_op = etcdrpc . RequestOp ( request_range = request ) request_ops . append ( request_op ) elif isinstance ( op , transactions . Delete ) : request = self . _build_delete_request ( op . key , op . range_end , op . prev_kv ) request_op = etcdrpc . RequestOp ( request_delete_range = request ) request_ops . append ( request_op ) elif isinstance ( op , transactions . Txn ) : compare = [ c . build_message ( ) for c in op . compare ] success_ops = self . _ops_to_requests ( op . success ) failure_ops = self . _ops_to_requests ( op . failure ) request = etcdrpc . TxnRequest ( compare = compare , success = success_ops , failure = failure_ops ) request_op = etcdrpc . RequestOp ( request_txn = request ) request_ops . append ( request_op ) else : raise Exception ( 'Unknown request class {}' . format ( op . __class__ ) ) return request_ops
def _save_password_in_keyring ( credential_id , username , password ) : '''Saves provider password in system keyring'''
try : import keyring # pylint : disable = import - error return keyring . set_password ( credential_id , username , password ) except ImportError : log . error ( 'Tried to store password in keyring, but no keyring module is installed' ) return False
def namedb_get_num_blockstack_ops_at ( db , block_id ) : """Get the number of name / namespace / token operations that occurred at a particular block ."""
cur = db . cursor ( ) # preorders at this block preorder_count_rows_query = "SELECT COUNT(*) FROM preorders WHERE block_number = ?;" preorder_count_rows_args = ( block_id , ) num_preorders = namedb_select_count_rows ( cur , preorder_count_rows_query , preorder_count_rows_args ) # committed operations at this block query = "SELECT COUNT(*) FROM history WHERE block_id = ?;" args = ( block_id , ) rows_result = namedb_query_execute ( cur , query , args ) count = 0 for r in rows_result : count = r [ 'COUNT(*)' ] break log . debug ( "{} preorders; {} history rows at {}" . format ( num_preorders , count , block_id ) ) return count + num_preorders
def process_cgmlst_results ( df ) : """Append informative fields to cgMLST330 BLAST results DataFrame The ` qseqid ` column must contain cgMLST330 query IDs with ` { marker name } | { allele number } ` format . The ` qseqid ` parsed allele numbers and marker names are appended as new fields . ` is _ perfect ` column contains boolean values for whether an allele result is 100 % identity and coverage . ` has _ perfect _ match ` denotes if a cgMLST330 marker has a perfect allele match . The top result with the largest bitscore for a marker with no perfect match is used to retrieve the allele present at that marker locus . Args : df ( pandas . DataFrame ) : DataFrame of cgMLST330 BLAST results Returns : pandas . DataFrame : cgMLST330 BLAST results DataFrame with extra fields ( ` marker ` , ` allele ` , ` is _ perfect ` , ` has _ perfect _ match ` )"""
assert isinstance ( df , pd . DataFrame ) markers = [ ] alleles = [ ] for x in df [ 'qseqid' ] : marker , allele = x . split ( '|' ) markers . append ( marker ) alleles . append ( int ( allele ) ) df . loc [ : , 'marker' ] = markers df . loc [ : , 'allele' ] = alleles df . loc [ : , 'is_match' ] = ( df [ 'coverage' ] >= 1.0 ) & ( df [ 'pident' ] >= 90.0 ) & ~ ( df [ 'is_trunc' ] ) df . loc [ : , 'allele_name' ] = df . apply ( lambda x : allele_name ( x . sseq . replace ( '-' , '' ) ) , axis = 1 ) df . loc [ : , 'is_perfect' ] = ( df [ 'coverage' ] == 1.0 ) & ( df [ 'pident' ] == 100.0 ) df_perf = df [ df [ 'is_perfect' ] ] perf_markers = df_perf [ 'marker' ] . unique ( ) df . loc [ : , 'has_perfect_match' ] = df [ 'marker' ] . isin ( perf_markers ) start_idxs , end_idxs , needs_revcomps , trunc , is_extended = extend_subj_match_vec ( df ) df . loc [ : , 'start_idx' ] = start_idxs df . loc [ : , 'end_idx' ] = end_idxs df . loc [ : , 'needs_revcomp' ] = needs_revcomps df . loc [ : , 'trunc' ] = trunc df . loc [ : , 'is_extended' ] = is_extended df . loc [ : , 'sseq_msa_gaps' ] = np . zeros ( df . shape [ 0 ] , dtype = np . int64 ) df . loc [ : , 'sseq_msa_p_gaps' ] = np . zeros ( df . shape [ 0 ] , dtype = np . float64 ) df . loc [ : , 'too_many_gaps' ] = trunc return df
def handle_error ( self , error ) : """An error handling function which will be called when an error is raised during a responder ' s on _ data ( ) function . There is no default functionality and the subclasses must overload this . Parameters error : Exception Exception thrown during code execution"""
# This error was HTTP - related if isinstance ( error , HTTPError ) : err_code = error . code err_msg = error . msg err_info = '' else : err_code = 500 err_msg = "Server Error" err_info = "%s" % error print ( "Unexpected Server Error" , file = stderr ) traceback . print_tb ( error . __traceback__ , file = stderr ) # for error _ handler in self . http _ application . next _ error _ handler ( req ) : err_str = ( "<html>" "<head></head>" "<body><h1>HTTP Error : {code} {message}</h1><p>{info}</p></body>" "</html>\n" ) . format ( code = err_code , message = err_msg , info = err_info ) header_info = { 'code' : err_code , 'msg' : err_msg , 'date' : HTTPResponse . get_current_time ( ) , 'length' : len ( err_str . encode ( ) ) , 'contents' : err_str } response = '\r\n' . join ( ( "HTTP/1.1 {code} {msg}" , "Content-Type: text/html; charset=UTF-8" , "Content-Length: {length}" , "Date: {date}" , "" , "{contents}" ) ) . format ( ** header_info ) self . transport . write ( response . encode ( ) )
def _format_lat ( self , lat ) : '''Format latitude to fit the image name'''
if self . ppd in [ 4 , 8 , 16 , 32 , 64 ] : latcenter = '000N' elif self . ppd in [ 128 ] : if lat < 0 : latcenter = '450S' else : latcenter = '450N' return latcenter
def shape ( self ) : """Tuple of array dimensions . Examples > > > x = mx . nd . array ( [ 1 , 2 , 3 , 4 ] ) > > > x . shape (4L , ) > > > y = mx . nd . zeros ( ( 2 , 3 , 4 ) ) > > > y . shape (2L , 3L , 4L )"""
ndim = mx_int ( ) pdata = ctypes . POINTER ( mx_int ) ( ) check_call ( _LIB . MXNDArrayGetShapeEx ( self . handle , ctypes . byref ( ndim ) , ctypes . byref ( pdata ) ) ) if ndim . value == - 1 : return None else : return tuple ( pdata [ : ndim . value ] )
def iter_next ( self ) -> bool : """True if the iterator can return another batch ."""
# Read batch _ size lines from the source stream sources_sentences = [ [ ] for x in self . sources_sentences ] # type : List [ List [ str ] ] target_sentences = [ ] # type : List [ str ] num_read = 0 for num_read , ( sources , target ) in enumerate ( parallel_iterate ( self . sources_iters , self . target_iter , skip_blanks = False ) , 1 ) : source_len = 0 if sources [ 0 ] is None else len ( sources [ 0 ] ) target_len = 0 if target is None else len ( target ) if source_len > self . max_len_source : logger . info ( "Trimming source sentence {} ({} -> {})" . format ( self . sentno + num_read , source_len , self . max_len_source ) ) sources = [ source [ 0 : self . max_len_source ] for source in sources ] if target_len > self . max_len_target : logger . info ( "Trimming target sentence {} ({} -> {})" . format ( self . sentno + num_read , target_len , self . max_len_target ) ) target = target [ 0 : self . max_len_target ] for i , source in enumerate ( sources ) : sources_sentences [ i ] . append ( source ) target_sentences . append ( target ) if num_read == self . batch_size : break self . sentno += num_read if num_read == 0 : self . next_batch = None return False # The final batch may be underfilled , so mark it num_pad = self . batch_size - num_read dataset = self . data_loader . load ( sources_sentences , target_sentences , [ num_read ] ) . fill_up ( self . bucket_batch_sizes ) data = [ dataset . source [ 0 ] , dataset . target [ 0 ] ] label = dataset . label provide_data = [ mx . io . DataDesc ( name = n , shape = x . shape , layout = C . BATCH_MAJOR ) for n , x in zip ( self . data_names , data ) ] provide_label = [ mx . io . DataDesc ( name = n , shape = x . shape , layout = C . BATCH_MAJOR ) for n , x in zip ( self . label_names , label ) ] self . next_batch = mx . io . DataBatch ( data , label , pad = num_pad , index = None , bucket_key = self . buckets [ 0 ] , provide_data = provide_data , provide_label = provide_label ) return True
def getcomponentdetails ( self , product , component , force_refresh = False ) : """Helper for accessing a single component ' s info . This is a wrapper around getcomponentsdetails , see that for explanation"""
d = self . getcomponentsdetails ( product , force_refresh ) return d [ component ]
def init_widget ( self ) : """Set the listeners"""
w = self . window d = self . declaration self . set_background_color ( d . background_color ) self . set_touchable ( d . touchable ) self . set_outside_touchable ( d . outside_touchable ) # Listen for events w . setOnDismissListener ( w . getId ( ) ) w . onDismiss . connect ( self . on_dismiss ) super ( AndroidPopupWindow , self ) . init_widget ( )
def iterate ( self , start_line = None , parse_attr = True , headers = False , comments = False ) : """Iterate over GFF3 file , returning GFF3 entries Args : start _ line ( str ) : Next GFF3 entry . If ' handle ' has been partially read and you want to start iterating at the next entry , read the next GFF3 entry and pass it to this variable when calling gff3 _ iter . See ' Examples ' for proper usage . parse _ attr ( bool ) : Parse attributes column into a dictionary such that the string " tag1 = value1 ; tag2 = value2 " becomes : tag1 : value1 tag2 : value2 headers ( bool ) : Yields headers if True , else skips lines starting with " # # " comments ( bool ) : Yields comments if True , else skips lines starting with " # " Yields : GFF3Entry : class containing all GFF3 data , yields str for headers if headers options is True then yields GFF3Entry for entries Examples : The following three examples demonstrate how to use gff3 _ iter . Note : These doctests will not pass , examples are only in doctest format as per convention . bio _ utils uses pytests for testing . > > > for entry in gff3 _ iter ( open ( ' test . gff3 ' ) ) : . . . print ( entry . seqid ) # Sequence ID . . . print ( entry . source ) # Software that performed annotation . . . print ( entry . type ) # Type of annotation . . . print ( entry . start ) # Start position of annotation . . . print ( entry . end ) # End position of annotation . . . print ( entry . score ) # Confidence score of annotation . . . print ( entry . strand ) # Strand annotation is on . . . print ( entry . phase ) # Bases until next codon . . . print ( entry . attributes ) # Attributes of annotation . . . print ( entry . write ( ) ) # Reconstituted GFF3 entry > > > gff3 _ handle = open ( ' test . gff3 ' ) > > > next ( gff3 _ handle ) # Skip first line / entry > > > next _ line = next ( gff3 _ handle ) # Store next entry > > > for entry in gff3 _ iter ( gff3 _ handle , start _ line = next _ line ) : . . . print ( entry . seqid ) # Sequence ID . . . print ( entry . source ) # Software that performed annotation . . . print ( entry . type ) # Type of annotation . . . print ( entry . start ) # Start position of annotation . . . print ( entry . end ) # End position of annotation . . . print ( entry . score ) # Confidence score of annotation . . . print ( entry . strand ) # Strand annotation is on . . . print ( entry . phase ) # Bases until next codon . . . print ( entry . attributes ) # Attributes of annotation . . . print ( entry . write ( ) ) # Reconstituted GFF3 entry > > > for entry in gff3 _ iter ( open ( ' test . gff3 ' ) , parse _ attr = True ) : . . . print ( entry . seqid ) # Sequence ID . . . print ( entry . source ) # Software that performed annotation . . . print ( entry . type ) # Type of annotation . . . print ( entry . start ) # Start position of annotation . . . print ( entry . end ) # End position of annotation . . . print ( entry . score ) # Confidence score of annotation . . . print ( entry . strand ) # Strand annotation is on . . . print ( entry . phase ) # Bases until next codon . . . print ( entry . attributes [ ' attr1 ' ] ) # Print attribute ' attr1' . . . print ( entry . attributes [ ' attr2 ' ] ) # Print attribute ' attr2' . . . print ( entry . write ( ) ) # Reconstituted GFF3 entry"""
handle = self . handle # Speed tricks : reduces function calls split = str . split strip = str . strip if start_line is None : line = next ( handle ) # Read first GFF3 else : line = start_line # Set header to given header # Check if input is text or bytestream if ( isinstance ( line , bytes ) ) : def next_line ( i ) : return next ( i ) . decode ( 'utf-8' ) line = strip ( line . decode ( 'utf-8' ) ) else : next_line = next line = strip ( line ) # Manual ' for ' loop isn ' t needed to read the file properly and quickly , # unlike fasta _ iter and fastq _ iter , but it is necessary begin iterating # partway through a file when the user gives a starting line . try : # Manually construct a for loop to improve speed by using ' next ' while True : # Loop until StopIteration Exception raised self . current_line += 1 data = GFF3Entry ( ) # Initialize early to prevent access error if line . startswith ( '##FASTA' ) : # Skip FASTA entries raise FastaFound if line . startswith ( '##' ) and not headers : line = strip ( next_line ( handle ) ) continue elif line . startswith ( '##' ) and headers : yield line line = strip ( next_line ( handle ) ) continue if line . startswith ( '#' ) and not comments : line = strip ( next_line ( handle ) ) continue elif line . startswith ( '#' ) and comments : yield line line = strip ( next_line ( handle ) ) continue split_line = split ( line , '\t' ) data . origline = line data . seqid = split_line [ 0 ] data . source = split_line [ 1 ] data . type = split_line [ 2 ] data . start = int ( split_line [ 3 ] ) data . end = int ( split_line [ 4 ] ) try : # Make float unless dot data . score = float ( split_line [ 5 ] ) except ValueError : data . score = split_line [ 5 ] data . _score_str = split_line [ 5 ] data . strand = split_line [ 6 ] try : # Get phase as int unless phase not given data . phase = int ( split_line [ 7 ] ) except ValueError : data . phase = split_line [ 7 ] data . attributes = split_line [ 8 ] if parse_attr : attributes = split ( data . attributes , ';' ) data . attributes = OrderedDict ( ) for attribute in attributes : split_attribute = attribute . split ( '=' ) key = split_attribute [ 0 ] value = split_attribute [ - 1 ] . split ( ',' ) if ',' in split_attribute [ - 1 ] else split_attribute [ - 1 ] if not key == '' : # Avoid semicolon split at end data . attributes [ key ] = value line = strip ( next_line ( handle ) ) # Raises StopIteration at EOF yield data except StopIteration : # Yield last GFF3 entry if data . origline : yield data else : # handle case where GFF ends in comment pass except FastaFound : # When FASTA found , last entry is repeat so pass pass
def calculate_squared_sum ( numbers ) : """Given a list of numbers , this function calculates the sum of each number squared , but each element is rounded up ( to its Ceiling ) before squaring . Parameters : numbers ( List ) : The list of numbers . Returns : The sum of the squares of the rounded numbers . Examples : For numbers = [ 1,2,3 ] the output should be 14 For numbers = [ 1,4,9 ] the output should be 98 For numbers = [ 1,3,5,7 ] the output should be 84 For numbers = [ 1.4,4.2,0 ] the output should be 29 For numbers = [ - 2.4,1,1 ] the output should be 6"""
import math square_sum = 0 for num in numbers : square_sum += math . ceil ( num ) ** 2 return square_sum
def writeDate ( self , n ) : """Writes a C { datetime } instance to the stream . @ type n : L { datetime } @ param n : The C { Date } data to be encoded to the AMF3 data stream ."""
if isinstance ( n , datetime . time ) : raise pyamf . EncodeError ( 'A datetime.time instance was found but ' 'AMF3 has no way to encode time objects. Please use ' 'datetime.datetime instead (got:%r)' % ( n , ) ) self . stream . write ( TYPE_DATE ) ref = self . context . getObjectReference ( n ) if ref != - 1 : self . _writeInteger ( ref << 1 ) return self . context . addObject ( n ) self . stream . write_uchar ( REFERENCE_BIT ) if self . timezone_offset is not None : n -= self . timezone_offset ms = util . get_timestamp ( n ) self . stream . write_double ( ms * 1000.0 )
def _default ( self ) : """Determines default ."""
if self . ctx . ignore_default : if not self . ctx . ignore_missing : self . ctx . errors . missing ( ) return NOT_SET if self . default is NOT_SET : if not self . ctx . ignore_missing : self . ctx . errors . missing ( ) return NOT_SET if self . default in IGNORE : return self . default if isinstance ( self . default , Hook ) : if self . default : return self . default ( ) if not self . ctx . ignore_missing : self . ctx . errors . missing ( ) return NOT_SET if isinstance ( self . default , type ) or callable ( self . default ) : return self . default ( ) return self . default
def check_plugins ( self ) : """collect plugins from entry point ' frosted . plugins ' and run their check ( ) method , passing the filename"""
checkers = { } for ep in pkg_resources . iter_entry_points ( group = 'frosted.plugins' ) : checkers . update ( { ep . name : ep . load ( ) } ) for plugin_name , plugin in checkers . items ( ) : if self . filename != '(none)' : messages = plugin . check ( self . filename ) for message , loc , args , kwargs in messages : self . report ( message , loc , * args , ** kwargs )
def create_layout_params ( self , child , layout ) : """Override as there is no ( width , height ) constructor ."""
from . android_fragment import AndroidFragment if isinstance ( child , AndroidFragment ) : return super ( AndroidViewPager , self ) . create_layout_params ( child , layout ) # Only apply to decor views dp = self . dp w , h = ( coerce_size ( layout . get ( 'width' , 'match_parent' ) ) , coerce_size ( layout . get ( 'height' , 'wrap_content' ) ) ) w = w if w < 0 else int ( w * dp ) h = h if h < 0 else int ( h * dp ) # No ( w , h ) constructor params = ViewPagerLayoutParams ( ) params . width = w params . height = h params . isDecor = True return params
def get_route_io_data_types ( self ) : # type : ( ) - > typing . List [ UserDefined ] """Returns a list of all user - defined data types that are referenced as either an argument , result , or error of a route . If a List or Nullable data type is referenced , then the contained data type is returned assuming it ' s a user - defined type ."""
data_types = set ( ) # type : typing . Set [ UserDefined ] for route in self . routes : data_types |= self . get_route_io_data_types_for_route ( route ) return sorted ( data_types , key = lambda dt : dt . name )
def correlation_matrix ( corrdf , title , ** kwargs ) : """Plot image of a matrix correlation . Parameters corrdf : DataFrame The matrix correlation to plot . title : str The matrix title Returns str , The resulting image encoded as a string ."""
imgdata = BytesIO ( ) fig_cor , axes_cor = plt . subplots ( 1 , 1 ) labels = corrdf . columns matrix_image = axes_cor . imshow ( corrdf , vmin = - 1 , vmax = 1 , interpolation = "nearest" , cmap = 'bwr' ) plt . title ( title , size = 18 ) plt . colorbar ( matrix_image ) axes_cor . set_xticks ( np . arange ( 0 , corrdf . shape [ 0 ] , corrdf . shape [ 0 ] * 1.0 / len ( labels ) ) ) axes_cor . set_yticks ( np . arange ( 0 , corrdf . shape [ 1 ] , corrdf . shape [ 1 ] * 1.0 / len ( labels ) ) ) axes_cor . set_xticklabels ( labels , rotation = 90 ) axes_cor . set_yticklabels ( labels ) matrix_image . figure . savefig ( imgdata , bbox_inches = 'tight' ) imgdata . seek ( 0 ) result_string = 'data:image/png;base64,' + quote ( base64 . b64encode ( imgdata . getvalue ( ) ) ) plt . close ( matrix_image . figure ) return result_string
def wrap_with ( wrapper_cls ) : """Use a custom ` Wrapper ` to apply annotations to the decorated function . : param wrapper _ cls : Custom ` Wrapper ` subclass"""
def wrapper ( func ) : annotate ( func , 'wrapper' , [ { 'wrapper' : wrapper_cls } ] ) return activate ( func ) return wrapper
def check_password ( method , password , hashed_password , charset ) : """Check that ` ` password ` ` match ` hashed _ password ` using ` ` method ` ` , assuming the encoding is ` ` charset ` ` . : param str method : on of ` ` " crypt " ` ` , ` ` " ldap " ` ` , ` ` " hex _ md5 " ` ` , ` ` " hex _ sha1 " ` ` , ` ` " hex _ sha224 " ` ` , ` ` " hex _ sha256 " ` ` , ` ` " hex _ sha384 " ` ` , ` ` " hex _ sha512 " ` ` , ` ` " plain " ` ` : param password : The user inputed password : type password : : obj : ` str ` or : obj : ` unicode ` : param hashed _ password : The hashed password as stored in the database : type hashed _ password : : obj : ` str ` or : obj : ` unicode ` : param str charset : The used char encoding ( also used internally , so it must be valid for the charset used by ` ` password ` ` when it was initially ) : return : True if ` ` password ` ` match ` ` hashed _ password ` ` using ` ` method ` ` , ` ` False ` ` otherwise : rtype : bool"""
if not isinstance ( password , six . binary_type ) : password = password . encode ( charset ) if not isinstance ( hashed_password , six . binary_type ) : hashed_password = hashed_password . encode ( charset ) if method == "plain" : return password == hashed_password elif method == "crypt" : if hashed_password . startswith ( b'$' ) : salt = b'$' . join ( hashed_password . split ( b'$' , 3 ) [ : - 1 ] ) elif hashed_password . startswith ( b'_' ) : # pragma : no cover old BSD format not supported salt = hashed_password [ : 9 ] else : salt = hashed_password [ : 2 ] if six . PY3 : password = password . decode ( charset ) salt = salt . decode ( charset ) hashed_password = hashed_password . decode ( charset ) if not crypt_salt_is_valid ( salt ) : raise ValueError ( "System crypt implementation do not support the salt %r" % salt ) crypted_password = crypt . crypt ( password , salt ) return crypted_password == hashed_password elif method == "ldap" : scheme = LdapHashUserPassword . get_scheme ( hashed_password ) salt = LdapHashUserPassword . get_salt ( hashed_password ) return LdapHashUserPassword . hash ( scheme , password , salt , charset = charset ) == hashed_password elif ( method . startswith ( "hex_" ) and method [ 4 : ] in { "md5" , "sha1" , "sha224" , "sha256" , "sha384" , "sha512" } ) : return getattr ( hashlib , method [ 4 : ] ) ( password ) . hexdigest ( ) . encode ( "ascii" ) == hashed_password . lower ( ) else : raise ValueError ( "Unknown password method check %r" % method )
def parse_mapping ( self , map_path , source = None , dotfiles = None ) : """Do a simple parse of the dotfile mapping , using semicolons to separate source file name from the target file paths ."""
include_re = r"""^\s*#include\s+(".+"|'.+')""" include_re = re . compile ( include_re , re . I ) mapping_re = r"""^("[^"]+"|\'[^\']+\'|[^\'":]+)\s*(?::\s*(.*)\s*)?$""" mapping_re = re . compile ( mapping_re ) filename = None map_path = path . realpath ( path . expanduser ( map_path ) ) if path . isfile ( map_path ) : filename = map_path elif path . isdir ( map_path ) : # try finding a mapping in the target directory for map_name in '.dotfiles' , 'dotfiles' : candidate = path . join ( map_path , map_name ) if path . isfile ( candidate ) : filename = candidate break if filename is None : raise ValueError ( 'No dotfile mapping found in %s' % map_path ) if source is None : source = path . dirname ( map_path ) if dotfiles is None : dotfiles = OrderedDict ( ) lineno = 0 with open ( filename ) as fh : for line in fh : lineno += 1 content = line . strip ( ) match = include_re . match ( content ) if match : include_path = match . group ( 1 ) . strip ( '\'"' ) if ( include_path . startswith ( '/' ) or include_path . startswith ( '~' ) ) : include_path = path . realpath ( path . expanduser ( include_path ) ) else : include_path = path . join ( path . dirname ( filename ) , include_path ) if path . exists ( include_path ) : self . log . debug ( 'Recursively parsing mapping in %s' , include_path ) dotfiles = self . parse_mapping ( include_path , dotfiles = dotfiles ) else : self . log . warning ( 'Include command points to file or ' 'directory that does not exist, "%s",' ' on line %d' , include_path , lineno ) if not content or content . startswith ( '#' ) : # comment line or empty line continue match = mapping_re . match ( content ) if match : source_path , target_path = match . groups ( ) source_path = path . join ( source , source_path . strip ( '\'"' ) ) if source_path in dotfiles : self . log . warning ( 'Duplicate dotfile source "%s" ' 'on line #%d' , lineno ) continue if target_path is None : target_path = source_path dotfiles [ source_path ] = target_path else : self . log . warning ( 'Dotfile mapping regex failed on line ' '#%d' , lineno ) return dotfiles
def CreateFromDOM ( node , default_namespace = None ) : """Create a Python instance from the given DOM node . The node tag must correspond to an element declaration in this module . @ deprecated : Forcing use of DOM interface is unnecessary ; use L { CreateFromDocument } ."""
if default_namespace is None : default_namespace = Namespace . fallbackNamespace ( ) return pyxb . binding . basis . element . AnyCreateFromDOM ( node , default_namespace )
def EU27as ( self , to = 'name_short' ) : """Return EU27 countries in the specified classification Parameters to : str , optional Output classification ( valid str for an index of country _ data file ) , default : name _ short Returns Pandas DataFrame"""
if isinstance ( to , str ) : to = [ to ] return self . data [ self . data . EU < 2013 ] [ to ]
def temperature_data_to_csv ( temperature_data , path_or_buf ) : """Write temperature data to CSV . See also : any : ` pandas . DataFrame . to _ csv ` . Parameters temperature _ data : : any : ` pandas . Series ` Temperature data series with : any : ` pandas . DatetimeIndex ` . path _ or _ buf : : any : ` str ` or file handle , default None File path or object , if None is provided the result is returned as a string ."""
if temperature_data . index . name is None : temperature_data . index . name = "dt" if temperature_data . name is None : temperature_data . name = "temperature" return temperature_data . to_frame ( ) . to_csv ( path_or_buf , index = True )
def _validate_api_params ( params ) : '''Validate the API params as specified in the config file .'''
# page _ id and API key are mandatory and they must be string / unicode return ( isinstance ( params [ 'api_page_id' ] , ( six . string_types , six . text_type ) ) and isinstance ( params [ 'api_key' ] , ( six . string_types , six . text_type ) ) )
def register_model ( self , model , bundle ) : """Registers a bundle as the main bundle for a model . Used when we need to lookup urls by a model ."""
if model in self . _model_registry : raise AlreadyRegistered ( 'The model %s is already registered' % model ) if bundle . url_params : raise Exception ( "A primary model bundle cannot have dynamic \ url_parameters" ) self . _model_registry [ model ] = bundle
def _compute_merkle_root ( self , required_state_root ) : """Computes the merkle root of the state changes in the context corresponding with _ last _ valid _ batch _ c _ id as applied to _ previous _ state _ hash . Args : required _ state _ root ( str ) : The merkle root that these txns should equal . Returns : state _ hash ( str ) : The merkle root calculated from the previous state hash and the state changes from the context _ id"""
state_hash = None if self . _previous_valid_batch_c_id is not None : publishing_or_genesis = self . _always_persist or required_state_root is None state_hash = self . _squash ( state_root = self . _previous_state_hash , context_ids = [ self . _previous_valid_batch_c_id ] , persist = self . _always_persist , clean_up = publishing_or_genesis ) if self . _always_persist is True : return state_hash if state_hash == required_state_root : self . _squash ( state_root = self . _previous_state_hash , context_ids = [ self . _previous_valid_batch_c_id ] , persist = True , clean_up = True ) return state_hash
def is_valid ( self , max_age = None ) : '''Determines if the cache files have expired , or if it is still valid'''
if max_age is None : max_age = self . cache_max_age if os . path . isfile ( self . cache_path_cache ) : mod_time = os . path . getmtime ( self . cache_path_cache ) current_time = time ( ) if ( mod_time + max_age ) > current_time : return True return False
async def send ( self , content = None , * , tts = False , embed = None , file = None , files = None , delete_after = None , nonce = None ) : """| coro | Sends a message to the destination with the content given . The content must be a type that can convert to a string through ` ` str ( content ) ` ` . If the content is set to ` ` None ` ` ( the default ) , then the ` ` embed ` ` parameter must be provided . To upload a single file , the ` ` file ` ` parameter should be used with a single : class : ` . File ` object . To upload multiple files , the ` ` files ` ` parameter should be used with a : class : ` list ` of : class : ` . File ` objects . * * Specifying both parameters will lead to an exception * * . If the ` ` embed ` ` parameter is provided , it must be of type : class : ` . Embed ` and it must be a rich embed type . Parameters content The content of the message to send . tts : : class : ` bool ` Indicates if the message should be sent using text - to - speech . embed : : class : ` . Embed ` The rich embed for the content . file : : class : ` . File ` The file to upload . files : List [ : class : ` . File ` ] A list of files to upload . Must be a maximum of 10. nonce : : class : ` int ` The nonce to use for sending this message . If the message was successfully sent , then the message will have a nonce with this value . delete _ after : : class : ` float ` If provided , the number of seconds to wait in the background before deleting the message we just sent . If the deletion fails , then it is silently ignored . Raises : exc : ` . HTTPException ` Sending the message failed . : exc : ` . Forbidden ` You do not have the proper permissions to send the message . : exc : ` . InvalidArgument ` The ` ` files ` ` list is not of the appropriate size or you specified both ` ` file ` ` and ` ` files ` ` . Returns : class : ` . Message ` The message that was sent ."""
channel = await self . _get_channel ( ) state = self . _state content = str ( content ) if content is not None else None if embed is not None : embed = embed . to_dict ( ) if file is not None and files is not None : raise InvalidArgument ( 'cannot pass both file and files parameter to send()' ) if file is not None : if not isinstance ( file , File ) : raise InvalidArgument ( 'file parameter must be File' ) try : data = await state . http . send_files ( channel . id , files = [ file ] , content = content , tts = tts , embed = embed , nonce = nonce ) finally : file . close ( ) elif files is not None : if len ( files ) > 10 : raise InvalidArgument ( 'files parameter must be a list of up to 10 elements' ) elif not all ( isinstance ( file , File ) for file in files ) : raise InvalidArgument ( 'files parameter must be a list of File' ) try : data = await state . http . send_files ( channel . id , files = files , content = content , tts = tts , embed = embed , nonce = nonce ) finally : for f in files : f . close ( ) else : data = await state . http . send_message ( channel . id , content , tts = tts , embed = embed , nonce = nonce ) ret = state . create_message ( channel = channel , data = data ) if delete_after is not None : await ret . delete ( delay = delete_after ) return ret
def convert_datetime ( value , parameter ) : '''Converts to datetime . datetime : ' ' , ' - ' , None convert to parameter default The first matching format in settings . DATETIME _ INPUT _ FORMATS converts to datetime'''
value = _check_default ( value , parameter , ( '' , '-' , None ) ) if value is None or isinstance ( value , datetime . datetime ) : return value for fmt in settings . DATETIME_INPUT_FORMATS : try : return datetime . datetime . strptime ( value , fmt ) except ( ValueError , TypeError ) : continue raise ValueError ( "`{}` does not match a format in settings.DATETIME_INPUT_FORMATS" . format ( value ) )
def handle ( self , key , value ) : '''Processes a vaild zookeeper request @ param key : The key that matched the request @ param value : The value associated with the key'''
# break down key elements = key . split ( ":" ) dict = { } dict [ 'action' ] = elements [ 1 ] dict [ 'domain' ] = elements [ 2 ] dict [ 'appid' ] = elements [ 3 ] value = ujson . loads ( value ) # the master dict to return master = { } master [ 'uuid' ] = value [ 'uuid' ] master [ 'server_time' ] = int ( self . get_current_time ( ) ) master [ 'action' ] = dict [ 'action' ] master [ 'domain' ] = dict [ 'domain' ] master [ 'appid' ] = dict [ 'appid' ] # log we received the info message extras = self . get_log_dict ( dict [ 'action' ] , appid = dict [ 'appid' ] , uuid = master [ 'uuid' ] ) self . logger . info ( 'Received zookeeper request' , extra = extras ) # get the current zk configuration data = None try : data = self . zoo_client . get ( self . path ) [ 0 ] except ZookeeperError : e = "Unable to load Zookeeper config" self . logger . error ( e ) master [ 'error' ] = e the_dict = { } if data is not None and len ( data ) > 0 : the_dict = yaml . safe_load ( data ) # update the configuration if "domains" not in the_dict : the_dict [ 'domains' ] = { } if "blacklist" not in the_dict : the_dict [ 'blacklist' ] = [ ] if dict [ 'action' ] == 'domain-update' : the_dict [ 'domains' ] [ dict [ 'domain' ] ] = { "window" : value [ 'window' ] , "hits" : value [ 'hits' ] , "scale" : value [ 'scale' ] } elif dict [ 'action' ] == 'domain-remove' : if dict [ 'domain' ] in the_dict [ 'domains' ] : del the_dict [ 'domains' ] [ dict [ 'domain' ] ] elif dict [ 'action' ] == 'blacklist-update' : the_dict [ 'blacklist' ] . append ( dict [ 'domain' ] ) the_dict [ 'blacklist' ] = list ( set ( the_dict [ 'blacklist' ] ) ) elif dict [ 'action' ] == 'blacklist-remove' : if dict [ 'domain' ] in the_dict [ 'blacklist' ] : the_dict [ 'blacklist' ] . remove ( dict [ 'domain' ] ) else : self . logger . warn ( "Unknown command given to Zookeeper Monitor" ) # write the configuration back to zookeeper the_string = yaml . dump ( the_dict , default_flow_style = False ) try : self . zoo_client . set ( self . path , the_string ) except ZookeeperError : e = "Unable to store Zookeeper config" self . logger . error ( e ) master [ 'error' ] = e # ack the data back to kafka if self . _send_to_kafka ( master ) : extras [ 'success' ] = True self . logger . info ( 'Sent zookeeper update to kafka' , extra = extras ) else : extras [ 'success' ] = False self . logger . error ( 'Failed to send zookeeper update to kafka' , extra = extras )
def _FracInt ( x , y , z , a , b , c , tau , n ) : """Returns 1 x ^ 2 y ^ 2 z ^ 2 - - - - - ( 1 - - - - - - - - - - - - - - - - - - ) ^ n sqrt ( tau + a ) ( tau + b ) ( tau + c ) ) tau + a tau + b tau + c"""
denom = np . sqrt ( ( a + tau ) * ( b + tau ) * ( c + tau ) ) return ( 1. - x ** 2 / ( a + tau ) - y ** 2 / ( b + tau ) - z ** 2 / ( c + tau ) ) ** n / denom
def get_where_clause_from_dict ( dictionary , join_operator = 'AND' ) : """Builds a where clause from a dictionary"""
CoyoteDb . escape_dictionary ( dictionary ) clause = join_operator . join ( ( ' {k} is {v} ' if str ( v ) . lower ( ) == 'null' else ' {k} = {v} ' ) . format ( k = k , v = v ) # IS should be the operator for null values for k , v in dictionary . iteritems ( ) ) return clause
def _assemble_and_send_request ( self ) : """Fires off the Fedex request . @ warning : NEVER CALL THIS METHOD DIRECTLY . CALL send _ request ( ) , WHICH RESIDES ON FedexBaseService AND IS INHERITED ."""
# Fire off the query . return self . client . service . uploadDocuments ( WebAuthenticationDetail = self . WebAuthenticationDetail , ClientDetail = self . ClientDetail , TransactionDetail = self . TransactionDetail , Version = self . VersionId , Documents = self . Documents , Usage = self . Usage , OriginCountryCode = self . OriginCountryCode , DestinationCountryCode = self . DestinationCountryCode , )
def semidetached ( b , component , solve_for = None , ** kwargs ) : """Create a constraint to force requiv to be semidetached"""
comp_ps = b . get_component ( component = component ) requiv = comp_ps . get_parameter ( qualifier = 'requiv' ) requiv_critical = comp_ps . get_parameter ( qualifier = 'requiv_max' ) if solve_for in [ requiv , None ] : lhs = requiv rhs = 1.0 * requiv_critical else : raise NotImplementedError return lhs , rhs , { 'component' : component }
def create_simple_binding ( jboss_config , binding_name , value , profile = None ) : '''Create a simple jndi binding in the running jboss instance jboss _ config Configuration dictionary with properties specified above . binding _ name Binding name to be created value Binding value profile The profile name ( JBoss domain mode only ) CLI Example : . . code - block : : bash salt ' * ' jboss7 . create _ simple _ binding \ ' { " cli _ path " : " integration . modules . sysmod . SysModuleTest . test _ valid _ docs " , \ " controller " : " 10.11.12.13:9999 " , " cli _ user " : " jbossadm " , " cli _ password " : " jbossadm " } ' \ my _ binding _ name my _ binding _ value'''
log . debug ( "======================== MODULE FUNCTION: jboss7.create_simple_binding, binding_name=%s, value=%s, profile=%s" , binding_name , value , profile ) operation = '/subsystem=naming/binding="{binding_name}":add(binding-type=simple, value="{value}")' . format ( binding_name = binding_name , value = __escape_binding_value ( value ) ) if profile is not None : operation = '/profile="{profile}"' . format ( profile = profile ) + operation return __salt__ [ 'jboss7_cli.run_operation' ] ( jboss_config , operation )
def is_up_to_date ( self , server_version ) : """determine if a client ( on the local user ' s machine ) is up to date with the version provided on the server . Return a boolean with True or False Parameters server _ version : the server version string to compare to the host"""
client_split = self . __version__ . split ( '.' ) client_len = len ( client_split ) server_split = server_version . split ( '.' ) server_len = len ( server_split ) # Make both lists the same length for i in range ( client_len , server_len ) : client_split . append ( '0' ) for i in range ( server_len , client_len ) : server_split . append ( '0' ) for i in range ( 0 , client_len ) : if 'b' in client_split [ i ] : # Using a beta version , don ' t check return True client = int ( client_split [ i ] ) server = int ( server_split [ i ] ) if client < server : return False elif server < client : return True return True
def register_route ( self , app ) : """Register the api route function with the app ."""
if "url" not in self . params [ "options" ] : raise Exception ( "Component does not have a URL property" ) if not hasattr ( self . route_func , "__call__" ) : raise Exception ( "No app route function supplied" ) app . add_url_rule ( self . params [ "options" ] [ "url" ] , self . params [ "options" ] [ "url" ] , self . route_func )
def _raw ( cls , vertices , edges , out_edges , in_edges , head , tail ) : """Private constructor for direct construction of an ObjectGraph from its attributes . vertices is the collection of vertices out _ edges and in _ edges map vertices to lists of edges head and tail map edges to objects ."""
self = object . __new__ ( cls ) self . _out_edges = out_edges self . _in_edges = in_edges self . _head = head self . _tail = tail self . _vertices = vertices self . _edges = edges return self
def get ( self , key ) : """Returns the value for the specified key , or ` ` None ` ` if this map does not contain this key . * * Warning : This method returns a clone of original value , modifying the returned value does not change the actual value in the map . One should put modified value back to make changes visible to all nodes . * * > > > value = map . get ( key ) > > > value . update _ some _ property ( ) > > > map . put ( key , value ) * * Warning 2 : This method uses _ _ hash _ _ and _ _ eq _ _ methods of binary form of the key , not the actual implementations of _ _ hash _ _ and _ _ eq _ _ defined in key ' s class . * * : param key : ( object ) , the specified key . : return : ( object ) , the value for the specified key ."""
check_not_none ( key , "key can't be None" ) key_data = self . _to_data ( key ) return self . _get_internal ( key_data )
def stop ( self ) : '''stop nsh input'''
self . mpstate . rl . set_prompt ( self . status . flightmode + "> " ) self . mpstate . functions . input_handler = None self . started = False # unlock the port mav = self . master . mav mav . serial_control_send ( self . serial_settings . port , 0 , 0 , self . serial_settings . baudrate , 0 , [ 0 ] * 70 )
def authenticate ( self , username = None , password = None , ** kwargs ) : """The authenticate method takes credentials as keyword arguments , usually username / email and password . Returns a user model if the Stormpath authentication was successful or None otherwise . It expects three variable to be defined in Django settings : \n STORMPATH _ ID = " apiKeyId " \n STORMPATH _ SECRET = " apiKeySecret " \n STORMPATH _ APPLICATION = " https : / / api . stormpath . com / v1 / applications / APP _ UID " """
if username is None : UserModel = get_user_model ( ) username = kwargs . get ( UserModel . USERNAME_FIELD ) account = self . _stormpath_authenticate ( username , password ) if account is None : return None return self . _create_or_get_user ( account )
def _to_dict ( self ) : """Return a json dictionary representing this model ."""
_dict = { } if hasattr ( self , 'values' ) and self . values is not None : _dict [ 'values' ] = [ x . _to_dict ( ) for x in self . values ] if hasattr ( self , 'pagination' ) and self . pagination is not None : _dict [ 'pagination' ] = self . pagination . _to_dict ( ) return _dict
def _done_handler ( self , task ) : """Called when the main task ( : meth : ` _ run ` , : attr : ` _ task ` ) returns ."""
try : task . result ( ) except asyncio . CancelledError : # normal termination pass except Exception as err : try : if self . _sm_enabled : self . _xmlstream . abort ( ) else : self . _xmlstream . close ( ) except Exception : pass self . on_failure ( err ) self . _logger . exception ( "broker task failed" )
def sg_sugar_func ( func ) : r"""Decorates a function ` func ` so that it can be a sugar function . Sugar function can be used in a chainable manner . Args : func : function to decorate Returns : A sugar function ."""
@ wraps ( func ) def wrapper ( tensor , ** kwargs ) : # call sugar function out = func ( tensor , tf . sg_opt ( kwargs ) ) # save node info for reuse out . _sugar = tf . sg_opt ( func = func , arg = tf . sg_opt ( kwargs ) + sg_get_context ( ) , prev = tensor ) # inject reuse function out . sg_reuse = types . MethodType ( sg_reuse , out ) return out return wrapper
def _set_show_mpls_ldp_fec ( self , v , load = False ) : """Setter method for show _ mpls _ ldp _ fec , mapped from YANG variable / brocade _ mpls _ rpc / show _ mpls _ ldp _ fec ( rpc ) If this variable is read - only ( config : false ) in the source YANG file , then _ set _ show _ mpls _ ldp _ fec is considered as a private method . Backends looking to populate this variable should do so via calling thisObj . _ set _ show _ mpls _ ldp _ fec ( ) directly ."""
if hasattr ( v , "_utype" ) : v = v . _utype ( v ) try : t = YANGDynClass ( v , base = show_mpls_ldp_fec . show_mpls_ldp_fec , is_leaf = True , yang_name = "show-mpls-ldp-fec" , rest_name = "show-mpls-ldp-fec" , parent = self , path_helper = self . _path_helper , extmethods = self . _extmethods , register_paths = False , extensions = { u'tailf-common' : { u'hidden' : u'full' , u'actionpoint' : u'showMplsLdpFec' } } , namespace = 'urn:brocade.com:mgmt:brocade-mpls' , defining_module = 'brocade-mpls' , yang_type = 'rpc' , is_config = True ) except ( TypeError , ValueError ) : raise ValueError ( { 'error-string' : """show_mpls_ldp_fec must be of a type compatible with rpc""" , 'defined-type' : "rpc" , 'generated-type' : """YANGDynClass(base=show_mpls_ldp_fec.show_mpls_ldp_fec, is_leaf=True, yang_name="show-mpls-ldp-fec", rest_name="show-mpls-ldp-fec", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions={u'tailf-common': {u'hidden': u'full', u'actionpoint': u'showMplsLdpFec'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='rpc', is_config=True)""" , } ) self . __show_mpls_ldp_fec = t if hasattr ( self , '_set' ) : self . _set ( )
def invoke_script ( self , script , id = None , endpoint = None ) : """Invokes a script that has been assembled Args : script : ( str ) a hexlified string of a contract invocation script , example ' 00c10b746f74616c537570706c796754a64cac1b1073e662933ef3e30b007cd98d67d7' id : ( int , optional ) id to use for response tracking endpoint : ( RPCEndpoint , optional ) endpoint to specify to use Returns : json object of the result or the error encountered in the RPC call"""
return self . _call_endpoint ( INVOKE_SCRIPT , params = [ script ] , id = id , endpoint = endpoint )
def action ( act , config ) : """CLI action preprocessor"""
if not config : pass elif act is "list" : do_list ( ) else : config_dir = os . path . join ( CONFIG_ROOT , config ) globals ( ) [ "do_" + act ] ( config , config_dir )
def index_agreement ( s , o ) : """index of agreement input : s : simulated o : observed output : ia : index of agreement"""
# s , o = filter _ nan ( s , o ) ia = 1 - ( np . sum ( ( o - s ) ** 2 ) ) / ( np . sum ( ( np . abs ( s - np . mean ( o ) ) + np . abs ( o - np . mean ( o ) ) ) ** 2 ) ) return ia
def fdr ( y , z ) : """False discovery rate ` fp / ( tp + fp ) `"""
tp , tn , fp , fn = contingency_table ( y , z ) return fp / ( tp + fp )
def _get_features ( self , i , word , context , prev , prev2 ) : '''Map tokens into a feature representation , implemented as a { hashable : float } dict . If the features change , a new model must be trained .'''
def add ( name , * args ) : features [ ' ' . join ( ( name , ) + tuple ( args ) ) ] += 1 i += len ( self . START ) features = defaultdict ( int ) # It ' s useful to have a constant feature , which acts sort of like a prior add ( 'bias' ) add ( 'i suffix' , word [ - 3 : ] ) add ( 'i pref1' , word [ 0 ] ) add ( 'i-1 tag' , prev ) add ( 'i-2 tag' , prev2 ) add ( 'i tag+i-2 tag' , prev , prev2 ) add ( 'i word' , context [ i ] ) add ( 'i-1 tag+i word' , prev , context [ i ] ) add ( 'i-1 word' , context [ i - 1 ] ) add ( 'i-1 suffix' , context [ i - 1 ] [ - 3 : ] ) add ( 'i-2 word' , context [ i - 2 ] ) add ( 'i+1 word' , context [ i + 1 ] ) add ( 'i+1 suffix' , context [ i + 1 ] [ - 3 : ] ) add ( 'i+2 word' , context [ i + 2 ] ) return features
def _load_entities ( self ) : """Must load all the entities it needs from cache , and return ` ` False ` ` if it could not find all of them ."""
if not self . _chat_peer : return True # Nothing to load ( e . g . MessageDeleted ) self . _chat , self . _input_chat = self . _get_entity_pair ( self . chat_id ) return self . _input_chat is not None
def _restore_meta ( self ) : """Restore from meta file . ' RESTORE _ META _ FILE ' is expected to have . meta at the end ."""
restore_meta_file = self . _get_restore_meta_file ( ) filename = self . flags [ 'RESTORE_DIRECTORY' ] + self . _get_restore_meta_file ( ) new_saver = tf . train . import_meta_graph ( filename ) new_saver . restore ( self . sess , filename [ : - 5 ] ) print ( "Model restored from %s" % restore_meta_file )
def show_env ( self , env ) : """Show environment variables ."""
self . dialog_manager . show ( RemoteEnvDialog ( env , parent = self ) )
def _blank_within ( self , perimeter ) : """Blank all the pixels within the given perimeter . Parameters perimeter : list The perimeter of the region ."""
# Method : # scan around the perimeter filling ' up ' from each pixel # stopping when we reach the other boundary for p in perimeter : # if we are on the edge of the data then there is nothing to fill if p [ 0 ] >= self . data . shape [ 0 ] or p [ 1 ] >= self . data . shape [ 1 ] : continue # if this pixel is blank then don ' t fill if self . data [ p ] == 0 : continue # blank this pixel self . data [ p ] = 0 # blank until we reach the other perimeter for i in range ( p [ 1 ] + 1 , self . data . shape [ 1 ] ) : q = p [ 0 ] , i # stop when we reach another part of the perimeter if q in perimeter : break # fill everything in between , even inclusions self . data [ q ] = 0 return
def get_backend_path ( service ) : """Return the dotted path of the matching backend ."""
for backend in _get_backends ( ) : try : if backend . service_allowed ( service ) : return "%s.%s" % ( backend . __class__ . __module__ , backend . __class__ . __name__ ) except AttributeError : raise NotImplementedError ( "%s.%s.service_allowed() not implemented" % ( backend . __class__ . __module__ , backend . __class__ . __name__ ) ) return None
def serialize ( self ) : """Convert the parameter into a dictionary . : return : The parameter dictionary . : rtype : dict"""
pickle = super ( ResourceParameter , self ) . serialize ( ) pickle [ 'frequency' ] = self . frequency pickle [ 'unit' ] = self . _unit . serialize ( ) return pickle
def add_notification ( self , notification ) : """Add a notification into actions list : param notification : notification to add : type notification : alignak . notification . Notification : return : None"""
if notification . uuid in self . actions : logger . warning ( "Already existing notification: %s" , notification ) return logger . debug ( "Adding a notification: %s" , notification ) self . actions [ notification . uuid ] = notification self . nb_notifications += 1 # A notification which is not a master one asks for a brok if notification . contact is not None : self . add ( notification . get_initial_status_brok ( ) )
def major_tick_mark ( self ) : """Read / write : ref : ` XlTickMark ` value specifying the type of major tick mark to display on this axis ."""
majorTickMark = self . _element . majorTickMark if majorTickMark is None : return XL_TICK_MARK . CROSS return majorTickMark . val
def yaml ( self ) : """returns the yaml output of the dict ."""
return ordered_dump ( OrderedDict ( self ) , Dumper = yaml . SafeDumper , default_flow_style = False )
def _G ( self , x , p ) : """analytic solution of the 2d projected mass integral integral : 2 * pi * x * kappa * dx : param x : : param p : : return :"""
prefactor = ( p + p ** 3 ) ** - 1 * p if isinstance ( x , np . ndarray ) : inds0 = np . where ( x * p == 1 ) inds1 = np . where ( x * p < 1 ) inds2 = np . where ( x * p > 1 ) func = np . ones_like ( x ) func [ inds0 ] = np . log ( 0.25 * x [ inds0 ] ** 2 * p ** 2 ) + np . pi * p * ( self . _u ( x [ inds0 ] ) - 1 ) + 2 * p ** 2 * ( self . _u ( x [ inds0 ] ) * np . arctanh ( self . _u ( x [ inds0 ] ) ** - 1 ) + np . log ( 0.5 * x [ inds0 ] ) ) func [ inds1 ] = np . log ( 0.25 * x [ inds1 ] ** 2 * p ** 2 ) + np . pi * p * ( self . _u ( x [ inds1 ] ) - 1 ) + 2 * p ** 2 * ( self . _u ( x [ inds1 ] ) * np . arctanh ( self . _u ( x [ inds1 ] ) ** - 1 ) + np . log ( 0.5 * x [ inds1 ] ) ) + 2 * self . _g ( x [ inds1 ] , p ) * np . arctanh ( self . _g ( x [ inds1 ] , p ) ) func [ inds2 ] = np . log ( 0.25 * x [ inds2 ] ** 2 * p ** 2 ) + np . pi * p * ( self . _u ( x [ inds2 ] ) - 1 ) + 2 * p ** 2 * ( self . _u ( x [ inds2 ] ) * np . arctanh ( self . _u ( x [ inds2 ] ) ** - 1 ) + np . log ( 0.5 * x [ inds2 ] ) ) - 2 * self . _f ( x [ inds2 ] , p ) * np . arctan ( self . _f ( x [ inds2 ] , p ) ) else : if x * p == 1 : func = np . log ( 0.25 * x ** 2 * p ** 2 ) + np . pi * p * ( self . _u ( x ) - 1 ) + 2 * p ** 2 * ( self . _u ( x ) * np . arctanh ( self . _u ( x ) ** - 1 ) + np . log ( 0.5 * x ) ) elif x * p < 1 : func = np . log ( 0.25 * x ** 2 * p ** 2 ) + np . pi * p * ( self . _u ( x ) - 1 ) + 2 * p ** 2 * ( self . _u ( x ) * np . arctanh ( self . _u ( x ) ** - 1 ) + np . log ( 0.5 * x ) ) + 2 * self . _g ( x , p ) * np . arctanh ( self . _g ( x , p ) ) else : func = np . log ( 0.25 * x ** 2 * p ** 2 ) + np . pi * p * ( self . _u ( x ) - 1 ) + 2 * p ** 2 * ( self . _u ( x ) * np . arctanh ( self . _u ( x ) ** - 1 ) + np . log ( 0.5 * x ) ) - 2 * self . _f ( x , p ) * np . arctan ( self . _f ( x , p ) ) return func * prefactor
def scaleSignal ( img , fitParams = None , backgroundToZero = False , reference = None ) : '''scale the image between . . . backgroundToZero = True - > 0 ( average background ) and 1 ( maximum signal ) backgroundToZero = False - > signal + - 3std reference - > reference image - - scale image to fit this one returns : scaled image'''
img = imread ( img ) if reference is not None : # def fn ( ii , m , n ) : # return ii * m + n # curve _ fit ( fn , img [ : : 10 , : : 10 ] , ref [ : : 10 , : : 10 ] ) low , high = signalRange ( img , fitParams ) low2 , high2 = signalRange ( reference ) img = np . asfarray ( img ) ampl = ( high2 - low2 ) / ( high - low ) img -= low img *= ampl img += low2 return img else : offs , div = scaleParams ( img , fitParams , backgroundToZero ) img = np . asfarray ( img ) - offs img /= div print ( 'offset: %s, divident: %s' % ( offs , div ) ) return img
def _get_hover_data ( self , data , element , dimensions = None ) : """Initializes hover data based on Element dimension values . If empty initializes with no data ."""
if 'hover' not in self . handles or self . static_source : return for d in ( dimensions or element . dimensions ( ) ) : dim = util . dimension_sanitizer ( d . name ) if dim not in data : data [ dim ] = element . dimension_values ( d ) values = np . asarray ( data [ dim ] ) if ( values . dtype . kind == 'M' or ( len ( values ) and isinstance ( values [ 0 ] , util . datetime_types ) ) ) : data [ dim + '_dt_strings' ] = [ d . pprint_value ( v ) for v in values ] for k , v in self . overlay_dims . items ( ) : dim = util . dimension_sanitizer ( k . name ) if dim not in data : data [ dim ] = [ v for _ in range ( len ( list ( data . values ( ) ) [ 0 ] ) ) ]
def asyncio_main_run ( root_runner : BaseRunner ) : """Create an ` ` asyncio ` ` event loop running in the main thread and watching runners Using ` ` asyncio ` ` to handle suprocesses requires a specific loop type to run in the main thread . This function sets up and runs the correct loop in a portable way . In addition , it runs a single : py : class : ` ~ . BaseRunner ` until completion or failure . . . seealso : : The ` issue # 8 < https : / / github . com / MatterMiners / cobald / issues / 8 > ` _ for details ."""
assert threading . current_thread ( ) == threading . main_thread ( ) , 'only main thread can accept asyncio subprocesses' if sys . platform == 'win32' : event_loop = asyncio . ProactorEventLoop ( ) asyncio . set_event_loop ( event_loop ) else : event_loop = asyncio . get_event_loop ( ) asyncio . get_child_watcher ( ) . attach_loop ( event_loop ) event_loop . run_until_complete ( awaitable_runner ( root_runner ) )
def build_tqdm_inner ( self , desc , total ) : """Extension point . Override to provide custom options to inner progress bars ( Batch loop ) : param desc : Description : param total : Number of batches : return : new progress bar"""
return self . tqdm ( desc = desc , total = total , leave = self . leave_inner )
def run_migrations_online ( ) : """Run migrations in ' online ' mode . In this scenario we need to create an Engine and associate a connection with the context ."""
# this callback is used to prevent an auto - migration from being generated # when there are no changes to the schema # reference : https : / / alembic . sqlalchemy . org / en / latest / cookbook . html def process_revision_directives ( context , revision , directives ) : if getattr ( config . cmd_opts , 'autogenerate' , False ) : script = directives [ 0 ] if script . upgrade_ops . is_empty ( ) : directives [ : ] = [ ] logger . info ( 'No changes in schema detected.' ) engine = engine_from_config ( config . get_section ( config . config_ini_section ) , prefix = 'sqlalchemy.' , poolclass = pool . NullPool ) connection = engine . connect ( ) kwargs = { } if engine . name in ( 'sqlite' , 'mysql' ) : kwargs = { 'transaction_per_migration' : True , 'transactional_ddl' : True , } configure_args = current_app . extensions [ 'migrate' ] . configure_args if configure_args : kwargs . update ( configure_args ) context . configure ( connection = connection , target_metadata = target_metadata , # compare _ type = True , process_revision_directives = process_revision_directives , ** kwargs ) try : with context . begin_transaction ( ) : context . run_migrations ( ) finally : connection . close ( )
def GetRegion ( region_name ) : """Converts region name string into boto Region object ."""
regions = boto_ec2 . regions ( ) region = None valid_region_names = [ ] for r in regions : valid_region_names . append ( r . name ) if r . name == region_name : region = r break if not region : logging . info ( 'invalid region name: %s ' % ( region_name ) ) logging . info ( 'Try one of these:\n %s' % ( '\n' . join ( valid_region_names ) ) ) assert ( False ) return region
def loads ( cls , str ) : """Loads a : class : ` ~ pypot . primitive . move . Move ` from a json string ."""
d = json . loads ( str ) return cls . create ( d )
def get_units ( self , * args , ** kwargs ) : """Returns the units of a Dimension"""
if len ( args ) == 1 : return self . spike_times . get_label ( args [ 0 ] ) . units return [ self . spike_times . get_label ( a ) . units for a in args ]
def Create ( self , name , description = None ) : """Creates a new group > > > clc . v2 . Datacenter ( location = " WA1 " ) . RootGroup ( ) . Create ( " Test3 " , " Description3 " ) < clc . APIv2 . group . Group object at 0x10cc76c90 > > > > print _ Test5"""
if not description : description = name r = clc . v2 . API . Call ( 'POST' , 'groups/%s' % ( self . alias ) , { 'name' : name , 'description' : description , 'parentGroupId' : self . id } , session = self . session ) return ( Group ( id = r [ 'id' ] , alias = self . alias , group_obj = r , session = self . session ) )
def possible_moves ( self , position ) : """Finds out the locations of possible moves given board . Board position . : pre get _ location is on board and piece at specified get _ location on position : type : position : Board : rtype : list"""
for move in itertools . chain ( self . forward_moves ( position ) , self . capture_moves ( position ) , self . en_passant_moves ( position ) ) : yield move
def setUsers ( self , * args , ** kwargs ) : """Adds the active users for this branch to a ' users ' field . Returns the number of requests done to Mambu . . . todo : : since pagination logic was added , is not always true that just 1 request was done . It may be more ! But since request counter singleton holds true information about how many requests were done to Mambu , in fact this return value may be obsolete"""
try : usrs = [ us for us in self . mambuusersclass ( branchId = self [ 'id' ] , * args , ** kwargs ) if us [ 'userState' ] == "ACTIVE" ] except AttributeError as ae : from . mambuuser import MambuUsers self . mambuusersclass = MambuUsers usrs = [ us for us in self . mambuusersclass ( branchId = self [ 'id' ] , * args , ** kwargs ) if us [ 'userState' ] == "ACTIVE" ] self [ 'users' ] = usrs return 1
def _interpolated_template ( self , templateid ) : """Return an interpolator for the given template"""
phase , y = self . _get_template_by_id ( templateid ) # double - check that phase ranges from 0 to 1 assert phase . min ( ) >= 0 assert phase . max ( ) <= 1 # at the start and end points , we need to add ~ 5 points to make sure # the spline & derivatives wrap appropriately phase = np . concatenate ( [ phase [ - 5 : ] - 1 , phase , phase [ : 5 ] + 1 ] ) y = np . concatenate ( [ y [ - 5 : ] , y , y [ : 5 ] ] ) # Univariate spline allows for derivatives ; use this ! return UnivariateSpline ( phase , y , s = 0 , k = 5 )
def _extract_authors ( pub , idx , _root ) : """Create a concatenated string of author names . Separate names with semi - colons . : param any pub : Publication author structure is ambiguous : param int idx : Index number of Pub"""
logger_ts . info ( "enter extract_authors" ) try : # DOI Author data . We ' d prefer to have this first . names = pub [ 'author' ] except KeyError as e : try : # Manually entered author data . This is second best . names = pub [ 'authors' ] except KeyError as e : # Couldn ' t find any author data . Skip it altogether . names = False logger_ts . info ( "extract_authors: KeyError: author data not provided, {}" . format ( e ) ) # If there is author data , find out what type it is if names : # Build author names onto empty string auth = '' # Is it a list of dicts or a list of strings ? Could be either # Authors : Stored as a list of dictionaries or list of strings if isinstance ( names , list ) : for name in names : if isinstance ( name , str ) : auth += name + ';' elif isinstance ( name , dict ) : for k , v in name . items ( ) : auth += v + ';' elif isinstance ( names , str ) : auth = names # Enter finished author string into target _root [ 'pub' + str ( idx + 1 ) + '_author' ] = auth [ : - 1 ] return _root
def p_nonfluent_list ( self , p ) : '''nonfluent _ list : nonfluent _ list domain _ section | nonfluent _ list objects _ section | nonfluent _ list init _ non _ fluent _ section | empty'''
if p [ 1 ] is None : p [ 0 ] = dict ( ) else : name , section = p [ 2 ] p [ 1 ] [ name ] = section p [ 0 ] = p [ 1 ]
def _iter_templates ( ) : """uses reflection to yield the Cheetah templates under this module"""
# pylint : disable = W0406 # needed for introspection import javatools . cheetah from Cheetah . Template import Template for _ , name , _ in iter_modules ( __path__ ) : __import__ ( "javatools.cheetah." + name ) found = getattr ( getattr ( javatools . cheetah , name ) , name ) if issubclass ( found , Template ) : yield found
def validate ( config ) : """validate config file"""
with open ( config ) as fh : content = fh . read ( ) try : data = yaml . safe_load ( content ) except Exception : log . error ( "config file: %s is not valid yaml" , config ) raise try : jsonschema . validate ( data , CONFIG_SCHEMA ) except Exception : log . error ( "config file: %s is not valid" , config ) raise log . info ( "config file valid, accounts:%d" , len ( data [ 'accounts' ] ) ) return data
def copy_file ( self ) : share_name = self . _create_share ( ) directory_name = self . _create_directory ( share_name ) source_file_name = self . _get_file_reference ( ) self . service . create_file ( share_name , directory_name , source_file_name , 512 ) # Basic # Copy the file from the directory to the root of the share source = self . service . make_file_url ( share_name , directory_name , source_file_name ) copy = self . service . copy_file ( share_name , None , 'file1copy' , source ) # Poll for copy completion while copy . status != 'success' : count = count + 1 if count > 5 : print ( 'Timed out waiting for async copy to complete.' ) time . sleep ( 30 ) copy = self . service . get_file_properties ( share_name , dir_name , 'file1copy' ) . properties . copy # With SAS from a remote account to local file # Commented out as remote share , directory , file , and sas would need to be created '''source _ file _ url = self . service . make _ file _ url ( remote _ share _ name , remote _ directory _ name , remote _ file _ name , sas _ token = remote _ sas _ token , copy = self . service . copy _ file ( destination _ sharename , destination _ directory _ name , destination _ file _ name , source _ file _ url )'''
# Abort copy # Commented out as this involves timing the abort to be sent while the copy is still running # Abort copy is useful to do along with polling # self . service . abort _ copy _ file ( share _ name , dir _ name , file _ name , copy . id ) self . service . delete_share ( share_name )
def cursorPositionChangedEvent ( self ) : """Update the highlighting . This is an overloaded version of the native Qt slot of ` ` QTextEdit ` ` . In this class , the purpose of this slot is to check if the character to the right of the cursor needs highlighting , assuming there is a second character to pair with it . | Args | * * * None * * | Returns | * * * None * * | Raises | * * * None * *"""
# Determine the sender and cursor position . qteWidget = self . sender ( ) tc = qteWidget . textCursor ( ) origin = tc . position ( ) # Remove all the highlighting . Since this will move the # cursor , first disconnect this very routine to avoid an # infinite recursion . qteWidget . cursorPositionChanged . disconnect ( self . cursorPositionChangedEvent ) self . qteRemoveHighlighting ( qteWidget ) qteWidget . cursorPositionChanged . connect ( self . cursorPositionChangedEvent ) # If we are beyond the last character ( for instance because # the cursor was explicitly moved to the end of the buffer ) # then there is no character to the right and will result in # an error when trying to fetch it . if origin >= len ( qteWidget . toPlainText ( ) ) : return else : # It is save to retrieve the character to the right of the # cursor . char = qteWidget . toPlainText ( ) [ origin ] # Return if the character is not in the matching list . if char not in self . charToHighlight : return # Disconnect the ' cursorPositionChanged ' signal from this # function because it will make changes to the cursor position # and would therefore immediately trigger itself , resulting in # an infinite recursion . qteWidget . cursorPositionChanged . disconnect ( self . cursorPositionChangedEvent ) # If we got until here " char " must be one of the two # characters to highlight . if char == self . charToHighlight [ 0 ] : start = origin # Found the first character , so now look for the second # one . If this second character does not exist the # function returns ' - 1 ' which is safe because the # ` ` self . highlightCharacter ` ` method can deal with this . stop = qteWidget . toPlainText ( ) . find ( self . charToHighlight [ 1 ] , start + 1 ) else : # Found the second character so the start index is indeed # the stop index . stop = origin # Search for the preceeding first character . start = qteWidget . toPlainText ( ) . rfind ( self . charToHighlight [ 0 ] , 0 , stop ) # Highlight the characters . oldCharFormats = self . highlightCharacters ( qteWidget , ( start , stop ) , QtCore . Qt . blue , 100 ) # Store the positions of the changed character in the # macroData structure of this widget . data = self . qteMacroData ( qteWidget ) data . matchingPositions = ( start , stop ) data . oldCharFormats = oldCharFormats self . qteSaveMacroData ( data , qteWidget ) # Reconnect the ' cursorPositionChanged ' signal . qteWidget . cursorPositionChanged . connect ( self . cursorPositionChangedEvent )
def get_chat_member ( self , * args , ** kwargs ) : """See : func : ` get _ chat _ member `"""
return get_chat_member ( * args , ** self . _merge_overrides ( ** kwargs ) ) . run ( )
def enable ( self , enable_password ) : """Change to the privilege mode ."""
if self . device . prompt [ - 1 ] == '#' : self . log ( "Device is already in privileged mode" ) return events = [ self . password_re , self . device . prompt_re , pexpect . TIMEOUT , pexpect . EOF ] transitions = [ ( self . password_re , [ 0 ] , 1 , partial ( a_send_password , enable_password ) , 10 ) , ( self . password_re , [ 1 ] , - 1 , ConnectionAuthenticationError ( "Incorrect enable password" , self . device . hostname ) , 0 ) , ( self . device . prompt_re , [ 0 , 1 , 2 , 3 ] , - 1 , a_expected_prompt , 0 ) , ( pexpect . TIMEOUT , [ 0 , 1 , 2 ] , - 1 , ConnectionAuthenticationError ( "Unable to get privileged mode" , self . device . hostname ) , 0 ) , ( pexpect . EOF , [ 0 , 1 , 2 ] , - 1 , ConnectionError ( "Device disconnected" ) , 0 ) ] self . device . ctrl . send_command ( self . enable_cmd ) fsm = FSM ( "IOS-ENABLE" , self . device , events , transitions , timeout = 10 , max_transitions = 5 ) fsm . run ( ) if self . device . prompt [ - 1 ] != '#' : raise ConnectionAuthenticationError ( "Privileged mode not set" , self . device . hostname )
def cmd_func_name ( self , command : str ) -> str : """Get the method name associated with a given command . : param command : command to look up method name which implements it : return : method name which implements the given command"""
target = COMMAND_FUNC_PREFIX + command return target if callable ( getattr ( self , target , None ) ) else ''
def _extractSupportedAssociationType ( self , server_error , endpoint , assoc_type ) : """Handle ServerErrors resulting from association requests . @ returns : If server replied with an C { unsupported - type } error , return a tuple of supported C { association _ type } , C { session _ type } . Otherwise logs the error and returns None . @ rtype : tuple or None"""
# Any error message whose code is not ' unsupported - type ' # should be considered a total failure . if server_error . error_code != 'unsupported-type' or server_error . message . isOpenID1 ( ) : logging . error ( 'Server error when requesting an association from %r: %s' % ( endpoint . server_url , server_error . error_text ) ) return None # The server didn ' t like the association / session type # that we sent , and it sent us back a message that # might tell us how to handle it . logging . error ( 'Unsupported association type %s: %s' % ( assoc_type , server_error . error_text , ) ) # Extract the session _ type and assoc _ type from the # error message assoc_type = server_error . message . getArg ( OPENID_NS , 'assoc_type' ) session_type = server_error . message . getArg ( OPENID_NS , 'session_type' ) if assoc_type is None or session_type is None : logging . error ( 'Server responded with unsupported association ' 'session but did not supply a fallback.' ) return None elif not self . negotiator . isAllowed ( assoc_type , session_type ) : fmt = ( 'Server sent unsupported session/association type: ' 'session_type=%s, assoc_type=%s' ) logging . error ( fmt % ( session_type , assoc_type ) ) return None else : return assoc_type , session_type
def restore ( backup_file ) : '''Restore a Cozy backup _ file : path to . tar . gz'''
if not os . path . isfile ( backup_file ) and not os . path . islink ( backup_file ) : print 'Missing backup file: {}' . format ( backup_file ) else : couchdb_path = _get_couchdb_path ( ) print 'Restore Cozy:' cmd = 'supervisorctl stop cozy-controller ; sleep 10' cmd += ' ; service couchdb stop ; service nginx stop' cmd += ' ; rm -rf {couchdb_path}/.cozy_design' cmd += ' {couchdb_path}/_replicator.couch' cmd += ' ; tar xvzf {backup_file} -C /' cmd += ' ; service couchdb start ; service nginx start' cmd = cmd . format ( backup_file = backup_file , couchdb_path = couchdb_path ) helpers . cmd_exec ( cmd , show_output = True ) helpers . wait_couchdb ( 10 ) cmd = 'supervisorctl start cozy-controller' helpers . cmd_exec ( cmd , show_output = True )
def get_idd_code ( self , ip ) : '''Get idd _ code'''
rec = self . get_all ( ip ) return rec and rec . idd_code
def save ( self ) : """: return : save this environment on Ariane server ( create or update )"""
LOGGER . debug ( "Environment.save" ) post_payload = { } consolidated_osi_id = [ ] if self . id is not None : post_payload [ 'environmentID' ] = self . id if self . name is not None : post_payload [ 'environmentName' ] = self . name if self . description is not None : post_payload [ 'environmentDescription' ] = self . description if self . color_code is not None : post_payload [ 'environmentColorCode' ] = self . color_code if self . osi_ids is not None : consolidated_osi_id = copy . deepcopy ( self . osi_ids ) if self . osi_2_rm is not None : for osi_2_rm in self . osi_2_rm : if osi_2_rm . id is None : osi_2_rm . sync ( ) consolidated_osi_id . remove ( osi_2_rm . id ) if self . osi_2_add is not None : for osi_id_2_add in self . osi_2_add : if osi_id_2_add . id is None : osi_id_2_add . save ( ) consolidated_osi_id . append ( osi_id_2_add . id ) post_payload [ 'environmentOSInstancesID' ] = consolidated_osi_id args = { 'http_operation' : 'POST' , 'operation_path' : '' , 'parameters' : { 'payload' : json . dumps ( post_payload ) } } response = EnvironmentService . requester . call ( args ) if response . rc != 0 : LOGGER . warning ( 'Environment.save - Problem while saving environment ' + self . name + '. Reason: ' + str ( response . response_content ) + '-' + str ( response . error_message ) + " (" + str ( response . rc ) + ")" ) else : self . id = response . response_content [ 'environmentID' ] if self . osi_2_add is not None : for osi_2_add in self . osi_2_add : osi_2_add . sync ( ) if self . osi_2_rm is not None : for osi_2_rm in self . osi_2_rm : osi_2_rm . sync ( ) self . osi_2_add . clear ( ) self . osi_2_rm . clear ( ) self . sync ( ) return self
def validate_digit ( value , start , end ) : '''validate if a digit is valid'''
if not str ( value ) . isdigit ( ) or int ( value ) < start or int ( value ) > end : raise ValueError ( '%s must be a digit from %s to %s' % ( value , start , end ) )
def field ( self , name , fieldType = "C" , size = "50" , decimal = 0 ) : """Adds a dbf field descriptor to the shapefile ."""
if fieldType == "D" : size = "8" decimal = 0 elif fieldType == "L" : size = "1" decimal = 0 if len ( self . fields ) >= 2046 : raise ShapefileException ( "Shapefile Writer reached maximum number of fields: 2046." ) self . fields . append ( ( name , fieldType , size , decimal ) )