signature
stringlengths 29
44.1k
| implementation
stringlengths 0
85.2k
|
---|---|
def version ( self , value ) :
"""Version setter ."""
|
self . bytearray [ self . _get_slicers ( 1 ) ] = bytearray ( c_uint8 ( value or 0 ) )
|
def _set_dict_value_by_path ( d , val , path ) :
"""Given a dict ( ` ` d ` ` ) , a value ( ` ` val ` ` ) , and a list specifying the
hierarchical path to a key in that dict ( ` ` path ` ` ) , set the value in ` ` d ` `
at ` ` path ` ` to ` ` val ` ` .
: param d : the dict to search in
: type d : dict
: param path : the path to the key in the dict
: type path : list
: raises : TypeError if the path is too short
: returns : the modified dict"""
|
tmp_path = deepcopy ( path )
tmp_d = deepcopy ( d )
result = tmp_d
while len ( tmp_path ) > 0 :
if len ( tmp_path ) == 1 :
result [ tmp_path [ 0 ] ] = val
break
k = tmp_path . pop ( 0 )
result = result [ k ]
return tmp_d
|
def get_port_profile_for_vlan ( self , vlan_id , device_id ) :
"""Returns Vlan id associated with the port profile ."""
|
entry = self . session . query ( ucsm_model . PortProfile ) . filter_by ( vlan_id = vlan_id , device_id = device_id ) . first ( )
return entry . profile_id if entry else None
|
def upgrade ( options = None , fatal = False , dist = False ) :
"""Upgrade all packages ."""
|
cmd = [ 'yum' , '--assumeyes' ]
if options is not None :
cmd . extend ( options )
cmd . append ( 'upgrade' )
log ( "Upgrading with options: {}" . format ( options ) )
_run_yum_command ( cmd , fatal )
|
def get ( self , name , * df ) :
"""Get the value of a property by I { name } .
@ param name : The property name .
@ type name : str
@ param df : An optional value to be returned when the value
is not set
@ type df : [ 1 ] .
@ return : The stored value , or I { df [ 0 ] } if not set .
@ rtype : any"""
|
return self . provider ( name ) . __get ( name , * df )
|
def list_folders ( kwargs = None , call = None ) :
'''List all the folders for this VMware environment
CLI Example :
. . code - block : : bash
salt - cloud - f list _ folders my - vmware - config'''
|
if call != 'function' :
raise SaltCloudSystemExit ( 'The list_folders function must be called with ' '-f or --function.' )
return { 'Folders' : salt . utils . vmware . list_folders ( _get_si ( ) ) }
|
def get_std_start_date ( self ) :
"""If the date is custom , return the start datetime with the format % Y - % m - % d % H : % M : % S . Else , returns " " ."""
|
first , _ = self . _val
if first != datetime . min and first != datetime . max :
return first . strftime ( "%Y-%m-%d %H:%M:%S" )
else :
return ""
|
def do_transform ( self ) :
"""Apply the transformation ( if it exists ) to the latest _ value"""
|
if not self . transform :
return
try :
self . latest_value = utils . Transform ( expr = self . transform , value = self . latest_value , timedelta = self . time_between_updates ( ) . total_seconds ( ) ) . result ( )
except ( TypeError , ValueError ) :
logger . warn ( "Invalid transformation '%s' for metric %s" , self . transfrom , self . pk )
self . transform = ''
|
def load_hha ( self , idx ) :
"""Load HHA features from Gupta et al . ECCV14.
See https : / / github . com / s - gupta / rcnn - depth / blob / master / rcnn / saveHHA . m"""
|
im = Image . open ( '{}/data/hha/img_{}.png' . format ( self . nyud_dir , idx ) )
hha = np . array ( im , dtype = np . float32 )
hha -= self . mean_hha
hha = hha . transpose ( ( 2 , 0 , 1 ) )
return hha
|
def funname ( file ) :
"""Return variable names from file names ."""
|
if isinstance ( file , str ) :
files = [ file ]
else :
files = file
bases = [ os . path . basename ( f ) for f in files ]
names = [ os . path . splitext ( b ) [ 0 ] for b in bases ]
if isinstance ( file , str ) :
return names [ 0 ]
else :
return names
|
def inference ( self , dataRDD , feed_timeout = 600 , qname = 'input' ) :
"""* For InputMode . SPARK only * : Feeds Spark RDD partitions into the TensorFlow worker nodes and returns an RDD of results
It is the responsibility of the TensorFlow " main " function to interpret the rows of the RDD and provide valid data for the output RDD .
This will use the distributed TensorFlow cluster for inferencing , so the TensorFlow " main " function should be capable of inferencing .
Per Spark design , the output RDD will be lazily - executed only when a Spark action is invoked on the RDD .
Args :
: dataRDD : input data as a Spark RDD
: feed _ timeout : number of seconds after which data feeding times out ( 600 sec default )
: qname : * INTERNAL _ USE *
Returns :
A Spark RDD representing the output of the TensorFlow inferencing"""
|
logging . info ( "Feeding inference data" )
assert self . input_mode == InputMode . SPARK , "TFCluster.inference() requires InputMode.SPARK"
assert qname in self . queues , "Unknown queue: {}" . format ( qname )
return dataRDD . mapPartitions ( TFSparkNode . inference ( self . cluster_info , feed_timeout = feed_timeout , qname = qname ) )
|
def from_json ( cls , data , result = None ) :
"""Create new Relation element from JSON data
: param data : Element data from JSON
: type data : Dict
: param result : The result this element belongs to
: type result : overpy . Result
: return : New instance of Relation
: rtype : overpy . Relation
: raises overpy . exception . ElementDataWrongType : If type value of the passed JSON data does not match ."""
|
if data . get ( "type" ) != cls . _type_value :
raise exception . ElementDataWrongType ( type_expected = cls . _type_value , type_provided = data . get ( "type" ) )
tags = data . get ( "tags" , { } )
rel_id = data . get ( "id" )
( center_lat , center_lon ) = cls . get_center_from_json ( data = data )
members = [ ]
supported_members = [ RelationNode , RelationWay , RelationRelation ]
for member in data . get ( "members" , [ ] ) :
type_value = member . get ( "type" )
for member_cls in supported_members :
if member_cls . _type_value == type_value :
members . append ( member_cls . from_json ( member , result = result ) )
attributes = { }
ignore = [ "id" , "members" , "tags" , "type" ]
for n , v in data . items ( ) :
if n in ignore :
continue
attributes [ n ] = v
return cls ( rel_id = rel_id , attributes = attributes , center_lat = center_lat , center_lon = center_lon , members = members , tags = tags , result = result )
|
def _decode_sensor_data ( properties ) :
"""Decode , decompress , and parse the data from the history API"""
|
b64_input = ""
for s in properties . get ( 'payload' ) : # pylint : disable = consider - using - join
b64_input += s
decoded = base64 . b64decode ( b64_input )
data = zlib . decompress ( decoded )
points = [ ]
i = 0
while i < len ( data ) :
points . append ( { 'timestamp' : int ( 1e3 * ArloBaseStation . _parse_statistic ( data [ i : ( i + 4 ) ] , 0 ) ) , 'temperature' : ArloBaseStation . _parse_statistic ( data [ ( i + 8 ) : ( i + 10 ) ] , 1 ) , 'humidity' : ArloBaseStation . _parse_statistic ( data [ ( i + 14 ) : ( i + 16 ) ] , 1 ) , 'airQuality' : ArloBaseStation . _parse_statistic ( data [ ( i + 20 ) : ( i + 22 ) ] , 1 ) } )
i += 22
return points
|
def cigar ( self , x , rot = 0 , cond = 1e6 , noise = 0 ) :
"""Cigar test objective function"""
|
if rot :
x = rotate ( x )
x = [ x ] if isscalar ( x [ 0 ] ) else x
# scalar into list
f = [ ( x [ 0 ] ** 2 + cond * sum ( x [ 1 : ] ** 2 ) ) * np . exp ( noise * np . random . randn ( 1 ) [ 0 ] / len ( x ) ) for x in x ]
return f if len ( f ) > 1 else f [ 0 ]
# 1 - element - list into scalar
|
def pvwatts_ac ( pdc , pdc0 , eta_inv_nom = 0.96 , eta_inv_ref = 0.9637 ) :
r"""Implements NREL ' s PVWatts inverter model [ 1 ] _ .
. . math : :
\ eta = \ frac { \ eta _ { nom } } { \ eta _ { ref } } ( - 0.0162 \ zeta - \ frac { 0.0059 } { \ zeta } + 0.9858)
. . math : :
P _ { ac } = \ min ( \ eta P _ { dc } , P _ { ac0 } )
where : math : ` \ zeta = P _ { dc } / P _ { dc0 } ` and : math : ` P _ { dc0 } = P _ { ac0 } / \ eta _ { nom } ` .
Parameters
pdc : numeric
DC power .
pdc0 : numeric
Nameplate DC rating .
eta _ inv _ nom : numeric , default 0.96
Nominal inverter efficiency .
eta _ inv _ ref : numeric , default 0.9637
Reference inverter efficiency . PVWatts defines it to be 0.9637
and is included here for flexibility .
Returns
pac : numeric
AC power .
References
. . [ 1 ] A . P . Dobos , " PVWatts Version 5 Manual , "
http : / / pvwatts . nrel . gov / downloads / pvwattsv5 . pdf
(2014 ) ."""
|
pac0 = eta_inv_nom * pdc0
zeta = pdc / pdc0
# arrays to help avoid divide by 0 for scalar and array
eta = np . zeros_like ( pdc , dtype = float )
pdc_neq_0 = ~ np . equal ( pdc , 0 )
# eta < 0 if zeta < 0.006 . pac is forced to be > = 0 below . GH 541
eta = eta_inv_nom / eta_inv_ref * ( - 0.0162 * zeta - np . divide ( 0.0059 , zeta , out = eta , where = pdc_neq_0 ) + 0.9858 )
pac = eta * pdc
pac = np . minimum ( pac0 , pac )
pac = np . maximum ( 0 , pac )
# GH 541
return pac
|
def _make_variant_locus_id ( gene_id , allele_id ) :
"""A convenience method to uniformly create variant loci .
If we want to materialize these in the monarch space ,
then we wrap with the self . make _ id function .
: param gene _ id :
: param allele _ id :
: return :"""
|
varloci = '-' . join ( ( gene_id , allele_id ) )
varloci = '_:' + re . sub ( r'(ZFIN)?:' , '' , varloci )
return varloci
|
def get_row_at_index ( self , index ) :
"""Returns a table row by it ' s index
: param int index : the zero - indexed position of the row in the table"""
|
if index is None :
return None
url = self . build_url ( self . _endpoints . get ( 'get_row_index' ) )
response = self . session . post ( url , data = { 'index' : index } )
if not response :
return None
return self . row_constructor ( parent = self , ** { self . _cloud_data_key : response . json ( ) } )
|
async def delete ( self , force = False ) :
'''Delete a node from the cortex .
The following tear - down operations occur in order :
* validate that you have permissions to delete the node
* validate that you have permissions to delete all tags
* validate that there are no remaining references to the node .
* delete all the tags ( bottom up )
* fire onDelTag ( ) handlers
* delete tag properties from storage
* log tag : del splices
* delete all secondary properties
* fire onDelProp handler
* delete secondary property from storage
* log prop : del splices
* delete the primary property
* fire onDel handlers for the node
* delete primary property from storage
* log node : del splices'''
|
formname , formvalu = self . ndef
if self . isrunt :
raise s_exc . IsRuntForm ( mesg = 'Cannot delete runt nodes' , form = formname , valu = formvalu )
tags = [ ( len ( t ) , t ) for t in self . tags . keys ( ) ]
# check for tag permissions
# TODO
# check for any nodes which reference us . . .
if not force : # refuse to delete tag nodes with existing tags
if self . form . name == 'syn:tag' :
async for _ in self . snap . _getNodesByTag ( self . ndef [ 1 ] ) : # NOQA
mesg = 'Nodes still have this tag.'
return await self . snap . _raiseOnStrict ( s_exc . CantDelNode , mesg , form = formname )
async for refr in self . snap . _getNodesByType ( formname , formvalu , addform = False ) :
if refr . buid == self . buid :
continue
mesg = 'Other nodes still refer to this node.'
return await self . snap . _raiseOnStrict ( s_exc . CantDelNode , mesg , form = formname )
for size , tag in sorted ( tags , reverse = True ) :
await self . delTag ( tag , init = True )
for name in list ( self . props . keys ( ) ) :
await self . pop ( name , init = True )
sops = self . form . getDelOps ( self . buid )
splice = self . snap . splice ( 'node:del' , ndef = self . ndef )
await self . snap . stor ( sops , [ splice ] )
self . snap . livenodes . pop ( self . buid )
self . snap . core . pokeFormCount ( formname , - 1 )
await self . form . wasDeleted ( self )
|
def run_mesh ( self , mesh = 100.0 , shift = None , is_time_reversal = True , is_mesh_symmetry = True , with_eigenvectors = False , with_group_velocities = False , is_gamma_center = False ) :
"""Run mesh sampling phonon calculation .
See the parameter details in Phonopy . init _ mesh ( ) ."""
|
self . init_mesh ( mesh = mesh , shift = shift , is_time_reversal = is_time_reversal , is_mesh_symmetry = is_mesh_symmetry , with_eigenvectors = with_eigenvectors , with_group_velocities = with_group_velocities , is_gamma_center = is_gamma_center )
self . _mesh . run ( )
|
def is_active ( cache , token ) :
"""Accepts the cache and ID token and checks to see if the profile is
currently logged in . If so , return the token , otherwise throw a
NotAuthenticatedException .
: param cache :
: param token :
: return :"""
|
profile = cache . get ( token )
if not profile :
raise exceptions . NotAuthenticatedException ( 'The token is good, but you are not logged in. Please ' 'try logging in again.' )
return profile
|
def create_glir_message ( commands , array_serialization = None ) :
"""Create a JSON - serializable message of GLIR commands . NumPy arrays
are serialized according to the specified method .
Arguments
commands : list
List of GLIR commands .
array _ serialization : string or None
Serialization method for NumPy arrays . Possible values are :
' binary ' ( default ) : use a binary string
' base64 ' : base64 encoded string of the array"""
|
# Default serialization method for NumPy arrays .
if array_serialization is None :
array_serialization = 'binary'
# Extract the buffers .
commands_modified , buffers = _extract_buffers ( commands )
# Serialize the modified commands ( with buffer pointers ) and the buffers .
commands_serialized = [ _serialize_command ( command_modified ) for command_modified in commands_modified ]
buffers_serialized = [ _serialize_buffer ( buffer , array_serialization ) for buffer in buffers ]
# Create the final message .
msg = { 'msg_type' : 'glir_commands' , 'commands' : commands_serialized , 'buffers' : buffers_serialized , }
return msg
|
def show_warnings ( self ) :
"""SHOW WARNINGS"""
|
yield self . _execute_command ( COMMAND . COM_QUERY , "SHOW WARNINGS" )
result = MySQLResult ( self )
yield result . read ( )
raise gen . Return ( result . rows )
|
def update_topic_counter ( sender , topic , user , request , response , ** kwargs ) :
"""Handles the update of the views counter associated with topics ."""
|
topic . __class__ . _default_manager . filter ( id = topic . id ) . update ( views_count = F ( 'views_count' ) + 1 )
|
def decline_noun ( self , noun , gender , mimation = True ) :
"""Return a list of all possible declined forms given any form
of a noun and its gender ."""
|
stem = self . stemmer . get_stem ( noun , gender )
declension = [ ]
for case in self . endings [ gender ] [ 'singular' ] :
if gender == 'm' :
form = stem + self . endings [ gender ] [ 'singular' ] [ case ]
else :
form = stem + self . endings [ gender ] [ 'singular' ] [ case ] [ 1 : ]
declension . append ( ( form , { 'case' : case , 'number' : 'singular' } ) )
for case in self . endings [ gender ] [ 'dual' ] :
if gender == 'm' :
form = stem + self . endings [ gender ] [ 'dual' ] [ case ]
else :
form = stem + self . endings [ gender ] [ 'dual' ] [ case ] [ 1 : ]
declension . append ( ( form , { 'case' : case , 'number' : 'dual' } ) )
for case in self . endings [ gender ] [ 'plural' ] :
if gender == 'm' :
form = stem + self . endings [ gender ] [ 'plural' ] [ case ]
else :
if stem [ - 3 ] in self . akkadian [ 'macron_vowels' ] :
theme_vowel = stem [ - 3 ]
else :
theme_vowel = 'ā'
ending = [ x for x in self . endings [ gender ] [ 'plural' ] [ case ] if x [ 0 ] == theme_vowel ]
if stem [ - 2 ] in self . akkadian [ 'short_vowels' ] :
form = stem [ : - 2 ] + ending [ 0 ]
elif stem [ - 1 ] in self . akkadian [ 'consonants' ] and stem [ - 2 ] in self . akkadian [ 'macron_vowels' ] :
form = stem + ending [ 0 ]
else :
form = stem [ : - 1 ] + ending [ 0 ]
declension . append ( ( form , { 'case' : case , 'number' : 'plural' } ) )
return declension
|
def weighted_sum ( groupe , var ) :
'''Fonction qui calcule la moyenne pondérée par groupe d ' une variable'''
|
data = groupe [ var ]
weights = groupe [ 'pondmen' ]
return ( data * weights ) . sum ( )
|
def kldiv_cs_model ( prediction , fm ) :
"""Computes Chao - Shen corrected KL - divergence between prediction
and fdm made from fixations in fm .
Parameters :
prediction : np . ndarray
a fixation density map
fm : FixMat object"""
|
# compute histogram of fixations needed for ChaoShen corrected kl - div
# image category must exist ( > - 1 ) and image _ size must be non - empty
assert ( len ( fm . image_size ) == 2 and ( fm . image_size [ 0 ] > 0 ) and ( fm . image_size [ 1 ] > 0 ) )
assert ( - 1 not in fm . category )
# check whether fixmat contains fixations
if len ( fm . x ) == 0 :
return np . NaN
( scale_factor , _ ) = calc_resize_factor ( prediction , fm . image_size )
# this specifies left edges of the histogram bins , i . e . fixations between
# ]0 binedge [ 0 ] ] are included . - - > fixations are ceiled
e_y = np . arange ( 0 , np . round ( scale_factor * fm . image_size [ 0 ] + 1 ) )
e_x = np . arange ( 0 , np . round ( scale_factor * fm . image_size [ 1 ] + 1 ) )
samples = np . array ( list ( zip ( ( scale_factor * fm . y ) , ( scale_factor * fm . x ) ) ) )
( fdm , _ ) = np . histogramdd ( samples , ( e_y , e_x ) )
# compute ChaoShen corrected kl - div
q = np . array ( prediction , copy = True )
q [ q == 0 ] = np . finfo ( q . dtype ) . eps
q /= np . sum ( q )
( H , pa , la ) = chao_shen ( fdm )
q = q [ fdm > 0 ]
cross_entropy = - np . sum ( ( pa * np . log2 ( q ) ) / la )
return ( cross_entropy - H )
|
def reset_scan_stats ( self ) :
"""Clears the scan event statistics and updates the last reset time"""
|
self . _scan_event_count = 0
self . _v1_scan_count = 0
self . _v1_scan_response_count = 0
self . _v2_scan_count = 0
self . _device_scan_counts = { }
self . _last_reset_time = time . time ( )
|
def _verify_dict_list ( self , values , keys , name ) :
'''Validate a list of ` dict ` , ensuring it has specific keys
and no others .
: param values : A list of ` dict ` to validate .
: param keys : A list of keys to validate each ` dict ` against .
: param name : Name describing the values , to show in error messages .'''
|
keys = set ( keys )
name = name . title ( )
for value in values :
if not isinstance ( value , Mapping ) :
raise MessageError ( 'Invalid {0} value' . format ( name ) )
for key in keys :
if key not in value :
err = '{0} must contain "{1}"'
raise MessageError ( err . format ( name , key ) )
if set ( value ) - keys :
err = '{0} must contain only {1}'
words = [ '"{0}"' . format ( r ) for r in sorted ( keys ) ]
words = ' and ' . join ( words )
raise MessageError ( err . format ( name , words ) )
|
def segmentlistdict_to_short_string ( seglists ) :
"""Return a string representation of a segmentlistdict object . Each
segmentlist in the dictionary is encoded using to _ range _ strings ( )
with " , " used to delimit segments . The keys are converted to
strings and paired with the string representations of their
segmentlists using " = " as a delimiter . Finally the key = value
strings are combined using " / " to delimit them .
Example :
> > > from pycbc _ glue . segments import *
> > > segs = segmentlistdict ( { " H1 " : segmentlist ( [ segment ( 0 , 10 ) , segment ( 35 , 35 ) , segment ( 100 , infinity ( ) ) ] ) , " L1 " : segmentlist ( [ segment ( 5 , 15 ) , segment ( 45 , 60 ) ] ) } )
> > > segmentlistdict _ to _ short _ string ( segs )
' H1 = 0:10,35,100 : / L1 = 5:15,45:60'
This function , and its inverse segmentlistdict _ from _ short _ string ( ) ,
are intended to be used to allow small segmentlistdict objects to
be encoded in command line options and config files . For large
segmentlistdict objects or when multiple sets of segmentlists are
required , the LIGO Light Weight XML encoding available through the
pycbc _ glue . ligolw library should be used ."""
|
return "/" . join ( [ "%s=%s" % ( str ( key ) , "," . join ( to_range_strings ( value ) ) ) for key , value in seglists . items ( ) ] )
|
def get_dns_zone_ids ( env = 'dev' , facing = 'internal' ) :
"""Get Route 53 Hosted Zone IDs for _ env _ .
Args :
env ( str ) : Deployment environment .
facing ( str ) : Type of ELB , external or internal .
Returns :
list : Hosted Zone IDs for _ env _ . Only * PrivateZone * when _ facing _ is
internal ."""
|
client = boto3 . Session ( profile_name = env ) . client ( 'route53' )
zones = client . list_hosted_zones_by_name ( DNSName = '.' . join ( [ env , DOMAIN ] ) )
zone_ids = [ ]
for zone in zones [ 'HostedZones' ] :
LOG . debug ( 'Found Hosted Zone: %s' , zone )
if facing == 'external' or zone [ 'Config' ] [ 'PrivateZone' ] :
LOG . info ( 'Using %(Id)s for "%(Name)s", %(Config)s' , zone )
zone_ids . append ( zone [ 'Id' ] )
LOG . debug ( 'Zone IDs: %s' , zone_ids )
return zone_ids
|
def write ( self , s ) :
"""Write wrapper .
Parameters
s : bytes
Bytes to write"""
|
try :
return self . handle . write ( s )
except OSError :
print ( )
print ( "Piksi disconnected" )
print ( )
raise IOError
|
def bass ( self , bass ) :
"""Set the speaker ' s bass ."""
|
bass = int ( bass )
bass = max ( - 10 , min ( bass , 10 ) )
# Coerce in range
self . renderingControl . SetBass ( [ ( 'InstanceID' , 0 ) , ( 'DesiredBass' , bass ) ] )
|
def get_path ( self , path , query = None ) :
"""Make a GET request , optionally including a query , to a relative path .
The path of the request includes a path on top of the base URL
assigned to the endpoint .
Parameters
path : str
The path to request , relative to the endpoint
query : DataQuery , optional
The query to pass when making the request
Returns
resp : requests . Response
The server ' s response to the request
See Also
get _ query , get , url _ path"""
|
return self . get ( self . url_path ( path ) , query )
|
def MGMT_COMM_GET ( self , Addr = 'ff02::1' , TLVs = [ ] ) :
"""send MGMT _ COMM _ GET command
Returns :
True : successful to send MGMT _ COMM _ GET
False : fail to send MGMT _ COMM _ GET"""
|
print '%s call MGMT_COMM_GET' % self . port
try :
cmd = 'commissioner mgmtget'
if len ( TLVs ) != 0 :
tlvs = "" . join ( hex ( tlv ) . lstrip ( "0x" ) . zfill ( 2 ) for tlv in TLVs )
cmd += ' binary '
cmd += tlvs
print cmd
return self . __sendCommand ( cmd ) [ 0 ] == 'Done'
except Exception , e :
ModuleHelper . WriteIntoDebugLogger ( "MGMT_COMM_GET() Error: " + str ( e ) )
|
def delete ( self , id = None ) :
"""Delete a record from the database
: param id : The id of the row to delete
: type id : mixed
: return : The number of rows deleted
: rtype : int"""
|
if id is not None :
self . where ( 'id' , '=' , id )
sql = self . _grammar . compile_delete ( self )
return self . _connection . delete ( sql , self . get_bindings ( ) )
|
def compose_tree_path ( tree , issn = False ) :
"""Compose absolute path for given ` tree ` .
Args :
pub ( obj ) : : class : ` . Tree ` instance .
issn ( bool , default False ) : Compose URL using ISSN .
Returns :
str : Absolute path of the tree , without server ' s address and protocol ."""
|
if issn :
return join ( "/" , ISSN_DOWNLOAD_KEY , basename ( tree . issn ) )
return join ( "/" , PATH_DOWNLOAD_KEY , quote_plus ( tree . path ) . replace ( "%2F" , "/" ) , )
|
def _deserialize ( self , value , attr , obj ) :
"""Deserializes a string into a Pendulum object ."""
|
if not self . context . get ( 'convert_dates' , True ) or not value :
return value
value = super ( PendulumField , self ) . _deserialize ( value , attr , value )
timezone = self . get_field_value ( 'timezone' )
target = pendulum . instance ( value )
if ( timezone and ( text_type ( target ) != text_type ( target . in_timezone ( timezone ) ) ) ) :
raise ValidationError ( "The provided datetime is not in the " "{} timezone." . format ( timezone ) )
return target
|
def server ( self , default_not_found = True , base_url = None ) :
"""Returns a WSGI compatible API server for the given Hug API module"""
|
falcon_api = falcon . API ( middleware = self . middleware )
default_not_found = self . documentation_404 ( ) if default_not_found is True else None
base_url = self . base_url if base_url is None else base_url
not_found_handler = default_not_found
self . api . _ensure_started ( )
if self . not_found_handlers :
if len ( self . not_found_handlers ) == 1 and None in self . not_found_handlers :
not_found_handler = self . not_found_handlers [ None ]
else :
not_found_handler = partial ( self . version_router , api_version = False , versions = self . not_found_handlers , not_found = default_not_found )
not_found_handler . interface = True
if not_found_handler :
falcon_api . add_sink ( not_found_handler )
self . _not_found = not_found_handler
for sink_base_url , sinks in self . sinks . items ( ) :
for url , extra_sink in sinks . items ( ) :
falcon_api . add_sink ( extra_sink , sink_base_url + url + '(?P<path>.*)' )
for router_base_url , routes in self . routes . items ( ) :
for url , methods in routes . items ( ) :
router = { }
for method , versions in methods . items ( ) :
method_function = "on_{0}" . format ( method . lower ( ) )
if len ( versions ) == 1 and None in versions . keys ( ) :
router [ method_function ] = versions [ None ]
else :
router [ method_function ] = partial ( self . version_router , versions = versions , not_found = not_found_handler )
router = namedtuple ( 'Router' , router . keys ( ) ) ( ** router )
falcon_api . add_route ( router_base_url + url , router )
if self . versions and self . versions != ( None , ) :
falcon_api . add_route ( router_base_url + '/v{api_version}' + url , router )
def error_serializer ( request , response , error ) :
response . content_type = self . output_format . content_type
response . body = self . output_format ( { "errors" : { error . title : error . description } } , request , response )
falcon_api . set_error_serializer ( error_serializer )
return falcon_api
|
def num_buttons ( self ) :
"""The number of buttons on a device with
the : attr : ` ~ libinput . constant . DeviceCapability . TABLET _ PAD ` capability .
Buttons on a pad device are numbered sequentially , see
` Tablet pad button numbers ` _ for details .
Returns :
int : The number of buttons supported by the device .
Raises :
AttributeError"""
|
num = self . _libinput . libinput_device_tablet_pad_get_num_buttons ( self . _handle )
if num < 0 :
raise AttributeError ( 'This device is not a tablet pad device' )
return num
|
def _create_diff_action ( diff , diff_key , key , value ) :
'''DRY to build diff parts ( added , removed , updated ) .'''
|
if diff_key not in diff . keys ( ) :
diff [ diff_key ] = { }
diff [ diff_key ] [ key ] = value
|
def maybe_coroutine ( decide ) :
"""Either be a coroutine or not .
Use as a decorator :
@ maybe _ coroutine ( lambda maybeAPromise : return isinstance ( maybeAPromise , Promise ) )
def foo ( maybeAPromise ) :
result = yield maybeAPromise
print ( " hello " )
return result
The function passed should be a generator yielding either only Promises or whatever
you feel like .
The decide parameter must be a function which gets called with the same parameters as
the function to decide whether this is a coroutine or not .
Using this it is possible to either make the function a coroutine or not based on a
parameter to the function call .
Let ' s explain the example above :
# If the maybeAPromise is an instance of Promise ,
# we want the foo function to act as a coroutine .
# If the maybeAPromise is not an instance of Promise ,
# we want the foo function to act like any other normal synchronous function .
@ maybe _ coroutine ( lambda maybeAPromise : return isinstance ( maybeAPromise , Promise ) )
def foo ( maybeAPromise ) :
# If isinstance ( maybeAPromise , Promise ) , foo behaves like a coroutine ,
# thus maybeAPromise will get resolved asynchronously and the result will be
# pushed back here .
# Otherwise , foo behaves like no _ coroutine ,
# just pushing the exact value of maybeAPromise back into the generator .
result = yield maybeAPromise
print ( " hello " )
return result"""
|
def _maybe_coroutine ( f ) :
@ functools . wraps ( f )
def __maybe_coroutine ( * args , ** kwargs ) :
if decide ( * args , ** kwargs ) :
return coroutine ( f ) ( * args , ** kwargs )
else :
return no_coroutine ( f ) ( * args , ** kwargs )
return __maybe_coroutine
return _maybe_coroutine
|
def with_query ( self , * args , ** kwargs ) :
"""Return a new URL with query part replaced .
Accepts any Mapping ( e . g . dict , multidict . MultiDict instances )
or str , autoencode the argument if needed .
A sequence of ( key , value ) pairs is supported as well .
It also can take an arbitrary number of keyword arguments .
Clear query if None is passed ."""
|
# N . B . doesn ' t cleanup query / fragment
new_query = self . _get_str_query ( * args , ** kwargs )
return URL ( self . _val . _replace ( path = self . _val . path , query = new_query ) , encoded = True )
|
def get_id ( brain_or_object ) :
"""Get the Plone ID for this object
: param brain _ or _ object : A single catalog brain or content object
: type brain _ or _ object : ATContentType / DexterityContentType / CatalogBrain
: returns : Plone ID
: rtype : string"""
|
if is_brain ( brain_or_object ) and base_hasattr ( brain_or_object , "getId" ) :
return brain_or_object . getId
return get_object ( brain_or_object ) . getId ( )
|
def _expected_condition_value_in_element_attribute ( self , element_attribute_value ) :
"""Tries to find the element and checks that it contains the requested attribute with the expected value ,
but does not thrown an exception if the element is not found
: param element _ attribute _ value : Tuple with 3 items where :
[0 ] element : PageElement or element locator as a tuple ( locator _ type , locator _ value ) to be found
[1 ] attribute : element ' s attribute where to check its value
[2 ] value : expected value for the element ' s attribute
: returns : the web element if it contains the expected value for the requested attribute or False
: rtype : selenium . webdriver . remote . webelement . WebElement or appium . webdriver . webelement . WebElement"""
|
element , attribute , value = element_attribute_value
web_element = self . _expected_condition_find_element ( element )
try :
return web_element if web_element and web_element . get_attribute ( attribute ) == value else False
except StaleElementReferenceException :
return False
|
def from_value ( cls , value : str ) -> T :
"""Create instance from symbol
: param value : unique symbol
: return : This instance
Usage :
> > > from owlmixin . samples import Animal
> > > Animal . from _ value ( ' cat ' ) . crow ( )
mewing"""
|
return [ x for x in cls . __members__ . values ( ) if x . value [ 0 ] == value ] [ 0 ]
|
def setNreps ( self , nreps ) :
"""Sets the number of reps before the raster plot resets"""
|
for plot in self . responsePlots . values ( ) :
plot . setNreps ( nreps )
|
def dump ( props , output ) :
"""Dumps a dict of properties to the specified open stream or file path .
: API : public"""
|
def escape ( token ) :
return re . sub ( r'([=:\s])' , r'\\\1' , token )
def write ( out ) :
for k , v in props . items ( ) :
out . write ( '%s=%s\n' % ( escape ( str ( k ) ) , escape ( str ( v ) ) ) )
if hasattr ( output , 'write' ) and callable ( output . write ) :
write ( output )
elif isinstance ( output , six . string_types ) :
with open ( output , 'w+' ) as out :
write ( out )
else :
raise TypeError ( 'Can only dump data to a path or a writable object, given: %s' % output )
|
def set_interrupt ( self , method = None , ** kwargs ) :
"""Decorator that turns a function or controller method into an action interrupt ."""
|
def action_wrap ( f ) :
action_id = kwargs . get ( "action_id" , f . __name__ )
name = kwargs . get ( "name" , action_id )
if inspect . ismethod ( f ) : # not " . " in f . _ _ qualname _ _ :
self . _interrupt = _ActionInterrupt ( f )
self . _ui_parameters [ "interrupt_enabled" ] = True
return self . _interrupt
else :
qual_name = getattr ( f , "__qualname__" , None )
owner_class = kwargs . get ( "controller_class" , None )
if owner_class :
qual_name = owner_class + "." + f . __name__
if qual_name :
Actions . add_unbound_interrupt ( qual_name , self )
else :
print ( "using upython? if yes you need to pass the name of the controller class via the controller_class parameter." )
return f
if method :
return action_wrap ( method )
else :
return action_wrap
|
def info ( cls , name , message , * args ) :
"""Convenience function to log a message at the INFO level .
: param name : The name of the logger instance in the VSG namespace ( VSG . < name > )
: param message : A message format string .
: param args : The arguments that are are merged into msg using the string formatting operator .
: . . note : The native logger ' s ` kwargs ` are not used in this function ."""
|
cls . getLogger ( name ) . info ( message , * args )
|
def generate_block_from_parent_header_and_coinbase ( cls , parent_header : BlockHeader , coinbase : Address ) -> BaseBlock :
"""Generate block from parent header and coinbase ."""
|
block_header = generate_header_from_parent_header ( cls . compute_difficulty , parent_header , coinbase , timestamp = parent_header . timestamp + 1 , )
block = cls . get_block_class ( ) ( block_header , transactions = [ ] , uncles = [ ] , )
return block
|
def columns ( self ) :
"""Return the columns of the result set ."""
|
result = self . query . result ( )
return [ field . name for field in result . schema ]
|
def get_host_template ( resource_root , name , cluster_name ) :
"""Lookup a host template by name in the specified cluster .
@ param resource _ root : The root Resource object .
@ param name : Host template name .
@ param cluster _ name : Cluster name .
@ return : An ApiHostTemplate object .
@ since : API v3"""
|
return call ( resource_root . get , HOST_TEMPLATE_PATH % ( cluster_name , name ) , ApiHostTemplate , api_version = 3 )
|
def openstack_undercloud_install ( self ) :
"""Deploy an undercloud on the host ."""
|
instack_undercloud_ver , _ = self . run ( 'repoquery --whatprovides /usr/share/instack-undercloud/puppet-stack-config/puppet-stack-config.pp' )
if instack_undercloud_ver . rstrip ( '\n' ) == 'instack-undercloud-0:2.2.0-1.el7ost.noarch' :
LOG . warn ( 'Workaround for BZ1298189' )
self . run ( "sed -i \"s/.*Keystone_domain\['heat_domain'\].*/Service\['keystone'\] -> Class\['::keystone::roles::admin'\] -> Class\['::heat::keystone::domain'\]/\" /usr/share/instack-undercloud/puppet-stack-config/puppet-stack-config.pp" )
self . run ( 'OS_PASSWORD=bob openstack undercloud install' , user = 'stack' )
# NOTE ( Gonéri ) : we also need this after the overcloud deployment
if self . run ( 'rpm -qa openstack-ironic-api' ) [ 0 ] . rstrip ( '\n' ) == 'openstack-ironic-api-4.2.2-3.el7ost.noarch' :
LOG . warn ( 'Workaround for BZ1297796' )
self . run ( 'systemctl start openstack-ironic-api.service' )
self . add_environment_file ( user = 'stack' , filename = 'stackrc' )
self . run ( 'heat stack-list' , user = 'stack' )
|
def format_code ( source , preferred_quote = "'" ) :
"""Return source code with quotes unified ."""
|
try :
return _format_code ( source , preferred_quote )
except ( tokenize . TokenError , IndentationError ) :
return source
|
def password ( self , length : int = 8 , hashed : bool = False ) -> str :
"""Generate a password or hash of password .
: param length : Length of password .
: param hashed : MD5 hash .
: return : Password or hash of password .
: Example :
k6dv2odff9#4h"""
|
text = ascii_letters + digits + punctuation
password = '' . join ( [ self . random . choice ( text ) for _ in range ( length ) ] )
if hashed :
md5 = hashlib . md5 ( )
md5 . update ( password . encode ( ) )
return md5 . hexdigest ( )
else :
return password
|
def take_branch ( self , example ) :
"""Returns a ` DecisionTreeNode ` instance that can better classify
` example ` based on the selectors value .
If there are no more branches ( ie , this node is a leaf ) or the
attribute gives a value for an unexistent branch then this method
returns None ."""
|
if self . attribute is None :
return None
value = self . attribute ( example )
return self . branches . get ( value , None )
|
def produce_scattertext_html ( term_doc_matrix , category , category_name , not_category_name , protocol = 'https' , minimum_term_frequency = DEFAULT_MINIMUM_TERM_FREQUENCY , pmi_threshold_coefficient = DEFAULT_PMI_THRESHOLD_COEFFICIENT , max_terms = None , filter_unigrams = False , height_in_pixels = None , width_in_pixels = None , term_ranker = termranking . AbsoluteFrequencyRanker ) :
'''Returns html code of visualization .
Parameters
term _ doc _ matrix : TermDocMatrix
Corpus to use
category : str
name of category column
category _ name : str
name of category to mine for
not _ category _ name : str
name of everything that isn ' t in category
protocol : str
optional , used prototcol of , http or https
minimum _ term _ frequency : int , optional
Minimum number of times word needs to appear to make it into visualization .
pmi _ threshold _ coefficient : int , optional
Filter out bigrams with a PMI of < 2 * pmi _ threshold _ coefficient . Default is 6.
max _ terms : int , optional
Maximum number of terms to include in visualization .
filter _ unigrams : bool
default False , do we filter unigrams that only occur in one bigram
width _ in _ pixels : int
width of viz in pixels , if None , default to JS ' s choice
height _ in _ pixels : int
height of viz in pixels , if None , default to JS ' s choice
term _ ranker : TermRanker
TermRanker class for determining term frequency ranks .
Returns
str , html of visualization'''
|
scatter_chart_data = ScatterChart ( term_doc_matrix = term_doc_matrix , minimum_term_frequency = minimum_term_frequency , pmi_threshold_coefficient = pmi_threshold_coefficient , filter_unigrams = filter_unigrams , max_terms = max_terms , term_ranker = term_ranker ) . to_dict ( category = category , category_name = category_name , not_category_name = not_category_name , transform = percentile_alphabetical )
scatterplot_structure = ScatterplotStructure ( VizDataAdapter ( scatter_chart_data ) , width_in_pixels , height_in_pixels )
return BasicHTMLFromScatterplotStructure ( scatterplot_structure ) . to_html ( protocol = protocol )
|
def _client ( self , id , secret ) :
"""Performs client login with the provided credentials"""
|
url = self . api_url + self . auth_token_url
auth_string = '%s:%s' % ( id , secret )
authorization = base64 . b64encode ( auth_string . encode ( ) ) . decode ( )
headers = { 'Authorization' : "Basic " + authorization , 'Content-Type' : "application/x-www-form-urlencoded" }
params = { 'grant_type' : 'client_credentials' , 'response_type' : 'token' }
return self . session . post ( url , params = params , headers = headers )
|
def get_standings ( date ) :
"""Return the standings file for current standings ( given current date ) ."""
|
try :
return urlopen ( STANDINGS_URL . format ( date . year , date . strftime ( '%Y/%m/%d' ) ) )
except HTTPError :
ValueError ( 'Could not find the standings file. ' 'mlb.com does not provide the file that ' 'mlbgame needs to perform this operation.' )
|
def weighted_n ( self ) :
"""float count of returned rows adjusted for weighting ."""
|
if not self . is_weighted :
return float ( self . unweighted_n )
return float ( sum ( self . _cube_dict [ "result" ] [ "measures" ] [ "count" ] [ "data" ] ) )
|
def open_in_browser ( file_location ) :
"""Attempt to open file located at file _ location in the default web
browser ."""
|
# If just the name of the file was given , check if it ' s in the Current
# Working Directory .
if not os . path . isfile ( file_location ) :
file_location = os . path . join ( os . getcwd ( ) , file_location )
if not os . path . isfile ( file_location ) :
raise IOError ( "\n\nFile not found." )
# For some reason OSX requires this adjustment ( tested on 10.10.4)
if sys . platform == "darwin" :
file_location = "file:///" + file_location
new = 2
# open in a new tab , if possible
webbrowser . get ( ) . open ( file_location , new = new )
|
def parse_compound ( compound_def , context = None ) :
"""Parse a structured compound definition as obtained from a YAML file
Returns a CompoundEntry ."""
|
compound_id = compound_def . get ( 'id' )
_check_id ( compound_id , 'Compound' )
mark = FileMark ( context , None , None )
return CompoundEntry ( compound_def , mark )
|
def eval_jacobian ( self , ordered_parameters = [ ] , ** parameters ) :
"""Jacobian of : math : ` S ` in the
: class : ` ~ symfit . core . argument . Parameter ` ' s ( : math : ` \\ nabla _ \\ vec { p } S ` ) .
: param parameters : values of the
: class : ` ~ symfit . core . argument . Parameter ` ' s to evaluate : math : ` \\ nabla _ \\ vec { p } S ` at .
: return : ` ` np . array ` ` of length equal to the number of parameters . ."""
|
evaluated_func = super ( LeastSquares , self ) . __call__ ( ordered_parameters , ** parameters )
evaluated_jac = super ( LeastSquares , self ) . eval_jacobian ( ordered_parameters , ** parameters )
result = 0
for var , f , jac_comp in zip ( self . model . dependent_vars , evaluated_func , evaluated_jac ) :
y = self . dependent_data [ var ]
sigma_var = self . model . sigmas [ var ]
if y is not None :
sigma = self . sigma_data [ sigma_var ]
pre_sum = jac_comp * ( ( y - f ) / sigma ** 2 ) [ np . newaxis , ... ]
axes = tuple ( range ( 1 , len ( pre_sum . shape ) ) )
result -= np . sum ( pre_sum , axis = axes , keepdims = False )
return np . atleast_1d ( np . squeeze ( np . array ( result ) ) )
|
def reader ( fname ) :
'''Helper function to open the results file ( coords file ) and create alignment objects with the values in it'''
|
f = pyfastaq . utils . open_file_read ( fname )
for line in f :
if line . startswith ( '[' ) or ( not '\t' in line ) :
continue
yield alignment . Alignment ( line )
pyfastaq . utils . close ( f )
|
def keyReleaseEvent ( self , event ) :
"""Reimplement Qt method .
Handle " most recent used " tab behavior ,
When ctrl is released and tab _ switcher is visible , tab will be changed ."""
|
if self . isVisible ( ) :
qsc = get_shortcut ( context = 'Editor' , name = 'Go to next file' )
for key in qsc . split ( '+' ) :
key = key . lower ( )
if ( ( key == 'ctrl' and event . key ( ) == Qt . Key_Control ) or ( key == 'alt' and event . key ( ) == Qt . Key_Alt ) ) :
self . item_selected ( )
event . accept ( )
|
def _get_selected_cipher_suite ( server_connectivity : ServerConnectivityInfo , ssl_version : OpenSslVersionEnum , openssl_cipher_str : str , should_use_legacy_openssl : Optional [ bool ] ) -> 'AcceptedCipherSuite' :
"""Given an OpenSSL cipher string ( which may specify multiple cipher suites ) , return the cipher suite that was
selected by the server during the SSL handshake ."""
|
ssl_connection = server_connectivity . get_preconfigured_ssl_connection ( override_ssl_version = ssl_version , should_use_legacy_openssl = should_use_legacy_openssl )
ssl_connection . ssl_client . set_cipher_list ( openssl_cipher_str )
# Perform the SSL handshake
try :
ssl_connection . connect ( )
selected_cipher = AcceptedCipherSuite . from_ongoing_ssl_connection ( ssl_connection , ssl_version )
except ClientCertificateRequested :
selected_cipher = AcceptedCipherSuite . from_ongoing_ssl_connection ( ssl_connection , ssl_version )
finally :
ssl_connection . close ( )
return selected_cipher
|
def apply ( self , x ) :
"""Apply Householder transformation to vector x .
Applies the Householder transformation efficiently to the given vector ."""
|
# make sure that x is a ( N , * ) matrix
if len ( x . shape ) != 2 :
raise ArgumentError ( 'x is not a matrix of shape (N,*)' )
if self . beta == 0 :
return x
return x - self . beta * self . v * numpy . dot ( self . v . T . conj ( ) , x )
|
def get_message ( self , method , args , kwargs , options = None ) :
"""Get the soap message for the specified method , args and soapheaders .
This is the entry point for creating the outbound soap message .
@ param method : The method being invoked .
@ type method : I { service . Method }
@ param args : A list of args for the method invoked .
@ type args : list
@ param kwargs : Named ( keyword ) args for the method invoked .
@ type kwargs : dict
@ return : The soap envelope .
@ rtype : L { Document }"""
|
content = self . headercontent ( method , options = options )
header = self . header ( content )
content = self . bodycontent ( method , args , kwargs )
body = self . body ( content )
env = self . envelope ( header , body )
if self . options ( ) . prefixes :
body . normalizePrefixes ( )
env . promotePrefixes ( )
else :
env . refitPrefixes ( )
return Document ( env )
|
def position ( self ) :
"""Read / write : ref : ` XlDataLabelPosition ` member specifying the position
of this data label with respect to its data point , or | None | if no
position is specified . Assigning | None | causes PowerPoint to choose
the default position , which varies by chart type ."""
|
dLbl = self . _dLbl
if dLbl is None :
return None
dLblPos = dLbl . dLblPos
if dLblPos is None :
return None
return dLblPos . val
|
def list_current_orders ( self , bet_ids = None , market_ids = None , order_projection = None , customer_order_refs = None , customer_strategy_refs = None , date_range = time_range ( ) , order_by = None , sort_dir = None , from_record = None , record_count = None , session = None , lightweight = None ) :
"""Returns a list of your current orders .
: param list bet _ ids : If you ask for orders , restricts the results to orders with the specified bet IDs
: param list market _ ids : One or more market ids
: param str order _ projection : Optionally restricts the results to the specified order status
: param list customer _ order _ refs : Optionally restricts the results to the specified customer order references
: param list customer _ strategy _ refs : Optionally restricts the results to the specified customer strategy
references
: param dict date _ range : Optionally restricts the results to be from / to the specified date , these dates
are contextual to the orders being returned and therefore the dates used to filter on will change
to placed , matched , voided or settled dates depending on the orderBy
: param str order _ by : Specifies how the results will be ordered . If no value is passed in , it defaults to BY _ BET
: param str sort _ dir : Specifies the direction the results will be sorted in
: param int from _ record : Specifies the first record that will be returned
: param int record _ count : Specifies how many records will be returned from the index position ' fromRecord '
: param requests . session session : Requests session object
: param bool lightweight : If True will return dict not a resource
: rtype : resources . CurrentOrders"""
|
params = clean_locals ( locals ( ) )
method = '%s%s' % ( self . URI , 'listCurrentOrders' )
( response , elapsed_time ) = self . request ( method , params , session )
return self . process_response ( response , resources . CurrentOrders , elapsed_time , lightweight )
|
def __get_verb ( counts ) :
"""Let ' s fetch a VERB
: param counts :"""
|
cursor = CONN . cursor ( )
check_query = "select verb_id from surverbs"
cursor . execute ( check_query )
check_result = cursor . fetchall ( )
id_list = [ ]
for row in check_result :
id_list . append ( row [ 0 ] )
rand = random . randint ( 1 , counts [ 'max_verb' ] )
while rand not in id_list :
rand = random . randint ( 1 , counts [ 'max_verb' ] )
query = "select * from surverbs where verb_id = {0}" . format ( rand )
cursor . execute ( query )
result = cursor . fetchone ( )
# cursor . close ( )
return result [ 1 ]
|
def make_alf_dirs_ ( self ) :
"""DEPRECATED"""
|
alf_dirs = { }
for k in range ( self . num_classes ) :
dirname = fileIO . join_path ( self . tmpdir , 'class{0:0>1}' . format ( k + 1 ) )
alf_dirs [ k + 1 ] = errors . directorymake ( dirname )
self . alf_dirs = alf_dirs
|
def MAX ( values , * others ) :
"""DECISIVE MAX
: param values :
: param others :
: return :"""
|
if others :
from mo_logs import Log
Log . warning ( "Calling wrong" )
return MAX ( [ values ] + list ( others ) )
output = Null
for v in values :
if v == None :
continue
elif output == None or v > output :
output = v
else :
pass
return output
|
def refresh_authorization ( self ) :
"""Refreshes the authorization tokens .
: return : Dictionary containing auth tokens , expiration info , and response status .
: rtype : ` ` dict ` `"""
|
response = self . request_handler . post ( endpoint = 'oauth/token' , omit_api_version = True , data = { 'grant_type' : 'refresh_token' , 'client_id' : self . auth . client_id , 'client_secret' : self . auth . client_secret , 'refresh_token' : self . auth . refresh_token , } )
data = response . json ( )
token_expiration = ( datetime . datetime . utcnow ( ) + datetime . timedelta ( seconds = data [ 'expires_in' ] ) )
self . auth . update ( token_expiration = token_expiration , access_token = data [ 'access_token' ] , refresh_token = data [ 'refresh_token' ] , )
return { 'access_token' : self . auth . access_token , 'refresh_token' : self . auth . refresh_token , 'token_expiration' : format_event_time ( self . auth . token_expiration ) , }
|
def GET_name_info ( self , path_info , name ) :
"""Look up a name ' s zonefile , address , and last TXID
Reply status , zonefile , zonefile hash , address , and last TXID .
' status ' can be ' available ' , ' registered ' , ' revoked ' , or ' pending '"""
|
if not check_name ( name ) and not check_subdomain ( name ) :
return self . _reply_json ( { 'error' : 'Invalid name or subdomain' } , status_code = 400 )
blockstackd_url = get_blockstackd_url ( )
name_rec = None
try :
name_rec = blockstackd_client . get_name_record ( name , include_history = False , hostport = blockstackd_url )
except ValueError :
return self . _reply_json ( { 'error' : 'Invalid argument: not a well-formed name or subdomain' } , status_code = 400 )
if 'error' in name_rec :
if 'not found' in name_rec [ 'error' ] . lower ( ) :
return self . _reply_json ( { 'status' : 'available' } , status_code = 404 )
elif 'failed to load subdomain' in name_rec [ 'error' ] . lower ( ) : # try to redirect to resolver , if given
_ , _ , domain_name = blockstackd_scripts . is_address_subdomain ( name )
domain_rec = blockstackd_client . get_name_record ( domain_name , include_history = False , hostport = blockstackd_url )
if 'error' in domain_rec : # no resolver known for on - chain name
return self . _reply_json ( { 'status' : 'available' , 'more' : 'failed to look up parent domain' } , status_code = 404 )
resolver_target = domain_rec . get ( 'resolver' , None )
if resolver_target is None : # no _ resolver
return self . _reply_json ( { 'status' : 'available' , 'more' : 'failed to find parent domain\'s resolver' } , status_code = 404 )
redirect_location = resolver_target + '/v1/names/' + name
log . debug ( "Redirect lookup on {} to {}" . format ( name , redirect_location ) )
self . _send_headers ( status_code = 301 , more_headers = { 'Location' : redirect_location } )
return self . wfile . write ( json . dumps ( { 'status' : 'redirect' } ) )
elif 'expired' in name_rec [ 'error' ] . lower ( ) :
return self . _reply_json ( { 'error' : name_rec [ 'error' ] } , status_code = 404 )
else :
return self . _reply_json ( { 'error' : 'Blockstack daemon error: {}' . format ( name_rec [ 'error' ] ) } , status_code = name_rec . get ( 'http_status' , 502 ) )
zonefile_txt = None
if 'zonefile' in name_rec :
zonefile_txt = base64 . b64decode ( name_rec [ 'zonefile' ] )
ret = { }
if blockstackd_scripts . is_subdomain ( name ) : # subdomain
address = name_rec [ 'address' ]
if address :
address = virtualchain . address_reencode ( str ( address ) )
log . debug ( "{} is registered_subdomain" . format ( name ) )
ret = { 'status' : 'registered_subdomain' , 'zonefile' : zonefile_txt , 'zonefile_hash' : name_rec [ 'value_hash' ] , 'address' : name_rec [ 'address' ] , 'blockchain' : 'bitcoin' , 'last_txid' : name_rec [ 'txid' ] , 'did' : name_rec . get ( 'did' , { 'error' : 'Not supported for this name' } ) }
else :
status = 'revoked' if name_rec [ 'revoked' ] else 'registered'
address = name_rec [ 'address' ]
if address :
address = virtualchain . address_reencode ( str ( address ) )
log . debug ( "{} is {}" . format ( name , status ) )
ret = { 'status' : status , 'zonefile' : zonefile_txt , 'zonefile_hash' : name_rec [ 'value_hash' ] , 'address' : address , 'last_txid' : name_rec [ 'txid' ] , 'blockchain' : 'bitcoin' , 'expire_block' : name_rec [ 'expire_block' ] , # expire _ block is what blockstack . js expects
'renewal_deadline' : name_rec [ 'renewal_deadline' ] , 'grace_period' : name_rec . get ( 'grace_period' , False ) , 'resolver' : name_rec . get ( 'resolver' , None ) , 'did' : name_rec . get ( 'did' , { 'error' : 'Not supported for this name' } ) }
return self . _reply_json ( ret )
|
def gdate_to_jdn ( date ) :
"""Compute Julian day from Gregorian day , month and year .
Algorithm from wikipedia ' s julian _ day article .
Return : The julian day number"""
|
not_jan_or_feb = ( 14 - date . month ) // 12
year_since_4800bc = date . year + 4800 - not_jan_or_feb
month_since_4800bc = date . month + 12 * not_jan_or_feb - 3
jdn = date . day + ( 153 * month_since_4800bc + 2 ) // 5 + 365 * year_since_4800bc + ( year_since_4800bc // 4 - year_since_4800bc // 100 + year_since_4800bc // 400 ) - 32045
return jdn
|
def from_dict ( cls , d ) :
"""As in : Class : ` pymatgen . core . Molecule ` except
restoring graphs using ` from _ dict _ of _ dicts `
from NetworkX to restore graph information ."""
|
m = Molecule . from_dict ( d [ 'molecule' ] )
return cls ( m , d [ 'graphs' ] )
|
def _compile_new_relic_stats ( self , stats_this_second , stats_next_second ) :
"""from instance ' stats _ this _ second ' and instance ' stats _ next _ second ' , compute some per
second stats metrics and other aggregated metrics
: param dict stats _ this _ second :
: param dict stats _ next _ second :
: return : compiled instance stats that has metrics
{ ' opcounters _ per _ node _ per _ second ' : { . . . } ,
' server _ statistics _ per _ second ' : { . . . } ,
' aggregate _ server _ statistics ' : { . . . } ,
' replication _ lag ' : 0.0,
' aggregate _ database _ statistics ' : { }"""
|
server_statistics_per_second = { }
opcounters_per_node_per_second = [ ]
for subdoc in [ "opcounters" , "network" ] :
first_doc = stats_this_second [ 'aggregate_server_statistics' ] [ subdoc ]
second_doc = stats_next_second [ 'aggregate_server_statistics' ] [ subdoc ]
keys = set ( first_doc . keys ( ) ) | set ( second_doc . keys ( ) )
server_statistics_per_second [ subdoc ] = { key : int ( second_doc [ key ] ) - int ( first_doc [ key ] ) for key in keys if isinstance ( first_doc [ key ] , int ) }
for node1 , node2 in zip ( stats_this_second [ 'opcounters_per_node' ] , stats_next_second [ 'opcounters_per_node' ] ) :
node_opcounters_per_second = { }
for repl , members in node2 . items ( ) :
node_opcounters_per_second [ repl ] = { }
for member , ops in members . items ( ) :
node_opcounters_per_second [ repl ] [ member ] = { }
for op , count in ops . items ( ) :
node_opcounters_per_second [ repl ] [ member ] [ op ] = count - node1 [ repl ] [ member ] [ op ]
opcounters_per_node_per_second . append ( node_opcounters_per_second )
return { 'opcounters_per_node_per_second' : opcounters_per_node_per_second , 'server_statistics_per_second' : server_statistics_per_second , 'aggregate_server_statistics' : stats_next_second . get ( 'aggregate_server_statistics' ) , 'replication_lag' : stats_next_second . get ( 'replication_lag' ) , 'aggregate_database_statistics' : self . get_aggregate_database_stats ( ) }
|
def imap_unordered ( self , jobs , timeout = 0.5 ) :
"""A iterator over a set of jobs .
: param jobs : the items to pass through our function
: param timeout : timeout between polling queues
Results are yielded as soon as they are available in the output
queue ( up to the discretisation provided by timeout ) . Since the
queues can be specified to have a maximum length , the consumption
of both the input jobs iterable and memory use in the output
queues are controlled ."""
|
timeout = max ( timeout , 0.5 )
jobs_iter = iter ( jobs )
out_jobs = 0
job = None
while True :
if not self . closed and job is None : # Get a job
try :
job = jobs_iter . next ( )
except StopIteration :
job = None
self . close ( )
if job is not None : # Put any job
try :
self . put ( job , True , timeout )
except Queue . Full :
pass
# we ' ll try again next time around
else :
job = None
for result in self . get_finished ( ) :
yield result
# Input and yielded everything ?
if self . closed and self . _items == 0 :
break
sleep ( timeout )
|
def generate_fpn_proposals ( multilevel_pred_boxes , multilevel_label_logits , image_shape2d ) :
"""Args :
multilevel _ pred _ boxes : # lvl HxWxAx4 boxes
multilevel _ label _ logits : # lvl tensors of shape HxWxA
Returns :
boxes : kx4 float
scores : k logits"""
|
num_lvl = len ( cfg . FPN . ANCHOR_STRIDES )
assert len ( multilevel_pred_boxes ) == num_lvl
assert len ( multilevel_label_logits ) == num_lvl
training = get_current_tower_context ( ) . is_training
all_boxes = [ ]
all_scores = [ ]
if cfg . FPN . PROPOSAL_MODE == 'Level' :
fpn_nms_topk = cfg . RPN . TRAIN_PER_LEVEL_NMS_TOPK if training else cfg . RPN . TEST_PER_LEVEL_NMS_TOPK
for lvl in range ( num_lvl ) :
with tf . name_scope ( 'Lvl{}' . format ( lvl + 2 ) ) :
pred_boxes_decoded = multilevel_pred_boxes [ lvl ]
proposal_boxes , proposal_scores = generate_rpn_proposals ( tf . reshape ( pred_boxes_decoded , [ - 1 , 4 ] ) , tf . reshape ( multilevel_label_logits [ lvl ] , [ - 1 ] ) , image_shape2d , fpn_nms_topk )
all_boxes . append ( proposal_boxes )
all_scores . append ( proposal_scores )
proposal_boxes = tf . concat ( all_boxes , axis = 0 )
# nx4
proposal_scores = tf . concat ( all_scores , axis = 0 )
# Here we are different from Detectron .
# Detectron picks top - k within the batch , rather than within an image . However we do not have a batch .
proposal_topk = tf . minimum ( tf . size ( proposal_scores ) , fpn_nms_topk )
proposal_scores , topk_indices = tf . nn . top_k ( proposal_scores , k = proposal_topk , sorted = False )
proposal_boxes = tf . gather ( proposal_boxes , topk_indices )
else :
for lvl in range ( num_lvl ) :
with tf . name_scope ( 'Lvl{}' . format ( lvl + 2 ) ) :
pred_boxes_decoded = multilevel_pred_boxes [ lvl ]
all_boxes . append ( tf . reshape ( pred_boxes_decoded , [ - 1 , 4 ] ) )
all_scores . append ( tf . reshape ( multilevel_label_logits [ lvl ] , [ - 1 ] ) )
all_boxes = tf . concat ( all_boxes , axis = 0 )
all_scores = tf . concat ( all_scores , axis = 0 )
proposal_boxes , proposal_scores = generate_rpn_proposals ( all_boxes , all_scores , image_shape2d , cfg . RPN . TRAIN_PRE_NMS_TOPK if training else cfg . RPN . TEST_PRE_NMS_TOPK , cfg . RPN . TRAIN_POST_NMS_TOPK if training else cfg . RPN . TEST_POST_NMS_TOPK )
tf . sigmoid ( proposal_scores , name = 'probs' )
# for visualization
return tf . stop_gradient ( proposal_boxes , name = 'boxes' ) , tf . stop_gradient ( proposal_scores , name = 'scores' )
|
def get_environment_from_batch_command ( env_cmd , initial = None ) :
"""Take a command ( either a single command or list of arguments )
and return the environment created after running that command .
Note that if the command must be a batch file or . cmd file , or the
changes to the environment will not be captured .
If initial is supplied , it is used as the initial environment passed
to the child process ."""
|
if not isinstance ( env_cmd , ( list , tuple ) ) :
env_cmd = [ env_cmd ]
if not os . path . exists ( env_cmd [ 0 ] ) :
raise RuntimeError ( 'Error: %s does not exist' % ( env_cmd [ 0 ] , ) )
# construct the command that will alter the environment
env_cmd = subprocess . list2cmdline ( env_cmd )
# create a tag so we can tell in the output when the proc is done
tag = 'Done running command'
# construct a cmd . exe command to do accomplish this
cmd = 'cmd.exe /s /c "{env_cmd} && echo "{tag}" && set"' . format ( ** vars ( ) )
# launch the process
proc = subprocess . Popen ( cmd , stdout = subprocess . PIPE , env = initial )
# parse the output sent to stdout
lines = proc . stdout
# consume whatever output occurs until the tag is reached
for line in lines :
line = line . decode ( 'utf-8' )
if 'The specified configuration type is missing.' in line :
raise AssertionError ( 'Error executing %s. View http://blog.ionelmc.ro/2014/12/21/compiling-python-extensions-on-windows/ for details.' % ( env_cmd ) )
if tag in line :
break
if sys . version_info [ 0 ] > 2 : # define a way to handle each KEY = VALUE line
handle_line = lambda l : l . decode ( 'utf-8' ) . rstrip ( ) . split ( '=' , 1 )
else : # define a way to handle each KEY = VALUE line
handle_line = lambda l : l . rstrip ( ) . split ( '=' , 1 )
# parse key / values into pairs
pairs = map ( handle_line , lines )
# make sure the pairs are valid
valid_pairs = filter ( validate_pair , pairs )
# construct a dictionary of the pairs
result = dict ( valid_pairs )
# let the process finish
proc . communicate ( )
return result
|
def middle_end ( self , index ) :
"""Set the index ( + 1 ) where MIDDLE ends .
: param int index : the new index for MIDDLE end"""
|
if ( index < 0 ) or ( index > self . all_length ) :
raise ValueError ( u"The given index is not valid" )
self . __middle_end = index
|
def main ( search , query ) :
"""main function that does the search"""
|
url = search . search ( query )
print ( url )
search . open_page ( url )
|
def match_field ( self , field , value , required = True , new_group = False ) :
"""Add a ` ` field : value ` ` term to the query .
Matches will have the ` ` value ` ` in the ` ` field ` ` .
Arguments :
field ( str ) : The field to check for the value .
The field must be namespaced according to Elasticsearch rules
using the dot syntax .
For example , ` ` " mdf . source _ name " ` ` is the ` ` source _ name ` ` field
of the ` ` mdf ` ` dictionary .
value ( str ) : The value to match .
required ( bool ) : If ` ` True ` ` , will add term with ` ` AND ` ` .
If ` ` False ` ` , will use ` ` OR ` ` . * * Default : * * ` ` True ` ` .
new _ group ( bool ) : If ` ` True ` ` , will separate the term into a new parenthetical group .
If ` ` False ` ` , will not .
* * Default : * * ` ` False ` ` .
Returns :
SearchHelper : Self"""
|
# If not the start of the query string , add an AND or OR
if self . initialized :
if required :
self . _and_join ( new_group )
else :
self . _or_join ( new_group )
self . _field ( field , value )
return self
|
def probabilities ( self ) :
'''Trains a model and predicts recommendations .
If the query feature collection could not be found or if there
is insufficient training data , an empty list is returned .
Otherwise , a list of content objects ( tuples of content
id and feature collection ) and probabilities is returned .
The probability is generated from the model , and reflects
confidence of the model that the corresponding content object
is related to the query based on the ground truth data .
On a large database , random samples are used for training , so
this function is not deterministic .
: rtype : ` ` list ` ` of
( ( ` ` content _ id ` ` , : class : ` dossier . fc . FeatureCollection ` ) ,
probability )'''
|
self . query_fc = self . store . get ( self . query_content_id )
if self . query_fc is None :
logger . warning ( 'Could not find FC for %s' , self . query_content_id )
return [ ]
# Try the canopy query before training , because if the canopy query
# gives us nothing , then there ' s no point in the additional work .
# Possible optimization : If the canopy query yields fewer than N
# results , then can we just return all of them ? - - - AG
# N . B Doing the canopy query first will cause things to be slower
# when there is insufficient training data .
candidates = self . canopy ( limit = self . canopy_limit )
if len ( candidates ) == 0 :
logger . info ( 'Could not find any candidates in a canopy query by ' 'scanning the following indexes: %s' , ', ' . join ( self . store . index_names ( ) ) )
return [ ]
# Get labels from the database and translate them to the form
# ` [ { - 1 , 1 } , i , j ] ` where ` i , j ` are indices into the list
# ` content _ objs ` , which has type ` [ ( content _ id , FeatureCollection ) ] ` .
logger . info ( 'Fetching labels...' )
labels = list ( self . labels_from_query ( limit = self . label_limit ) )
logger . info ( 'Fetching FCs from labels...' )
content_objs = self . content_objs_from_labels ( labels )
indexed_labels = labels_to_indexed_coref_values ( content_objs , labels )
logger . info ( 'Training...' )
model = self . train ( content_objs , indexed_labels )
if model is None :
logger . info ( 'Could not train model: insufficient training data. ' '(query content id: %s)' , self . query_content_id )
raise InsufficientTrainingData
feature_names , classifier , transformer = model
return zip ( candidates , self . classify ( feature_names , classifier , transformer , candidates ) )
|
def find ( * args , ** kwargs ) :
"""Find the first matching element in a list and return it .
Usage : :
find ( element , list _ )
find ( of = element , in _ = list _ )
find ( where = predicate , in _ = list _ )
: param element , of : Element to search for ( by equality comparison )
: param where : Predicate defining an element to search for .
This should be a callable taking a single argument
and returning a boolean result .
: param list _ , in _ : List to search in
: return : Last matching element
: raise IndexError : If no matching elements were found
. . versionadded : : 0.0.4"""
|
list_ , idx = _index ( * args , start = 0 , step = 1 , ** kwargs )
if idx < 0 :
raise IndexError ( "element not found" )
return list_ [ idx ]
|
def fixture_to_tables ( fixture ) :
"""convert fixture into * behave * examples
: param fixture : a dictionary in the following form : :
" test1name " :
" test1property1 " : . . . ,
" test1property2 " : . . . ,
" test2name " :
" test2property1 " : . . . ,
" test2property2 " : . . . ,
: return : a list , with each item represent a table : ` ( caption , rows ) ` ,
each item in ` rows ` is ` ( col1 , col2 , . . . ) `"""
|
tables = [ ]
for ( title , content ) in fixture . iteritems ( ) :
rows = [ ]
# header ( keyword ) row
keys = sorted ( content . keys ( ) )
rows . append ( tuple ( keys ) )
# item ( value ) row
row1 = [ ]
for col in rows [ 0 ] :
row1 . append ( content [ col ] )
rows . append ( tuple ( row1 ) )
tables . append ( ( title , tuple ( rows ) ) )
return tables
|
def ParseFileObject ( self , parser_mediator , file_object ) :
"""Parses a NTFS $ MFT metadata file - like object .
Args :
parser _ mediator ( ParserMediator ) : mediates interactions between parsers
and other components , such as storage and dfvfs .
file _ object ( dfvfs . FileIO ) : file - like object ."""
|
mft_metadata_file = pyfsntfs . mft_metadata_file ( )
try :
mft_metadata_file . open_file_object ( file_object )
except IOError as exception :
parser_mediator . ProduceExtractionWarning ( 'unable to open file with error: {0!s}' . format ( exception ) )
for entry_index in range ( 0 , mft_metadata_file . number_of_file_entries ) :
try :
mft_entry = mft_metadata_file . get_file_entry ( entry_index )
self . _ParseMFTEntry ( parser_mediator , mft_entry )
except IOError as exception :
parser_mediator . ProduceExtractionWarning ( ( 'unable to parse MFT entry: {0:d} with error: {1!s}' ) . format ( entry_index , exception ) )
mft_metadata_file . close ( )
|
def export ( self , storage_client , overwrite = True ) :
'''a method to export all the records in collection to another platform
: param storage _ client : class object with storage client methods
: return : string with exit message'''
|
title = '%s.export' % self . __class__ . __name__
# validate storage client
method_list = [ 'save' , 'load' , 'list' , 'export' , 'delete' , 'remove' , '_import' , 'collection_name' ]
for method in method_list :
if not getattr ( storage_client , method , None ) :
from labpack . parsing . grammar import join_words
raise ValueError ( '%s(storage_client=...) must be a client object with %s methods.' % ( title , join_words ( method_list ) ) )
# define copy record function
def _copy_record ( _record , _storage_client ) :
record_key = _record [ 'key' ]
record_data , record_metadata = self . s3 . read_record ( self . bucket_name , record_key )
encryption = ''
if 'encryption' in record_metadata [ 'metadata' ] . keys ( ) :
encryption = record_metadata [ 'metadata' ] [ 'encryption' ]
last_modified = 0.0
if 'last_modified' in record_metadata [ 'metadata' ] . keys ( ) :
try :
last_modified = float ( record_metadata [ 'metadata' ] [ 'last_modified' ] )
except :
pass
outcome = _storage_client . _import ( record_key , record_data , overwrite = overwrite , encryption = encryption , last_modified = last_modified )
return outcome
# retrieve list of records in bucket
count = 0
skipped = 0
record_list , next_key = self . s3 . list_records ( self . bucket_name )
for record in record_list :
outcome = _copy_record ( record , storage_client )
if outcome :
count += 1
else :
skipped += 1
# continue through bucket
if next_key :
while next_key :
record_list , next_key = self . s3 . list_records ( self . bucket_name , starting_key = next_key )
for record in record_list :
outcome = _copy_record ( record , storage_client )
if outcome :
count += 1
else :
skipped += 1
# report outcome
from os import path
plural = ''
skip_insert = ''
new_root , new_folder = path . split ( storage_client . collection_folder )
if count != 1 :
plural = 's'
if skipped > 0 :
skip_plural = ''
if skipped > 1 :
skip_plural = 's'
skip_insert = ' %s record%s skipped to avoid overwrite.' % ( str ( skipped ) , skip_plural )
exit_msg = '%s record%s exported to %s.%s' % ( str ( count ) , plural , new_folder , skip_insert )
return exit_msg
|
def upload_file ( self , fax_file , ** kwargs ) : # noqa : E501
"""upload a file # noqa : E501
Before sending a fax you need to upload your files using this API . In order to upload your fax file , you have to send a ` multipart / form - data ` request with your file . If the upload was successful you would receive a ` file _ path ` which you can use to send your fax . # noqa : E501
This method makes a synchronous HTTP request by default . To make an
asynchronous HTTP request , please pass async = True
> > > thread = api . upload _ file ( fax _ file , async = True )
> > > result = thread . get ( )
: param async bool
: param file fax _ file : ( required )
: param str format : can be ' pdf ' or ' tiff '
: return : File
If the method is called asynchronously ,
returns the request thread ."""
|
kwargs [ '_return_http_data_only' ] = True
if kwargs . get ( 'async' ) :
return self . upload_file_with_http_info ( fax_file , ** kwargs )
# noqa : E501
else :
( data ) = self . upload_file_with_http_info ( fax_file , ** kwargs )
# noqa : E501
return data
|
def command ( self , * args , ** kwargs ) :
"""A shortcut decorator for declaring and attaching a command to
the group . This takes the same arguments as : func : ` command ` but
immediately registers the created command with this instance by
calling into : meth : ` add _ command ` ."""
|
def decorator ( f ) :
cmd = command ( * args , ** kwargs ) ( f )
self . add_command ( cmd )
return cmd
return decorator
|
def __embed_branch ( dfs_data ) :
"""Builds the combinatorial embedding of the graph . Returns whether the graph is planar ."""
|
u = dfs_data [ 'ordering' ] [ 0 ]
dfs_data [ 'LF' ] = [ ]
dfs_data [ 'RF' ] = [ ]
dfs_data [ 'FG' ] = { }
n = dfs_data [ 'graph' ] . num_nodes ( )
f0 = ( 0 , n )
g0 = ( 0 , n )
L0 = { 'u' : 0 , 'v' : n }
R0 = { 'x' : 0 , 'y' : n }
dfs_data [ 'LF' ] . append ( f0 )
dfs_data [ 'RF' ] . append ( g0 )
dfs_data [ 'FG' ] [ 0 ] = [ L0 , R0 ]
dfs_data [ 'FG' ] [ 'm' ] = 0
dfs_data [ 'FG' ] [ 'l' ] = 0
dfs_data [ 'FG' ] [ 'r' ] = 0
# print ' DFS Ordering : { } ' . format ( dfs _ data [ ' ordering ' ] )
# for node in dfs _ data [ ' ordering ' ] :
# print ' { } : { } ' . format ( node , dfs _ data [ ' adj ' ] [ node ] )
nonplanar = __embed_branch_recursive ( u , dfs_data )
# print " Nonplanar : " , nonplanar
return not nonplanar
|
def execute ( node ) :
"""Uses ohai to get virtualization information which is then saved to then
node file"""
|
with hide ( 'everything' ) :
virt = json . loads ( sudo ( 'ohai virtualization' ) )
if not len ( virt ) or virt [ 0 ] [ 1 ] != "host" : # It may work for virtualization solutions other than Xen
print ( "This node is not a Xen host, doing nothing" )
return
node [ 'virtualization' ] = { 'role' : 'host' , 'system' : 'xen' , 'vms' : [ ] , }
# VMs
with hide ( 'everything' ) :
vm_list = sudo ( "xm list" )
for vm in vm_list . split ( "\n" ) [ 2 : ] :
data = vm . split ( )
if len ( data ) != 6 :
break
node [ 'virtualization' ] [ 'vms' ] . append ( { 'fqdn' : data [ 0 ] , 'RAM' : data [ 2 ] , 'cpus' : data [ 3 ] } )
print ( "Found {0} VMs for this Xen host" . format ( len ( node [ 'virtualization' ] [ 'vms' ] ) ) )
# Save node file and remove the returned temp file
del node [ 'name' ]
os . remove ( chef . save_config ( node , True ) )
|
def lmom ( self , * args , nmom = 5 , ** kwds ) :
"""Compute the distribution ' s L - moments , e . g . l1 , l2 , l3 , l4 , . .
: param args : Distribution parameters in order of shape ( s ) , loc , scale
: type args : float
: param nmom : Number of moments to calculate
: type nmom : int
: param kwds : Distribution parameters as named arguments . See : attr : ` rv _ continous . shapes ` for names of shape
parameters
: type kwds : float
: returns : List of L - moments
: rtype : list"""
|
ratios = self . lmom_ratios ( * args , nmom = nmom , ** kwds )
moments = ratios [ 0 : 2 ]
moments += [ ratio * moments [ 1 ] for ratio in ratios [ 2 : ] ]
return moments
|
def write ( grp , out_path ) :
"""Write a GRP to a text file .
Args :
grp ( list ) : GRP object to write to new - line delimited text file
out _ path ( string ) : output path
Returns :
None"""
|
with open ( out_path , "w" ) as f :
for x in grp :
f . write ( str ( x ) + "\n" )
|
def deploy_project ( ) :
"""Deploy to the project directory in the virtualenv"""
|
project_root = '/' . join ( [ deployment_root ( ) , 'env' , env . project_fullname , 'project' ] )
local_dir = os . getcwd ( )
if env . verbosity :
print env . host , "DEPLOYING project" , env . project_fullname
# Exclude a few things that we don ' t want deployed as part of the project folder
rsync_exclude = [ 'local_settings*' , '*.pyc' , '*.log' , '.*' , '/build' , '/dist' , '/media*' , '/static*' , '/www' , '/public' , '/template*' ]
# make site local settings if they don ' t already exist
_make_local_sitesettings ( )
created = deploy_files ( local_dir , project_root , rsync_exclude = rsync_exclude )
if not env . patch : # hook the project into sys . path
pyvers = run ( 'python -V' ) . split ( ' ' ) [ 1 ] . split ( '.' ) [ 0 : 2 ]
# Python x . x . x
sitepackages = '' . join ( [ 'lib/python' , pyvers [ 0 ] , '.' , pyvers [ 1 ] , '/site-packages' ] )
link_name = '/' . join ( [ deployment_root ( ) , 'env' , env . project_fullname , sitepackages , env . project_package_name ] )
target = '/' . join ( [ project_root , env . project_package_name ] )
run ( ' ' . join ( [ 'ln -s' , target , link_name ] ) )
# make sure manage . py has exec permissions
managepy = '/' . join ( [ target , 'sitesettings' , 'manage.py' ] )
if exists ( managepy ) :
sudo ( 'chmod ugo+x %s' % managepy )
return created
|
def PyplotLineStyles ( ) :
"""Linestyles
This examples showcases different linestyles copying those of Tikz / PGF ."""
|
import numpy as np
import matplotlib . pyplot as plt
from collections import OrderedDict
from matplotlib . transforms import blended_transform_factory
linestyles = OrderedDict ( [ ( 'solid' , ( 0 , ( ) ) ) , ( 'loosely dotted' , ( 0 , ( 1 , 10 ) ) ) , ( 'dotted' , ( 0 , ( 1 , 5 ) ) ) , ( 'densely dotted' , ( 0 , ( 1 , 1 ) ) ) , ( 'loosely dashed' , ( 0 , ( 5 , 10 ) ) ) , ( 'dashed' , ( 0 , ( 5 , 5 ) ) ) , ( 'densely dashed' , ( 0 , ( 5 , 1 ) ) ) , ( 'loosely dashdotted' , ( 0 , ( 3 , 10 , 1 , 10 ) ) ) , ( 'dashdotted' , ( 0 , ( 3 , 5 , 1 , 5 ) ) ) , ( 'densely dashdotted' , ( 0 , ( 3 , 1 , 1 , 1 ) ) ) , ( 'loosely dashdotdotted' , ( 0 , ( 3 , 10 , 1 , 10 , 1 , 10 ) ) ) , ( 'dashdotdotted' , ( 0 , ( 3 , 5 , 1 , 5 , 1 , 5 ) ) ) , ( 'densely dashdotdotted' , ( 0 , ( 3 , 1 , 1 , 1 , 1 , 1 ) ) ) ] )
plt . figure ( figsize = ( 10 , 6 ) )
ax = plt . subplot ( 1 , 1 , 1 )
X , Y = np . linspace ( 0 , 100 , 10 ) , np . zeros ( 10 )
for i , ( name , linestyle ) in enumerate ( linestyles . items ( ) ) :
ax . plot ( X , Y + i , linestyle = linestyle , linewidth = 1.5 , color = 'black' )
ax . set_ylim ( - 0.5 , len ( linestyles ) - 0.5 )
plt . yticks ( np . arange ( len ( linestyles ) ) , linestyles . keys ( ) )
plt . xticks ( [ ] )
# For each line style , add a text annotation with a small offset from
# the reference point ( 0 in Axes coords , y tick value in Data coords ) .
reference_transform = blended_transform_factory ( ax . transAxes , ax . transData )
for i , ( name , linestyle ) in enumerate ( linestyles . items ( ) ) :
ax . annotate ( str ( linestyle ) , xy = ( 0.0 , i ) , xycoords = reference_transform , xytext = ( - 6 , - 12 ) , textcoords = 'offset points' , color = "blue" , fontsize = 8 , ha = "right" , family = "monospace" )
plt . tight_layout ( )
return plt . gcf ( )
|
def create ( cls , name , cluster_virtual , network_value , macaddress , interface_id , nodes , vlan_id = None , cluster_mode = 'balancing' , backup_mgt = None , primary_heartbeat = None , log_server_ref = None , domain_server_address = None , location_ref = None , zone_ref = None , default_nat = False , enable_antivirus = False , enable_gti = False , comment = None , snmp = None , ** kw ) :
"""Create a layer 3 firewall cluster with management interface and any number
of nodes . If providing keyword arguments to create additional interfaces ,
use the same constructor arguments and pass an ` interfaces ` keyword argument .
The constructor defined interface will be assigned as the primary
management interface by default . Otherwise the engine will be created with a
single interface and interfaces can be added after .
. . versionchanged : : 0.6.1
Chgnged ` cluster _ nic ` to ` interface _ id ` , and ` cluster _ mask ` to ` network _ value `
: param str name : name of firewall engine
: param str cluster _ virtual : ip of cluster CVI
: param str network _ value : ip netmask of cluster CVI
: param str macaddress : macaddress for packet dispatch clustering
: param str interface _ id : nic id to use for primary interface
: param list nodes : address / network _ value / nodeid combination for cluster nodes
: param str vlan _ id : optional VLAN id for the management interface , i . e . ' 15 ' .
: param str cluster _ mode : ' balancing ' or ' standby ' mode ( default : balancing )
: param str , int primary _ heartbeat : optionally set the primary _ heartbeat . This is
automatically set to the management interface but can be overridden to use
another interface if defining additional interfaces using ` interfaces ` .
: param str , int backup _ mgt : optionally set the backup management interface . This
is unset unless you define additional interfaces using ` interfaces ` .
: param str log _ server _ ref : ( optional ) href to log _ server instance
: param list domain _ server _ address : ( optional ) DNS server addresses
: param str location _ ref : location href or not for engine if needed to contact SMC
behind NAT ( created if not found )
: param str zone _ ref : zone name , str href or Zone for management interface
( created if not found )
: param bool enable _ antivirus : ( optional ) Enable antivirus ( required DNS )
: param bool enable _ gti : ( optional ) Enable GTI
: param list interfaces : optional keyword to supply additional interfaces
: param dict snmp : SNMP dict should have keys ` snmp _ agent ` str defining name of SNMPAgent ,
` snmp _ interface ` which is a list of interface IDs , and optionally ` snmp _ location ` which
is a string with the SNMP location name .
: raises CreateEngineFailed : Failure to create with reason
: return : : py : class : ` smc . core . engine . Engine `
Example nodes parameter input : :
[ { ' address ' : ' 5.5.5.2 ' , ' network _ value ' : ' 5.5.5.0/24 ' , ' nodeid ' : 1 } ,
{ ' address ' : ' 5.5.5.3 ' , ' network _ value ' : ' 5.5.5.0/24 ' , ' nodeid ' : 2 } ,
{ ' address ' : ' 5.5.5.4 ' , ' network _ value ' : ' 5.5.5.0/24 ' , ' nodeid ' : 3 } ]
You can also create additional CVI + NDI , or NDI only interfaces by providing
the keyword argument interfaces using the same keyword values from the
constructor : :
interfaces = [
{ ' interface _ id ' : 1,
' macaddress ' : ' 02:02:02:02:02:03 ' ,
' interfaces ' : [ { ' cluster _ virtual ' : ' 2.2.2.1 ' ,
' network _ value ' : ' 2.2.2.0/24 ' ,
' nodes ' : [ { ' address ' : ' 2.2.2.2 ' , ' network _ value ' : ' 2.2.2.0/24 ' , ' nodeid ' : 1 } ,
{ ' address ' : ' 2.2.2.3 ' , ' network _ value ' : ' 2.2.2.0/24 ' , ' nodeid ' : 2 } ]
{ ' interface _ id ' : 2,
' interfaces ' : [ { ' nodes ' : [ { ' address ' : ' 3.3.3.2 ' , ' network _ value ' : ' 3.3.3.0/24 ' , ' nodeid ' : 1 } ,
{ ' address ' : ' 3.3.3.3 ' , ' network _ value ' : ' 3.3.3.0/24 ' , ' nodeid ' : 2 } ]
It is also possible to define VLAN interfaces by providing the ` vlan _ id ` keyword .
Example VLAN with NDI only interfaces . If nesting the zone _ ref within the interfaces
list , the zone will be applied to the VLAN versus the top level interface : :
interfaces = [
{ ' interface _ id ' : 2,
' interfaces ' : [ { ' nodes ' : [ { ' address ' : ' 3.3.3.2 ' , ' network _ value ' : ' 3.3.3.0/24 ' , ' nodeid ' : 1 } ,
{ ' address ' : ' 3.3.3.3 ' , ' network _ value ' : ' 3.3.3.0/24 ' , ' nodeid ' : 2 } ] ,
' vlan _ id ' : 22,
' zone _ ref ' : ' private - network '
{ ' nodes ' : [ { ' address ' : ' 4.4.4.1 ' , ' network _ value ' : ' 4.4.4.0/24 ' , ' nodeid ' : 1 } ,
{ ' address ' : ' 4.4.4.2 ' , ' network _ value ' : ' 4.4.4.0/24 ' , ' nodeid ' : 2 } ] ,
' vlan _ id ' : 23,
' zone _ ref ' : ' other _ vlan '
Tunnel interfaces can also be created . As all interfaces defined are assumed to be
a physical interface type , you must specify the ` type ` parameter to indicate the
interface is a tunnel interface . Tunnel interfaces do not have a macaddress or VLANs .
They be configured with NDI interfaces by omitting the ` cluster _ virtual ` and
` network _ value ` top level attributes : :
interfaces = [
{ ' interface _ id ' : 1000,
' interfaces ' : [ { ' cluster _ virtual ' : ' 100.100.100.1 ' ,
' network _ value ' : ' 100.100.100.0/24 ' ,
' nodes ' : [ { ' address ' : ' 100.100.100.2 ' , ' network _ value ' : ' 100.100.100.0/24 ' , ' nodeid ' : 1 } ,
{ ' address ' : ' 100.100.100.3 ' , ' network _ value ' : ' 100.100.100.0/24 ' , ' nodeid ' : 2 } ]
' zone _ ref ' : ' AWStunnel ' ,
' type ' : ' tunnel _ interface '
If setting primary _ heartbeat or backup _ mgt to a specific interface ( the primary
interface configured in the constructor will have these roles by default ) , you
must define the interfaces in the ` interfaces ` keyword argument list .
. . note : : If creating additional interfaces , you must at minimum provide the
` interface _ id ` and ` nodes ` to create an NDI only interface ."""
|
interfaces = kw . pop ( 'interfaces' , [ ] )
# Add the primary interface to the interface list
interface = { 'cluster_virtual' : cluster_virtual , 'network_value' : network_value , 'nodes' : nodes }
if vlan_id :
interface . update ( vlan_id = vlan_id )
interfaces . append ( dict ( interface_id = interface_id , macaddress = macaddress , zone_ref = zone_ref , interfaces = [ interface ] ) )
primary_mgt = interface_id if not vlan_id else '{}.{}' . format ( interface_id , vlan_id )
return FirewallCluster . create_bulk ( name , interfaces = interfaces , nodes = len ( nodes ) , cluster_mode = cluster_mode , primary_mgt = primary_mgt , backup_mgt = backup_mgt , primary_heartbeat = primary_heartbeat , log_server_ref = log_server_ref , domain_server_address = domain_server_address , location_ref = location_ref , default_nat = default_nat , enable_antivirus = enable_antivirus , enable_gti = enable_gti , comment = comment , snmp = snmp , ** kw )
|
def list ( request , content_type , id ) :
"""Wrapper exposing comment ' s render _ comment _ list tag as a view ."""
|
# get object
app_label , model = content_type . split ( '-' )
ctype = ContentType . objects . get ( app_label = app_label , model = model )
obj = ctype . get_object_for_this_type ( id = id )
# setup template and return result
t = Template ( "{% load comments %}{% render_comment_list for object %}" )
context = RequestContext ( request )
context . update ( { 'object' : obj } )
result = t . render ( context )
return HttpResponse ( result )
|
def request ( self , path , api = 'public' , method = 'GET' , params = { } , headers = None , body = None ) :
"""Exchange . request is the entry point for all generated methods"""
|
return self . fetch2 ( path , api , method , params , headers , body )
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.