signature
stringlengths 29
44.1k
| implementation
stringlengths 0
85.2k
|
---|---|
def _create_non_null_wrapper ( name , t ) :
'creates type wrapper for non - null of given type'
|
def __new__ ( cls , json_data , selection_list = None ) :
if json_data is None :
raise ValueError ( name + ' received null value' )
return t ( json_data , selection_list )
def __to_graphql_input__ ( value , indent = 0 , indent_string = ' ' ) :
return t . __to_graphql_input__ ( value , indent , indent_string )
return type ( name , ( t , ) , { '__new__' : __new__ , '_%s__auto_register' % name : False , '__to_graphql_input__' : __to_graphql_input__ , } )
|
def add ( self , name , extends , ** kwargs ) :
"""To manually add a extends . Usually , menu by hand which
may not be attached to any functions
: param name :
: param cls :
: param method _ :
: param is _ class _ :
: param kwargs :
: return :"""
|
f = extends
self . _push ( name = name , module_ = f . __module__ , class_ = f . __name__ , method_ = f . __name__ , is_class_ = False , ** kwargs )
|
def logstream_policy ( ) :
"""Policy needed for logspout - > kinesis log streaming ."""
|
p = Policy ( Statement = [ Statement ( Effect = Allow , Resource = [ "*" ] , Action = [ kinesis . CreateStream , kinesis . DescribeStream , Action ( kinesis . prefix , "AddTagsToStream" ) , Action ( kinesis . prefix , "PutRecords" ) ] ) ] )
return p
|
def request ( self , item , timeout = 5000 ) :
"""Request data from DDE service ."""
|
hszItem = DDE . CreateStringHandle ( self . _idInst , item , CP_WINUNICODE )
# hDdeData = DDE . ClientTransaction ( LPBYTE ( ) , 0 , self . _ hConv , hszItem , CF _ TEXT , XTYP _ REQUEST , timeout , LPDWORD ( ) )
pdwResult = DWORD ( 0 )
hDdeData = DDE . ClientTransaction ( LPBYTE ( ) , 0 , self . _hConv , hszItem , CF_TEXT , XTYP_REQUEST , timeout , byref ( pdwResult ) )
DDE . FreeStringHandle ( self . _idInst , hszItem )
if not hDdeData :
raise DDEError ( "Unable to request item" , self . _idInst )
if timeout != TIMEOUT_ASYNC :
pdwSize = DWORD ( 0 )
pData = DDE . AccessData ( hDdeData , byref ( pdwSize ) )
if not pData :
DDE . FreeDataHandle ( hDdeData )
raise DDEError ( "Unable to access data in request function" , self . _idInst )
DDE . UnaccessData ( hDdeData )
else :
pData = None
DDE . FreeDataHandle ( hDdeData )
return pData
|
def module_name ( self ) :
""": return : str"""
|
if self . module_type in velbus . MODULE_DIRECTORY . keys ( ) :
return velbus . MODULE_DIRECTORY [ self . module_type ]
return "Unknown"
|
def OnTextSize ( self , event ) :
"""Text size combo text event handler"""
|
try :
size = int ( event . GetString ( ) )
except Exception :
size = get_default_font ( ) . GetPointSize ( )
post_command_event ( self , self . FontSizeMsg , size = size )
|
def SetWeight ( self , path_segment_index , weight ) :
"""Sets a weight for a specific path segment index .
Args :
path _ segment _ index : an integer containing the path segment index .
weight : an integer containing the weight .
Raises :
ValueError : if the path segment weights do not contain
the path segment index ."""
|
if path_segment_index not in self . _weight_per_index :
raise ValueError ( 'Path segment index not set.' )
self . _weight_per_index [ path_segment_index ] = weight
if weight not in self . _indexes_per_weight :
self . _indexes_per_weight [ weight ] = [ ]
self . _indexes_per_weight [ weight ] . append ( path_segment_index )
|
def plot_triaxial ( height , width , tools ) :
'''Plot pandas dataframe containing an x , y , and z column'''
|
import bokeh . plotting
p = bokeh . plotting . figure ( x_axis_type = 'datetime' , plot_height = height , plot_width = width , title = ' ' , toolbar_sticky = False , tools = tools , active_drag = BoxZoomTool ( ) , output_backend = 'webgl' )
p . yaxis . axis_label = 'Acceleration (count)'
p . xaxis . axis_label = 'Time (timezone as programmed)'
# Plot accelerometry data as lines and scatter ( for BoxSelectTool )
colors = [ '#1b9e77' , '#d95f02' , '#7570b3' ]
axes = [ 'x' , 'y' , 'z' ]
lines = [ None , ] * 3
scats = [ None , ] * 3
for i , ( ax , c ) in enumerate ( zip ( axes , colors ) ) :
lines [ i ] = p . line ( y = ax , x = 'dt' , color = c , legend = False , source = source )
scats [ i ] = p . scatter ( y = ax , x = 'dt' , color = c , legend = False , size = 1 , source = source )
return p , lines , scats
|
def download_software_file ( filename = None , synch = False ) :
'''Download software packages by filename .
Args :
filename ( str ) : The filename of the PANOS file to download .
synch ( bool ) : If true then the file will synch to the peer unit .
CLI Example :
. . code - block : : bash
salt ' * ' panos . download _ software _ file PanOS _ 5000-8.0.0
salt ' * ' panos . download _ software _ file PanOS _ 5000-8.0.0 True'''
|
if not filename :
raise CommandExecutionError ( "Filename option must not be none." )
if not isinstance ( synch , bool ) :
raise CommandExecutionError ( "Synch option must be boolean.." )
if synch is True :
query = { 'type' : 'op' , 'cmd' : '<request><system><software><download>' '<file>{0}</file></download></software></system></request>' . format ( filename ) }
else :
query = { 'type' : 'op' , 'cmd' : '<request><system><software><download><sync-to-peer>yes</sync-to-peer>' '<file>{0}</file></download></software></system></request>' . format ( filename ) }
return _get_job_results ( query )
|
def mergeGenomeLogFiles ( outPrefix , nbSet ) :
"""Merge genome and log files together .
: param outPrefix : the prefix of the output files .
: param nbSet : The number of set of files to merge together .
: type outPrefix : str
: type nbSet : int
: returns : the name of the output file ( the ` ` genome ` ` file ) .
After merging , the files are deleted to save space ."""
|
outputFile = None
try :
outputFile = open ( outPrefix + ".genome" , "w" )
outputLog = open ( outPrefix + ".log" , "w" )
except IOError :
msg = "%s or %s: can't write file" % ( outPrefix + ".genome" , outPrefix + ".log" )
raise ProgramError ( msg )
for i in xrange ( 1 , nbSet + 1 ) :
for j in xrange ( i , nbSet + 1 ) :
fileName = outPrefix + "_output.sub.%(i)d.%(j)d.genome" % locals ( )
printHeader = False
if ( i == 1 ) and ( j == 1 ) : # This is the first file we open
printHeader = True
# Read file here
try :
with open ( fileName , 'r' ) as inputFile :
for nbLine , line in enumerate ( inputFile ) :
if nbLine == 0 :
if printHeader :
outputFile . write ( line )
else :
outputFile . write ( line )
except IOError :
msg = "%(fileName)s: no such file" % locals ( )
raise ProgramError ( msg )
# Deleting the file
try :
os . remove ( fileName )
except IOError :
msg = "%(fileName)s: can't delete the file" % locals ( )
raise ProgramError ( msg )
# Read file here
fileName = outPrefix + "_output.sub.%(i)d.%(j)d.log" % locals ( )
try :
with open ( fileName , 'r' ) as inputFile :
for line in inputFile :
outputLog . write ( line )
except IOError :
msg = "%(fileName)s: no such file" % locals ( )
raise ProgramError ( msg )
# Deleting the file
try :
os . remove ( fileName )
except IOError :
msg = "%(fileName)s: can't delete the file" % locals ( )
raise ProgramError ( msg )
# Removing the tmp . list * files
try :
for fileName in glob . glob ( outPrefix + "_tmp.list*" ) :
os . remove ( fileName )
except IOError :
msg = "can't delete the tmp.list* files"
raise ProgramError ( msg )
# Removing the output . sub . *
try :
for fileName in glob . glob ( outPrefix + "_output.sub.*" ) :
os . remove ( fileName )
except IOError :
msg = "can't delete the output.sub.* files"
raise ProgramError ( msg )
# Closing the output files
outputFile . close ( )
outputLog . close ( )
return outPrefix + ".genome"
|
def set_sql_mode ( self , sql_mode ) :
"""Set the connection sql _ mode . See MySQL documentation for
legal values ."""
|
if self . _server_version < ( 4 , 1 ) :
raise NotSupportedError ( "server is too old to set sql_mode" )
self . query ( "SET SESSION sql_mode='%s'" % sql_mode )
self . store_result ( )
|
def read ( self , length = None ) :
"""Read the given amount of bytes ."""
|
if length is None :
return self . reader . read ( )
result = self . reader . read ( length )
if len ( result ) != length :
raise BufferError ( 'No more data left to read (need {}, got {}: {}); last read {}' . format ( length , len ( result ) , repr ( result ) , repr ( self . _last ) ) )
self . _last = result
return result
|
def _ParseLogLine ( self , parser_mediator , structure ) :
"""Parses a log line .
Args :
parser _ mediator ( ParserMediator ) : mediates interactions between parsers
and other components , such as storage and dfvfs .
structure ( pyparsing . ParseResults ) : structure of tokens derived from
a line of a text file ."""
|
if not self . _xchat_year :
return
time_elements_tuple = self . _GetTimeElementsTuple ( structure )
try :
date_time = dfdatetime_time_elements . TimeElements ( time_elements_tuple = time_elements_tuple )
date_time . is_local_time = True
except ValueError :
parser_mediator . ProduceExtractionWarning ( 'invalid date time value: {0!s}' . format ( structure . date_time ) )
return
self . _last_month = time_elements_tuple [ 1 ]
event_data = XChatLogEventData ( )
event_data . nickname = structure . nickname
# The text string contains multiple unnecessary whitespaces that need to
# be removed , thus the split and re - join .
event_data . text = ' ' . join ( structure . text . split ( ) )
event = time_events . DateTimeValuesEvent ( date_time , definitions . TIME_DESCRIPTION_ADDED , time_zone = parser_mediator . timezone )
parser_mediator . ProduceEventWithEventData ( event , event_data )
|
def jp_compose ( s , base = None ) :
"""append / encode a string to json - pointer"""
|
if s == None :
return base
ss = [ s ] if isinstance ( s , six . string_types ) else s
ss = [ s . replace ( '~' , '~0' ) . replace ( '/' , '~1' ) for s in ss ]
if base :
ss . insert ( 0 , base )
return '/' . join ( ss )
|
def make_file_read_only ( file_path ) :
"""Removes the write permissions for the given file for owner , groups and others .
: param file _ path : The file whose privileges are revoked .
: raise FileNotFoundError : If the given file does not exist ."""
|
old_permissions = os . stat ( file_path ) . st_mode
os . chmod ( file_path , old_permissions & ~ WRITE_PERMISSIONS )
|
def _adjacent_tri ( self , edge , i ) :
"""Given a triangle formed by edge and i , return the triangle that shares
edge . * i * may be either a point or the entire triangle ."""
|
if not np . isscalar ( i ) :
i = [ x for x in i if x not in edge ] [ 0 ]
try :
pt1 = self . _edges_lookup [ edge ]
pt2 = self . _edges_lookup [ ( edge [ 1 ] , edge [ 0 ] ) ]
except KeyError :
return None
if pt1 == i :
return ( edge [ 1 ] , edge [ 0 ] , pt2 )
elif pt2 == i :
return ( edge [ 1 ] , edge [ 0 ] , pt1 )
else :
raise RuntimeError ( "Edge %s and point %d do not form a triangle " "in this mesh." % ( edge , i ) )
|
def render_content ( self ) :
"""Render the panel ' s content ."""
|
if not self . has_content :
return ""
template = self . template
if isinstance ( self . template , str ) :
template = self . app . ps . jinja2 . env . get_template ( self . template )
context = self . render_vars ( )
content = template . render ( app = self . app , request = self . request , ** context )
return content
|
def delete ( self , redis ) :
'''Deletes this field ' s value from the databse . Should be implemented
in special cases'''
|
value = getattr ( self . obj , self . name )
redis . srem ( self . key ( ) + ':' + value , self . obj . id )
|
def build_response_signature ( self , saml_response , relay_state , sign_algorithm = OneLogin_Saml2_Constants . RSA_SHA1 ) :
"""Builds the Signature of the SAML Response .
: param saml _ request : The SAML Response
: type saml _ request : string
: param relay _ state : The target URL the user should be redirected to
: type relay _ state : string
: param sign _ algorithm : Signature algorithm method
: type sign _ algorithm : string"""
|
return self . __build_signature ( saml_response , relay_state , 'SAMLResponse' , sign_algorithm )
|
def one_use_option ( * args , ** kwargs ) :
"""Wrapper of the click . option decorator that replaces any instances of
the Option class with the custom OneUseOption class"""
|
# cannot force a multiple or count option to be single use
if "multiple" in kwargs or "count" in kwargs :
raise ValueError ( "Internal error, one_use_option cannot be used " "with multiple or count." )
# cannot force a non Option Paramater ( argument ) to be a OneUseOption
if kwargs . get ( "cls" ) :
raise TypeError ( "Internal error, one_use_option cannot overwrite " "cls {}." . format ( kwargs . get ( "cls" ) ) )
# use our OneUseOption class instead of a normal Option
kwargs [ "cls" ] = OneUseOption
# if dealing with a flag , switch to a counting option ,
# and then assert if the count is not greater than 1 and cast to a bool
if kwargs . get ( "is_flag" ) :
kwargs [ "is_flag" ] = False
# mutually exclusive with count
kwargs [ "count" ] = True
# if not a flag , this option takes an argument ( s ) , switch to a multiple
# option , assert the len is 1 , and treat the first element as the value
else :
kwargs [ "multiple" ] = True
# decorate with the click . option decorator , but with our custom kwargs
def decorator ( f ) :
return click . option ( * args , ** kwargs ) ( f )
return decorator
|
def _sorted_by_priority ( self , fmtos , changed = None ) :
"""Sort the formatoption objects by their priority and dependency
Parameters
fmtos : list
list of : class : ` Formatoption ` instances
changed : list
the list of formatoption keys that have changed
Yields
Formatoption
The next formatoption as it comes by the sorting
Warnings
The list ` fmtos ` is cleared by this method !"""
|
def pop_fmto ( key ) :
idx = fmtos_keys . index ( key )
del fmtos_keys [ idx ]
return fmtos . pop ( idx )
def get_children ( fmto , parents_keys ) :
all_fmtos = fmtos_keys + parents_keys
for key in fmto . children + fmto . dependencies :
if key not in fmtos_keys :
continue
child_fmto = pop_fmto ( key )
for childs_child in get_children ( child_fmto , parents_keys + [ child_fmto . key ] ) :
yield childs_child
# filter out if parent is in update list
if ( any ( key in all_fmtos for key in child_fmto . parents ) or fmto . key in child_fmto . parents ) :
continue
yield child_fmto
fmtos . sort ( key = lambda fmto : fmto . priority , reverse = True )
fmtos_keys = [ fmto . key for fmto in fmtos ]
self . _last_update = changed or fmtos_keys [ : ]
self . logger . debug ( "Update the formatoptions %s" , fmtos_keys )
while fmtos :
del fmtos_keys [ 0 ]
fmto = fmtos . pop ( 0 )
# first update children
for child_fmto in get_children ( fmto , [ fmto . key ] ) :
yield child_fmto
# filter out if parent is in update list
if any ( key in fmtos_keys for key in fmto . parents ) :
continue
yield fmto
|
def G ( self ) :
"""Inverse cross - spectral density .
. . math : : \ mathbf { G } ( f ) = \ mathbf { A } ( f ) \ mathbf { C } ^ { - 1 } \ mathbf { A } ' ( f )"""
|
if self . c is None :
raise RuntimeError ( 'Inverse cross spectral density requires ' 'invertible noise covariance matrix c.' )
A = self . A ( )
# TODO : can we do that more efficiently ?
G = np . einsum ( 'ji..., jk... ->ik...' , A . conj ( ) , self . Cinv ( ) )
G = np . einsum ( 'ij..., jk... ->ik...' , G , A )
return G
|
def get_value ( value ) :
"""quoted - string / attribute"""
|
v = Value ( )
if not value :
raise errors . HeaderParseError ( "Expected value but found end of string" )
leader = None
if value [ 0 ] in CFWS_LEADER :
leader , value = get_cfws ( value )
if not value :
raise errors . HeaderParseError ( "Expected value but found " "only {}" . format ( leader ) )
if value [ 0 ] == '"' :
token , value = get_quoted_string ( value )
else :
token , value = get_extended_attribute ( value )
if leader is not None :
token [ : 0 ] = [ leader ]
v . append ( token )
return v , value
|
def modelItemChanged ( self , cti ) :
"""Called when the an Config Tree Item ( CTI ) in the model has changed .
If the CTI is a different one than the CTI that belongs to this editor , the editor
is closed . This can happen if the user has checked a checkbox . Qt does not close other
editors in the view in that case , so this is why we do it here .
If the cti parameter is the CTI belonging to this editor , nothing is done . We don ' t
close the editor because the user may want to continue editing ."""
|
if cti is not self . cti :
logger . debug ( "Another config tree item has changed: {}. Closing editor for {}" . format ( cti , self . cti ) )
self . delegate . closeEditor . emit ( self , QtWidgets . QAbstractItemDelegate . NoHint )
# CLOSES SELF !
else :
logger . debug ( "Cti of this editor has changed: {}" . format ( cti ) )
|
def _get_data_from_bigquery ( self , queries ) :
"""Get data from bigquery table or query ."""
|
all_df = [ ]
for query in queries :
all_df . append ( query . execute ( ) . result ( ) . to_dataframe ( ) )
df = pd . concat ( all_df , ignore_index = True )
return df
|
def str_args ( args ) :
"""formats a list of function arguments prettily not as code
( kwargs are tuples ( argname , argvalue )"""
|
res = [ ]
for x in args :
if isinstance ( x , tuple ) and len ( x ) == 2 :
key , value = x
if value and str_arg ( value ) :
res += [ "%s=%s" % ( key , str_arg ( value ) ) ]
else :
res += [ str_arg ( x ) ]
return ', ' . join ( res )
|
def parse ( cls , string ) :
"""Parse a string and create a metric"""
|
match = re . match ( r'^(?P<name>[A-Za-z0-9\.\-_]+)\s+' + '(?P<value>[0-9\.]+)\s+' + '(?P<timestamp>[0-9\.]+)(\n?)$' , string )
try :
groups = match . groupdict ( )
# TODO : get precision from value string
return Metric ( groups [ 'name' ] , groups [ 'value' ] , float ( groups [ 'timestamp' ] ) )
except :
raise DiamondException ( "Metric could not be parsed from string: %s." % string )
|
async def _get_update ( self , rr_builder : Callable , fro : int , to : int , delta : bool ) -> ( str , int ) :
"""Get rev reg delta / state json , and its timestamp on the distributed ledger ,
from cached rev reg delta / state frames list or distributed ledger ,
updating cache as necessary .
Raise BadRevStateTime if caller asks for a delta / state in the future . Raise ClosedPool
if an update requires the ledger but the node pool is closed .
Issuer anchors cannot revoke retroactively .
Hence , for any new request against asked - for interval ( fro , to ) :
* if the cache has a frame f on f . timestamp < = to < = f . to ,
> return its rev reg delta / state ; e . g . , starred frame below :
Frames : - - - - - [ xxxxx ] - - - - [ xx ] - - - - - [ * * * * * ] - - - - - [ x ] - - - - - [ xx ] - - - - - > time
Fro - to : ^ - - - - ^
* otherwise , if there is a maximum frame f with fro < = f . to and f . timestamp < = to
> return its rev reg delta / state ; e . g . , starred frame below :
Frames : - - - - - [ xxxxx ] - - - - [ xx ] - - - - - [ xxxxx ] - - - - - [ * ] - - - - - [ xx ] - - - - - > time
Fro - to : ^ - - - - - ^
* otherwise , if the cache has a frame f on f . timestamp < to ,
> check the distributed ledger for a delta to / state for the rev reg since e . timestamp ;
- if there is one , bake it through ' to ' into a new delta / state , add new frame to cache and
return rev reg delta / state ; e . g . , starred frame below :
Frames : - - - - - [ xxxxx ] - - - - [ xx ] - - - - - [ xxxxx ] - - - - - [ x ] - - - - - [ xx ] - - - - - > time
Fro - to : ^ - - - - - ^
Ledger : - - - - - [ xxxxx ] - - - - [ xx ] - - - - - [ xxxxx ] - - - - - [ x ] - - ! - - - - - [ xx ] - - - - - > time
Update : - - - - - [ xxxxx ] - - - - [ xx ] - - - - - [ xxxxx ] - - - - - [ x ] - - [ * * * * * ] - - [ xx ] - - - - - > time
- otherwise , update the ' to ' time in the frame and return the rev reg delta / state ;
e . g . , starred frame below :
Frames : - - - - - [ xxxxx ] - - - - [ xx ] - - - - - [ xxxxx ] - - - - - [ x ] - - - - - [ xx ] - - - - - > time
Fro - to : ^ - - - - - ^
Ledger : - - - - - [ xxxxx ] - - - - [ xx ] - - - - - [ xxxxx ] - - - - - [ x ] - - - - - [ xx ] - - - - - > time
Update : - - - - - [ xxxxx ] - - - - [ xx ] - - - - - [ xxxxx ] - - - - - [ * * * * * ] - - [ xx ] - - - - - > time
* otherwise , there is no cache frame f on f . timestamp < to :
> create new frame and add it to cache ; return rev reg delta / state ; e . g . , starred frame below :
Frames : - - - - - [ xxxxx ] - - - - [ xx ] - - - - - [ xxxxx ] - - - - - [ * ] - - - - - [ xx ] - - - - - > time
Fro - to : ^ - - ^
Ledger : - ! - - - - - [ xxxxx ] - - - - [ xx ] - - - - - [ xxxxx ] - - - - - [ x ] - - - - - [ xx ] - - - - - > time
Update : - [ * * * ] - - [ xxxxx ] - - - - [ xx ] - - - - - [ xxxxx ] - - - - - [ x ] - - - - - [ xx ] - - - - - > time
On return of any previously existing rev reg delta / state frame , always update its query time beforehand .
: param rr _ builder : callback to build rev reg delta / state if need be ( specify holder - prover anchor ' s
_ build _ rr _ delta _ json ( ) or verifier anchor ' s _ build _ rr _ state _ json ( ) as needed )
: param fro : least time ( epoch seconds ) of interest ; lower - bounds ' to ' on frame housing return data
: param to : greatest time ( epoch seconds ) of interest ; upper - bounds returned revocation delta / state timestamp
: param delta : True to operate on rev reg deltas , False for states
: return : rev reg delta / state json and ledger timestamp ( epoch seconds )"""
|
LOGGER . debug ( 'RevoCacheEntry.get_update >>> rr_builder: %s, fro: %s, to: %s, delta: %s' , rr_builder . __name__ , fro , to , delta )
if fro > to :
( fro , to ) = ( to , fro )
now = int ( time ( ) )
if to > now :
LOGGER . debug ( 'RevoCacheEntry._get_update <!< Cannot query a rev reg %s in the future (%s > %s)' , 'delta' if delta else 'state' , to , now )
raise BadRevStateTime ( 'Cannot query a rev reg {} in the future ({} > {})' . format ( 'delta' if delta else 'state' , to , now ) )
cache_frame = None
rr_update_json = None
rr_frames = self . rr_delta_frames if delta else self . rr_state_frames
frames = [ frame for frame in rr_frames if frame . timestamp <= to <= frame . to ]
if frames :
cache_frame = max ( frames , key = lambda f : f . timestamp )
# should be unique in any case
# do not update frame . to , it ' s already past asked - for ' to '
else :
frames = [ frame for frame in rr_frames if ( fro <= frame . to and frame . timestamp <= to ) ]
if frames :
cache_frame = max ( frames , key = lambda f : f . timestamp )
# do not update frame . to - another update might occur , but we don ' t care ; fro < frame . to , good enough
if not frames :
frames = [ frame for frame in rr_frames if frame . timestamp < to ]
# frame . to < to since not frames coming in
if frames :
latest_cached = max ( frames , key = lambda frame : frame . timestamp )
if delta :
( rr_update_json , timestamp ) = await rr_builder ( self . rev_reg_def [ 'id' ] , to = to , fro = latest_cached . timestamp , fro_delta = latest_cached . rr_update )
else :
( rr_update_json , timestamp ) = await rr_builder ( self . rev_reg_def [ 'id' ] , to )
if timestamp == latest_cached . timestamp :
latest_cached . to = to
# this timestamp now known good through more recent ' to '
cache_frame = latest_cached
else :
( rr_update_json , timestamp ) = await rr_builder ( self . rev_reg_def [ 'id' ] , to )
if cache_frame is None :
cache_frame = RevRegUpdateFrame ( to , timestamp , json . loads ( rr_update_json ) )
# sets qtime to now
rr_frames . append ( cache_frame )
self . cull ( delta )
else :
cache_frame . qtime = int ( time ( ) )
rv = ( json . dumps ( cache_frame . rr_update ) , cache_frame . timestamp )
LOGGER . debug ( 'RevoCacheEntry._get_update <<< %s' , rv )
return rv
|
def dispatch ( self , * args , ** kwargs ) :
"""This decorator sets this view to have restricted permissions ."""
|
return super ( BreedingCreate , self ) . dispatch ( * args , ** kwargs )
|
def normalizeCountry ( country_str , target = "iso3c" , title_case = False ) :
"""Return a normalized name / code for country in ` ` country _ str ` ` .
The input can be a code or name , the ` ` target ` ` determines output value .
3 character ISO code is the default ( iso3c ) , ' country _ name ' , and ' iso2c '
are common also . See ` ` countrycode . countrycode ` ` for details and other
options . Raises ` ` ValueError ` ` if the country is unrecognized ."""
|
iso2 = "iso2c"
iso3 = "iso3c"
raw = "country_name"
if country_str is None :
return ""
if len ( country_str ) == 2 :
cc = countrycode ( country_str . upper ( ) , origin = iso2 , target = target )
if not cc :
cc = countrycode ( country_str , origin = raw , target = target )
elif len ( country_str ) == 3 :
cc = countrycode ( country_str . upper ( ) , origin = iso3 , target = target )
if not cc :
cc = countrycode ( country_str , origin = raw , target = target )
else :
cc = countrycode ( country_str , origin = raw , target = target )
# Still need to validate because origin = raw will return whatever is
# input if not match is found .
cc = countrycode ( cc , origin = target , target = target ) if cc else None
if not cc :
raise ValueError ( "Country not found: %s" % ( country_str ) )
return cc . title ( ) if title_case else cc
|
def get_string ( self , prompt , default_str = None ) -> str :
"""Return a string value that the user enters . Raises exception for cancel ."""
|
accept_event = threading . Event ( )
value_ref = [ None ]
def perform ( ) :
def accepted ( text ) :
value_ref [ 0 ] = text
accept_event . set ( )
def rejected ( ) :
accept_event . set ( )
self . __message_column . remove_all ( )
pose_get_string_message_box ( self . ui , self . __message_column , prompt , str ( default_str ) , accepted , rejected )
# self . _ _ message _ column . add ( self . _ _ make _ cancel _ row ( ) )
with self . __lock :
self . __q . append ( perform )
self . document_controller . add_task ( "ui_" + str ( id ( self ) ) , self . __handle_output_and_q )
accept_event . wait ( )
def update_message_column ( ) :
self . __message_column . remove_all ( )
self . __message_column . add ( self . __make_cancel_row ( ) )
self . document_controller . add_task ( "ui_" + str ( id ( self ) ) , update_message_column )
if value_ref [ 0 ] is None :
raise Exception ( "Cancel" )
return value_ref [ 0 ]
|
def intervallookup ( table , start = 'start' , stop = 'stop' , value = None , include_stop = False ) :
"""Construct an interval lookup for the given table . E . g . : :
> > > import petl as etl
> > > table = [ [ ' start ' , ' stop ' , ' value ' ] ,
. . . [ 1 , 4 , ' foo ' ] ,
. . . [ 3 , 7 , ' bar ' ] ,
. . . [ 4 , 9 , ' baz ' ] ]
> > > lkp = etl . intervallookup ( table , ' start ' , ' stop ' )
> > > lkp . search ( 0 , 1)
> > > lkp . search ( 1 , 2)
[ ( 1 , 4 , ' foo ' ) ]
> > > lkp . search ( 2 , 4)
[ ( 1 , 4 , ' foo ' ) , ( 3 , 7 , ' bar ' ) ]
> > > lkp . search ( 2 , 5)
[ ( 1 , 4 , ' foo ' ) , ( 3 , 7 , ' bar ' ) , ( 4 , 9 , ' baz ' ) ]
> > > lkp . search ( 9 , 14)
> > > lkp . search ( 19 , 140)
> > > lkp . search ( 0)
> > > lkp . search ( 1)
[ ( 1 , 4 , ' foo ' ) ]
> > > lkp . search ( 2)
[ ( 1 , 4 , ' foo ' ) ]
> > > lkp . search ( 4)
[ ( 3 , 7 , ' bar ' ) , ( 4 , 9 , ' baz ' ) ]
> > > lkp . search ( 5)
[ ( 3 , 7 , ' bar ' ) , ( 4 , 9 , ' baz ' ) ]
Note start coordinates are included and stop coordinates are excluded
from the interval . Use the ` include _ stop ` keyword argument to include the
upper bound of the interval when finding overlaps .
Some examples using the ` include _ stop ` and ` value ` keyword arguments : :
> > > import petl as etl
> > > table = [ [ ' start ' , ' stop ' , ' value ' ] ,
. . . [ 1 , 4 , ' foo ' ] ,
. . . [ 3 , 7 , ' bar ' ] ,
. . . [ 4 , 9 , ' baz ' ] ]
> > > lkp = etl . intervallookup ( table , ' start ' , ' stop ' , include _ stop = True ,
. . . value = ' value ' )
> > > lkp . search ( 0 , 1)
[ ' foo ' ]
> > > lkp . search ( 1 , 2)
[ ' foo ' ]
> > > lkp . search ( 2 , 4)
[ ' foo ' , ' bar ' , ' baz ' ]
> > > lkp . search ( 2 , 5)
[ ' foo ' , ' bar ' , ' baz ' ]
> > > lkp . search ( 9 , 14)
[ ' baz ' ]
> > > lkp . search ( 19 , 140)
> > > lkp . search ( 0)
> > > lkp . search ( 1)
[ ' foo ' ]
> > > lkp . search ( 2)
[ ' foo ' ]
> > > lkp . search ( 4)
[ ' foo ' , ' bar ' , ' baz ' ]
> > > lkp . search ( 5)
[ ' bar ' , ' baz ' ]"""
|
tree = tupletree ( table , start = start , stop = stop , value = value )
return IntervalTreeLookup ( tree , include_stop = include_stop )
|
def tdb_minus_tt ( jd_tdb ) :
"""Computes how far TDB is in advance of TT , given TDB .
Given that the two time scales never diverge by more than 2ms , TT
can also be given as the argument to perform the conversion in the
other direction ."""
|
t = ( jd_tdb - T0 ) / 36525.0
# USNO Circular 179 , eq . 2.6.
return ( 0.001657 * sin ( 628.3076 * t + 6.2401 ) + 0.000022 * sin ( 575.3385 * t + 4.2970 ) + 0.000014 * sin ( 1256.6152 * t + 6.1969 ) + 0.000005 * sin ( 606.9777 * t + 4.0212 ) + 0.000005 * sin ( 52.9691 * t + 0.4444 ) + 0.000002 * sin ( 21.3299 * t + 5.5431 ) + 0.000010 * t * sin ( 628.3076 * t + 4.2490 ) )
|
def track_locations ( locations ) :
"""Return an iterator tweets from users in these locations .
See https : / / dev . twitter . com / streaming / overview / request - parameters # locations
Params :
locations . . . list of bounding box locations of the form :
southwest _ longitude , southwest _ latitude , northeast _ longitude , northeast _ latitude , . . ."""
|
if len ( locations ) % 4 != 0 :
raise Exception ( 'length of bounding box list should be a multiple of four' )
results = twapi . request ( 'statuses/filter' , { 'locations' : ',' . join ( '%f' % l for l in locations ) } )
return results . get_iterator ( )
|
def get_type_plural ( self ) :
""": return : a string ( event if get _ type is an object ) indicating the plural of the type name"""
|
t = self . get_type ( )
if t :
if hasattr ( t , 'get_plural' ) :
return t . get_plural ( )
if t == type ( self ) . _meta . verbose_name :
return unicode ( type ( self ) . _meta . verbose_name_plural )
return u"{0}s" . format ( t )
return unicode ( type ( self ) . _meta . verbose_name_plural )
|
def export_model ( model , model_type , export_dir , model_column_fn ) :
"""Export to SavedModel format .
Args :
model : Estimator object
model _ type : string indicating model type . " wide " , " deep " or " wide _ deep "
export _ dir : directory to export the model .
model _ column _ fn : Function to generate model feature columns ."""
|
wide_columns , deep_columns = model_column_fn ( )
if model_type == 'wide' :
columns = wide_columns
elif model_type == 'deep' :
columns = deep_columns
else :
columns = wide_columns + deep_columns
feature_spec = tf . feature_column . make_parse_example_spec ( columns )
example_input_fn = ( tf . estimator . export . build_parsing_serving_input_receiver_fn ( feature_spec ) )
model . export_savedmodel ( export_dir , example_input_fn , strip_default_attrs = True )
|
async def recv_message ( self ) :
"""Coroutine to receive incoming message from the server .
If server sends UNARY response , then you can call this coroutine only
once . If server sends STREAM response , then you should call this
coroutine several times , until it returns None . To simplify you code in
this case , : py : class : ` Stream ` implements async iterations protocol , so
you can use it like this :
. . code - block : : python3
async for massage in stream :
do _ smth _ with ( message )
or even like this :
. . code - block : : python3
messages = [ msg async for msg in stream ]
HTTP / 2 has flow control mechanism , so client will acknowledge received
DATA frames as a message only after user consumes this coroutine .
: returns : message"""
|
# TODO : check that messages were sent for non - stream - stream requests
if not self . _recv_initial_metadata_done :
await self . recv_initial_metadata ( )
with self . _wrapper :
message = await recv_message ( self . _stream , self . _codec , self . _recv_type )
self . _recv_message_count += 1
message , = await self . _dispatch . recv_message ( message )
return message
|
def compute ( self , pairs , x = None , x_link = None ) :
"""Return continuous random values for each record pair .
Parameters
pairs : pandas . MultiIndex
A pandas MultiIndex with the record pairs to compare . The indices
in the MultiIndex are indices of the DataFrame ( s ) to link .
x : pandas . DataFrame
The DataFrame to link . If ` x _ link ` is given , the comparing is a
linking problem . If ` x _ link ` is not given , the problem is one of
deduplication .
x _ link : pandas . DataFrame , optional
The second DataFrame .
Returns
pandas . Series , pandas . DataFrame , numpy . ndarray
The result of comparing record pairs ( the features ) . Can be
a tuple with multiple pandas . Series , pandas . DataFrame ,
numpy . ndarray objects ."""
|
df_empty = pd . DataFrame ( index = pairs )
return self . _compute ( tuple ( [ df_empty ] ) , tuple ( [ df_empty ] ) )
|
def find ( name , arg = None ) :
"""Find process by name or by argument in command line .
Args :
name ( str ) : Process name to search for .
arg ( str ) : Command line argument for a process to search for .
Returns :
tea . process . base . IProcess : Process object if found ."""
|
for p in get_processes ( ) :
if p . name . lower ( ) . find ( name . lower ( ) ) != - 1 :
if arg is not None :
for a in p . cmdline or [ ] :
if a . lower ( ) . find ( arg . lower ( ) ) != - 1 :
return p
else :
return p
return None
|
def get_user_roles ( self , user , url_prefix , auth , session , send_opts ) :
"""Get roles associated with the given user .
Args :
user ( string ) : User name .
url _ prefix ( string ) : Protocol + host such as https : / / api . theboss . io
auth ( string ) : Token to send in the request header .
session ( requests . Session ) : HTTP session to use for request .
send _ opts ( dictionary ) : Additional arguments to pass to session . send ( ) .
Returns :
( list ) : List of roles that user has .
Raises :
requests . HTTPError on failure ."""
|
req = self . get_user_role_request ( 'GET' , 'application/json' , url_prefix , auth , user )
prep = session . prepare_request ( req )
resp = session . send ( prep , ** send_opts )
if resp . status_code == 200 :
return resp . json ( )
msg = ( 'Failed getting roles for user: {}, got HTTP response: ({}) - {}' . format ( user , resp . status_code , resp . text ) )
raise HTTPError ( msg , request = req , response = resp )
|
def update ( self , * args , ** kwargs ) :
"""Calls update on each of the systems self . systems ."""
|
for system in self . systems :
system . update ( self , * args , ** kwargs )
|
def sub_slots ( x , match_fn , path = ( ) , arr = None , match = False , recurse_into_matches = True ) :
"""given a BaseX in x , explore its ATTRS ( doing the right thing for VARLEN ) .
return a list of tree - paths ( i . e . tuples ) for tree children that match match _ fn . The root elt won ' t match ."""
|
# todo : rename match to topmatch for clarity
# todo : profiling suggests this getattr - heavy recursive process is the next bottleneck
if arr is None :
arr = [ ]
if match and match_fn ( x ) :
arr . append ( path )
if not recurse_into_matches :
return arr
if isinstance ( x , PathTree ) :
for attr in x . ATTRS :
val = getattr ( x , attr )
if attr in x . VARLEN :
for i , elt in enumerate ( val or ( ) ) :
nextpath = path + ( ( attr , i ) , )
sub_slots ( elt , match_fn , nextpath , arr , True , recurse_into_matches )
else :
nextpath = path + ( attr , )
sub_slots ( val , match_fn , nextpath , arr , True , recurse_into_matches )
return arr
|
def clear_thumbnails ( self ) :
'''clear all thumbnails from the map'''
|
state = self . state
for l in state . layers :
keys = state . layers [ l ] . keys ( ) [ : ]
for key in keys :
if ( isinstance ( state . layers [ l ] [ key ] , SlipThumbnail ) and not isinstance ( state . layers [ l ] [ key ] , SlipIcon ) ) :
state . layers [ l ] . pop ( key )
|
def assoc_indexed ( cls , ops , kwargs ) :
r"""Flatten nested indexed structures while pulling out possible prefactors
For example , for an : class : ` . IndexedSum ` :
. . math : :
\ sum _ j \ left ( a \ sum _ i \ dots \ right ) = a \ sum _ { j , i } \ dots"""
|
from qnet . algebra . core . abstract_quantum_algebra import ( ScalarTimesQuantumExpression )
term , * ranges = ops
if isinstance ( term , cls ) :
coeff = 1
elif isinstance ( term , ScalarTimesQuantumExpression ) :
coeff = term . coeff
term = term . term
if not isinstance ( term , cls ) :
return ops , kwargs
else :
return ops , kwargs
term = term . make_disjunct_indices ( * ranges )
combined_ranges = tuple ( ranges ) + term . ranges
if coeff == 1 :
return cls . create ( term . term , * combined_ranges )
else :
bound_symbols = set ( [ r . index_symbol for r in combined_ranges ] )
if len ( coeff . free_symbols . intersection ( bound_symbols ) ) == 0 :
return coeff * cls . create ( term . term , * combined_ranges )
else :
return cls . create ( coeff * term . term , * combined_ranges )
|
def get ( self , status_item ) :
"""queries the database and returns that status of the item .
args :
status _ item : the name of the item to check"""
|
lg = logging . getLogger ( "%s.%s" % ( self . ln , inspect . stack ( ) [ 0 ] [ 3 ] ) )
lg . setLevel ( self . log_level )
sparql = '''
SELECT ?loaded
WHERE {{
kdr:{0} kds:{1} ?loaded .
}}'''
value = self . conn . query ( sparql = sparql . format ( self . group , status_item ) )
if len ( value ) > 0 and cbool ( value [ 0 ] . get ( 'loaded' , { } ) . get ( "value" , False ) ) :
return True
else :
return False
|
def execute ( connection : connection , statement : str ) -> Optional [ List [ Tuple [ str , ... ] ] ] :
"""Execute PGSQL statement and fetches the statement response .
Parameters
connection : psycopg2 . extensions . connection
Active connection to a PostGreSQL database .
statement : str
PGSQL statement to run against the database .
Returns
response : list or None
List of tuples , where each tuple represents a formatted line of response from the database engine , where
each tuple item roughly corresponds to a column . For instance , while a raw SELECT response might include
the table headers , psycopg2 returns only the rows that matched . If no response was given , None is returned ."""
|
response = list ( )
# type : List
# See the following link for reasoning behind both with statements :
# http : / / initd . org / psycopg / docs / usage . html # with - statement
# Additionally , the with statement makes this library safer to use with
# higher - level libraries ( e . g . SQLAlchemy ) that don ' t inherently respect
# PostGreSQL ' s autocommit isolation - level , since the transaction is
# properly completed for each statement .
with connection :
with connection . cursor ( cursor_factory = Psycopg2Cursor ) as cursor :
cursor . execute ( statement )
connection . commit ( )
# Get response
try :
response = cursor . fetchall ( )
if not response : # Empty response list
log ( '<No Response>' , logger_name = _LOGGER_NAME )
return None
except ProgrammingError as e :
if e . args and e . args [ 0 ] == 'no results to fetch' : # No response available ( i . e . no response given )
log ( '<No Response>' , logger_name = _LOGGER_NAME )
return None
# Some other programming error ; re - raise
raise e
log ( 'Response' , logger_name = _LOGGER_NAME )
log ( '--------' , logger_name = _LOGGER_NAME )
for line in response :
log ( str ( line ) , logger_name = _LOGGER_NAME )
return response
|
def get_access_flags_string ( self ) :
"""Return the access flags string of the field
: rtype : string"""
|
if self . access_flags_string == None :
self . access_flags_string = get_access_flags_string ( self . get_access_flags ( ) )
if self . access_flags_string == "" :
self . access_flags_string = "0x%x" % self . get_access_flags ( )
return self . access_flags_string
|
def _ordereddict2dict ( input_ordered_dict ) :
'''Convert ordered dictionary to a dictionary'''
|
return salt . utils . json . loads ( salt . utils . json . dumps ( input_ordered_dict ) )
|
def _get_channel_bindings_application_data ( response ) :
"""https : / / tools . ietf . org / html / rfc5929 4 . The ' tls - server - end - point ' Channel Binding Type
Gets the application _ data value for the ' tls - server - end - point ' CBT Type .
This is ultimately the SHA256 hash of the certificate of the HTTPS endpoint
appended onto tls - server - end - point . This value is then passed along to the
kerberos library to bind to the auth response . If the socket is not an SSL
socket or the raw HTTP object is not a urllib3 HTTPResponse then None will
be returned and the Kerberos auth will use GSS _ C _ NO _ CHANNEL _ BINDINGS
: param response : The original 401 response from the server
: return : byte string used on the application _ data . value field on the CBT struct"""
|
application_data = None
raw_response = response . raw
if isinstance ( raw_response , HTTPResponse ) :
try :
if sys . version_info > ( 3 , 0 ) :
socket = raw_response . _fp . fp . raw . _sock
else :
socket = raw_response . _fp . fp . _sock
except AttributeError :
warnings . warn ( "Failed to get raw socket for CBT; has urllib3 impl changed" , NoCertificateRetrievedWarning )
else :
try :
server_certificate = socket . getpeercert ( True )
except AttributeError :
pass
else :
certificate_hash = _get_certificate_hash ( server_certificate )
application_data = b'tls-server-end-point:' + certificate_hash
else :
warnings . warn ( "Requests is running with a non urllib3 backend, cannot retrieve server certificate for CBT" , NoCertificateRetrievedWarning )
return application_data
|
def fermionic_constraints ( a ) :
"""Return a set of constraints that define fermionic ladder operators .
: param a : The non - Hermitian variables .
: type a : list of : class : ` sympy . physics . quantum . operator . Operator ` .
: returns : a dict of substitutions ."""
|
substitutions = { }
for i , ai in enumerate ( a ) :
substitutions [ ai ** 2 ] = 0
substitutions [ Dagger ( ai ) ** 2 ] = 0
substitutions [ ai * Dagger ( ai ) ] = 1.0 - Dagger ( ai ) * ai
for aj in a [ i + 1 : ] : # substitutions [ ai * Dagger ( aj ) ] = - Dagger ( ai ) * aj
substitutions [ ai * Dagger ( aj ) ] = - Dagger ( aj ) * ai
substitutions [ Dagger ( ai ) * aj ] = - aj * Dagger ( ai )
substitutions [ ai * aj ] = - aj * ai
substitutions [ Dagger ( ai ) * Dagger ( aj ) ] = - Dagger ( aj ) * Dagger ( ai )
return substitutions
|
def _partialParseWeekday ( self , s , sourceTime ) :
"""test if giving C { s } matched CRE _ WEEKDAY , used by L { parse ( ) }
@ type s : string
@ param s : date / time text to evaluate
@ type sourceTime : struct _ time
@ param sourceTime : C { struct _ time } value to use as the base
@ rtype : tuple
@ return : tuple of remained date / time text , datetime object and
an boolean value to describ if matched or not"""
|
parseStr = None
chunk1 = chunk2 = ''
ctx = self . currentContext
log . debug ( 'eval %s with context - %s, %s' , s , ctx . hasDate , ctx . hasTime )
# Weekday
m = self . ptc . CRE_WEEKDAY . search ( s )
if m is not None :
gv = m . group ( )
if s not in self . ptc . dayOffsets :
if ( gv != s ) : # capture remaining string
parseStr = gv
chunk1 = s [ : m . start ( ) ]
chunk2 = s [ m . end ( ) : ]
s = '%s %s' % ( chunk1 , chunk2 )
else :
parseStr = s
s = ''
if parseStr and not ctx . hasDate :
debug and log . debug ( 'found (weekday) [%s][%s][%s]' , parseStr , chunk1 , chunk2 )
sourceTime = self . _evalWeekday ( parseStr , sourceTime )
return s , sourceTime , bool ( parseStr )
|
def removeItem ( self , item ) :
"""Overloads the default QGraphicsScene method to handle cleanup and additional removal options for nodes .
: param item < QGraphicsItem >
: return < bool >"""
|
# for nodes and connections , call the prepareToRemove method before
# removing
if ( isinstance ( item , XNode ) or isinstance ( item , XNodeConnection ) ) : # make sure this item is ok to remove
if ( not item . prepareToRemove ( ) ) :
return False
# remove the item using the base class method
try :
self . _cache . remove ( item )
except KeyError :
pass
# mark the scene as modified
self . setModified ( True )
super ( XNodeScene , self ) . removeItem ( item )
if not self . signalsBlocked ( ) :
self . itemsRemoved . emit ( )
return True
|
def matrix_width ( self , zoom ) :
"""Tile matrix width ( number of columns ) at zoom level .
- zoom : zoom level"""
|
validate_zoom ( zoom )
width = int ( math . ceil ( self . grid . shape . width * 2 ** ( zoom ) / self . metatiling ) )
return 1 if width < 1 else width
|
def hrscan ( self , name , key_start , key_end , limit = 10 ) :
"""Return a dict mapping key / value in the top ` ` limit ` ` keys between
` ` key _ start ` ` and ` ` key _ end ` ` within hash ` ` name ` ` in descending order
. . note : : The range is ( ` ` key _ start ` ` , ` ` key _ end ` ` ] . The ` ` key _ start ` `
isn ' t in the range , but ` ` key _ end ` ` is .
: param string name : the hash name
: param string key _ start : The upper bound ( not included ) of keys to be
returned , empty string ` ` ' ' ` ` means + inf
: param string key _ end : The lower bound ( included ) of keys to be
returned , empty string ` ` ' ' ` ` means - inf
: param int limit : number of elements will be returned .
: return : a dict mapping key / value in descending order
: rtype : OrderedDict
> > > ssdb . hrscan ( ' hash _ 1 ' , ' g ' , ' a ' , 10)
{ ' f ' : ' F ' , ' e ' : ' E ' , ' d ' : ' D ' , ' c ' : ' C ' , ' b ' : ' B ' , ' a ' : ' A ' }
> > > ssdb . hrscan ( ' hash _ 2 ' , ' key7 ' , ' key1 ' , 3)
{ ' key6 ' : ' log ' , ' key5 ' : ' e ' , ' key4 ' : ' 256 ' }
> > > ssdb . hrscan ( ' hash _ 1 ' , ' c ' , ' ' , 10)
{ ' b ' : ' B ' , ' a ' : ' A ' }
> > > ssdb . hscan ( ' hash _ 2 ' , ' keys ' , ' ' , 10)"""
|
limit = get_positive_integer ( 'limit' , limit )
return self . execute_command ( 'hrscan' , name , key_start , key_end , limit )
|
def has_active_subscription ( self , plan = None ) :
"""Checks to see if this customer has an active subscription to the given plan .
: param plan : The plan for which to check for an active subscription . If plan is None and
there exists only one active subscription , this method will check if that subscription
is valid . Calling this method with no plan and multiple valid subscriptions for this customer will
throw an exception .
: type plan : Plan or string ( plan ID )
: returns : True if there exists an active subscription , False otherwise .
: throws : TypeError if ` ` plan ` ` is None and more than one active subscription exists for this customer ."""
|
if plan is None :
valid_subscriptions = self . _get_valid_subscriptions ( )
if len ( valid_subscriptions ) == 0 :
return False
elif len ( valid_subscriptions ) == 1 :
return True
else :
raise TypeError ( "plan cannot be None if more than one valid subscription exists for this customer." )
else : # Convert Plan to id
if isinstance ( plan , StripeModel ) :
plan = plan . id
return any ( [ subscription . is_valid ( ) for subscription in self . subscriptions . filter ( plan__id = plan ) ] )
|
def get_go2nt ( self , goea_results ) :
"""Return go2nt with added formatted string versions of the P - values ."""
|
go2obj = self . objaartall . grprdflt . gosubdag . go2obj
# Add string version of P - values
goea_nts = MgrNtGOEAs ( goea_results ) . get_nts_strpval ( )
return { go2obj [ nt . GO ] . id : nt for nt in goea_nts if nt . GO in go2obj }
|
def board_links_to_ids ( self ) :
"""Convert board links to ids"""
|
resp = self . stats . session . open ( "{0}/members/{1}/boards?{2}" . format ( self . stats . url , self . username , urllib . urlencode ( { "key" : self . key , "token" : self . token , "fields" : "shortLink" } ) ) )
boards = json . loads ( resp . read ( ) )
return [ board [ 'id' ] for board in boards if self . board_links == [ "" ] or board [ 'shortLink' ] in self . board_links ]
|
def _evaluate ( self , R , z , phi = 0. , t = 0. , _forceFloatEval = False ) :
"""NAME :
_ evaluate
PURPOSE :
evaluate the potential at R , z
INPUT :
R - Galactocentric cylindrical radius
z - vertical height
phi - azimuth
t - time
OUTPUT :
Phi ( R , z )
HISTORY :
2010-07-09 - Started - Bovy ( NYU )"""
|
if not _forceFloatEval and not self . integerSelf == None :
return self . integerSelf . _evaluate ( R , z , phi = phi , t = t )
elif self . beta == 3. :
r = numpy . sqrt ( R ** 2. + z ** 2. )
return ( 1. / self . a ) * ( r - self . a * ( r / self . a ) ** ( 3. - self . alpha ) / ( 3. - self . alpha ) * special . hyp2f1 ( 3. - self . alpha , 2. - self . alpha , 4. - self . alpha , - r / self . a ) ) / ( self . alpha - 2. ) / r
else :
r = numpy . sqrt ( R ** 2. + z ** 2. )
return special . gamma ( self . beta - 3. ) * ( ( r / self . a ) ** ( 3. - self . beta ) / special . gamma ( self . beta - 1. ) * special . hyp2f1 ( self . beta - 3. , self . beta - self . alpha , self . beta - 1. , - self . a / r ) - special . gamma ( 3. - self . alpha ) / special . gamma ( self . beta - self . alpha ) ) / r
|
def is_connected ( self , other ) :
"""Returns ` ` True ` ` if there exists a ( possibly empty ) range which is
enclosed by both this range and other .
Examples :
* [ 1 , 3 ] and [ 5 , 7 ] are not connected
* [ 5 , 7 ] and [ 1 , 3 ] are not connected
* [ 2 , 4 ) and [ 3 , 5 ) are connected , because both enclose [ 3 , 4)
* [ 1 , 3 ) and [ 3 , 5 ) are connected , because both enclose the empty range
[3 , 3)
* [ 1 , 3 ) and ( 3 , 5 ) are not connected"""
|
return self . upper > other . lower and other . upper > self . lower or ( self . upper == other . lower and ( self . upper_inc or other . lower_inc ) ) or ( self . lower == other . upper and ( self . lower_inc or other . upper_inc ) )
|
def __getVariables ( self ) :
"""Parses the P4 env vars using ' set p4'"""
|
try :
startupinfo = None
if os . name == 'nt' :
startupinfo = subprocess . STARTUPINFO ( )
startupinfo . dwFlags |= subprocess . STARTF_USESHOWWINDOW
output = subprocess . check_output ( [ 'p4' , 'set' ] , startupinfo = startupinfo )
if six . PY3 :
output = str ( output , 'utf8' )
except subprocess . CalledProcessError as err :
LOGGER . error ( err )
return
p4vars = { }
for line in output . splitlines ( ) :
if not line :
continue
try :
k , v = line . split ( '=' , 1 )
except ValueError :
continue
p4vars [ k . strip ( ) ] = v . strip ( ) . split ( ' (' ) [ 0 ]
if p4vars [ k . strip ( ) ] . startswith ( '(config' ) :
del p4vars [ k . strip ( ) ]
self . _port = self . _port or os . getenv ( 'P4PORT' , p4vars . get ( 'P4PORT' ) )
self . _user = self . _user or os . getenv ( 'P4USER' , p4vars . get ( 'P4USER' ) )
self . _client = self . _client or os . getenv ( 'P4CLIENT' , p4vars . get ( 'P4CLIENT' ) )
|
def format_response ( self , response_data ) :
"""Format an RPC response ."""
|
_addr , length = self . response_info ( )
if len ( response_data ) != length :
raise HardwareError ( "Invalid response read length, should be the same as what response_info() returns" , expected = length , actual = len ( response_data ) )
resp , flags , received_length , payload = struct . unpack ( "<HxBL4x20s" , response_data )
resp = resp & 0xFF
if flags & ( 1 << 3 ) :
raise HardwareError ( "Could not grab external gate" )
if received_length > 20 :
raise HardwareError ( "Invalid received payload length > 20 bytes" , received_length = received_length )
payload = payload [ : received_length ]
return { 'status' : resp , 'payload' : payload }
|
def query ( self , value = None ) :
"""Return or set the query string
: param string value : the new query string to use
: returns : string or new : class : ` URL ` instance"""
|
if value is not None :
return URL . _mutate ( self , query = value )
return self . _tuple . query
|
def broken_chains ( samples , chains ) :
"""Find the broken chains .
Args :
samples ( array _ like ) :
Samples as a nS x nV array _ like object where nS is the number of samples and nV is the
number of variables . The values should all be 0/1 or - 1 / + 1.
chains ( list [ array _ like ] ) :
List of chains of length nC where nC is the number of chains .
Each chain should be an array _ like collection of column indices in samples .
Returns :
: obj : ` numpy . ndarray ` : A nS x nC boolean array . If i , j is True , then chain j in sample i is
broken .
Examples :
> > > samples = np . array ( [ [ - 1 , + 1 , - 1 , + 1 ] , [ - 1 , - 1 , + 1 , + 1 ] ] , dtype = np . int8)
> > > chains = [ [ 0 , 1 ] , [ 2 , 3 ] ]
> > > dwave . embedding . broken _ chains ( samples , chains )
array ( [ [ True , True ] ,
[ False , False ] ] )
> > > samples = np . array ( [ [ - 1 , + 1 , - 1 , + 1 ] , [ - 1 , - 1 , + 1 , + 1 ] ] , dtype = np . int8)
> > > chains = [ [ 0 , 2 ] , [ 1 , 3 ] ]
> > > dwave . embedding . broken _ chains ( samples , chains )
array ( [ [ False , False ] ,
[ True , True ] ] )"""
|
samples = np . asarray ( samples )
if samples . ndim != 2 :
raise ValueError ( "expected samples to be a numpy 2D array" )
num_samples , num_variables = samples . shape
num_chains = len ( chains )
broken = np . zeros ( ( num_samples , num_chains ) , dtype = bool , order = 'F' )
for cidx , chain in enumerate ( chains ) :
if isinstance ( chain , set ) :
chain = list ( chain )
chain = np . asarray ( chain )
if chain . ndim > 1 :
raise ValueError ( "chains should be 1D array_like objects" )
# chains of length 1 , or 0 cannot be broken
if len ( chain ) <= 1 :
continue
all_ = ( samples [ : , chain ] == 1 ) . all ( axis = 1 )
any_ = ( samples [ : , chain ] == 1 ) . any ( axis = 1 )
broken [ : , cidx ] = np . bitwise_xor ( all_ , any_ )
return broken
|
def setup ( api = None ) :
"""Sets up and fills test directory for serving .
Using different filetypes to see how they are dealt with .
The tempoary directory will clean itself up ."""
|
global tmp_dir_object
tmp_dir_object = tempfile . TemporaryDirectory ( )
dir_name = tmp_dir_object . name
dir_a = os . path . join ( dir_name , "a" )
os . mkdir ( dir_a )
dir_b = os . path . join ( dir_name , "b" )
os . mkdir ( dir_b )
# populate directory a with text files
file_list = [ [ "hi.txt" , """Hi World!""" ] , [ "hi.html" , """<strong>Hi World!</strong>""" ] , [ "hello.html" , """
<img src='/static/b/smile.png'</img>
pop-up
<script src='/static/a/hi.js'></script>""" ] , [ "hi.js" , """alert('Hi World')""" ] ]
for f in file_list :
with open ( os . path . join ( dir_a , f [ 0 ] ) , mode = "wt" ) as fo :
fo . write ( f [ 1 ] )
# populate directory b with binary file
image = b'\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00\n\x00\x00\x00\n\x08\x02\x00\x00\x00\x02PX\xea\x00\x00\x006IDAT\x18\xd3c\xfc\xff\xff?\x03n\xc0\xc4\x80\x170100022222\xc2\x85\x90\xb9\x04t3\x92`7\xb2\x15D\xeb\xc6\xe34\xa8n4c\xe1F\x120\x1c\x00\xc6z\x12\x1c\x8cT\xf2\x1e\x00\x00\x00\x00IEND\xaeB`\x82'
with open ( os . path . join ( dir_b , "smile.png" ) , mode = "wb" ) as fo :
fo . write ( image )
|
def add_colors ( * args ) :
'''Do some * args magic to return a tuple , which has the sums of all tuples in * args'''
|
# Adapted from an answer here : http : / / stackoverflow . com / questions / 14180866 / sum - each - value - in - a - list - of - tuples
added = [ sum ( x ) for x in zip ( * args ) ]
return numpy . clip ( added , 0 , 255 )
|
def find_crs ( op , element ) :
"""Traverses the supplied object looking for coordinate reference
systems ( crs ) . If multiple clashing reference systems are found
it will throw an error ."""
|
crss = [ crs for crs in element . traverse ( lambda x : x . crs , [ _Element ] ) if crs is not None ]
if not crss :
return { }
crs = crss [ 0 ]
if any ( crs != ocrs for ocrs in crss [ 1 : ] ) :
raise ValueError ( 'Cannot %s Elements in different ' 'coordinate reference systems.' % type ( op ) . __name__ )
return { 'crs' : crs }
|
def literal_to_dict ( value ) :
"""Transform an object value into a dict readable value
: param value : Object of a triple which is not a BNode
: type value : Literal or URIRef
: return : dict or str or list"""
|
if isinstance ( value , Literal ) :
if value . language is not None :
return { "@value" : str ( value ) , "@language" : value . language }
return value . toPython ( )
elif isinstance ( value , URIRef ) :
return { "@id" : str ( value ) }
elif value is None :
return None
return str ( value )
|
def ServiceAccountCredentialsFromFile ( filename , scopes , user_agent = None ) :
"""Use the credentials in filename to create a token for scopes ."""
|
filename = os . path . expanduser ( filename )
# We have two options , based on our version of oauth2client .
if oauth2client . __version__ > '1.5.2' : # oauth2client > = 2.0.0
credentials = ( service_account . ServiceAccountCredentials . from_json_keyfile_name ( filename , scopes = scopes ) )
if credentials is not None :
if user_agent is not None :
credentials . user_agent = user_agent
return credentials
else : # oauth2client < 2.0.0
with open ( filename ) as keyfile :
service_account_info = json . load ( keyfile )
account_type = service_account_info . get ( 'type' )
if account_type != oauth2client . client . SERVICE_ACCOUNT :
raise exceptions . CredentialsError ( 'Invalid service account credentials: %s' % ( filename , ) )
# pylint : disable = protected - access
credentials = service_account . _ServiceAccountCredentials ( service_account_id = service_account_info [ 'client_id' ] , service_account_email = service_account_info [ 'client_email' ] , private_key_id = service_account_info [ 'private_key_id' ] , private_key_pkcs8_text = service_account_info [ 'private_key' ] , scopes = scopes , user_agent = user_agent )
# pylint : enable = protected - access
return credentials
|
def max_rain ( self ) :
"""Returns a tuple containing the max value in the rain
series preceeded by its timestamp
: returns : a tuple
: raises : ValueError when the measurement series is empty"""
|
return max ( self . _purge_none_samples ( self . rain_series ( ) ) , key = lambda item : item [ 1 ] )
|
def _validate_region ( self ) :
"""Validate region was passed in and is a valid GCE zone ."""
|
if not self . region :
raise GCECloudException ( 'Zone is required for GCE cloud framework: ' 'Example: us-west1-a' )
try :
zone = self . compute_driver . ex_get_zone ( self . region )
except Exception :
zone = None
if not zone :
raise GCECloudException ( '{region} is not a valid GCE zone. ' 'Example: us-west1-a' . format ( region = self . region ) )
|
def create ( url , filename , options , properties ) :
"""Create new image group at given SCO - API by uploading local file .
Expects an tar - archive containing images in the image group . Allows to
update properties of created resource .
Parameters
url : string
Url to POST image group create request
filename : string
Path to tar - archive on local disk
options : Dictionary , optional
Values for image group options . Argument may be None .
properties : Dictionary
Set of additional properties for image group ( may be None )
Returns
string
Url of created image group resource"""
|
# Ensure that the file has valid suffix
if not has_tar_suffix ( filename ) :
raise ValueError ( 'invalid file suffix: ' + filename )
# Upload file to create image group . If response is not 201 the uploaded
# file is not a valid tar file
files = { 'file' : open ( filename , 'rb' ) }
response = requests . post ( url , files = files )
if response . status_code != 201 :
raise ValueError ( 'invalid file: ' + filename )
# Get image group HATEOAS references from successful response
links = references_to_dict ( response . json ( ) [ 'links' ] )
resource_url = links [ REF_SELF ]
# Update image group options if given
if not options is None :
obj_ops = [ ]
# Catch TypeErrors if properties is not a list .
try :
for opt in options :
obj_ops . append ( { 'name' : opt , 'value' : options [ opt ] } )
except TypeError as ex :
raise ValueError ( 'invalid option set' )
try :
req = urllib2 . Request ( links [ REF_UPDATE_OPTIONS ] )
req . add_header ( 'Content-Type' , 'application/json' )
response = urllib2 . urlopen ( req , json . dumps ( { 'options' : obj_ops } ) )
except urllib2 . URLError as ex :
raise ValueError ( str ( ex ) )
# Update image group properties if given
if not properties is None :
obj_props = [ ]
# Catch TypeErrors if properties is not a list .
try :
for key in properties :
obj_props . append ( { 'key' : key , 'value' : properties [ key ] } )
except TypeError as ex :
raise ValueError ( 'invalid property set' )
try :
req = urllib2 . Request ( links [ REF_UPSERT_PROPERTIES ] )
req . add_header ( 'Content-Type' , 'application/json' )
response = urllib2 . urlopen ( req , json . dumps ( { 'properties' : obj_props } ) )
except urllib2 . URLError as ex :
raise ValueError ( str ( ex ) )
return resource_url
|
def publish ( self , topic , payload = None , qos = 0 , retain = False ) :
"""Publish a message on a topic .
This causes a message to be sent to the broker and subsequently from
the broker to any clients subscribing to matching topics .
topic : The topic that the message should be published on .
payload : The actual message to send . If not given , or set to None a
zero length message will be used . Passing an int or float will result
in the payload being converted to a string representing that number . If
you wish to send a true int / float , use struct . pack ( ) to create the
payload you require .
qos : The quality of service level to use .
retain : If set to true , the message will be set as the " last known
good " / retained message for the topic .
Returns a tuple ( result , mid ) , where result is MQTT _ ERR _ SUCCESS to
indicate success or MQTT _ ERR _ NO _ CONN if the client is not currently
connected . mid is the message ID for the publish request . The mid
value can be used to track the publish request by checking against the
mid argument in the on _ publish ( ) callback if it is defined .
A ValueError will be raised if topic is None , has zero length or is
invalid ( contains a wildcard ) , if qos is not one of 0 , 1 or 2 , or if
the length of the payload is greater than 268435455 bytes ."""
|
if topic is None or len ( topic ) == 0 :
raise ValueError ( 'Invalid topic.' )
if qos < 0 or qos > 2 :
raise ValueError ( 'Invalid QoS level.' )
if isinstance ( payload , str ) or isinstance ( payload , bytearray ) :
local_payload = payload
elif sys . version_info [ 0 ] < 3 and isinstance ( payload , unicode ) :
local_payload = payload
elif isinstance ( payload , int ) or isinstance ( payload , float ) :
local_payload = str ( payload )
elif payload is None :
local_payload = None
else :
raise TypeError ( 'payload must be a string, bytearray, int, float or None.' )
if local_payload is not None and len ( local_payload ) > 268435455 :
raise ValueError ( 'Payload too large.' )
if self . _topic_wildcard_len_check ( topic ) != MQTT_ERR_SUCCESS :
raise ValueError ( 'Publish topic cannot contain wildcards.' )
local_mid = self . _mid_generate ( )
if qos == 0 :
rc = self . _send_publish ( local_mid , topic , local_payload , qos , retain , False )
return ( rc , local_mid )
else :
message = MQTTMessage ( )
message . timestamp = time . time ( )
message . mid = local_mid
message . topic = topic
if local_payload is None or len ( local_payload ) == 0 :
message . payload = None
else :
message . payload = local_payload
message . qos = qos
message . retain = retain
message . dup = False
self . _out_message_mutex . acquire ( )
self . _out_messages . append ( message )
if self . _max_inflight_messages == 0 or self . _inflight_messages < self . _max_inflight_messages :
self . _inflight_messages = self . _inflight_messages + 1
if qos == 1 :
message . state = mqtt_ms_wait_for_puback
elif qos == 2 :
message . state = mqtt_ms_wait_for_pubrec
self . _out_message_mutex . release ( )
rc = self . _send_publish ( message . mid , message . topic , message . payload , message . qos , message . retain , message . dup )
# remove from inflight messages so it will be send after a connection is made
if rc is MQTT_ERR_NO_CONN :
with self . _out_message_mutex :
self . _inflight_messages -= 1
message . state = mqtt_ms_publish
return ( rc , local_mid )
else :
message . state = mqtt_ms_queued ;
self . _out_message_mutex . release ( )
return ( MQTT_ERR_SUCCESS , local_mid )
|
def create_token_indices ( self , tokens ) :
"""If ` apply _ encoding _ options ` is inadequate , one can retrieve tokens from ` self . token _ counts ` , filter with
a desired strategy and regenerate ` token _ index ` using this method . The token index is subsequently used
when ` encode _ texts ` or ` decode _ texts ` methods are called ."""
|
start_index = len ( self . special_token )
indices = list ( range ( len ( tokens ) + start_index ) )
# prepend because the special tokens come in the beginning
tokens_with_special = self . special_token + list ( tokens )
self . _token2idx = dict ( list ( zip ( tokens_with_special , indices ) ) )
self . _idx2token = dict ( list ( zip ( indices , tokens_with_special ) ) )
|
def read_json ( self ) :
"""Calls the overridden method .
: returns : The read metadata .
: rtype : dict"""
|
with reading_ancillary_files ( self ) :
metadata = super ( GenericLayerMetadata , self ) . read_json ( )
return metadata
|
def _get_fields_info ( self , cols , model_schema , filter_rel_fields , ** kwargs ) :
"""Returns a dict with fields detail
from a marshmallow schema
: param cols : list of columns to show info for
: param model _ schema : Marshmallow model schema
: param filter _ rel _ fields : expects add _ query _ rel _ fields or
edit _ query _ rel _ fields
: param kwargs : Receives all rison arguments for pagination
: return : dict with all fields details"""
|
ret = list ( )
for col in cols :
page = page_size = None
col_args = kwargs . get ( col , { } )
if col_args :
page = col_args . get ( API_PAGE_INDEX_RIS_KEY , None )
page_size = col_args . get ( API_PAGE_SIZE_RIS_KEY , None )
ret . append ( self . _get_field_info ( model_schema . fields [ col ] , filter_rel_fields . get ( col , [ ] ) , page = page , page_size = page_size , ) )
return ret
|
def create_app ( * , debug = False , threads = 1 , bigchaindb_factory = None ) :
"""Return an instance of the Flask application .
Args :
debug ( bool ) : a flag to activate the debug mode for the app
( default : False ) .
threads ( int ) : number of threads to use
Return :
an instance of the Flask application ."""
|
if not bigchaindb_factory :
bigchaindb_factory = BigchainDB
app = Flask ( __name__ )
app . wsgi_app = StripContentTypeMiddleware ( app . wsgi_app )
CORS ( app )
app . debug = debug
app . config [ 'bigchain_pool' ] = utils . pool ( bigchaindb_factory , size = threads )
add_routes ( app )
return app
|
def call_agent_side ( self , method , * args , ** kwargs ) :
'''Call the method , wrap it in Deferred and bind error handler .'''
|
assert not self . _finalize_called , ( "Attempt to call agent side code " "after finalize() method has been " "called. Method: %r" % ( method , ) )
ensure_state = kwargs . pop ( 'ensure_state' , None )
d = defer . Deferred ( canceller = self . _cancel_agent_side_call )
self . _agent_jobs . append ( d )
if ensure_state : # call method only if state check is checks in
d . addCallback ( lambda _ : ( self . _ensure_state ( ensure_state ) and method ( * args , ** kwargs ) ) )
else :
d . addCallback ( defer . drop_param , method , * args , ** kwargs )
d . addErrback ( self . _error_handler , method )
d . addBoth ( defer . bridge_param , self . _remove_agent_job , d )
time . call_next ( d . callback , None )
return d
|
def whois ( ip_address ) :
"""Whois client for Python"""
|
whois_ip = str ( ip_address )
try :
query = socket . gethostbyname ( whois_ip )
except Exception :
query = whois_ip
s = socket . socket ( socket . AF_INET , socket . SOCK_STREAM )
s . connect ( ( "whois.ripe.net" , 43 ) )
s . send ( query . encode ( "utf8" ) + b"\r\n" )
answer = b""
while True :
d = s . recv ( 4096 )
answer += d
if not d :
break
s . close ( )
ignore_tag = b"remarks:"
# ignore all lines starting with the ignore _ tag
lines = [ line for line in answer . split ( b"\n" ) if not line or ( line and not line . startswith ( ignore_tag ) ) ]
# noqa : E501
# remove empty lines at the bottom
for i in range ( 1 , len ( lines ) ) :
if not lines [ - i ] . strip ( ) :
del lines [ - i ]
else :
break
return b"\n" . join ( lines [ 3 : ] )
|
def add_distinguished_name ( list_name , item_name ) :
'''Adds a distinguished name to a distinguished name list .
list _ name ( str ) : The name of the specific policy distinguished name list to append to .
item _ name ( str ) : The distinguished name to append .
CLI Example :
. . code - block : : bash
salt ' * ' bluecoat _ sslv . add _ distinguished _ name MyDistinguishedList cn = foo . bar . com'''
|
payload = { "jsonrpc" : "2.0" , "id" : "ID0" , "method" : "add_policy_distinguished_names" , "params" : [ list_name , { "item_name" : item_name } ] }
response = __proxy__ [ 'bluecoat_sslv.call' ] ( payload , True )
return _validate_change_result ( response )
|
def _CheckMacOSPaths ( self , filename , artifact_definition , source , paths ) :
"""Checks if the paths are valid MacOS paths .
Args :
filename ( str ) : name of the artifacts definition file .
artifact _ definition ( ArtifactDefinition ) : artifact definition .
source ( SourceType ) : source definition .
paths ( list [ str ] ) : paths to validate .
Returns :
bool : True if the MacOS paths is valid ."""
|
result = True
paths_with_private = [ ]
paths_with_symbolic_link_to_private = [ ]
for path in paths :
path_lower = path . lower ( )
path_segments = path_lower . split ( source . separator )
if not path_segments :
logging . warning ( ( 'Empty path defined by artifact definition: {0:s} in file: ' '{1:s}' ) . format ( artifact_definition . name , filename ) )
result = False
elif len ( path_segments ) == 1 :
continue
elif path_segments [ 1 ] in self . _MACOS_PRIVATE_SUB_PATHS :
paths_with_symbolic_link_to_private . append ( path )
elif path_segments [ 1 ] == 'private' and len ( path_segments ) >= 2 :
if path_segments [ 2 ] in self . _MACOS_PRIVATE_SUB_PATHS :
paths_with_private . append ( path )
else :
logging . warning ( ( 'Unsupported private path: {0:s} defined by artifact definition: ' '{1:s} in file: {2:s}' ) . format ( path , artifact_definition . name , filename ) )
result = False
for private_path in paths_with_private :
if private_path [ 8 : ] not in paths_with_symbolic_link_to_private :
logging . warning ( ( 'Missing symbolic link: {0:s} for path: {1:s} defined by artifact ' 'definition: {2:s} in file: {3:s}' ) . format ( private_path [ 8 : ] , private_path , artifact_definition . name , filename ) )
result = False
for path in paths_with_symbolic_link_to_private :
private_path = '/private{0:s}' . format ( path )
if private_path not in paths_with_private :
logging . warning ( ( 'Missing path: {0:s} for symbolic link: {1:s} defined by artifact ' 'definition: {2:s} in file: {3:s}' ) . format ( private_path , path , artifact_definition . name , filename ) )
result = False
return result
|
def ls ( bank ) :
'''Return an iterable object containing all entries stored in the specified
bank .'''
|
_init_client ( )
query = "SELECT etcd_key FROM {0} WHERE bank='{1}'" . format ( _table_name , bank )
cur , _ = run_query ( client , query )
out = [ row [ 0 ] for row in cur . fetchall ( ) ]
cur . close ( )
return out
|
def _parse ( self ) :
"""Parse the ADF outputs . There are two files : one is ' logfile ' , the other
is the ADF output file . The final energy and structures are parsed from
the ' logfile ' . Frequencies and normal modes are parsed from the ADF
output file ."""
|
workdir = os . path . dirname ( self . filename )
logfile = os . path . join ( workdir , "logfile" )
if not os . path . isfile ( logfile ) :
raise IOError ( "The ADF logfile can not be accessed!" )
self . is_failed = False
self . error = None
self . final_energy = None
self . final_structure = None
self . energies = [ ]
self . structures = [ ]
self . frequencies = [ ]
self . normal_modes = None
self . freq_type = None
self . run_type = None
self . is_internal_crash = False
self . _parse_logfile ( logfile )
if not self . is_failed and self . run_type != "SinglePoint" :
self . _parse_adf_output ( )
|
def is_internet_available ( ips = CONNECTION_IPS , timeout = 1.0 ) :
"""Returns if an internet connection is available .
: param ips : Address ips to check against .
: type ips : list
: param timeout : Timeout in seconds .
: type timeout : int
: return : Is internet available .
: rtype : bool"""
|
while ips :
try :
urllib2 . urlopen ( "http://{0}" . format ( ips . pop ( 0 ) ) , timeout = timeout )
return True
except IndexError as error :
continue
except ( urllib2 . URLError , socket . error ) as error :
continue
return False
|
def send ( self ) :
"""Sends the email to the recipient
If the sending fails will set the status of the instance to " error " and will log the error according to your project ' s django - logging configuration"""
|
if self . content_type . name == 'node' :
to = [ self . to . user . email ]
elif self . content_type . name == 'layer' :
to = [ self . to . email ]
# layer case is slightly special , mantainers need to be notified as well
# TODO : consider making the mantainers able to switch off notifications
for mantainer in self . to . mantainers . all ( ) . only ( 'email' ) :
to += [ mantainer . email ]
else :
to = [ self . to . email ]
context = { 'sender_name' : self . from_name , 'sender_email' : self . from_email , 'message' : self . message , 'site' : settings . SITE_NAME , 'object_type' : self . content_type . name , 'object_name' : str ( self . to ) }
message = render_to_string ( 'mailing/inward_message.txt' , context )
email = EmailMessage ( # subject
_ ( 'Contact request from %(sender_name)s - %(site)s' ) % context , # message
message , # from
settings . DEFAULT_FROM_EMAIL , # to
to , # reply - to header
headers = { 'Reply-To' : self . from_email } )
import socket
# try sending email
try :
email . send ( )
self . status = 1
# if error
except socket . error as e : # log the error
import logging
log = logging . getLogger ( __name__ )
error_msg = 'nodeshot.community.mailing.models.inward.send(): %s' % e
log . error ( error_msg )
# set status of the instance as " error "
self . status = - 1
|
def pcpool ( name , cvals ) :
"""This entry point provides toolkit programmers a method for
programmatically inserting character data into the
kernel pool .
http : / / naif . jpl . nasa . gov / pub / naif / toolkit _ docs / C / cspice / pcpool _ c . html
: param name : The kernel pool name to associate with cvals .
: type name : str
: param cvals : An array of strings to insert into the kernel pool .
: type cvals : Array of str"""
|
name = stypes . stringToCharP ( name )
lenvals = ctypes . c_int ( len ( max ( cvals , key = len ) ) + 1 )
n = ctypes . c_int ( len ( cvals ) )
cvals = stypes . listToCharArray ( cvals , lenvals , n )
libspice . pcpool_c ( name , n , lenvals , cvals )
|
def _as_parameter_ ( self ) :
"""Compatibility with ctypes .
Allows passing transparently a Point object to an API call ."""
|
wp = WINDOWPLACEMENT ( )
wp . length = sizeof ( wp )
wp . flags = self . flags
wp . showCmd = self . showCmd
wp . ptMinPosition . x = self . ptMinPosition . x
wp . ptMinPosition . y = self . ptMinPosition . y
wp . ptMaxPosition . x = self . ptMaxPosition . x
wp . ptMaxPosition . y = self . ptMaxPosition . y
wp . rcNormalPosition . left = self . rcNormalPosition . left
wp . rcNormalPosition . top = self . rcNormalPosition . top
wp . rcNormalPosition . right = self . rcNormalPosition . right
wp . rcNormalPosition . bottom = self . rcNormalPosition . bottom
return wp
|
def seed ( self ) :
"""Reset the number from which the next generated sequence start .
If you seed at 100 , next seed will be 101"""
|
form = self . request . form
prefix = form . get ( 'prefix' , None )
if prefix is None :
return 'No prefix provided'
seed = form . get ( 'seed' , None )
if seed is None :
return 'No seed provided'
if not seed . isdigit ( ) :
return 'Seed must be a digit'
seed = int ( seed )
if seed < 0 :
return 'Seed cannot be negative'
new_seq = self . set_seed ( prefix , seed )
return 'IDServerView: "%s" seeded to %s' % ( prefix , new_seq )
|
def clear ( self ) :
"""Deletes all activations in the agenda ."""
|
if lib . EnvDeleteActivation ( self . _env , ffi . NULL ) != 1 :
raise CLIPSError ( self . _env )
|
def updateNetwork ( self , dhcp = 'dhcp' , ipaddress = None , netmask = None , gateway = None , dns = None ) :
"""Change the current network settings ."""
|
return self . __post ( '/api/updateNetwork' , data = { 'dhcp' : dhcp , 'ipaddress' : ipaddress , 'netmask' : netmask , 'gateway' : gateway , 'dns' : json . dumps ( dns ) } )
|
def _onShortcutMoveLine ( self , down ) :
"""Move line up or down
Actually , not a selected text , but next or previous block is moved
TODO keep bookmarks when moving"""
|
startBlock , endBlock = self . _selectedBlocks ( )
startBlockNumber = startBlock . blockNumber ( )
endBlockNumber = endBlock . blockNumber ( )
def _moveBlock ( block , newNumber ) :
text = block . text ( )
with self :
del self . lines [ block . blockNumber ( ) ]
self . lines . insert ( newNumber , text )
if down : # move next block up
blockToMove = endBlock . next ( )
if not blockToMove . isValid ( ) :
return
# if operaiton is UnDone , marks are located incorrectly
markMargin = self . getMargin ( "mark_area" )
if markMargin :
markMargin . clearBookmarks ( startBlock , endBlock . next ( ) )
_moveBlock ( blockToMove , startBlockNumber )
self . _selectLines ( startBlockNumber + 1 , endBlockNumber + 1 )
else : # move previous block down
blockToMove = startBlock . previous ( )
if not blockToMove . isValid ( ) :
return
# if operaiton is UnDone , marks are located incorrectly
markMargin = self . getMargin ( "mark_area" )
if markMargin :
markMargin . clearBookmarks ( startBlock , endBlock )
_moveBlock ( blockToMove , endBlockNumber )
self . _selectLines ( startBlockNumber - 1 , endBlockNumber - 1 )
if markMargin :
markMargin . update ( )
|
def wait_until ( predicate , success_description , timeout = 10 ) :
"""Wait up to 10 seconds ( by default ) for predicate to be true .
E . g . :
wait _ until ( lambda : client . primary = = ( ' a ' , 1 ) ,
' connect to the primary ' )
If the lambda - expression isn ' t true after 10 seconds , we raise
AssertionError ( " Didn ' t ever connect to the primary " ) .
Returns the predicate ' s first true value ."""
|
start = time . time ( )
while True :
retval = predicate ( )
if retval :
return retval
if time . time ( ) - start > timeout :
raise AssertionError ( "Didn't ever %s" % success_description )
time . sleep ( 0.1 )
|
def get_media_urls ( tweet ) :
"""Gets the https links to each media entity in the tweet .
Args :
tweet ( Tweet or dict ) : tweet
Returns :
list : list of urls . Will be an empty list if there are no urls present .
Example :
> > > from tweet _ parser . getter _ methods . tweet _ entities import get _ media _ urls
> > > tweet = { ' created _ at ' : ' 2017-21-23T15:21:21.000Z ' ,
. . . ' entities ' : { ' user _ mentions ' : [ { ' id ' : 2382763597,
. . . ' id _ str ' : ' 2382763597 ' ,
. . . ' indices ' : [ 14 , 26 ] ,
. . . ' name ' : ' Fiona ' ,
. . . ' screen _ name ' : ' notFromShrek ' } ] } ,
. . . ' extended _ entities ' : { ' media ' : [ { ' display _ url ' : ' pic . twitter . com / something ' ,
. . . ' expanded _ url ' : ' https : / / twitter . com / something ' ,
. . . ' id ' : 4242,
. . . ' id _ str ' : ' 4242 ' ,
. . . ' indices ' : [ 88 , 111 ] ,
. . . ' media _ url ' : ' http : / / pbs . twimg . com / media / something . jpg ' ,
. . . ' media _ url _ https ' : ' https : / / pbs . twimg . com / media / something . jpg ' ,
. . . ' sizes ' : { ' large ' : { ' h ' : 1065 , ' resize ' : ' fit ' , ' w ' : 1600 } ,
. . . ' medium ' : { ' h ' : 799 , ' resize ' : ' fit ' , ' w ' : 1200 } ,
. . . ' small ' : { ' h ' : 453 , ' resize ' : ' fit ' , ' w ' : 680 } ,
. . . ' thumb ' : { ' h ' : 150 , ' resize ' : ' crop ' , ' w ' : 150 } } ,
. . . ' type ' : ' photo ' ,
. . . ' url ' : ' https : / / t . co / something ' } ,
. . . { ' display _ url ' : ' pic . twitter . com / something _ else ' ,
. . . ' expanded _ url ' : ' https : / / twitter . com / user / status / something / photo / 1 ' ,
. . . ' id ' : 4243,
. . . ' id _ str ' : ' 4243 ' ,
. . . ' indices ' : [ 88 , 111 ] ,
. . . ' media _ url ' : ' http : / / pbs . twimg . com / media / something _ else . jpg ' ,
. . . ' media _ url _ https ' : ' https : / / pbs . twimg . com / media / something _ else . jpg ' ,
. . . ' sizes ' : { ' large ' : { ' h ' : 1065 , ' resize ' : ' fit ' , ' w ' : 1600 } ,
. . . ' medium ' : { ' h ' : 799 , ' resize ' : ' fit ' , ' w ' : 1200 } ,
. . . ' small ' : { ' h ' : 453 , ' resize ' : ' fit ' , ' w ' : 680 } ,
. . . ' thumb ' : { ' h ' : 150 , ' resize ' : ' crop ' , ' w ' : 150 } } ,
. . . ' type ' : ' photo ' ,
. . . ' url ' : ' https : / / t . co / something _ else ' } ] }
> > > get _ media _ urls ( tweet )
[ ' https : / / pbs . twimg . com / media / something . jpg ' , ' https : / / pbs . twimg . com / media / something _ else . jpg ' ]"""
|
media = get_media_entities ( tweet )
urls = [ m . get ( "media_url_https" ) for m in media ] if media else [ ]
return urls
|
def to_float ( b : Collection [ Tensor ] ) -> Collection [ Tensor ] :
"Recursively map lists of tensors in ` b ` to FP16."
|
if is_listy ( b ) :
return [ to_float ( o ) for o in b ]
return b . float ( ) if b . dtype not in [ torch . int64 , torch . int32 , torch . int16 ] else b
|
def consume ( self , limit = None ) :
"""Returns an iterator that waits for one message at a time ."""
|
for total_message_count in count ( ) :
if limit and total_message_count >= limit :
raise StopIteration
if not self . channel . is_open :
raise StopIteration
self . channel . wait ( )
yield True
|
def deserializeG2 ( x , compressed = True ) :
"""Deserializes an array of bytes , @ x , into a G2 element ."""
|
return _deserialize ( x , G2Element , compressed , librelic . g2_read_bin_abi )
|
def remember ( empowered , powerupClass , interface ) :
"""Adds a powerup to ` ` empowered ` ` that will instantiate ` ` powerupClass ` `
with the empowered ' s store when adapted to the given interface .
: param empowered : The Empowered ( Store or Item ) to be powered up .
: type empowered : ` ` axiom . item . Empowered ` `
: param powerupClass : The class that will be powered up to .
: type powerupClass : class
: param interface : The interface of the powerup .
: type interface : ` ` zope . interface . Interface ` `
: returns : ` ` None ` `"""
|
className = fullyQualifiedName ( powerupClass )
powerup = _StoredByName ( store = empowered . store , className = className )
empowered . powerUp ( powerup , interface )
|
def bounds ( ctx , tile ) :
"""Print Tile bounds ."""
|
click . echo ( '%s %s %s %s' % TilePyramid ( ctx . obj [ 'grid' ] , tile_size = ctx . obj [ 'tile_size' ] , metatiling = ctx . obj [ 'metatiling' ] ) . tile ( * tile ) . bounds ( pixelbuffer = ctx . obj [ 'pixelbuffer' ] ) )
|
def clean_cell ( self , cell , cell_type ) :
"""Uses the type of field ( from the mapping ) to
determine how to clean and format the cell ."""
|
try : # Get rid of non - ASCII characters
cell = cell . encode ( 'ascii' , 'ignore' ) . decode ( )
if cell_type == 'D' :
cell = datetime . strptime ( cell , '%Y%m%d' )
elif cell_type == 'I' :
cell = int ( cell )
elif cell_type == 'N' :
cell = Decimal ( cell )
else :
cell = cell . upper ( )
if len ( cell ) > 50 :
cell = cell [ 0 : 50 ]
if not cell or cell in NULL_TERMS :
cell = None
except :
cell = None
return cell
|
def twofilter_smoothing ( self , t , info , phi , loggamma , linear_cost = False , return_ess = False , modif_forward = None , modif_info = None ) :
"""Two - filter smoothing .
Parameters
t : time , in range 0 < = t < T - 1
info : SMC object
the information filter
phi : function
test function , a function of ( X _ t , X _ { t + 1 } )
loggamma : function
a function of ( X _ { t + 1 } )
linear _ cost : bool
if True , use the O ( N ) variant ( basic version is O ( N ^ 2 ) )
Returns
Two - filter estimate of the smoothing expectation of phi ( X _ t , x _ { t + 1 } )"""
|
ti = self . T - 2 - t
# t + 1 in reverse
if t < 0 or t >= self . T - 1 :
raise ValueError ( 'two-filter smoothing: t must be in range 0,...,T-2' )
lwinfo = info . hist . wgt [ ti ] . lw - loggamma ( info . hist . X [ ti ] )
if linear_cost :
return self . _twofilter_smoothing_ON ( t , ti , info , phi , lwinfo , return_ess , modif_forward , modif_info )
else :
return self . _twofilter_smoothing_ON2 ( t , ti , info , phi , lwinfo )
|
def prune_tour ( self , tour , cpus ) :
"""Test deleting each contig and check the delta _ score ; tour here must
be an array of ints ."""
|
while True :
tour_score , = self . evaluate_tour_M ( tour )
logging . debug ( "Starting score: {}" . format ( tour_score ) )
active_sizes = self . active_sizes
M = self . M
args = [ ]
for i , t in enumerate ( tour ) :
stour = tour [ : i ] + tour [ i + 1 : ]
args . append ( ( t , stour , tour_score , active_sizes , M ) )
# Parallel run
p = Pool ( processes = cpus )
results = list ( p . imap ( prune_tour_worker , args ) )
assert len ( tour ) == len ( results ) , "Array size mismatch, tour({}) != results({})" . format ( len ( tour ) , len ( results ) )
# Identify outliers
active_contigs = self . active_contigs
idx , log10deltas = zip ( * results )
lb , ub = outlier_cutoff ( log10deltas )
logging . debug ( "Log10(delta_score) ~ [{}, {}]" . format ( lb , ub ) )
remove = set ( active_contigs [ x ] for ( x , d ) in results if d < lb )
self . active -= remove
self . report_active ( )
tig_to_idx = self . tig_to_idx
tour = [ active_contigs [ x ] for x in tour ]
tour = array . array ( 'i' , [ tig_to_idx [ x ] for x in tour if x not in remove ] )
if not remove :
break
self . tour = tour
self . flip_all ( tour )
return tour
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.