signature
stringlengths 29
44.1k
| implementation
stringlengths 0
85.2k
|
---|---|
def ProcessMessage ( self , message ) :
"""Run the foreman on the client ."""
|
# Only accept authenticated messages
if ( message . auth_state != rdf_flows . GrrMessage . AuthorizationState . AUTHENTICATED ) :
return
now = time . time ( )
# Maintain a cache of the foreman
with self . lock :
if ( self . foreman_cache is None or now > self . foreman_cache . age + self . cache_refresh_time ) :
self . foreman_cache = aff4 . FACTORY . Open ( "aff4:/foreman" , mode = "rw" , token = self . token )
self . foreman_cache . age = now
if message . source :
self . foreman_cache . AssignTasksToClient ( message . source . Basename ( ) )
|
def _generate_barcode_ids ( info_iter ) :
"""Create unique barcode IDs assigned to sequences"""
|
bc_type = "SampleSheet"
barcodes = list ( set ( [ x [ - 1 ] for x in info_iter ] ) )
barcodes . sort ( )
barcode_ids = { }
for i , bc in enumerate ( barcodes ) :
barcode_ids [ bc ] = ( bc_type , i + 1 )
return barcode_ids
|
def _store ( self , uid , content , data = None ) :
"""Store the given dict of content at uid . Nothing returned ."""
|
doc = dict ( uid = uid )
if data :
gfs = gridfs . GridFS ( self . db )
id = gfs . put ( data , encoding = 'utf-8' )
doc . update ( data_id = id )
doc . update ( content )
self . db . pastes . insert_one ( doc )
|
def all_segs_matching_fts ( self , fts ) :
"""Return segments matching a feature mask , both as ( value , feature )
tuples ( sorted in reverse order by length ) .
Args :
fts ( list ) : feature mask as ( value , feature ) tuples .
Returns :
list : segments matching ` fts ` , sorted in reverse order by length"""
|
matching_segs = [ ]
for seg , pairs in self . segments :
if set ( fts ) <= set ( pairs ) :
matching_segs . append ( seg )
return sorted ( matching_segs , key = lambda x : len ( x ) , reverse = True )
|
def isdir ( path , ** kwargs ) :
"""Check if * path * is a directory"""
|
import os . path
return os . path . isdir ( path , ** kwargs )
|
def parse_result_to_dsl ( tokens ) :
"""Convert a ParseResult to a PyBEL DSL object .
: type tokens : dict or pyparsing . ParseResults
: rtype : BaseEntity"""
|
if MODIFIER in tokens :
return parse_result_to_dsl ( tokens [ TARGET ] )
elif REACTION == tokens [ FUNCTION ] :
return _reaction_po_to_dict ( tokens )
elif VARIANTS in tokens :
return _variant_po_to_dict ( tokens )
elif MEMBERS in tokens :
return _list_po_to_dict ( tokens )
elif FUSION in tokens :
return _fusion_to_dsl ( tokens )
return _simple_po_to_dict ( tokens )
|
def spharm ( lmax , theta , phi , normalization = '4pi' , kind = 'real' , csphase = 1 , packed = False , degrees = True ) :
"""Compute all the spherical harmonic functions up to a maximum degree .
Usage
ylm = spharm ( lmax , theta , phi , [ normalization , kind , csphase , packed ,
degrees ] )
Returns
ylm : float or complex , dimension ( 2 , lmax + 1 , lmax + 1 ) or
(2 , ( lmax + 1 ) * ( lmax + 2 ) / 2)
An array of spherical harmonic functions , ylm [ i , l , m ] , where l and m
are the spherical harmonic degree and ( positive ) order , respectively .
The index i provides the positive ( 0 ) and negative ( 1 ) order . If packed
is True , the array is 2 - dimensional with the index of the second column
corresponding to l * ( l + 1 ) / 2 + m .
Parameters
lmax : integer
The maximum degree of the spherical harmonic functions to be computed .
theta : float
The colatitude in degrees .
phi : float
The longitude in degrees .
normalization : str , optional , default = ' 4pi '
'4pi ' , ' ortho ' , ' schmidt ' , or ' unnorm ' for geodesy 4pi normalized ,
orthonormalized , Schmidt semi - normalized , or unnormalized spherical
harmonic functions , respectively .
kind : str , optional , default = ' real '
' real ' or ' complex ' spherical harmonic coefficients .
csphase : optional , integer , default = 1
If 1 ( default ) , the Condon - Shortley phase will be excluded . If - 1 , the
Condon - Shortley phase of ( - 1 ) ^ m will be appended to the spherical
harmonic functions .
packed : optional , bool , default = False
If True , return a 2 - dimensional packed array where the index of the
second column corresponds to l * ( l + 1 ) / 2 + m , where l and m are
respectively the degree and order .
degrees : optional , bool , default = True
If True , ` colat ` and ` phi ` are expressed in degrees .
Description
spharm will calculate all of the spherical harmonic functions up to degree
lmax for a given colatitude theta and longitude phi . Three parameters
determine how the spherical harmonic functions are defined . normalization
can be either ' 4pi ' ( default ) , ' ortho ' , ' schmidt ' , or ' unnorm ' for 4pi
normalized , orthonormalized , Schmidt semi - normalized , or unnormalized
spherical harmonic functions , respectively . kind can be either ' real ' or
' complex ' , and csphase determines whether to include or exclude ( default )
the Condon - Shortley phase factor .
By default , the routine will return a 3 - dimensional array , ylm [ i , l , m ] ,
where l and m are the spherical harmonic degree and ( positive ) order ,
respectively . The index i = 0 corresponds to the positive orders , whereas i = 1
corresponds to the negative orders . If the optional parameter packed is set
to True , the output will instead be a 2 - dimensional array where the indices
of the second column correspond to l * ( l + 1 ) / 2 + m .
The spherical harmonic functions are calculated using the standard three -
term recursion formula , and in order to prevent overflows , the scaling
approach of Holmes and Featherstone ( 2002 ) is utilized . The resulting
functions are accurate to about degree 2800 . See Wieczorek and Meschede
(2018 ) for exact definitions on how the spherical harmonic functions are
defined .
References
Holmes , S . A . , and W . E . Featherstone , A unified approach to the Clenshaw
summation and the recursive computation of very high degree and order
normalised associated Legendre functions , J . Geodesy , 76 , 279-299,
doi : 10.1007 / s00190-002-0216-2 , 2002.
Wieczorek , M . A . , and M . Meschede . SHTools — Tools for working with
spherical harmonics , Geochem . , Geophys . , Geosyst . , 19 , 2574-2592,
doi : 10.1029/2018GC007529 , 2018."""
|
if lmax < 0 :
raise ValueError ( "lmax must be greater or equal to 0. Input value was {:s}." . format ( repr ( lmax ) ) )
if normalization . lower ( ) not in ( '4pi' , 'ortho' , 'schmidt' , 'unnorm' ) :
raise ValueError ( "The normalization must be '4pi', 'ortho', 'schmidt', " + "or 'unnorm'. Input value was {:s}." . format ( repr ( normalization ) ) )
if kind . lower ( ) not in ( 'real' , 'complex' ) :
raise ValueError ( "kind must be 'real' or 'complex'. " + "Input value was {:s}." . format ( repr ( kind ) ) )
if csphase != 1 and csphase != - 1 :
raise ValueError ( "csphase must be either 1 or -1. Input value was {:s}." . format ( repr ( csphase ) ) )
if normalization . lower ( ) == 'unnorm' and lmax > 85 :
_warnings . warn ( "Calculations using unnormalized coefficients " + "are stable only for degrees less than or equal " + "to 85. lmax for the coefficients will be set to " + "85. Input value was {:d}." . format ( lmax ) , category = RuntimeWarning )
lmax = 85
if degrees is True :
theta = _np . deg2rad ( theta )
phi = _np . deg2rad ( phi )
if kind . lower ( ) == 'real' :
p = _legendre ( lmax , _np . cos ( theta ) , normalization = normalization , csphase = csphase , cnorm = 0 , packed = packed )
else :
p = _legendre ( lmax , _np . cos ( theta ) , normalization = normalization , csphase = csphase , cnorm = 1 , packed = packed )
if packed is False :
if kind . lower ( ) == 'real' :
ylm = _np . zeros ( ( 2 , lmax + 1 , lmax + 1 ) , dtype = _np . float_ )
ylm [ 0 , : , : ] = p [ : , : ]
ylm [ 1 , : , : ] = p [ : , : ]
for m in range ( lmax + 1 ) :
ylm [ 0 , m : lmax + 1 , m ] *= _np . cos ( m * phi )
ylm [ 1 , m : lmax + 1 , m ] *= _np . sin ( m * phi )
else :
ylm = _np . zeros ( ( 2 , lmax + 1 , lmax + 1 ) , dtype = _np . complex_ )
ylm [ 0 , : , : ] = p [ : , : ]
for m in range ( lmax + 1 ) :
ylm [ 0 , m : lmax + 1 , m ] *= ( _np . cos ( m * phi ) + 1j * _np . sin ( m * phi ) )
ylm [ 1 , m : lmax + 1 , m ] = ylm [ 0 , m : lmax + 1 , m ] . conj ( )
if _np . mod ( m , 2 ) == 1 :
ylm [ 1 , m : lmax + 1 , m ] = - ylm [ 1 , m : lmax + 1 , m ]
else :
if kind . lower ( ) == 'real' :
ylm = _np . zeros ( ( 2 , ( lmax + 1 ) * ( lmax + 2 ) // 2 ) , dtype = _np . float_ )
ylm [ 0 , : ] = p [ : ]
ylm [ 1 , : ] = p [ : ]
for m in range ( lmax + 1 ) :
cos = _np . cos ( m * phi )
sin = _np . sin ( m * phi )
for l in range ( m , lmax + 1 ) :
ind = l * ( l + 1 ) // 2 + m
ylm [ 0 , ind ] *= cos
ylm [ 1 , ind ] *= sin
else :
ylm = _np . zeros ( ( 2 , ( lmax + 1 ) * ( lmax + 2 ) // 2 ) , dtype = _np . complex_ )
ylm [ 0 , : ] = p [ : ]
ylm [ 1 , : ] = p [ : ]
for m in range ( lmax + 1 ) :
eimphi = ( _np . cos ( m * phi ) + 1j * _np . sin ( m * phi ) )
for l in range ( m , lmax + 1 ) :
ind = l * ( l + 1 ) // 2 + m
ylm [ 0 , ind ] *= eimphi
ylm [ 1 , ind ] = ylm [ 0 , ind ] . conj ( )
if _np . mod ( m , 2 ) == 1 :
ylm [ 1 , ind ] = - ylm [ 1 , ind ]
return ylm
|
def clean_surface ( surface , span ) :
"""Remove spurious characters from a quantity ' s surface ."""
|
surface = surface . replace ( '-' , ' ' )
no_start = [ 'and' , ' ' ]
no_end = [ ' and' , ' ' ]
found = True
while found :
found = False
for word in no_start :
if surface . lower ( ) . startswith ( word ) :
surface = surface [ len ( word ) : ]
span = ( span [ 0 ] + len ( word ) , span [ 1 ] )
found = True
for word in no_end :
if surface . lower ( ) . endswith ( word ) :
surface = surface [ : - len ( word ) ]
span = ( span [ 0 ] , span [ 1 ] - len ( word ) )
found = True
if not surface :
return None , None
split = surface . lower ( ) . split ( )
if split [ 0 ] in [ 'one' , 'a' , 'an' ] and len ( split ) > 1 and split [ 1 ] in r . UNITS + r . TENS :
span = ( span [ 0 ] + len ( surface . split ( ) [ 0 ] ) + 1 , span [ 1 ] )
surface = ' ' . join ( surface . split ( ) [ 1 : ] )
return surface , span
|
def parse_job_files ( self ) :
"""Check for job definitions in known zuul files ."""
|
repo_jobs = [ ]
for rel_job_file_path , job_info in self . job_files . items ( ) :
LOGGER . debug ( "Checking for job definitions in %s" , rel_job_file_path )
jobs = self . parse_job_definitions ( rel_job_file_path , job_info )
LOGGER . debug ( "Found %d job definitions in %s" , len ( jobs ) , rel_job_file_path )
repo_jobs . extend ( jobs )
if not repo_jobs :
LOGGER . info ( "No job definitions found in repo '%s'" , self . repo )
else :
LOGGER . info ( "Found %d job definitions in repo '%s'" , len ( repo_jobs ) , self . repo )
# LOGGER . debug ( json . dumps ( repo _ jobs , indent = 4 ) )
return repo_jobs
|
def fit_curvefit ( p0 , datax , datay , function , ** kwargs ) :
"""Fits the data to a function using scipy . optimise . curve _ fit
Parameters
p0 : array _ like
initial parameters to use for fitting
datax : array _ like
x data to use for fitting
datay : array _ like
y data to use for fitting
function : function
funcion to be fit to the data
kwargs
keyword arguments to be passed to scipy . optimise . curve _ fit
Returns
pfit _ curvefit : array
Optimal values for the parameters so that the sum of
the squared residuals of ydata is minimized
perr _ curvefit : array
One standard deviation errors in the optimal values for
the parameters"""
|
pfit , pcov = _curve_fit ( function , datax , datay , p0 = p0 , epsfcn = 0.0001 , ** kwargs )
error = [ ]
for i in range ( len ( pfit ) ) :
try :
error . append ( _np . absolute ( pcov [ i ] [ i ] ) ** 0.5 )
except :
error . append ( _np . NaN )
pfit_curvefit = pfit
perr_curvefit = _np . array ( error )
return pfit_curvefit , perr_curvefit
|
def to_array ( self ) :
"""Serializes this InputTextMessageContent to a dictionary .
: return : dictionary representation of this object .
: rtype : dict"""
|
array = super ( InputTextMessageContent , self ) . to_array ( )
array [ 'message_text' ] = u ( self . message_text )
# py2 : type unicode , py3 : type str
if self . parse_mode is not None :
array [ 'parse_mode' ] = u ( self . parse_mode )
# py2 : type unicode , py3 : type str
if self . disable_web_page_preview is not None :
array [ 'disable_web_page_preview' ] = bool ( self . disable_web_page_preview )
# type bool
return array
|
def get_port_profile_status_input_request_type_getnext_request_last_received_port_profile_info_profile_mac ( self , ** kwargs ) :
"""Auto Generated Code"""
|
config = ET . Element ( "config" )
get_port_profile_status = ET . Element ( "get_port_profile_status" )
config = get_port_profile_status
input = ET . SubElement ( get_port_profile_status , "input" )
request_type = ET . SubElement ( input , "request-type" )
getnext_request = ET . SubElement ( request_type , "getnext-request" )
last_received_port_profile_info = ET . SubElement ( getnext_request , "last-received-port-profile-info" )
profile_mac = ET . SubElement ( last_received_port_profile_info , "profile-mac" )
profile_mac . text = kwargs . pop ( 'profile_mac' )
callback = kwargs . pop ( 'callback' , self . _callback )
return callback ( config )
|
def manager_view ( request , managerTitle ) :
'''View the details of a manager position .
Parameters :
request is an HTTP request
managerTitle is the URL title of the manager .'''
|
targetManager = get_object_or_404 ( Manager , url_title = managerTitle )
if not targetManager . active :
messages . add_message ( request , messages . ERROR , MESSAGES [ 'INACTIVE_MANAGER' ] . format ( managerTitle = targetManager . title ) )
return HttpResponseRedirect ( reverse ( 'managers:list_managers' ) )
else :
return render_to_response ( 'view_manager.html' , { 'page_name' : "View Manager" , 'targetManager' : targetManager , } , context_instance = RequestContext ( request ) )
|
def download ( self , uuid , output_format = 'gzip' ) :
"""Download pre - prepared data by UUID
: type uuid : str
: param uuid : Data UUID
: type output _ format : str
: param output _ format : Output format of the data , either " gzip " or " text "
: rtype : str
: return : The downloaded content"""
|
if output_format . lower ( ) not in ( 'gzip' , 'text' ) :
raise Exception ( "output_format must be one of file, text" )
data = { 'format' : output_format , 'uuid' : uuid , }
return self . get ( 'download' , get_params = data , is_json = False )
|
def load ( self ) :
"""Load each path in order . Remember paths already loaded and only load new ones ."""
|
data = self . dict_class ( )
for path in self . paths :
if path in self . paths_loaded :
continue
try :
with open ( path , 'r' ) as file :
path_data = yaml . load ( file . read ( ) )
data = dict_merge ( data , path_data )
self . paths_loaded . add ( path )
except IOError : # TODO : Log this correctly once logging is implemented
if not path . endswith ( '.local.yml' ) :
print 'CONFIG NOT FOUND: %s' % ( path )
self . data = data
|
def addBorder ( self , width , color = None ) :
"""Add a border to the current : py : class : ` Layer ` .
: param width : The width of the border .
: param color : The : py : class : ` Color ` of the border , current : py : class : ` Color ` is the default value .
: rtype : Nothing ."""
|
width = int ( width / config . DOWNSAMPLING )
if color == None :
color = self . color
layer = self . image . getActiveLayer ( ) . data
colorRGBA = color . get_0_255 ( )
print ( 'adding border' + str ( colorRGBA ) + str ( width ) + str ( layer . shape ) )
layer [ 0 : width , : , 0 ] = colorRGBA [ 0 ]
layer [ 0 : width , : , 1 ] = colorRGBA [ 1 ]
layer [ 0 : width , : , 2 ] = colorRGBA [ 2 ]
layer [ 0 : width , : , 3 ] = colorRGBA [ 3 ]
layer [ : , 0 : width , 0 ] = colorRGBA [ 0 ]
layer [ : , 0 : width , 1 ] = colorRGBA [ 1 ]
layer [ : , 0 : width , 2 ] = colorRGBA [ 2 ]
layer [ : , 0 : width , 3 ] = colorRGBA [ 3 ]
layer [ layer . shape [ 0 ] - width : layer . shape [ 0 ] , : , 0 ] = colorRGBA [ 0 ]
layer [ layer . shape [ 0 ] - width : layer . shape [ 0 ] , : , 1 ] = colorRGBA [ 1 ]
layer [ layer . shape [ 0 ] - width : layer . shape [ 0 ] , : , 2 ] = colorRGBA [ 2 ]
layer [ layer . shape [ 0 ] - width : layer . shape [ 0 ] , : , 3 ] = colorRGBA [ 3 ]
layer [ : , layer . shape [ 1 ] - width : layer . shape [ 1 ] , 0 ] = colorRGBA [ 0 ]
layer [ : , layer . shape [ 1 ] - width : layer . shape [ 1 ] , 1 ] = colorRGBA [ 1 ]
layer [ : , layer . shape [ 1 ] - width : layer . shape [ 1 ] , 2 ] = colorRGBA [ 2 ]
layer [ : , layer . shape [ 1 ] - width : layer . shape [ 1 ] , 3 ] = colorRGBA [ 3 ]
|
def load ( self , filename = None ) :
"""Method was overriden to set spectrum . filename as well"""
|
DataFile . load ( self , filename )
self . spectrum . filename = filename
|
def _show_stat_wrapper_Progress ( count , last_count , start_time , max_count , speed_calc_cycles , width , q , last_speed , prepend , show_stat_function , add_args , i , lock ) :
"""calculate"""
|
count_value , max_count_value , speed , tet , ttg , = Progress . _calc ( count , last_count , start_time , max_count , speed_calc_cycles , q , last_speed , lock )
return show_stat_function ( count_value , max_count_value , prepend , speed , tet , ttg , width , i , ** add_args )
|
def wait_with_ioloop ( self , ioloop , timeout = None ) :
"""Do blocking wait until condition is event is set .
Parameters
ioloop : tornadio . ioloop . IOLoop instance
MUST be the same ioloop that set ( ) / clear ( ) is called from
timeout : float , int or None
If not None , only wait up to ` timeout ` seconds for event to be set .
Return Value
flag : True if event was set within timeout , otherwise False .
Notes
This will deadlock if called in the ioloop !"""
|
f = Future ( )
def cb ( ) :
return gen . chain_future ( self . until_set ( ) , f )
ioloop . add_callback ( cb )
try :
f . result ( timeout )
return True
except TimeoutError :
return self . _flag
|
def instance_attr ( self , name , context = None ) :
"""Get the list of nodes associated to the given attribute name .
Assignments are looked for in both this class and in parents .
: returns : The list of assignments to the given name .
: rtype : list ( NodeNG )
: raises AttributeInferenceError : If no attribute with this name
can be found in this class or parent classes ."""
|
# Return a copy , so we don ' t modify self . instance _ attrs ,
# which could lead to infinite loop .
values = list ( self . instance_attrs . get ( name , [ ] ) )
# get all values from parents
for class_node in self . instance_attr_ancestors ( name , context ) :
values += class_node . instance_attrs [ name ]
values = [ n for n in values if not isinstance ( n , node_classes . DelAttr ) ]
if values :
return values
raise exceptions . AttributeInferenceError ( target = self , attribute = name , context = context )
|
def response_hook ( self , response , ** kwargs ) -> HTMLResponse :
"""Change response enconding and replace it by a HTMLResponse ."""
|
if not response . encoding :
response . encoding = DEFAULT_ENCODING
return HTMLResponse . _from_response ( response , self )
|
def get_nested_relation_kwargs ( field_name , relation_info ) :
"""Creating a default instance of a nested serializer"""
|
kwargs = get_relation_kwargs ( field_name , relation_info )
kwargs . pop ( 'queryset' )
kwargs . pop ( 'required' )
kwargs [ 'read_only' ] = True
return kwargs
|
def look_for_books ( citation_elements , kbs ) :
"""Look for books in our kb
Create book tags by using the authors and the title to find books
in our knowledge base"""
|
title = None
for el in citation_elements :
if el [ 'type' ] == 'QUOTED' :
title = el
break
if title :
normalized_title = title [ 'title' ] . upper ( )
if normalized_title in kbs [ 'books' ] :
line = kbs [ 'books' ] [ normalized_title ]
el = { 'type' : 'BOOK' , 'misc_txt' : '' , 'authors' : line [ 0 ] , 'title' : line [ 1 ] , 'year' : line [ 2 ] . strip ( ';' ) }
citation_elements . append ( el )
citation_elements . remove ( title )
return citation_elements
|
def _set_process_list ( self , v , load = False ) :
"""Setter method for process _ list , mapped from YANG variable / cpu _ state / process _ list ( container )
If this variable is read - only ( config : false ) in the
source YANG file , then _ set _ process _ list is considered as a private
method . Backends looking to populate this variable should
do so via calling thisObj . _ set _ process _ list ( ) directly .
YANG Description : CPU utilization summary and list of all the process"""
|
if hasattr ( v , "_utype" ) :
v = v . _utype ( v )
try :
t = YANGDynClass ( v , base = process_list . process_list , is_container = 'container' , presence = False , yang_name = "process-list" , rest_name = "process-list" , parent = self , path_helper = self . _path_helper , extmethods = self . _extmethods , register_paths = True , extensions = { u'tailf-common' : { u'callpoint' : u'RAS-process-cpu' , u'cli-suppress-show-path' : None } } , namespace = 'urn:brocade.com:mgmt:brocade-RAS-operational' , defining_module = 'brocade-RAS-operational' , yang_type = 'container' , is_config = False )
except ( TypeError , ValueError ) :
raise ValueError ( { 'error-string' : """process_list must be of a type compatible with container""" , 'defined-type' : "container" , 'generated-type' : """YANGDynClass(base=process_list.process_list, is_container='container', presence=False, yang_name="process-list", rest_name="process-list", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'RAS-process-cpu', u'cli-suppress-show-path': None}}, namespace='urn:brocade.com:mgmt:brocade-RAS-operational', defining_module='brocade-RAS-operational', yang_type='container', is_config=False)""" , } )
self . __process_list = t
if hasattr ( self , '_set' ) :
self . _set ( )
|
def unfreeze_extensions ( self ) :
"""Remove a previously frozen list of extensions ."""
|
output_path = os . path . join ( _registry_folder ( ) , 'frozen_extensions.json' )
if not os . path . isfile ( output_path ) :
raise ExternalError ( "There is no frozen extension list" )
os . remove ( output_path )
ComponentRegistry . _frozen_extensions = None
|
def view_run ( ) :
"""Page for viewing before / after for a specific test run ."""
|
build = g . build
if request . method == 'POST' :
form = forms . RunForm ( request . form )
else :
form = forms . RunForm ( request . args )
form . validate ( )
ops = operations . BuildOps ( build . id )
run , next_run , previous_run , approval_log = ops . get_run ( form . name . data , form . number . data , form . test . data )
if not run :
abort ( 404 )
file_type = form . type . data
image_file , log_file , config_file , sha1sum = ( _get_artifact_context ( run , file_type ) )
if request . method == 'POST' :
if form . approve . data and run . status == models . Run . DIFF_FOUND :
run . status = models . Run . DIFF_APPROVED
auth . save_admin_log ( build , run_approved = True , run = run )
elif form . disapprove . data and run . status == models . Run . DIFF_APPROVED :
run . status = models . Run . DIFF_FOUND
auth . save_admin_log ( build , run_rejected = True , run = run )
else :
abort ( 400 )
db . session . add ( run )
db . session . commit ( )
ops . evict ( )
return redirect ( url_for ( request . endpoint , id = build . id , name = run . release . name , number = run . release . number , test = run . name , type = file_type ) )
# Update form values for rendering
form . approve . data = True
form . disapprove . data = True
context = dict ( build = build , release = run . release , run = run , run_form = form , previous_run = previous_run , next_run = next_run , file_type = file_type , image_file = image_file , log_file = log_file , config_file = config_file , sha1sum = sha1sum , approval_log = approval_log )
if file_type :
template_name = 'view_artifact.html'
else :
template_name = 'view_run.html'
response = flask . Response ( render_template ( template_name , ** context ) )
return response
|
def launch_coroutine ( self , cor , * args , ** kwargs ) :
"""Start a coroutine task and return a blockable / awaitable object .
If this method is called from inside the event loop , it will return an
awaitable object . If it is called from outside the event loop it will
return an concurrent Future object that can block the calling thread
until the operation is finished .
Args :
cor ( coroutine ) : The coroutine that we wish to run in the
background and wait until it finishes .
Returns :
Future or asyncio . Task : A future representing the coroutine .
If this method is called from within the background loop
then an awaitable asyncio . Tasks is returned . Otherwise ,
a concurrent Future object is returned that you can call
` ` result ( ) ` ` on to block the calling thread ."""
|
if self . stopping :
raise LoopStoppingError ( "Could not launch coroutine because loop is shutting down: %s" % cor )
# Ensure the loop exists and is started
self . start ( )
cor = _instaniate_coroutine ( cor , args , kwargs )
if self . inside_loop ( ) :
return asyncio . ensure_future ( cor , loop = self . loop )
return asyncio . run_coroutine_threadsafe ( cor , loop = self . loop )
|
def managed_process ( process ) :
"""Wrapper for subprocess . Popen to work across various Python versions , when using the with syntax ."""
|
try :
yield process
finally :
for stream in [ process . stdout , process . stdin , process . stderr ] :
if stream :
stream . close ( )
process . wait ( )
|
def current_gvim_edit ( op = 'e' , fpath = '' ) :
r"""CommandLine :
python - m utool . util _ ubuntu XCtrl . current _ gvim _ edit sp ~ / . bashrc"""
|
import utool as ut
fpath = ut . unexpanduser ( ut . truepath ( fpath ) )
# print ( ' fpath = % r ' % ( fpath , ) )
ut . copy_text_to_clipboard ( fpath )
# print ( ut . get _ clipboard ( ) )
doscript = [ ( 'focus' , 'gvim' ) , ( 'key' , 'Escape' ) , ( 'type2' , ';' + op + ' ' + fpath ) , # ( ' type2 ' , ' ; ' + op + ' ' ) ,
# ( ' key ' , ' ctrl + v ' ) ,
( 'key' , 'KP_Enter' ) , ]
XCtrl . do ( * doscript , verbose = 0 , sleeptime = .001 )
|
def console_init_root ( w : int , h : int , title : Optional [ str ] = None , fullscreen : bool = False , renderer : Optional [ int ] = None , order : str = "C" , ) -> tcod . console . Console :
"""Set up the primary display and return the root console .
` w ` and ` h ` are the columns and rows of the new window ( in tiles . )
` title ` is an optional string to display on the windows title bar .
` fullscreen ` determines if the window will start in fullscreen . Fullscreen
mode is unreliable unless the renderer is set to ` tcod . RENDERER _ SDL2 ` or
` tcod . RENDERER _ OPENGL2 ` .
` renderer ` is the rendering back - end that libtcod will use .
If you don ' t know which to pick , then use ` tcod . RENDERER _ SDL2 ` .
Options are :
* ` tcod . RENDERER _ SDL ` :
A deprecated software / SDL2 renderer .
* ` tcod . RENDERER _ OPENGL ` :
A deprecated SDL2 / OpenGL1 renderer .
* ` tcod . RENDERER _ GLSL ` :
A deprecated SDL2 / OpenGL2 renderer .
* ` tcod . RENDERER _ SDL2 ` :
The recommended SDL2 renderer . Rendering is decided by SDL2 and can be
changed by using an SDL2 hint .
* ` tcod . RENDERER _ OPENGL2 ` :
An SDL2 / OPENGL2 renderer . Usually faster than regular SDL2.
Requires OpenGL 2.0 Core .
` order ` will affect how the array attributes of the returned root console
are indexed . ` order = ' C ' ` is the default , but ` order = ' F ' ` is recommended .
. . versionchanged : : 4.3
Added ` order ` parameter .
` title ` parameter is now optional .
. . versionchanged : : 8.0
The default ` renderer ` is now automatic instead of always being
` RENDERER _ SDL ` ."""
|
if title is None : # Use the scripts filename as the title .
title = os . path . basename ( sys . argv [ 0 ] )
if renderer is None :
warnings . warn ( "A renderer should be given, see the online documentation." , DeprecationWarning , stacklevel = 2 , )
renderer = tcod . constants . RENDERER_SDL
elif renderer in ( tcod . constants . RENDERER_SDL , tcod . constants . RENDERER_OPENGL , tcod . constants . RENDERER_GLSL , ) :
warnings . warn ( "The SDL, OPENGL, and GLSL renderers are deprecated." , DeprecationWarning , stacklevel = 2 , )
lib . TCOD_console_init_root ( w , h , _bytes ( title ) , fullscreen , renderer )
console = tcod . console . Console . _get_root ( order )
console . clear ( )
return console
|
def add_issue ( self , data ) :
"""This method include new issues to the ArticleMeta .
data : legacy SciELO Documents JSON Type 3."""
|
issue = self . dispatcher ( 'add_issue' , data , self . _admintoken )
return json . loads ( issue )
|
def startLoop ( self , useDriverLoop ) :
'''Called by the engine to start an event loop .'''
|
if useDriverLoop :
self . _driver . startLoop ( )
else :
self . _iterator = self . _driver . iterate ( )
|
def GroupsSensorsPost ( self , group_id , sensors ) :
"""Share a number of sensors within a group .
@ param group _ id ( int ) - Id of the group to share sensors with
@ param sensors ( dictionary ) - Dictionary containing the sensors to share within the groups
@ return ( bool ) - Boolean indicating whether the GroupsSensorsPost call was successful"""
|
if self . __SenseApiCall__ ( "/groups/{0}/sensors.json" . format ( group_id ) , "POST" , parameters = sensors ) :
return True
else :
self . __error__ = "api call unsuccessful"
return False
|
def get_form_kwargs ( self ) :
"""Return the form kwargs .
This method injects the context variable , defined in
: meth : ` get _ agnocomplete _ context ` . Override this method to adjust it to
your needs ."""
|
data = super ( UserContextFormViewMixin , self ) . get_form_kwargs ( )
data . update ( { 'user' : self . get_agnocomplete_context ( ) , } )
return data
|
def RepackTemplates ( self , repack_configs , templates , output_dir , config = None , sign = False , signed_template = False ) :
"""Call repacker in a subprocess ."""
|
pool = multiprocessing . Pool ( processes = 10 )
results = [ ]
bulk_sign_installers = False
for repack_config in repack_configs :
for template in templates :
repack_args = [ "grr_client_build" ]
if config :
repack_args . extend ( [ "--config" , config ] )
repack_args . extend ( [ "--secondary_configs" , repack_config , "repack" , "--template" , template , "--output_dir" , self . GetOutputDir ( output_dir , repack_config ) ] )
# We only sign exes and rpms at the moment . The others will raise if we
# try to ask for signing .
passwd = None
if sign :
if template . endswith ( ".exe.zip" ) : # This is for osslsigncode only .
if platform . system ( ) != "Windows" :
passwd = self . GetWindowsPassphrase ( )
repack_args . append ( "--sign" )
else :
bulk_sign_installers = True
if signed_template :
repack_args . append ( "--signed_template" )
elif template . endswith ( ".rpm.zip" ) :
bulk_sign_installers = True
print ( "Calling %s" % " " . join ( repack_args ) )
results . append ( pool . apply_async ( SpawnProcess , ( repack_args , ) , dict ( passwd = passwd ) ) )
# Also build debug if it ' s windows .
if template . endswith ( ".exe.zip" ) :
debug_args = [ ]
debug_args . extend ( repack_args )
debug_args . append ( "--debug_build" )
print ( "Calling %s" % " " . join ( debug_args ) )
results . append ( pool . apply_async ( SpawnProcess , ( debug_args , ) , dict ( passwd = passwd ) ) )
try :
pool . close ( )
# Workaround to handle keyboard kills
# http : / / stackoverflow . com / questions / 1408356 / keyboard - interrupts - with - pythons - multiprocessing - pool
# get will raise if the child raises .
for result_obj in results :
result_obj . get ( 9999 )
pool . join ( )
except KeyboardInterrupt :
print ( "parent received control-c" )
pool . terminate ( )
except ErrorDuringRepacking :
pool . terminate ( )
raise
if bulk_sign_installers :
to_sign = { }
for root , _ , files in os . walk ( output_dir ) :
for f in files :
if f . endswith ( ".exe" ) :
to_sign . setdefault ( "windows" , [ ] ) . append ( os . path . join ( root , f ) )
elif f . endswith ( ".rpm" ) :
to_sign . setdefault ( "rpm" , [ ] ) . append ( os . path . join ( root , f ) )
if to_sign . get ( "windows" ) :
signer = repacking . TemplateRepacker ( ) . GetSigner ( [ "ClientBuilder Context" , "Platform:%s" % platform . system ( ) , "Target:Windows" ] )
signer . SignFiles ( to_sign . get ( "windows" ) )
if to_sign . get ( "rpm" ) :
signer = repacking . TemplateRepacker ( ) . GetSigner ( [ "ClientBuilder Context" , "Platform:%s" % platform . system ( ) , "Target:Linux" , "Target:LinuxRpm" ] )
signer . AddSignatureToRPMs ( to_sign . get ( "rpm" ) )
|
def _parse_multifile ( self , desired_type : Type [ Union [ Dict , List , Set , Tuple ] ] , obj : PersistedObject , parsing_plan_for_children : Dict [ str , ParsingPlan ] , logger : Logger , options : Dict [ str , Dict [ str , Any ] ] ) -> Union [ Dict , List , Set , Tuple ] :
"""Options may contain a section with id ' MultifileCollectionParser ' containing the following options :
* lazy _ parsing : if True , the method will return immediately without parsing all the contents . Instead , the
returned collection will perform the parsing the first time an item is required .
* background _ parsing : if True , the method will return immediately while a thread parses all the contents in
the background . Note that users cannot set both lazy _ parsing and background _ parsing to True at the same time
: param desired _ type :
: param obj :
: param parsing _ plan _ for _ children :
: param logger :
: param options :
: return :"""
|
# first get the options and check them
lazy_parsing = False
background_parsing = False
opts = self . _get_applicable_options ( options )
for opt_key , opt_val in opts . items ( ) :
if opt_key is 'lazy_parsing' :
lazy_parsing = opt_val
elif opt_key is 'background_parsing' :
background_parsing = opt_val
else :
raise Exception ( 'Invalid option in MultiFileCollectionParser : ' + opt_key )
check_var ( lazy_parsing , var_types = bool , var_name = 'lazy_parsing' )
check_var ( background_parsing , var_types = bool , var_name = 'background_parsing' )
if lazy_parsing and background_parsing :
raise ValueError ( 'lazy_parsing and background_parsing cannot be set to true at the same time' )
if lazy_parsing : # build a lazy dictionary
results = LazyDictionary ( sorted ( list ( parsing_plan_for_children . keys ( ) ) ) , loading_method = lambda x : parsing_plan_for_children [ x ] . execute ( logger , options ) )
# logger . debug ( ' Assembling a ' + get _ pretty _ type _ str ( desired _ type ) + ' from all children of ' + str ( obj )
# + ' ( lazy parsing : children will be parsed when used ) ' )
logger . debug ( '(P) {loc} : lazy parsing ON, children will be parsed only if/when used' . format ( loc = obj . get_pretty_location ( blank_parent_part = ( not GLOBAL_CONFIG . full_paths_in_logs ) , compact_file_ext = True ) ) )
elif background_parsing : # - - TODO create a thread to perform the parsing in the background
raise ValueError ( 'Background parsing is not yet supported' )
else : # Parse right now
results = OrderedDict ( )
# parse all children according to their plan
# - - use key - based sorting on children to lead to reproducible results
# ( in case of multiple errors , the same error will show up first everytime )
for child_name , child_plan in sorted ( parsing_plan_for_children . items ( ) ) :
results [ child_name ] = child_plan . execute ( logger , options )
# logger . debug ( ' Assembling a ' + get _ pretty _ type _ str ( desired _ type ) + ' from all parsed children of '
# + str ( obj ) )
if issubclass ( desired_type , list ) : # return a list facade
return KeySortedListFacadeForDict ( results )
elif issubclass ( desired_type , tuple ) : # return a tuple facade
return KeySortedTupleFacadeForDict ( results )
elif issubclass ( desired_type , set ) : # return a set facade
return SetFacadeForDict ( results )
elif issubclass ( desired_type , dict ) : # return the dict directly
return results
else :
raise TypeError ( 'Cannot build the desired collection out of the multifile children: desired type is not ' 'supported: ' + get_pretty_type_str ( desired_type ) )
|
def columnCount ( self , parent = QtCore . QModelIndex ( ) ) :
"""Determines the numbers of columns the view will draw
Required by view , see : qtdoc : ` subclassing < qabstractitemmodel . subclassing > `"""
|
if parent . isValid ( ) :
return self . _stim . columnCount ( parent . row ( ) )
else :
return self . _stim . columnCount ( )
|
def filter_actions ( self , block_addr = None , block_stmt = None , insn_addr = None , read_from = None , write_to = None ) :
"""Filter self . actions based on some common parameters .
: param block _ addr : Only return actions generated in blocks starting at this address .
: param block _ stmt : Only return actions generated in the nth statement of each block .
: param insn _ addr : Only return actions generated in the assembly instruction at this address .
: param read _ from : Only return actions that perform a read from the specified location .
: param write _ to : Only return actions that perform a write to the specified location .
Notes :
If IR optimization is turned on , reads and writes may not occur in the instruction
they originally came from . Most commonly , If a register is read from twice in the same
block , the second read will not happen , instead reusing the temp the value is already
stored in .
Valid values for read _ from and write _ to are the string literals ' reg ' or ' mem ' ( matching
any read or write to registers or memory , respectively ) , any string ( representing a read
or write to the named register ) , and any integer ( representing a read or write to the
memory at this address ) ."""
|
if read_from is not None :
if write_to is not None :
raise ValueError ( "Can't handle read_from and write_to at the same time!" )
if read_from in ( 'reg' , 'mem' ) :
read_type = read_from
read_offset = None
elif isinstance ( read_from , str ) :
read_type = 'reg'
read_offset = self . state . project . arch . registers [ read_from ] [ 0 ]
else :
read_type = 'mem'
read_offset = read_from
if write_to is not None :
if write_to in ( 'reg' , 'mem' ) :
write_type = write_to
write_offset = None
elif isinstance ( write_to , str ) :
write_type = 'reg'
write_offset = self . state . project . arch . registers [ write_to ] [ 0 ]
else :
write_type = 'mem'
write_offset = write_to
# def addr _ of _ stmt ( bbl _ addr , stmt _ idx ) :
# if stmt _ idx is None :
# return None
# stmts = self . state . project . factory . block ( bbl _ addr ) . vex . statements
# if stmt _ idx > = len ( stmts ) :
# return None
# for i in reversed ( range ( stmt _ idx + 1 ) ) :
# if stmts [ i ] . tag = = ' Ist _ IMark ' :
# return stmts [ i ] . addr + stmts [ i ] . delta
# return None
def action_reads ( action ) :
if action . type != read_type :
return False
if action . action != 'read' :
return False
if read_offset is None :
return True
addr = action . addr
if isinstance ( addr , SimActionObject ) :
addr = addr . ast
if isinstance ( addr , claripy . ast . Base ) :
if addr . symbolic :
return False
addr = self . state . solver . eval ( addr )
if addr != read_offset :
return False
return True
def action_writes ( action ) :
if action . type != write_type :
return False
if action . action != 'write' :
return False
if write_offset is None :
return True
addr = action . addr
if isinstance ( addr , SimActionObject ) :
addr = addr . ast
if isinstance ( addr , claripy . ast . Base ) :
if addr . symbolic :
return False
addr = self . state . solver . eval ( addr )
if addr != write_offset :
return False
return True
return [ x for x in reversed ( self . actions ) if ( block_addr is None or x . bbl_addr == block_addr ) and ( block_stmt is None or x . stmt_idx == block_stmt ) and ( read_from is None or action_reads ( x ) ) and ( write_to is None or action_writes ( x ) ) and ( insn_addr is None or ( x . sim_procedure is None and x . ins_addr == insn_addr ) ) # ( insn _ addr is None or ( x . sim _ procedure is None and addr _ of _ stmt ( x . bbl _ addr , x . stmt _ idx ) = = insn _ addr ) )
]
|
def has_option ( section , name ) :
"""Wrapper around ConfigParser ' s ` ` has _ option ` ` method ."""
|
cfg = ConfigParser . SafeConfigParser ( { "working_dir" : "/tmp" , "debug" : "0" } )
cfg . read ( CONFIG_LOCATIONS )
return cfg . has_option ( section , name )
|
def extra ( request , provider ) :
"""Handle registration of new user with extra data for profile"""
|
identity = request . session . get ( 'identity' , None )
if not identity :
raise Http404
if request . method == "POST" :
form = str_to_class ( settings . EXTRA_FORM ) ( request . POST )
if form . is_valid ( ) :
user = form . save ( request , identity , provider )
del request . session [ 'identity' ]
if not settings . ACTIVATION_REQUIRED :
user = auth . authenticate ( identity = identity , provider = provider )
if user :
auth . login ( request , user )
return redirect ( request . session . pop ( 'next_url' , settings . LOGIN_REDIRECT_URL ) )
else :
messages . warning ( request , lang . ACTIVATION_REQUIRED_TEXT )
return redirect ( settings . ACTIVATION_REDIRECT_URL )
else :
initial = request . session [ 'extra' ]
form = str_to_class ( settings . EXTRA_FORM ) ( initial = initial )
return render_to_response ( 'netauth/extra.html' , { 'form' : form } , context_instance = RequestContext ( request ) )
|
def visitShapeDefinition ( self , ctx : ShExDocParser . ShapeDefinitionContext ) :
"""shapeDefinition : qualifier * ' { ' oneOfShape ? ' } ' annotation * semanticActions"""
|
if ctx . qualifier ( ) :
for q in ctx . qualifier ( ) :
self . visit ( q )
if ctx . oneOfShape ( ) :
oneof_parser = ShexOneOfShapeParser ( self . context )
oneof_parser . visit ( ctx . oneOfShape ( ) )
self . shape . expression = oneof_parser . expression
if ctx . annotation ( ) or ctx . semanticActions ( ) :
ansem_parser = ShexAnnotationAndSemactsParser ( self . context )
for annot in ctx . annotation ( ) :
ansem_parser . visit ( annot )
ansem_parser . visit ( ctx . semanticActions ( ) )
if ansem_parser . semacts :
self . shape . semActs = ansem_parser . semacts
if ansem_parser . annotations :
self . shape . annotations = ansem_parser . annotations
|
def _add_environment_lib ( ) :
"""Adds the chef _ solo _ envs cookbook , which provides a library that adds
environment attribute compatibility for chef - solo v10
NOTE : Chef 10 only"""
|
# Create extra cookbook dir
lib_path = os . path . join ( env . node_work_path , cookbook_paths [ 0 ] , 'chef_solo_envs' , 'libraries' )
with hide ( 'running' , 'stdout' ) :
sudo ( 'mkdir -p {0}' . format ( lib_path ) )
# Add environment patch to the node ' s cookbooks
put ( os . path . join ( basedir , 'environment.rb' ) , os . path . join ( lib_path , 'environment.rb' ) , use_sudo = True )
|
def _query ( self , action , qobj ) :
"""Form query to enumerate category"""
|
title = self . params . get ( 'title' )
pageid = self . params . get ( 'pageid' )
if action == 'random' :
return qobj . random ( namespace = 14 )
elif action == 'category' :
return qobj . category ( title , pageid , self . _continue_params ( ) )
|
def get_frame ( self ) :
"""Get a dataframe of metrics from this storage"""
|
metric_items = list ( self . db . metrics . find ( { 'run_name' : self . model_config . run_name } ) . sort ( 'epoch_idx' ) )
if len ( metric_items ) == 0 :
return pd . DataFrame ( columns = [ 'run_name' ] )
else :
return pd . DataFrame ( metric_items ) . drop ( [ '_id' , 'model_name' ] , axis = 1 ) . set_index ( 'epoch_idx' )
|
def get_user_permission_from_email ( self , email ) :
"""Returns a user ' s permissions object when given the user email ."""
|
_id = self . get_user_id_from_email ( email )
return self . get_user_permission ( _id )
|
def _FormatSubjectOrProcessToken ( self , token_data ) :
"""Formats a subject or process token as a dictionary of values .
Args :
token _ data ( bsm _ token _ data _ subject32 | bsm _ token _ data _ subject64 ) :
AUT _ SUBJECT32 , AUT _ PROCESS32 , AUT _ SUBJECT64 or AUT _ PROCESS64 token
data .
Returns :
dict [ str , str ] : token values ."""
|
ip_address = self . _FormatPackedIPv4Address ( token_data . ip_address )
return { 'aid' : token_data . audit_user_identifier , 'euid' : token_data . effective_user_identifier , 'egid' : token_data . effective_group_identifier , 'uid' : token_data . real_user_identifier , 'gid' : token_data . real_group_identifier , 'pid' : token_data . process_identifier , 'session_id' : token_data . session_identifier , 'terminal_port' : token_data . terminal_port , 'terminal_ip' : ip_address }
|
def make_path ( * args ) :
"""> > > _ hack _ make _ path _ doctest _ output ( make _ path ( " / a " , " b " ) )
' / a / b '
> > > _ hack _ make _ path _ doctest _ output ( make _ path ( [ " / a " , " b " ] ) )
' / a / b '
> > > _ hack _ make _ path _ doctest _ output ( make _ path ( * [ " / a " , " b " ] ) )
' / a / b '
> > > _ hack _ make _ path _ doctest _ output ( make _ path ( " / a " ) )
> > > _ hack _ make _ path _ doctest _ output ( make _ path ( [ " / a " ] ) )
> > > _ hack _ make _ path _ doctest _ output ( make _ path ( * [ " / a " ] ) )"""
|
paths = unpack_args ( * args )
return os . path . abspath ( os . path . join ( * [ p for p in paths if p is not None ] ) )
|
def parseline ( line , format ) :
"""Given a line ( a string actually ) and a short string telling
how to format it , return a list of python objects that result .
The format string maps words ( as split by line . split ( ) ) into
python code :
x - > Nothing ; skip this word
s - > Return this word as a string
i - > Return this word as an int
d - > Return this word as an int
f - > Return this word as a float
Basic parsing of strings :
> > > parseline ( ' Hello , World ' , ' ss ' )
[ ' Hello , ' , ' World ' ]
You can use ' x ' to skip a record ; you also don ' t have to parse
every record :
> > > parseline ( ' 1 2 3 4 ' , ' xdd ' )
[2 , 3]
> > > parseline ( ' C1 0.0 0.0 0.0 ' , ' sfff ' )
[ ' C1 ' , 0.0 , 0.0 , 0.0]
Should this return an empty list ?
> > > parseline ( ' This line wont be parsed ' , ' xx ' )"""
|
xlat = { 'x' : None , 's' : str , 'f' : float , 'd' : int , 'i' : int }
result = [ ]
words = line . split ( )
for i in range ( len ( format ) ) :
f = format [ i ]
trans = xlat . get ( f , None )
if trans :
result . append ( trans ( words [ i ] ) )
if len ( result ) == 0 :
return None
if len ( result ) == 1 :
return result [ 0 ]
return result
|
def _to_point ( dims ) :
"""Convert ( width , height ) or size - > point . Point ."""
|
assert dims
if isinstance ( dims , ( tuple , list ) ) :
if len ( dims ) != 2 :
raise ValueError ( "A two element tuple or list is expected here, got {}." . format ( dims ) )
else :
width = int ( dims [ 0 ] )
height = int ( dims [ 1 ] )
if width <= 0 or height <= 0 :
raise ValueError ( "Must specify +ve dims, got {}." . format ( dims ) )
else :
return point . Point ( width , height )
else :
size = int ( dims )
if size <= 0 :
raise ValueError ( "Must specify a +ve value for size, got {}." . format ( dims ) )
else :
return point . Point ( size , size )
|
def extent ( self ) :
"""Return the source range ( the range of text ) occupied by the entity
pointed at by the cursor ."""
|
if not hasattr ( self , '_extent' ) :
self . _extent = conf . lib . clang_getCursorExtent ( self )
return self . _extent
|
def list_absent ( name , acl_type , acl_names = None , recurse = False ) :
'''Ensure a Linux ACL list does not exist
Takes a list of acl names and remove them from the given path
name
The acl path
acl _ type
The type of the acl is used for , it can be ' user ' or ' group '
acl _ names
The list of users or groups
perms
Remove the permissions eg . : rwx
recurse
Set the permissions recursive in the path'''
|
if acl_names is None :
acl_names = [ ]
ret = { 'name' : name , 'result' : True , 'changes' : { } , 'comment' : '' }
if not os . path . exists ( name ) :
ret [ 'comment' ] = '{0} does not exist' . format ( name )
ret [ 'result' ] = False
return ret
__current_perms = __salt__ [ 'acl.getfacl' ] ( name )
if acl_type . startswith ( ( 'd:' , 'default:' ) ) :
_acl_type = ':' . join ( acl_type . split ( ':' ) [ 1 : ] )
_current_perms = __current_perms [ name ] . get ( 'defaults' , { } )
_default = True
else :
_acl_type = acl_type
_current_perms = __current_perms [ name ]
_default = False
# The getfacl execution module lists default with empty names as being
# applied to the user / group that owns the file , e . g . ,
# default : group : : rwx would be listed as default : group : root : rwx
# In this case , if acl _ name is empty , we really want to search for root
# but still uses ' ' for other
# We search through the dictionary getfacl returns for the owner of the
# file if acl _ name is empty .
if not acl_names :
_search_names = set ( __current_perms [ name ] . get ( 'comment' ) . get ( _acl_type , '' ) )
else :
_search_names = set ( acl_names )
if _current_perms . get ( _acl_type , None ) or _default :
try :
users = { }
for i in _current_perms [ _acl_type ] :
if i and next ( six . iterkeys ( i ) ) in _search_names :
users . update ( i )
except ( AttributeError , KeyError ) :
users = None
if users :
ret [ 'comment' ] = 'Removing permissions'
if __opts__ [ 'test' ] :
ret [ 'result' ] = None
return ret
for acl_name in acl_names :
__salt__ [ 'acl.delfacl' ] ( acl_type , acl_name , name , recursive = recurse )
else :
ret [ 'comment' ] = 'Permissions are in the desired state'
else :
ret [ 'comment' ] = 'ACL Type does not exist'
ret [ 'result' ] = False
return ret
|
def get_temp_url ( self , obj , seconds , method = "GET" , key = None , cached = True ) :
"""Given a storage object in this container , returns a URL that can be
used to access that object . The URL will expire after ` seconds `
seconds .
The only methods supported are GET and PUT . Anything else will raise an
` InvalidTemporaryURLMethod ` exception .
If you have your Temporary URL key , you can pass it in directly and
potentially save an API call to retrieve it . If you don ' t pass in the
key , and don ' t wish to use any cached value , pass ` cached = False ` ."""
|
return self . manager . get_temp_url ( self , obj , seconds , method = method , key = key , cached = cached )
|
def forward ( self , Q_ , p_ , G_ , h_ , A_ , b_ ) :
"""Solve a batch of QPs .
This function solves a batch of QPs , each optimizing over
` nz ` variables and having ` nineq ` inequality constraints
and ` neq ` equality constraints .
The optimization problem for each instance in the batch
( dropping indexing from the notation ) is of the form
\hat z = argmin_z 1/2 z^T Q z + p^T z
subject to Gz < = h
Az = b
where Q \ in S ^ { nz , nz } ,
S ^ { nz , nz } is the set of all positive semi - definite matrices ,
p \ in R ^ { nz }
G \ in R ^ { nineq , nz }
h \ in R ^ { nineq }
A \ in R ^ { neq , nz }
b \ in R ^ { neq }
These parameters should all be passed to this function as
Variable - or Parameter - wrapped Tensors .
( See torch . autograd . Variable and torch . nn . parameter . Parameter )
If you want to solve a batch of QPs where ` nz ` , ` nineq ` and ` neq `
are the same , but some of the contents differ across the
minibatch , you can pass in tensors in the standard way
where the first dimension indicates the batch example .
This can be done with some or all of the coefficients .
You do not need to add an extra dimension to coefficients
that will not change across all of the minibatch examples .
This function is able to infer such cases .
If you don ' t want to use any equality or inequality constraints ,
you can set the appropriate values to :
e = Variable ( torch . Tensor ( ) )
Parameters :
Q : A ( nBatch , nz , nz ) or ( nz , nz ) Tensor .
p : A ( nBatch , nz ) or ( nz ) Tensor .
G : A ( nBatch , nineq , nz ) or ( nineq , nz ) Tensor .
h : A ( nBatch , nineq ) or ( nineq ) Tensor .
A : A ( nBatch , neq , nz ) or ( neq , nz ) Tensor .
b : A ( nBatch , neq ) or ( neq ) Tensor .
Returns : \ hat z : a ( nBatch , nz ) Tensor ."""
|
nBatch = extract_nBatch ( Q_ , p_ , G_ , h_ , A_ , b_ )
Q , _ = expandParam ( Q_ , nBatch , 3 )
p , _ = expandParam ( p_ , nBatch , 2 )
G , _ = expandParam ( G_ , nBatch , 3 )
h , _ = expandParam ( h_ , nBatch , 2 )
A , _ = expandParam ( A_ , nBatch , 3 )
b , _ = expandParam ( b_ , nBatch , 2 )
if self . check_Q_spd :
for i in range ( nBatch ) :
e , _ = torch . eig ( Q [ i ] )
if not torch . all ( e [ : , 0 ] > 0 ) :
raise RuntimeError ( 'Q is not SPD.' )
_ , nineq , nz = G . size ( )
neq = A . size ( 1 ) if A . nelement ( ) > 0 else 0
assert ( neq > 0 or nineq > 0 )
self . neq , self . nineq , self . nz = neq , nineq , nz
if self . solver == QPSolvers . PDIPM_BATCHED :
self . Q_LU , self . S_LU , self . R = pdipm_b . pre_factor_kkt ( Q , G , A )
zhats , self . nus , self . lams , self . slacks = pdipm_b . forward ( Q , p , G , h , A , b , self . Q_LU , self . S_LU , self . R , self . eps , self . verbose , self . notImprovedLim , self . maxIter )
elif self . solver == QPSolvers . CVXPY :
vals = torch . Tensor ( nBatch ) . type_as ( Q )
zhats = torch . Tensor ( nBatch , self . nz ) . type_as ( Q )
lams = torch . Tensor ( nBatch , self . nineq ) . type_as ( Q )
nus = torch . Tensor ( nBatch , self . neq ) . type_as ( Q ) if self . neq > 0 else torch . Tensor ( )
slacks = torch . Tensor ( nBatch , self . nineq ) . type_as ( Q )
for i in range ( nBatch ) :
Ai , bi = ( A [ i ] , b [ i ] ) if neq > 0 else ( None , None )
vals [ i ] , zhati , nui , lami , si = solvers . cvxpy . forward_single_np ( * [ x . cpu ( ) . numpy ( ) if x is not None else None for x in ( Q [ i ] , p [ i ] , G [ i ] , h [ i ] , Ai , bi ) ] )
# if zhati [ 0 ] is None :
# import IPython , sys ; IPython . embed ( ) ; sys . exit ( - 1)
zhats [ i ] = torch . Tensor ( zhati )
lams [ i ] = torch . Tensor ( lami )
slacks [ i ] = torch . Tensor ( si )
if neq > 0 :
nus [ i ] = torch . Tensor ( nui )
self . vals = vals
self . lams = lams
self . nus = nus
self . slacks = slacks
else :
assert False
self . save_for_backward ( zhats , Q_ , p_ , G_ , h_ , A_ , b_ )
return zhats
|
def get_item ( self , table , hash_key , range_key = None , attributes_to_get = None , consistent_read = False , item_class = Item ) :
"""Retrieve an existing item from the table .
: type table : : class : ` boto . dynamodb . table . Table `
: param table : The Table object from which the item is retrieved .
: type hash _ key : int | long | float | str | unicode
: param hash _ key : The HashKey of the requested item . The
type of the value must match the type defined in the
schema for the table .
: type range _ key : int | long | float | str | unicode
: param range _ key : The optional RangeKey of the requested item .
The type of the value must match the type defined in the
schema for the table .
: type attributes _ to _ get : list
: param attributes _ to _ get : A list of attribute names .
If supplied , only the specified attribute names will
be returned . Otherwise , all attributes will be returned .
: type consistent _ read : bool
: param consistent _ read : If True , a consistent read
request is issued . Otherwise , an eventually consistent
request is issued .
: type item _ class : Class
: param item _ class : Allows you to override the class used
to generate the items . This should be a subclass of
: class : ` boto . dynamodb . item . Item `"""
|
key = self . build_key_from_values ( table . schema , hash_key , range_key )
response = self . layer1 . get_item ( table . name , key , attributes_to_get , consistent_read , object_hook = item_object_hook )
item = item_class ( table , hash_key , range_key , response [ 'Item' ] )
if 'ConsumedCapacityUnits' in response :
item . consumed_units = response [ 'ConsumedCapacityUnits' ]
return item
|
def _find_file ( self , needle , candidates ) :
"""Find the first directory containing a given candidate file ."""
|
for candidate in candidates :
fullpath = os . path . join ( candidate , needle )
if os . path . isfile ( fullpath ) :
return fullpath
raise PathError ( "Unable to locate file %s; tried %s" % ( needle , candidates ) )
|
def get_tower_results ( iterator , optimizer , dropout_rates ) :
r'''With this preliminary step out of the way , we can for each GPU introduce a
tower for which ' s batch we calculate and return the optimization gradients
and the average loss across towers .'''
|
# To calculate the mean of the losses
tower_avg_losses = [ ]
# Tower gradients to return
tower_gradients = [ ]
with tf . variable_scope ( tf . get_variable_scope ( ) ) : # Loop over available _ devices
for i in range ( len ( Config . available_devices ) ) : # Execute operations of tower i on device i
device = Config . available_devices [ i ]
with tf . device ( device ) : # Create a scope for all operations of tower i
with tf . name_scope ( 'tower_%d' % i ) : # Calculate the avg _ loss and mean _ edit _ distance and retrieve the decoded
# batch along with the original batch ' s labels ( Y ) of this tower
avg_loss = calculate_mean_edit_distance_and_loss ( iterator , dropout_rates , reuse = i > 0 )
# Allow for variables to be re - used by the next tower
tf . get_variable_scope ( ) . reuse_variables ( )
# Retain tower ' s avg losses
tower_avg_losses . append ( avg_loss )
# Compute gradients for model parameters using tower ' s mini - batch
gradients = optimizer . compute_gradients ( avg_loss )
# Retain tower ' s gradients
tower_gradients . append ( gradients )
avg_loss_across_towers = tf . reduce_mean ( tower_avg_losses , 0 )
tf . summary . scalar ( name = 'step_loss' , tensor = avg_loss_across_towers , collections = [ 'step_summaries' ] )
# Return gradients and the average loss
return tower_gradients , avg_loss_across_towers
|
def forward ( self , is_train , req , in_data , out_data , aux ) :
"""Implements forward computation .
is _ train : bool , whether forwarding for training or testing .
req : list of { ' null ' , ' write ' , ' inplace ' , ' add ' } , how to assign to out _ data . ' null ' means skip assignment , etc .
in _ data : list of NDArray , input data .
out _ data : list of NDArray , pre - allocated output buffers .
aux : list of NDArray , mutable auxiliary states . Usually not used ."""
|
data = in_data [ 0 ]
label = in_data [ 1 ]
pred = mx . nd . SoftmaxOutput ( data , label )
self . assign ( out_data [ 0 ] , req [ 0 ] , pred )
|
def _get_timeframe_bounds ( self , timeframe , bucket_width ) :
"""Get a ` bucket _ width ` aligned ` start _ time ` and ` end _ time ` from a
` timeframe ` dict"""
|
if bucket_width :
bucket_width_seconds = bucket_width
bucket_width = epoch_time_to_kronos_time ( bucket_width )
# TODO ( derek ) : Potential optimization by setting the end _ time equal to the
# untrusted _ time if end _ time > untrusted _ time and the results are not being
# output to the user ( only for caching )
if timeframe [ 'mode' ] [ 'value' ] == 'recent' : # Set end _ time equal to now and align to bucket width
end_time = kronos_time_now ( )
original_end_time = end_time
duration = get_seconds ( timeframe [ 'value' ] , timeframe [ 'scale' ] [ 'name' ] )
duration = epoch_time_to_kronos_time ( duration )
start_time = original_end_time - duration
if bucket_width : # Align values to the bucket width
# TODO ( derek ) : Warn the user that the timeframe has been altered to fit
# the bucket width
if ( end_time % bucket_width ) != 0 :
end_time += bucket_width - ( end_time % bucket_width )
if ( start_time % bucket_width ) != 0 :
start_time -= ( start_time % bucket_width )
start = kronos_time_to_datetime ( start_time )
end = kronos_time_to_datetime ( end_time )
elif timeframe [ 'mode' ] [ 'value' ] == 'range' :
end = datetime . datetime . strptime ( timeframe [ 'to' ] , DT_FORMAT )
end_seconds = datetime_to_epoch_time ( end )
start = datetime . datetime . strptime ( timeframe [ 'from' ] , DT_FORMAT )
start_seconds = datetime_to_epoch_time ( start )
if bucket_width : # Align values to the bucket width
# TODO ( derek ) : Warn the user that the timeframe has been altered to fit
# the bucket width
start_bump = start_seconds % bucket_width_seconds
start -= datetime . timedelta ( seconds = start_bump )
if ( end_seconds % bucket_width_seconds ) != 0 :
end_bump = bucket_width_seconds - ( end_seconds % bucket_width_seconds )
end += datetime . timedelta ( seconds = end_bump )
else :
raise ValueError ( "Timeframe mode must be 'recent' or 'range'" )
return start , end
|
def controller ( self ) :
"""Show current linked controllers ."""
|
if hasattr ( self , 'controllers' ) :
if len ( self . controllers ) > 1 : # in the future , we should support more controllers
raise TypeError ( "Only one controller per account." )
return self . controllers [ 0 ]
raise AttributeError ( "There is no controller assigned." )
|
def get_exchanges ( self , vhost = None ) :
""": returns : A list of dicts
: param string vhost : A vhost to query for exchanges , or None ( default ) ,
which triggers a query for all exchanges in all vhosts ."""
|
if vhost :
vhost = quote ( vhost , '' )
path = Client . urls [ 'exchanges_by_vhost' ] % vhost
else :
path = Client . urls [ 'all_exchanges' ]
exchanges = self . _call ( path , 'GET' )
return exchanges
|
def happy_edges ( row , prefix = None ) :
"""Convert a row in HAPPY file and yield edges ."""
|
trans = maketrans ( "[](){}" , " " )
row = row . strip ( ) . strip ( "+" )
row = row . translate ( trans )
scfs = [ x . strip ( "+" ) for x in row . split ( ":" ) ]
for a , b in pairwise ( scfs ) :
oa = '<' if a . strip ( ) [ 0 ] == '-' else '>'
ob = '<' if b . strip ( ) [ 0 ] == '-' else '>'
is_uncertain = a [ - 1 ] == ' ' or b [ 0 ] == ' '
a = a . strip ( ) . strip ( '-' )
b = b . strip ( ) . strip ( '-' )
if prefix :
a = prefix + a
b = prefix + b
yield ( a , b , oa , ob ) , is_uncertain
|
def evaluate ( loop_hparams , planner_hparams , policy_dir , model_dir , eval_metrics_dir , agent_type , eval_mode , eval_with_learner , log_every_steps , debug_video_path , num_debug_videos = 1 , random_starts_step_limit = None , report_fn = None , report_metric = None ) :
"""Evaluate ."""
|
if eval_with_learner :
assert agent_type == "policy"
if report_fn :
assert report_metric is not None
eval_metrics_writer = tf . summary . FileWriter ( eval_metrics_dir )
video_writers = ( )
kwargs = { }
if eval_mode in [ "agent_real" , "agent_simulated" ] :
if not eval_with_learner :
if debug_video_path :
tf . gfile . MakeDirs ( debug_video_path )
video_writers = [ common_video . WholeVideoWriter ( # pylint : disable = g - complex - comprehension
fps = 10 , output_path = os . path . join ( debug_video_path , "{}.avi" . format ( i ) ) , file_format = "avi" , ) for i in range ( num_debug_videos ) ]
kwargs [ "eval_fn" ] = make_eval_fn_with_agent ( agent_type , eval_mode , planner_hparams , model_dir , log_every_steps = log_every_steps , video_writers = video_writers , random_starts_step_limit = random_starts_step_limit )
eval_metrics = rl_utils . evaluate_all_configs ( loop_hparams , policy_dir , ** kwargs )
else :
eval_metrics = evaluate_world_model ( agent_type , loop_hparams , planner_hparams , model_dir , policy_dir , random_starts_step_limit , debug_video_path , log_every_steps )
rl_utils . summarize_metrics ( eval_metrics_writer , eval_metrics , 0 )
for video_writer in video_writers :
video_writer . finish_to_disk ( )
# Report metrics
if report_fn :
if report_metric == "mean_reward" :
metric_name = rl_utils . get_metric_name ( sampling_temp = loop_hparams . eval_sampling_temps [ 0 ] , max_num_noops = loop_hparams . eval_max_num_noops , clipped = False )
report_fn ( eval_metrics [ metric_name ] , 0 )
else :
report_fn ( eval_metrics [ report_metric ] , 0 )
return eval_metrics
|
def reset ( cls ) :
"""Resets the static state . Should only be called by tests ."""
|
cls . stats = StatContainer ( )
cls . parentMap = { }
cls . containerMap = { }
cls . subId = 0
for stat in gc . get_objects ( ) :
if isinstance ( stat , Stat ) :
stat . _aggregators = { }
|
def hms ( self , msg , tic = None , prt = sys . stdout ) :
"""Print elapsed time and message ."""
|
if tic is None :
tic = self . tic
now = timeit . default_timer ( )
hms = str ( datetime . timedelta ( seconds = ( now - tic ) ) )
prt . write ( '{HMS}: {MSG}\n' . format ( HMS = hms , MSG = msg ) )
return now
|
def etree ( self ) :
"""Returns a lxml object of the response ' s content that can be selected by xpath"""
|
if not hasattr ( self , '_elements' ) :
try :
parser = lxml . html . HTMLParser ( encoding = self . encoding )
self . _elements = lxml . html . fromstring ( self . content , parser = parser )
except LookupError : # lxml would raise LookupError when encoding not supported
# try fromstring without encoding instead .
# on windows , unicode is not availabe as encoding for lxml
self . _elements = lxml . html . fromstring ( self . content )
if isinstance ( self . _elements , lxml . etree . _ElementTree ) :
self . _elements = self . _elements . getroot ( )
return self . _elements
|
def add_node ( self , node_id , name , labels ) :
"""Add a node to the graph with name and labels .
Args :
node _ id : the unique node _ id e . g . ' www . evil4u . com '
name : the display name of the node e . g . ' evil4u '
labels : a list of labels e . g . [ ' domain ' , ' evil ' ]
Returns :
Nothing"""
|
self . neo_db . add_node ( node_id , name , labels )
|
def write_concrete_dag ( self ) :
"""Write all the nodes in the DAG to the DAG file ."""
|
if not self . __dag_file_path :
raise CondorDAGError , "No path for DAG file"
try :
dagfile = open ( self . __dag_file_path , 'w' )
except :
raise CondorDAGError , "Cannot open file " + self . __dag_file_path
for node in self . __nodes :
node . write_job ( dagfile )
node . write_vars ( dagfile )
if node . get_category ( ) :
node . write_category ( dagfile )
if node . get_priority ( ) :
node . write_priority ( dagfile )
node . write_pre_script ( dagfile )
node . write_post_script ( dagfile )
node . write_input_files ( dagfile )
node . write_output_files ( dagfile )
for node in self . __nodes :
node . write_parents ( dagfile )
for category in self . __maxjobs_categories :
self . write_maxjobs ( dagfile , category )
dagfile . close ( )
|
def build_search ( self ) :
"""Construct the ` ` Search ` ` object ."""
|
s = self . search ( )
s = self . query ( s , self . _query )
s = self . filter ( s )
if self . fields :
s = self . highlight ( s )
s = self . sort ( s )
self . aggregate ( s )
return s
|
def main_plates ( sdat ) :
"""Plot several plates information ."""
|
# calculating averaged horizontal surface velocity
# needed for redimensionalisation
ilast = sdat . rprof . index . levels [ 0 ] [ - 1 ]
rlast = sdat . rprof . loc [ ilast ]
nprof = 0
uprof_averaged = rlast . loc [ : , 'vhrms' ] * 0
for step in sdat . walk . filter ( rprof = True ) :
uprof_averaged += step . rprof [ 'vhrms' ]
nprof += 1
uprof_averaged /= nprof
radius = rlast [ 'r' ] . values
if sdat . par [ 'boundaries' ] [ 'air_layer' ] :
dsa = sdat . par [ 'boundaries' ] [ 'air_thickness' ]
isurf = np . argmin ( abs ( radius - radius [ - 1 ] + dsa ) )
vrms_surface = uprof_averaged . iloc [ isurf ]
isurf = np . argmin ( abs ( ( 1 - dsa ) - radius ) )
isurf -= 4
# why different isurf for the rest ?
else :
isurf = - 1
vrms_surface = uprof_averaged . iloc [ isurf ]
with misc . InchoateFiles ( 8 , 'plates' ) as fids :
fids . fnames = [ 'plate_velocity' , 'distance_subd' , 'continents' , 'flux' , 'topography' , 'age' , 'velderiv' , 'velocity' ]
fids [ 0 ] . write ( '# it time ph_trench vel_trench age_trench\n' )
fids [ 1 ] . write ( '# it time time [My] distance ' 'ph_trench ph_cont age_trench [My]\n' )
istart , iend = None , None
for step in sdat . walk . filter ( fields = [ 'T' ] ) : # could check other fields too
timestep = step . isnap
istart = timestep if istart is None else istart
iend = timestep
print ( 'Treating snapshot' , timestep )
rcmb = step . geom . rcmb
# topography
fname = sdat . filename ( 'sc' , timestep = timestep , suffix = '.dat' )
topo = np . genfromtxt ( str ( fname ) )
# rescaling topography !
if sdat . par [ 'boundaries' ] [ 'air_layer' ] :
topo [ : , 1 ] = topo [ : , 1 ] / ( 1. - dsa )
time = step . geom . ti_ad * vrms_surface * conf . scaling . ttransit / conf . scaling . yearins / 1.e6
trenches , ridges , agetrenches , _ , _ = detect_plates ( step , vrms_surface , fids , time )
plot_plates ( step , time , vrms_surface , trenches , ridges , agetrenches , topo , fids )
# prepare for continent plotting
concfld = step . fields [ 'c' ] [ 0 , : , : , 0 ]
continentsfld = np . ma . masked_where ( concfld < 3 , concfld )
# plotting continents , to - do
continentsfld = continentsfld / continentsfld
temp = step . fields [ 'T' ] [ 0 , : , : , 0 ]
tgrad = ( temp [ : , isurf - 1 ] - temp [ : , isurf ] ) / ( step . geom . r_coord [ isurf ] - step . geom . r_coord [ isurf - 1 ] )
io_surface ( timestep , time , fids [ 2 ] , concfld [ : - 1 , isurf ] )
io_surface ( timestep , time , fids [ 3 ] , tgrad )
io_surface ( timestep , time , fids [ 4 ] , topo [ : , 1 ] )
if 'age' in conf . plates . plot :
io_surface ( timestep , time , fids [ 5 ] , step . fields [ 'age' ] [ 0 , : , isurf , 0 ] )
# plot viscosity field with position of trenches and ridges
etamin , _ = sdat . scale ( 1e-2 , 'Pa' )
etamax , _ = sdat . scale ( sdat . par [ 'viscosity' ] [ 'eta_max' ] , 'Pa' )
fig , axis , _ , _ = field . plot_scalar ( step , 'eta' , vmin = etamin , vmax = etamax )
# plotting continents
field . plot_scalar ( step , 'c' , continentsfld , axis , False , cmap = 'cool_r' , vmin = 0 , vmax = 0 )
cmap2 = plt . cm . ocean
cmap2 . set_over ( 'm' )
# plotting velocity vectors
field . plot_vec ( axis , step , 'v' )
# Annotation with time and step
axis . text ( 1. , 0.9 , str ( round ( time , 0 ) ) + ' My' , transform = axis . transAxes )
axis . text ( 1. , 0.1 , str ( timestep ) , transform = axis . transAxes )
# Put arrow where ridges and trenches are
plot_plate_limits_field ( axis , rcmb , ridges , trenches )
misc . saveplot ( fig , 'eta' , timestep , close = False )
# Zoom
if conf . plates . zoom is not None :
if not 0 <= conf . plates . zoom <= 360 :
raise error . InvalidZoomError ( conf . plates . zoom )
if 45 < conf . plates . zoom <= 135 :
ladd , radd , uadd , dadd = 0.8 , 0.8 , 0.05 , 0.1
elif 135 < conf . plates . zoom <= 225 :
ladd , radd , uadd , dadd = 0.05 , 0.1 , 0.8 , 0.8
elif 225 < conf . plates . zoom <= 315 :
ladd , radd , uadd , dadd = 0.8 , 0.8 , 0.1 , 0.05
else : # > 315 or < = 45
ladd , radd , uadd , dadd = 0.1 , 0.05 , 0.8 , 0.8
xzoom = ( rcmb + 1 ) * np . cos ( np . radians ( conf . plates . zoom ) )
yzoom = ( rcmb + 1 ) * np . sin ( np . radians ( conf . plates . zoom ) )
axis . set_xlim ( xzoom - ladd , xzoom + radd )
axis . set_ylim ( yzoom - dadd , yzoom + uadd )
misc . saveplot ( fig , 'etazoom' , timestep , close = False )
plt . close ( fig )
# plot stress field with position of trenches and ridges
if 'str' in conf . plates . plot :
fig , axis , _ , _ = field . plot_scalar ( step , 'sII' , vmin = 0 , vmax = 300 )
# Annotation with time and step
axis . text ( 1. , 0.9 , str ( round ( time , 0 ) ) + ' My' , transform = axis . transAxes )
axis . text ( 1. , 0.1 , str ( timestep ) , transform = axis . transAxes )
# Put arrow where ridges and trenches are
plot_plate_limits_field ( axis , rcmb , ridges , trenches )
misc . saveplot ( fig , 's' , timestep , close = False )
# Zoom
if conf . plates . zoom is not None :
axis . set_xlim ( xzoom - ladd , xzoom + radd )
axis . set_ylim ( yzoom - dadd , yzoom + uadd )
misc . saveplot ( fig , 'szoom' , timestep , close = False )
plt . close ( fig )
# calculate stresses in the lithosphere
lithospheric_stress ( step , trenches , ridges , time )
# plotting the principal deviatoric stress field
if 'sx' in conf . plates . plot :
fig , axis , _ , _ = field . plot_scalar ( step , 'sII' , alpha = 0.1 )
# plotting continents
field . plot_scalar ( step , 'c' , continentsfld , axis , False , cmap = 'cool_r' , vmin = 0 , vmax = 0 )
cmap2 = plt . cm . ocean
cmap2 . set_over ( 'm' )
# plotting principal deviatoric stress
field . plot_vec ( axis , step , 'sx' )
# Annotation with time and step
axis . text ( 1. , 0.9 , str ( round ( time , 0 ) ) + ' My' , transform = axis . transAxes )
axis . text ( 1. , 0.1 , str ( timestep ) , transform = axis . transAxes )
# Put arrow where ridges and trenches are
plot_plate_limits_field ( axis , rcmb , ridges , trenches )
misc . saveplot ( fig , 'sx' , timestep )
# determine names of files
ptn = misc . out_name ( '{}_{}_{}' )
stem = ptn . format ( fids . fnames [ 0 ] , istart , iend )
idx = 0
fmt = '{}.dat'
while pathlib . Path ( fmt . format ( stem , idx ) ) . is_file ( ) :
fmt = '{}_{}.dat'
idx += 1
fids . fnames = [ fmt . format ( ptn . format ( fname , istart , iend ) , idx ) for fname in fids . fnames ]
|
def set_post_evaluation_transform ( self , value ) :
r"""Set the post processing transform applied after the prediction value
from the tree ensemble .
Parameters
value : str
A value denoting the transform applied . Possible values are :
- " NoTransform " ( default ) . Do not apply a transform .
- " Classification _ SoftMax " .
Apply a softmax function to the outcome to produce normalized ,
non - negative scores that sum to 1 . The transformation applied to
dimension ` i ` is equivalent to :
. . math : :
\ frac { e ^ { x _ i } } { \ sum _ j e ^ { x _ j } }
Note : This is the output transformation applied by the XGBoost package
with multiclass classification .
- " Regression _ Logistic " .
Applies a logistic transform the predicted value , specifically :
. . math : :
(1 + e ^ { - v } ) ^ { - 1}
This is the transformation used in binary classification ."""
|
self . tree_spec . postEvaluationTransform = _TreeEnsemble_pb2 . TreeEnsemblePostEvaluationTransform . Value ( value )
|
def reply_inform ( cls , req_msg , * args ) :
"""Helper method for creating inform messages in reply to a request .
Copies the message name and message identifier from request message .
Parameters
req _ msg : katcp . core . Message instance
The request message that this inform if in reply to
args : list of strings
The message arguments except name"""
|
return cls ( cls . INFORM , req_msg . name , args , req_msg . mid )
|
def publish ( self , exchange , routing_key , body , properties = None ) :
"""Publish a message to RabbitMQ . If the RabbitMQ connection is not
established or is blocked , attempt to wait until sending is possible .
: param str exchange : The exchange to publish the message to .
: param str routing _ key : The routing key to publish the message with .
: param bytes body : The message body to send .
: param dict properties : An optional dict of additional properties
to append .
: rtype : tornado . concurrent . Future
: raises : : exc : ` sprockets . mixins . amqp . NotReadyError `
: raises : : exc : ` sprockets . mixins . amqp . PublishingError `"""
|
future = concurrent . Future ( )
properties = properties or { }
properties . setdefault ( 'app_id' , self . default_app_id )
properties . setdefault ( 'message_id' , str ( uuid . uuid4 ( ) ) )
properties . setdefault ( 'timestamp' , int ( time . time ( ) ) )
if self . ready :
if self . publisher_confirmations :
self . message_number += 1
self . messages [ self . message_number ] = future
else :
future . set_result ( None )
try :
self . channel . basic_publish ( exchange , routing_key , body , pika . BasicProperties ( ** properties ) , True )
except exceptions . AMQPError as error :
future . set_exception ( PublishingFailure ( properties [ 'message_id' ] , exchange , routing_key , error . __class__ . __name__ ) )
else :
future . set_exception ( NotReadyError ( self . state_description , properties [ 'message_id' ] ) )
return future
|
def _raise_error_if_column_exists ( dataset , column_name = 'dataset' , dataset_variable_name = 'dataset' , column_name_error_message_name = 'column_name' ) :
"""Check if a column exists in an SFrame with error message ."""
|
err_msg = 'The SFrame {0} must contain the column {1}.' . format ( dataset_variable_name , column_name_error_message_name )
if column_name not in dataset . column_names ( ) :
raise ToolkitError ( str ( err_msg ) )
|
def emit_children ( self , node ) :
"""Emit all the children of a node ."""
|
return "" . join ( [ self . emit_node ( child ) for child in node . children ] )
|
def ms_rotate ( self , viewer , event , data_x , data_y , msg = True ) :
"""Rotate the image by dragging the cursor left or right ."""
|
if not self . canrotate :
return True
msg = self . settings . get ( 'msg_rotate' , msg )
x , y = self . get_win_xy ( viewer )
if event . state == 'move' :
self . _rotate_xy ( viewer , x , y )
elif event . state == 'down' :
if msg :
viewer . onscreen_message ( "Rotate (drag around center)" , delay = 1.0 )
self . _start_x , self . _start_y = x , y
self . _start_rot = viewer . get_rotation ( )
else :
viewer . onscreen_message ( None )
return True
|
def dumpDictHdf5 ( RV , o ) :
"""Dump a dictionary where each page is a list or an array"""
|
for key in list ( RV . keys ( ) ) :
o . create_dataset ( name = key , data = SP . array ( RV [ key ] ) , chunks = True , compression = 'gzip' )
|
def isCountRate ( self ) :
"""isCountRate : Method or IRInputObject used to indicate if the
science data is in units of counts or count rate . This method
assumes that the keyword ' BUNIT ' is in the header of the input
FITS file ."""
|
has_bunit = False
if 'BUNIT' in self . _image [ 'sci' , 1 ] . header :
has_bunit = True
countrate = False
if ( self . _image [ 0 ] . header [ 'UNITCORR' ] . strip ( ) == 'PERFORM' ) or ( has_bunit and self . _image [ 'sci' , 1 ] . header [ 'bunit' ] . find ( '/' ) != - 1 ) :
countrate = True
return countrate
|
def apply_inheritance ( self ) :
"""Apply inheritance over templates
Template can be used in the following objects : :
* hosts
* contacts
* services
* servicedependencies
* hostdependencies
* timeperiods
* hostsextinfo
* servicesextinfo
* serviceescalations
* hostescalations
* escalations
: return : None"""
|
# inheritance properties by template
self . hosts . apply_inheritance ( )
self . contacts . apply_inheritance ( )
self . services . apply_inheritance ( )
self . servicedependencies . apply_inheritance ( )
self . hostdependencies . apply_inheritance ( )
# Also timeperiods
self . timeperiods . apply_inheritance ( )
# Also " Hostextinfo "
self . hostsextinfo . apply_inheritance ( )
# Also " Serviceextinfo "
self . servicesextinfo . apply_inheritance ( )
# Now escalations too
self . serviceescalations . apply_inheritance ( )
self . hostescalations . apply_inheritance ( )
self . escalations . apply_inheritance ( )
|
def set_state ( profile , state , store = 'local' ) :
'''Configure the firewall state .
. . versionadded : : 2018.3.4
. . versionadded : : 2019.2.0
Args :
profile ( str ) :
The firewall profile to configure . Valid options are :
- domain
- public
- private
state ( str ) :
The firewall state . Valid options are :
- on
- off
- notconfigured
. . note : :
` ` notconfigured ` ` can only be used when using the lgpo store
store ( str ) :
The store to use . This is either the local firewall policy or the
policy defined by local group policy . Valid options are :
- lgpo
- local
Default is ` ` local ` `
Returns :
bool : ` ` True ` ` if successful
Raises :
CommandExecutionError : If an error occurs
ValueError : If the parameters are incorrect
CLI Example :
. . code - block : : bash
# Turn the firewall off when the domain profile is active
salt * firewall . set _ state domain off
# Turn the firewall on when the public profile is active and set that in
# the local group policy
salt * firewall . set _ state public on lgpo'''
|
return salt . utils . win_lgpo_netsh . set_state ( profile = profile , state = state , store = store )
|
def from_mediaid ( cls , context : InstaloaderContext , mediaid : int ) :
"""Create a post object from a given mediaid"""
|
return cls . from_shortcode ( context , Post . mediaid_to_shortcode ( mediaid ) )
|
def epoch_to_log_line_timestamp ( epoch_time , time_zone = None ) :
"""Converts an epoch timestamp in ms to log line timestamp format , which
is readible for humans .
Args :
epoch _ time : integer , an epoch timestamp in ms .
time _ zone : instance of tzinfo , time zone information .
Using pytz rather than python 3.2 time _ zone implementation for
python 2 compatibility reasons .
Returns :
A string that is the corresponding timestamp in log line timestamp
format ."""
|
s , ms = divmod ( epoch_time , 1000 )
d = datetime . datetime . fromtimestamp ( s , tz = time_zone )
return d . strftime ( '%m-%d %H:%M:%S.' ) + str ( ms )
|
def get_composition_query_session_for_repository ( self , repository_id ) :
"""Gets a composition query session for the given repository .
arg : repository _ id ( osid . id . Id ) : the ` ` Id ` ` of the repository
return : ( osid . repository . CompositionQuerySession ) - a
` ` CompositionQuerySession ` `
raise : NotFound - ` ` repository _ id ` ` not found
raise : NullArgument - ` ` repository _ id ` ` is ` ` null ` `
raise : OperationFailed - ` ` unable to complete request ` `
raise : Unimplemented - ` ` supports _ composition _ query ( ) ` ` or
` ` supports _ visible _ federation ( ) ` ` is ` ` false ` `
* compliance : optional - - This method must be implemented if
` ` supports _ composition _ query ( ) ` ` and
` ` supports _ visible _ federation ( ) ` ` are ` ` true ` ` . *"""
|
if not self . supports_composition_query ( ) :
raise errors . Unimplemented ( )
# Also include check to see if the catalog Id is found otherwise raise errors . NotFound
# pylint : disable = no - member
return sessions . CompositionQuerySession ( repository_id , runtime = self . _runtime )
|
def lin_sim_calc ( goid1 , goid2 , sim_r , termcnts ) :
'''Computes Lin ' s similarity measure using pre - calculated Resnik ' s similarities .'''
|
if sim_r is not None :
info = get_info_content ( goid1 , termcnts ) + get_info_content ( goid2 , termcnts )
if info != 0 :
return ( 2 * sim_r ) / info
|
def p_enum_list ( self , p ) :
'''enum _ list : enum _ list COMMA ENUM _ VAL
| ENUM _ VAL
| empty'''
|
if p [ 1 ] is None :
p [ 0 ] = [ ]
elif len ( p ) == 4 :
p [ 1 ] . append ( p [ 3 ] )
p [ 0 ] = p [ 1 ]
elif len ( p ) == 2 :
p [ 0 ] = [ p [ 1 ] ]
|
def _json_default_encoder ( func ) :
"""Monkey - Patch the core json encoder library .
This isn ' t as bad as it sounds .
We override the default method so that if an object
falls through and can ' t be encoded normally , we see if it is
a Future object and return the result to be encoded .
I set a special attribute on the Future object so I can tell
that ' s what it is , and can grab the result .
If that doesn ' t work , I fall back to the earlier behavior .
The nice thing about patching the library this way is that it
won ' t inerfere with existing code and it can itself be wrapped
by other methods .
So it ' s very extensible .
: param func : the JSONEncoder . default method .
: return : an object that can be json serialized ."""
|
@ wraps ( func )
def inner ( self , o ) :
try :
return o . _redpipe_future_result
# noqa
except AttributeError :
pass
return func ( self , o )
return inner
|
def to_metric ( self , desc , tag_values , agg_data ) :
"""to _ metric translate the data that OpenCensus create
to Prometheus format , using Prometheus Metric object
: type desc : dict
: param desc : The map that describes view definition
: type tag _ values : tuple of : class :
` ~ opencensus . tags . tag _ value . TagValue `
: param object of opencensus . tags . tag _ value . TagValue :
TagValue object used as label values
: type agg _ data : object of : class :
` ~ opencensus . stats . aggregation _ data . AggregationData `
: param object of opencensus . stats . aggregation _ data . AggregationData :
Aggregated data that needs to be converted as Prometheus samples
: rtype : : class : ` ~ prometheus _ client . core . CounterMetricFamily ` or
: class : ` ~ prometheus _ client . core . HistogramMetricFamily ` or
: class : ` ~ prometheus _ client . core . UnknownMetricFamily ` or
: class : ` ~ prometheus _ client . core . GaugeMetricFamily `
: returns : A Prometheus metric object"""
|
metric_name = desc [ 'name' ]
metric_description = desc [ 'documentation' ]
label_keys = desc [ 'labels' ]
assert ( len ( tag_values ) == len ( label_keys ) )
# Prometheus requires that all tag values be strings hence
# the need to cast none to the empty string before exporting . See
# https : / / github . com / census - instrumentation / opencensus - python / issues / 480
tag_values = [ tv if tv else "" for tv in tag_values ]
if isinstance ( agg_data , aggregation_data_module . CountAggregationData ) :
metric = CounterMetricFamily ( name = metric_name , documentation = metric_description , labels = label_keys )
metric . add_metric ( labels = tag_values , value = agg_data . count_data )
return metric
elif isinstance ( agg_data , aggregation_data_module . DistributionAggregationData ) :
assert ( agg_data . bounds == sorted ( agg_data . bounds ) )
# buckets are a list of buckets . Each bucket is another list with
# a pair of bucket name and value , or a triple of bucket name ,
# value , and exemplar . buckets need to be in order .
buckets = [ ]
cum_count = 0
# Prometheus buckets expect cumulative count .
for ii , bound in enumerate ( agg_data . bounds ) :
cum_count += agg_data . counts_per_bucket [ ii ]
bucket = [ str ( bound ) , cum_count ]
buckets . append ( bucket )
# Prometheus requires buckets to be sorted , and + Inf present .
# In OpenCensus we don ' t have + Inf in the bucket bonds so need to
# append it here .
buckets . append ( [ "+Inf" , agg_data . count_data ] )
metric = HistogramMetricFamily ( name = metric_name , documentation = metric_description , labels = label_keys )
metric . add_metric ( labels = tag_values , buckets = buckets , sum_value = agg_data . sum , )
return metric
elif isinstance ( agg_data , aggregation_data_module . SumAggregationDataFloat ) :
metric = UnknownMetricFamily ( name = metric_name , documentation = metric_description , labels = label_keys )
metric . add_metric ( labels = tag_values , value = agg_data . sum_data )
return metric
elif isinstance ( agg_data , aggregation_data_module . LastValueAggregationData ) :
metric = GaugeMetricFamily ( name = metric_name , documentation = metric_description , labels = label_keys )
metric . add_metric ( labels = tag_values , value = agg_data . value )
return metric
else :
raise ValueError ( "unsupported aggregation type %s" % type ( agg_data ) )
|
def sendJabber ( sender , password , receivers , body , senderDomain = NOTIFY_IM_DOMAIN_SENDER , receiverDomain = NOTIFY_IM_DOMAIN_RECEIVER ) :
"""Sends an instant message to the inputted receivers from the
given user . The senderDomain is an override to be used
when no domain is supplied , same for the receiverDomain .
: param sender < str >
: param password < str >
: param receivers < list > [ < str > , . . ]
: param body < str >
: param senderDomain < str >
: param receiverDomain < str >
: return < bool > success"""
|
import xmpp
# make sure there is a proper domain as part of the sender
if '@' not in sender :
sender += '@' + senderDomain
# create a jabber user connection
user = xmpp . protocol . JID ( sender )
# create a connection to an xmpp client
client = xmpp . Client ( user . getDomain ( ) , debug = [ ] )
connection = client . connect ( secure = 0 , use_srv = False )
if not connection :
text = 'Could not create a connection to xmpp (%s)' % sender
err = errors . NotifyError ( text )
logger . error ( err )
return False
# authenticate the session
auth = client . auth ( user . getNode ( ) , password , user . getResource ( ) )
if not auth :
text = 'Jabber not authenticated: (%s, %s)' % ( sender , password )
err = errors . NotifyError ( text )
logger . error ( err )
return False
count = 0
# send the message to the inputted receivers
for receiver in receivers :
if '@' not in receiver :
receiver += '@' + receiverDomain
# create the message
msg = xmpp . protocol . Message ( receiver , body )
# create the html message
html_http = { 'xmlns' : 'http://jabber.org/protocol/xhtml-im' }
html_node = xmpp . Node ( 'html' , html_http )
enc_msg = body . encode ( 'utf-8' )
xml = '<body xmlns="http://www.w3.org/1999/xhtml">%s</body>' % enc_msg
html_node . addChild ( node = xmpp . simplexml . XML2Node ( xml ) )
msg . addChild ( node = html_node )
client . send ( msg )
count += 1
return count > 0
|
def extend ( self , patterns ) :
"""Extend a : class : ` PatternSet ` with addition * patterns *
* patterns * can either be :
* A single : class : ` Pattern `
* Another : class : ` PatternSet ` or
* A list of : class : ` Pattern ` instances"""
|
assert patterns is not None
if isinstance ( patterns , Pattern ) :
self . append ( patterns )
return
if isinstance ( patterns , PatternSet ) :
patterns = patterns . patterns
assert all ( isinstance ( pat , Pattern ) for pat in patterns )
self . patterns . extend ( patterns )
self . _all_files = None
|
def handle ( self , * args , ** options ) :
"""get all the triggers that need to be handled"""
|
from django . db import connection
connection . close ( )
failed_tries = settings . DJANGO_TH . get ( 'failed_tries' , 10 )
trigger = TriggerService . objects . filter ( Q ( provider_failed__lte = failed_tries ) | Q ( consumer_failed__lte = failed_tries ) , status = True , user__is_active = True , provider__name__status = True , consumer__name__status = True , ) . select_related ( 'consumer__name' , 'provider__name' )
with ThreadPoolExecutor ( max_workers = settings . DJANGO_TH . get ( 'processes' ) ) as executor :
r = Read ( )
for t in trigger :
executor . submit ( r . reading , t )
|
def get_work_items ( self , ids , project = None , fields = None , as_of = None , expand = None , error_policy = None ) :
"""GetWorkItems .
[ Preview API ] Returns a list of work items ( Maximum 200)
: param [ int ] ids : The comma - separated list of requested work item ids . ( Maximum 200 ids allowed ) .
: param str project : Project ID or project name
: param [ str ] fields : Comma - separated list of requested fields
: param datetime as _ of : AsOf UTC date time string
: param str expand : The expand parameters for work item attributes . Possible options are { None , Relations , Fields , Links , All } .
: param str error _ policy : The flag to control error policy in a bulk get work items request . Possible options are { Fail , Omit } .
: rtype : [ WorkItem ]"""
|
route_values = { }
if project is not None :
route_values [ 'project' ] = self . _serialize . url ( 'project' , project , 'str' )
query_parameters = { }
if ids is not None :
ids = "," . join ( map ( str , ids ) )
query_parameters [ 'ids' ] = self . _serialize . query ( 'ids' , ids , 'str' )
if fields is not None :
fields = "," . join ( fields )
query_parameters [ 'fields' ] = self . _serialize . query ( 'fields' , fields , 'str' )
if as_of is not None :
query_parameters [ 'asOf' ] = self . _serialize . query ( 'as_of' , as_of , 'iso-8601' )
if expand is not None :
query_parameters [ '$expand' ] = self . _serialize . query ( 'expand' , expand , 'str' )
if error_policy is not None :
query_parameters [ 'errorPolicy' ] = self . _serialize . query ( 'error_policy' , error_policy , 'str' )
response = self . _send ( http_method = 'GET' , location_id = '72c7ddf8-2cdc-4f60-90cd-ab71c14a399b' , version = '5.1-preview.3' , route_values = route_values , query_parameters = query_parameters )
return self . _deserialize ( '[WorkItem]' , self . _unwrap_collection ( response ) )
|
def boot_priority ( self , boot_priority ) :
"""Sets the boot priority for this QEMU VM .
: param boot _ priority : QEMU boot priority"""
|
self . _boot_priority = boot_priority
log . info ( 'QEMU VM "{name}" [{id}] has set the boot priority to {boot_priority}' . format ( name = self . _name , id = self . _id , boot_priority = self . _boot_priority ) )
|
def _build_http_client ( cls , session : AppSession ) :
'''Create the HTTP client .
Returns :
Client : An instance of : class : ` . http . Client ` .'''
|
# TODO :
# recorder = self . _ build _ recorder ( )
stream_factory = functools . partial ( HTTPStream , ignore_length = session . args . ignore_length , keep_alive = session . args . http_keep_alive )
return session . factory . new ( 'HTTPClient' , connection_pool = session . factory [ 'ConnectionPool' ] , stream_factory = stream_factory )
|
def get_s3_region_from_endpoint ( endpoint ) :
"""Extracts and returns an AWS S3 region from an endpoint
of form ` s3 - ap - southeast - 1 . amazonaws . com `
: param endpoint : Endpoint region to be extracted ."""
|
# Extract region by regex search .
m = _EXTRACT_REGION_REGEX . search ( endpoint )
if m : # Regex matches , we have found a region .
region = m . group ( 1 )
if region == 'external-1' : # Handle special scenario for us - east - 1 URL .
return 'us-east-1'
if region . startswith ( 'dualstack' ) : # Handle special scenario for dualstack URL .
return region . split ( '.' ) [ 1 ]
return region
# No regex matches return None .
return None
|
def define_residues_for_plotting_traj ( self , analysis_cutoff ) :
"""Since plotting all residues that have made contact with the ligand over a lenghty
simulation is not always feasible or desirable . Therefore , only the residues that
have been in contact with ligand for a long amount of time will be plotted in the
final image .
The function first determines the fraction of time each residue spends in the
vicinity of the ligand for each trajectory . Once the data is processed , analysis
cutoff decides whether or not these residues are plotted based on the total
frequency this residue has spent in the vicinity of the ligand . The analysis
cutoff is supplied for a single trajectory and is therefore multiplied .
Takes :
* analysis _ cutoff * - a fraction ( of time ) a residue has to spend in the
vicinity of the ligand for a single traj
Output :
* self . frequency * - frequency per residue per trajectory
* topol _ data . dict _ of _ plotted _ res * - the residues that should be plotted in
the final image with the frequency for each trajectory ( used for plotting )"""
|
self . residue_counts_fraction = { }
# Calculate the fraction of time a residue spends in each simulation
for traj in self . residue_counts :
self . residue_counts_fraction [ traj ] = { residue : float ( values ) / len ( self . contacts_per_timeframe [ traj ] ) for residue , values in self . residue_counts [ traj ] . items ( ) }
for traj in self . residue_counts_fraction :
for residue in self . residue_counts_fraction [ traj ] :
self . frequency [ residue ] . append ( self . residue_counts_fraction [ traj ] [ residue ] )
self . topology_data . dict_of_plotted_res = { i : self . frequency [ i ] for i in self . frequency if sum ( self . frequency [ i ] ) > ( int ( len ( self . trajectory ) ) * analysis_cutoff ) }
assert len ( self . topology_data . dict_of_plotted_res ) != 0 , "Nothing to draw for this ligand:(residue number: " + str ( self . topology_data . universe . ligand . resids [ 0 ] ) + " on the chain " + str ( self . topology_data . universe . ligand . segids [ 0 ] ) + ") - try reducing the analysis cutoff."
|
def visit_root ( self , _ , children ) :
"""The main node holding all the query .
Arguments
_ ( node ) : parsimonious . nodes . Node .
children : list
- 0 : for ` ` WS ` ` ( whitespace ) : ` ` None ` ` .
- 1 : for ` ` NAMED _ RESOURCE ` ` : an instance of a subclass of ` ` . resources . Resource ` ` .
- 2 : for ` ` WS ` ` ( whitespace ) : ` ` None ` ` .
Returns
. resources . Resource
An instance of a subclass of ` ` . resources . Resource ` ` , with ` ` is _ root ` ` set to ` ` True ` ` .
Example
> > > data = DataQLParser ( r ' ' '
. . . foo
. . . ' ' ' , default _ rule = ' ROOT ' ) . data
> > > data
< Field [ foo ] / >
> > > data . is _ root
True
> > > data = DataQLParser ( r ' ' '
. . . bar [ name ]
. . . ' ' ' , default _ rule = ' ROOT ' ) . data
> > > data
< List [ bar ] >
< Field [ name ] / >
< / List [ bar ] >
> > > data . is _ root
True
> > > data = DataQLParser ( r ' ' '
. . . baz { name }
. . . ' ' ' , default _ rule = ' ROOT ' ) . data
> > > data
< Object [ baz ] >
< Field [ name ] / >
< / Object [ baz ] >
> > > data . is _ root
True"""
|
resource = children [ 1 ]
resource . is_root = True
return resource
|
def _reverse_to_source ( self , target , group1 ) :
"""Args :
target ( dict ) : A table containing the reverse transitions for each state
group1 ( list ) : A group of states
Return :
Set : A set of states for which there is a transition with the states of the group"""
|
new_group = [ ]
for dst in group1 :
new_group += target [ dst ]
return set ( new_group )
|
def mark_stages ( self , start_time , length , stage_name ) :
"""Mark stages , only add the new ones .
Parameters
start _ time : int
start time in s of the epoch being scored .
length : int
duration in s of the epoch being scored .
stage _ name : str
one of the stages defined in global stages ."""
|
y_pos = BARS [ 'stage' ] [ 'pos0' ]
current_stage = STAGES . get ( stage_name , STAGES [ 'Unknown' ] )
# the - 1 is really important , otherwise we stay on the edge of the rect
old_score = self . scene . itemAt ( start_time + length / 2 , y_pos + current_stage [ 'pos0' ] + current_stage [ 'pos1' ] - 1 , self . transform ( ) )
# check we are not removing the black border
if old_score is not None and old_score . pen ( ) == NoPen :
lg . debug ( 'Removing old score at {}' . format ( start_time ) )
self . scene . removeItem ( old_score )
self . idx_annot . remove ( old_score )
rect = QGraphicsRectItem ( start_time , y_pos + current_stage [ 'pos0' ] , length , current_stage [ 'pos1' ] )
rect . setPen ( NoPen )
rect . setBrush ( current_stage [ 'color' ] )
self . scene . addItem ( rect )
self . idx_annot . append ( rect )
|
def push_supply ( self , tokens ) :
"""Adds OPF and CPF data to a Generator ."""
|
logger . debug ( "Pushing supply data: %s" % tokens )
bus = self . case . buses [ tokens [ "bus_no" ] - 1 ]
n_generators = len ( [ g for g in self . case . generators if g . bus == bus ] )
if n_generators == 0 :
logger . error ( "No generator at bus [%s] for matching supply" % bus )
return
elif n_generators > 1 :
g = [ g for g in self . case . generators if g . bus == bus ] [ 0 ]
logger . warning ( "More than one generator at bus [%s] for demand. Using the " "first one [%s]." % ( bus , g ) )
else :
g = [ g for g in self . case . generators if g . bus == bus ] [ 0 ]
g . pcost_model = "poly"
g . poly_coeffs = ( tokens [ "p_fixed" ] , tokens [ "p_proportional" ] , tokens [ "p_quadratic" ] )
|
def plot_distributions ( y_true , scores , save_to , xlim = None , nbins = 100 , ymax = 3. , dpi = 150 ) :
"""Scores distributions
This function will create ( and overwrite ) the following files :
- { save _ to } . scores . png
- { save _ to } . scores . eps
Parameters
y _ true : ( n _ samples , ) array - like
Boolean reference .
scores : ( n _ samples , ) array - like
Predicted score .
save _ to : str
Files path prefix"""
|
plt . figure ( figsize = ( 12 , 12 ) )
if xlim is None :
xlim = ( np . min ( scores ) , np . max ( scores ) )
bins = np . linspace ( xlim [ 0 ] , xlim [ 1 ] , nbins )
plt . hist ( scores [ y_true ] , bins = bins , color = 'g' , alpha = 0.5 , normed = True )
plt . hist ( scores [ ~ y_true ] , bins = bins , color = 'r' , alpha = 0.5 , normed = True )
# TODO heuristic to estimate ymax from nbins and xlim
plt . ylim ( 0 , ymax )
plt . tight_layout ( )
plt . savefig ( save_to + '.scores.png' , dpi = dpi )
plt . savefig ( save_to + '.scores.eps' )
plt . close ( )
return True
|
def readdatacommdct ( idfname , iddfile = 'Energy+.idd' , commdct = None ) :
"""read the idf file"""
|
if not commdct :
block , commlst , commdct , idd_index = parse_idd . extractidddata ( iddfile )
theidd = eplusdata . Idd ( block , 2 )
else :
theidd = iddfile
data = eplusdata . Eplusdata ( theidd , idfname )
return data , commdct , idd_index
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.