signature
stringlengths 29
44.1k
| implementation
stringlengths 0
85.2k
|
---|---|
def get_port_profile_for_intf_input_rbridge_id ( self , ** kwargs ) :
"""Auto Generated Code"""
|
config = ET . Element ( "config" )
get_port_profile_for_intf = ET . Element ( "get_port_profile_for_intf" )
config = get_port_profile_for_intf
input = ET . SubElement ( get_port_profile_for_intf , "input" )
rbridge_id = ET . SubElement ( input , "rbridge-id" )
rbridge_id . text = kwargs . pop ( 'rbridge_id' )
callback = kwargs . pop ( 'callback' , self . _callback )
return callback ( config )
|
def table_spec_path ( cls , project , location , dataset , table_spec ) :
"""Return a fully - qualified table _ spec string ."""
|
return google . api_core . path_template . expand ( "projects/{project}/locations/{location}/datasets/{dataset}/tableSpecs/{table_spec}" , project = project , location = location , dataset = dataset , table_spec = table_spec , )
|
def cli ( ctx , email , first_name , last_name , password , role = "user" , metadata = { } ) :
"""Create a new user
Output :
an empty dictionary"""
|
return ctx . gi . users . create_user ( email , first_name , last_name , password , role = role , metadata = metadata )
|
def nearest_vertices ( self , lon , lat , k = 1 , max_distance = 2.0 ) :
"""Query the cKDtree for the nearest neighbours and Euclidean
distance from x , y points .
Returns 0 , 0 if a cKDtree has not been constructed
( switch tree = True if you need this routine )
Parameters
lon : 1D array of longitudinal coordinates in radians
lat : 1D array of latitudinal coordinates in radians
k : number of nearest neighbours to return
( default : 1)
max _ distance
: maximum Euclidean distance to search
for neighbours ( default : 2.0)
Returns
d : Euclidean distance between each point and their
nearest neighbour ( s )
vert : vertices of the nearest neighbour ( s )"""
|
if self . tree == False or self . tree == None :
return 0 , 0
lons = np . array ( lon ) . reshape ( - 1 , 1 )
lats = np . array ( lat ) . reshape ( - 1 , 1 )
xyz = np . empty ( ( lons . shape [ 0 ] , 3 ) )
x , y , z = lonlat2xyz ( lons , lats )
xyz [ : , 0 ] = x [ : ] . reshape ( - 1 )
xyz [ : , 1 ] = y [ : ] . reshape ( - 1 )
xyz [ : , 2 ] = z [ : ] . reshape ( - 1 )
dxyz , vertices = self . _cKDtree . query ( xyz , k = k , distance_upper_bound = max_distance )
if k == 1 : # force this to be a 2D array
vertices = np . reshape ( vertices , ( - 1 , 1 ) )
# # Now find the angular separation / great circle distance : dlatlon
vertxyz = self . points [ vertices ] . transpose ( 0 , 2 , 1 )
extxyz = np . repeat ( xyz , k , axis = 1 ) . reshape ( vertxyz . shape )
angles = np . arccos ( ( extxyz * vertxyz ) . sum ( axis = 1 ) )
return angles , vertices
|
async def reset ( self , von_wallet : Wallet , seed : str = None ) -> Wallet :
"""Close and delete ( open ) VON anchor wallet and then create , open , and return
replacement on current link secret .
Note that this operation effectively destroys private keys for keyed data
structures such as credential offers or credential definitions .
Raise WalletState if the wallet is closed .
: param von _ wallet : open wallet
: param seed : seed to use for new wallet ( default random )
: return : replacement wallet"""
|
LOGGER . debug ( 'WalletManager.reset >>> von_wallet %s' , von_wallet )
if not von_wallet . handle :
LOGGER . debug ( 'WalletManager.reset <!< Wallet %s is closed' , von_wallet . name )
raise WalletState ( 'Wallet {} is closed' . format ( von_wallet . name ) )
w_config = von_wallet . config
# wallet under reset , no need to make copy
w_config [ 'did' ] = von_wallet . did
w_config [ 'seed' ] = seed
w_config [ 'auto_create' ] = von_wallet . auto_create
# in case both auto _ remove + auto _ create set ( create every open )
w_config [ 'auto_remove' ] = von_wallet . auto_remove
label = await von_wallet . get_link_secret_label ( )
if label :
w_config [ 'link_secret_label' ] = label
await von_wallet . close ( )
if not von_wallet . auto_remove :
await self . remove ( von_wallet )
rv = await self . create ( w_config , von_wallet . access )
await rv . open ( )
LOGGER . debug ( 'WalletManager.reset <<< %s' , rv )
return rv
|
def insert_slash ( string , every = 2 ) :
"""insert _ slash insert / every 2 char"""
|
return os . path . join ( string [ i : i + every ] for i in xrange ( 0 , len ( string ) , every ) )
|
def _set_access_log ( self , config , level ) :
"""Configure access logs"""
|
access_handler = self . _get_param ( 'global' , 'log.access_handler' , config , 'syslog' , )
# log format for syslog
syslog_formatter = logging . Formatter ( "ldapcherry[%(process)d]: %(message)s" )
# replace access log handler by a syslog handler
if access_handler == 'syslog' :
cherrypy . log . access_log . handlers = [ ]
handler = logging . handlers . SysLogHandler ( address = '/dev/log' , facility = 'user' , )
handler . setFormatter ( syslog_formatter )
cherrypy . log . access_log . addHandler ( handler )
# if stdout , open a logger on stdout
elif access_handler == 'stdout' :
cherrypy . log . access_log . handlers = [ ]
handler = logging . StreamHandler ( sys . stdout )
formatter = logging . Formatter ( 'ldapcherry.access - %(levelname)s - %(message)s' )
handler . setFormatter ( formatter )
cherrypy . log . access_log . addHandler ( handler )
# if file , we keep the default
elif access_handler == 'file' :
pass
# replace access log handler by a null handler
elif access_handler == 'none' :
cherrypy . log . access_log . handlers = [ ]
handler = logging . NullHandler ( )
cherrypy . log . access_log . addHandler ( handler )
# set log level
cherrypy . log . access_log . setLevel ( level )
|
def prepare_migration_target ( self , uuid , nics = None , port = None , tags = None ) :
""": param name : Name of the kvm domain that will be migrated
: param port : A dict of host _ port : container _ port pairs
Example :
` port = { 8080 : 80 , 7000:7000 } `
Only supported if default network is used
: param nics : Configure the attached nics to the container
each nic object is a dict of the format
' type ' : nic _ type # default , bridge , vlan , or vxlan ( note , vlan and vxlan only supported by ovs )
' id ' : id # depends on the type , bridge name ( bridge type ) zerotier network id ( zertier type ) , the vlan tag or the vxlan id
: param uuid : uuid of machine to be migrated on old node
: return :"""
|
if nics is None :
nics = [ ]
args = { 'nics' : nics , 'port' : port , 'uuid' : uuid }
self . _migrate_network_chk . check ( args )
self . _client . sync ( 'kvm.prepare_migration_target' , args , tags = tags )
|
def compute_Pi_JinsDJ_given_D ( self , CDR3_seq , Pi_J_given_D , max_J_align ) :
"""Compute Pi _ JinsDJ conditioned on D .
This function returns the Pi array from the model factors of the J genomic
contributions , P ( D , J ) * P ( delJ | J ) , and the DJ ( N2 ) insertions ,
first _ nt _ bias _ insDJ ( n _ 1 ) PinsDJ ( \ ell _ { DJ } ) \ prod _ { i = 2 } ^ { \ ell _ { DJ } } Rdj ( n _ i | n _ { i - 1 } )
conditioned on D identity . This corresponds to { N ^ { x _ 3 } } _ { x _ 4 } J ( D ) ^ { x _ 4 } .
For clarity in parsing the algorithm implementation , we include which
instance attributes are used in the method as ' parameters . '
Parameters
CDR3 _ seq : str
CDR3 sequence composed of ' amino acids ' ( single character symbols
each corresponding to a collection of codons as given by codons _ dict ) .
Pi _ J _ given _ D : ndarray
List of ( 4 , 3L ) ndarrays corresponding to J ( D ) ^ { x _ 4 } .
max _ J _ align : int
Maximum alignment of the CDR3 _ seq to any genomic J allele allowed by
J _ usage _ mask .
self . PinsDJ : ndarray
Probability distribution of the DJ ( N2 ) insertion sequence length
self . first _ nt _ bias _ insDJ : ndarray
(4 , ) array of the probability distribution of the indentity of the
first nucleotide insertion for the DJ junction .
self . zero _ nt _ bias _ insDJ : ndarray
(4 , ) array of the probability distribution of the indentity of the
the nucleotide BEFORE the DJ insertion . Note , as the Markov model
at the DJ junction goes 3 ' to 5 ' this is the position AFTER the
insertions reading left to right .
self . Tdj : dict
Dictionary of full codon transfer matrices ( ( 4 , 4 ) ndarrays ) by
' amino acid ' .
self . Sdj : dict
Dictionary of transfer matrices ( ( 4 , 4 ) ndarrays ) by ' amino acid ' for
the DJ insertion ending in the first position .
self . Ddj : dict
Dictionary of transfer matrices ( ( 4 , 4 ) ndarrays ) by ' amino acid ' for
the VD insertion ending in the second position .
self . rTdj : dict
Dictionary of transfer matrices ( ( 4 , 4 ) ndarrays ) by ' amino acid ' for
the DJ insertion starting in the first position .
self . rDdj : dict
Dictionary of transfer matrices ( ( 4 , 4 ) ndarrays ) by ' amino acid ' for
DJ insertion starting in the first position and ending in the second
position of the same codon .
Returns
Pi _ JinsDJ _ given _ D : list
List of ( 4 , 3L ) ndarrays corresponding to { N ^ { x _ 3 } } _ { x _ 4 } J ( D ) ^ { x _ 4 } ."""
|
# max _ insertions = 30 # len ( PinsVD ) - 1 should zeropad the last few spots
max_insertions = len ( self . PinsDJ ) - 1
Pi_JinsDJ_given_D = [ np . zeros ( ( 4 , len ( CDR3_seq ) * 3 ) ) for i in range ( len ( Pi_J_given_D ) ) ]
for D_in in range ( len ( Pi_J_given_D ) ) : # start position is first nt in a codon
for init_pos in range ( - 1 , - ( max_J_align + 1 ) , - 3 ) : # Zero insertions
Pi_JinsDJ_given_D [ D_in ] [ : , init_pos ] += self . PinsDJ [ 0 ] * Pi_J_given_D [ D_in ] [ : , init_pos ]
# One insertion
Pi_JinsDJ_given_D [ D_in ] [ : , init_pos - 1 ] += self . PinsDJ [ 1 ] * np . dot ( self . rDdj [ CDR3_seq [ init_pos / 3 ] ] , Pi_J_given_D [ D_in ] [ : , init_pos ] )
# Two insertions and compute the base nt vec for the standard loop
current_base_nt_vec = np . dot ( self . rTdj [ CDR3_seq [ init_pos / 3 ] ] , Pi_J_given_D [ D_in ] [ : , init_pos ] )
Pi_JinsDJ_given_D [ D_in ] [ 0 , init_pos - 2 ] += self . PinsDJ [ 2 ] * np . sum ( current_base_nt_vec )
base_ins = 2
# Loop over all other insertions using base _ nt _ vec
for aa in CDR3_seq [ init_pos / 3 - 1 : init_pos / 3 - max_insertions / 3 : - 1 ] :
Pi_JinsDJ_given_D [ D_in ] [ : , init_pos - base_ins - 1 ] += self . PinsDJ [ base_ins + 1 ] * np . dot ( self . Sdj [ aa ] , current_base_nt_vec )
Pi_JinsDJ_given_D [ D_in ] [ : , init_pos - base_ins - 2 ] += self . PinsDJ [ base_ins + 2 ] * np . dot ( self . Ddj [ aa ] , current_base_nt_vec )
current_base_nt_vec = np . dot ( self . Tdj [ aa ] , current_base_nt_vec )
Pi_JinsDJ_given_D [ D_in ] [ 0 , init_pos - base_ins - 3 ] += self . PinsDJ [ base_ins + 3 ] * np . sum ( current_base_nt_vec )
base_ins += 3
# start position is second nt in a codon
for init_pos in range ( - 2 , - ( max_J_align + 1 ) , - 3 ) : # Zero insertions
Pi_JinsDJ_given_D [ D_in ] [ : , init_pos ] += self . PinsDJ [ 0 ] * Pi_J_given_D [ D_in ] [ : , init_pos ]
# One insertion - - - we first compute our p vec by pairwise mult with the ss distr
current_base_nt_vec = np . multiply ( Pi_J_given_D [ D_in ] [ : , init_pos ] , self . first_nt_bias_insDJ )
Pi_JinsDJ_given_D [ D_in ] [ 0 , init_pos - 1 ] += self . PinsDJ [ 1 ] * np . sum ( current_base_nt_vec )
base_ins = 1
# Loop over all other insertions using base _ nt _ vec
for aa in CDR3_seq [ init_pos / 3 - 1 : init_pos / 3 - max_insertions / 3 : - 1 ] :
Pi_JinsDJ_given_D [ D_in ] [ : , init_pos - base_ins - 1 ] += self . PinsDJ [ base_ins + 1 ] * np . dot ( self . Sdj [ aa ] , current_base_nt_vec )
Pi_JinsDJ_given_D [ D_in ] [ : , init_pos - base_ins - 2 ] += self . PinsDJ [ base_ins + 2 ] * np . dot ( self . Ddj [ aa ] , current_base_nt_vec )
current_base_nt_vec = np . dot ( self . Tdj [ aa ] , current_base_nt_vec )
Pi_JinsDJ_given_D [ D_in ] [ 0 , init_pos - base_ins - 3 ] += self . PinsDJ [ base_ins + 3 ] * np . sum ( current_base_nt_vec )
base_ins += 3
# start position is last nt in a codon
for init_pos in range ( - 3 , - ( max_J_align + 1 ) , - 3 ) : # Zero insertions
Pi_JinsDJ_given_D [ D_in ] [ 0 , init_pos ] += self . PinsDJ [ 0 ] * Pi_J_given_D [ D_in ] [ 0 , init_pos ]
# current _ base _ nt _ vec = first _ nt _ bias _ insDJ * Pi _ J _ given _ D [ D _ in ] [ 0 , init _ pos ] # Okay for steady state
current_base_nt_vec = self . zero_nt_bias_insDJ * Pi_J_given_D [ D_in ] [ 0 , init_pos ]
base_ins = 0
# Loop over all other insertions using base _ nt _ vec
for aa in CDR3_seq [ init_pos / 3 - 1 : init_pos / 3 - max_insertions / 3 : - 1 ] :
Pi_JinsDJ_given_D [ D_in ] [ : , init_pos - base_ins - 1 ] += self . PinsDJ [ base_ins + 1 ] * np . dot ( self . Sdj [ aa ] , current_base_nt_vec )
Pi_JinsDJ_given_D [ D_in ] [ : , init_pos - base_ins - 2 ] += self . PinsDJ [ base_ins + 2 ] * np . dot ( self . Ddj [ aa ] , current_base_nt_vec )
current_base_nt_vec = np . dot ( self . Tdj [ aa ] , current_base_nt_vec )
Pi_JinsDJ_given_D [ D_in ] [ 0 , init_pos - base_ins - 3 ] += self . PinsDJ [ base_ins + 3 ] * np . sum ( current_base_nt_vec )
base_ins += 3
return Pi_JinsDJ_given_D
|
def get_filter_solvers ( self , filter_ ) :
"""Returns the filter solvers that can solve the given filter .
Arguments
filter : dataql . resources . BaseFilter
An instance of the a subclass of ` ` BaseFilter ` ` for which we want to get the solver
classes that can solve it .
Returns
list
The list of filter solvers instances that can solve the given resource .
Raises
dataql . solvers . exceptions . SolverNotFound
When no solver is able to solve the given filter .
Example
> > > from dataql . resources import Filter
> > > registry = Registry ( )
> > > registry . get _ filter _ solvers ( Filter ( name = ' foo ' ) )
[ < FilterSolver > ]
> > > registry . get _ filter _ solvers ( None ) # doctest : + ELLIPSIS
Traceback ( most recent call last ) :
dataql . solvers . exceptions . SolverNotFound : No solvers found for this kind of object : . . ."""
|
solvers_classes = [ s for s in self . filter_solver_classes if s . can_solve ( filter_ ) ]
if solvers_classes :
solvers = [ ]
for solver_class in solvers_classes : # Put the solver instance in the cache if not cached yet .
if solver_class not in self . _filter_solvers_cache :
self . _filter_solvers_cache [ solver_class ] = solver_class ( self )
solvers . append ( self . _filter_solvers_cache [ solver_class ] )
return solvers
raise SolverNotFound ( self , filter_ )
|
def resource_to_url ( resource , request = None , quote = False ) :
"""Converts the given resource to a URL .
: param request : Request object ( required for the host name part of the
URL ) . If this is not given , the current request is used .
: param bool quote : If set , the URL returned will be quoted ."""
|
if request is None :
request = get_current_request ( )
# cnv = request . registry . getAdapter ( request , IResourceUrlConverter )
reg = get_current_registry ( )
cnv = reg . getAdapter ( request , IResourceUrlConverter )
return cnv . resource_to_url ( resource , quote = quote )
|
def page_templates_loading_check ( app_configs , ** kwargs ) :
"""Check if any page template can ' t be loaded ."""
|
errors = [ ]
for page_template in settings . get_page_templates ( ) :
try :
loader . get_template ( page_template [ 0 ] )
except template . TemplateDoesNotExist :
errors . append ( checks . Warning ( 'Django cannot find template %s' % page_template [ 0 ] , obj = page_template , id = 'pages.W001' ) )
return errors
|
def _set_Cpu ( self , v , load = False ) :
"""Setter method for Cpu , mapped from YANG variable / rbridge _ id / threshold _ monitor / Cpu ( container )
If this variable is read - only ( config : false ) in the
source YANG file , then _ set _ Cpu is considered as a private
method . Backends looking to populate this variable should
do so via calling thisObj . _ set _ Cpu ( ) directly ."""
|
if hasattr ( v , "_utype" ) :
v = v . _utype ( v )
try :
t = YANGDynClass ( v , base = Cpu . Cpu , is_container = 'container' , presence = False , yang_name = "Cpu" , rest_name = "Cpu" , parent = self , path_helper = self . _path_helper , extmethods = self . _extmethods , register_paths = True , extensions = { u'tailf-common' : { u'info' : u'Configure settings for component:CPU' , u'cli-compact-syntax' : None , u'callpoint' : u'CpuMonitor' , u'cli-incomplete-no' : None } } , namespace = 'urn:brocade.com:mgmt:brocade-threshold-monitor' , defining_module = 'brocade-threshold-monitor' , yang_type = 'container' , is_config = True )
except ( TypeError , ValueError ) :
raise ValueError ( { 'error-string' : """Cpu must be of a type compatible with container""" , 'defined-type' : "container" , 'generated-type' : """YANGDynClass(base=Cpu.Cpu, is_container='container', presence=False, yang_name="Cpu", rest_name="Cpu", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure settings for component:CPU', u'cli-compact-syntax': None, u'callpoint': u'CpuMonitor', u'cli-incomplete-no': None}}, namespace='urn:brocade.com:mgmt:brocade-threshold-monitor', defining_module='brocade-threshold-monitor', yang_type='container', is_config=True)""" , } )
self . __Cpu = t
if hasattr ( self , '_set' ) :
self . _set ( )
|
def aldb_device_handled ( self , addr ) :
"""Remove device from ALDB device list ."""
|
if isinstance ( addr , Address ) :
remove_addr = addr . id
else :
remove_addr = addr
try :
self . _aldb_devices . pop ( remove_addr )
_LOGGER . debug ( 'Removed ALDB device %s' , remove_addr )
except KeyError :
_LOGGER . debug ( 'Device %s not in ALDB device list' , remove_addr )
_LOGGER . debug ( 'ALDB device count: %d' , len ( self . _aldb_devices ) )
|
def parse ( celf , s ) :
"generates an Introspection tree from the given XML string description ."
|
def from_string_elts ( celf , attrs , tree ) :
elts = dict ( ( k , attrs [ k ] ) for k in attrs )
child_tags = dict ( ( childclass . tag_name , childclass ) for childclass in tuple ( celf . tag_elts . values ( ) ) + ( Introspection . Annotation , ) )
children = [ ]
for child in tree :
if child . tag not in child_tags :
raise KeyError ( "unrecognized tag %s" % child . tag )
# end if
childclass = child_tags [ child . tag ]
childattrs = { }
for attrname in childclass . tag_attrs :
if hasattr ( childclass , "tag_attrs_optional" ) and attrname in childclass . tag_attrs_optional :
childattrs [ attrname ] = child . attrib . get ( attrname , None )
else :
if attrname not in child . attrib :
raise ValueError ( "missing %s attribute for %s tag" % ( attrname , child . tag ) )
# end if
childattrs [ attrname ] = child . attrib [ attrname ]
# end if
# end for
if hasattr ( childclass , "attr_convert" ) :
for attr in childclass . attr_convert :
if attr in childattrs :
childattrs [ attr ] = childclass . attr_convert [ attr ] ( childattrs [ attr ] )
# end if
# end for
# end if
children . append ( from_string_elts ( childclass , childattrs , child ) )
# end for
for child_tag , childclass in tuple ( celf . tag_elts . items ( ) ) + ( ( ) , ( ( "annotations" , Introspection . Annotation ) , ) ) [ tree . tag != "annotation" ] :
for child in children :
if isinstance ( child , childclass ) :
if child_tag not in elts :
elts [ child_tag ] = [ ]
# end if
elts [ child_tag ] . append ( child )
# end if
# end for
# end for
return celf ( ** elts )
# end from _ string _ elts
# begin parse
tree = XMLElementTree . fromstring ( s )
assert tree . tag == "node" , "root of introspection tree must be <node> tag"
return from_string_elts ( Introspection , { } , tree )
|
def get_gradebook_column_summary ( self , gradebook_column_id ) :
"""Gets the ` ` GradebookColumnSummary ` ` for summary results .
arg : gradebook _ column _ id ( osid . id . Id ) : ` ` Id ` ` of the
` ` GradebookColumn ` `
return : ( osid . grading . GradebookColumnSummary ) - the gradebook
column summary
raise : NotFound - ` ` gradebook _ column _ id ` ` is not found
raise : NullArgument - ` ` gradebook _ column _ id ` ` is ` ` null ` `
raise : OperationFailed - unable to complete request
raise : PermissionDenied - authorization failure
raise : Unimplemented - ` ` has _ summary ( ) ` ` is ` ` false ` `
* compliance : mandatory - - This method is must be implemented . *"""
|
gradebook_column = self . get_gradebook_column ( gradebook_column_id )
summary_map = gradebook_column . _my_map
summary_map [ 'gradebookColumnId' ] = str ( gradebook_column . ident )
return GradebookColumnSummary ( osid_object_map = summary_map , runtime = self . _runtime , proxy = self . _proxy )
|
def build ( dburl , sitedir , mode ) :
"""Build a site ."""
|
if mode == 'force' :
amode = [ '-a' ]
else :
amode = [ ]
oldcwd = os . getcwd ( )
os . chdir ( sitedir )
db = StrictRedis . from_url ( dburl )
job = get_current_job ( db )
job . meta . update ( { 'out' : '' , 'milestone' : 0 , 'total' : 1 , 'return' : None , 'status' : None } )
job . save ( )
p = subprocess . Popen ( [ executable , '-m' , 'nikola' , 'build' ] + amode , stderr = subprocess . PIPE )
milestones = { 'done!' : 0 , 'render_posts' : 0 , 'render_pages' : 0 , 'generate_rss' : 0 , 'render_indexes' : 0 , 'sitemap' : 0 }
out = [ ]
while p . poll ( ) is None :
nl = p . stderr . readline ( ) . decode ( 'utf-8' )
for k in milestones :
if k in nl :
milestones [ k ] = 1
out . append ( nl )
job . meta . update ( { 'milestone' : sum ( milestones . values ( ) ) , 'total' : len ( milestones ) , 'out' : '' . join ( out ) , 'return' : None , 'status' : None } )
job . save ( )
out += p . stderr . readlines ( )
out = '' . join ( out )
job . meta . update ( { 'milestone' : len ( milestones ) , 'total' : len ( milestones ) , 'out' : '' . join ( out ) , 'return' : p . returncode , 'status' : p . returncode == 0 } )
job . save ( )
os . chdir ( oldcwd )
return p . returncode
|
def check_abundance ( number ) :
"""Determine if a given number is abundant . A number is considered abundant if the sum of its divisors
is greater than the number itself .
Examples :
check _ abundance ( 12 ) - > True
check _ abundance ( 13 ) - > False
check _ abundance ( 9 ) - > False
: param number : The number to check for abundance .
: return : Returns True if the number is abundant , otherwise False ."""
|
sum_of_divisors = sum ( divisor for divisor in range ( 1 , number ) if number % divisor == 0 )
return sum_of_divisors > number
|
def from_array ( array ) :
"""Deserialize a new CallbackQuery from a given dictionary .
: return : new CallbackQuery instance .
: rtype : CallbackQuery"""
|
if array is None or not array :
return None
# end if
assert_type_or_raise ( array , dict , parameter_name = "array" )
from . . receivable . peer import User
data = { }
data [ 'id' ] = u ( array . get ( 'id' ) )
data [ 'from_peer' ] = User . from_array ( array . get ( 'from' ) )
data [ 'chat_instance' ] = u ( array . get ( 'chat_instance' ) )
data [ 'message' ] = Message . from_array ( array . get ( 'message' ) ) if array . get ( 'message' ) is not None else None
data [ 'inline_message_id' ] = u ( array . get ( 'inline_message_id' ) ) if array . get ( 'inline_message_id' ) is not None else None
data [ 'data' ] = u ( array . get ( 'data' ) ) if array . get ( 'data' ) is not None else None
data [ 'game_short_name' ] = u ( array . get ( 'game_short_name' ) ) if array . get ( 'game_short_name' ) is not None else None
data [ '_raw' ] = array
return CallbackQuery ( ** data )
|
def paste ( ** kwargs ) :
"""Returns system clipboard contents ."""
|
window = Tk ( )
window . withdraw ( )
d = window . selection_get ( selection = 'CLIPBOARD' )
return d
|
def _get_importer ( path_name ) :
"""Python version of PyImport _ GetImporter C API function"""
|
cache = sys . path_importer_cache
try :
importer = cache [ path_name ]
except KeyError : # Not yet cached . Flag as using the
# standard machinery until we finish
# checking the hooks
cache [ path_name ] = None
for hook in sys . path_hooks :
try :
importer = hook ( path_name )
break
except ImportError :
pass
else : # The following check looks a bit odd . The trick is that
# NullImporter throws ImportError if the supplied path is a
# * valid * directory entry ( and hence able to be handled
# by the standard import machinery )
try :
importer = imp . NullImporter ( path_name )
except ImportError :
return None
cache [ path_name ] = importer
return importer
|
def is_obsoletes_pid ( pid ) :
"""Return True if ` ` pid ` ` is referenced in the obsoletes field of any object .
This will return True even if the PID is in the obsoletes field of an object that
does not exist on the local MN , such as replica that is in an incomplete chain ."""
|
return d1_gmn . app . models . ScienceObject . objects . filter ( obsoletes__did = pid ) . exists ( )
|
def root_namespace ( self , value ) :
"""Setter for * * self . _ _ root _ namespace * * attribute .
: param value : Attribute value .
: type value : unicode"""
|
if value is not None :
assert type ( value ) is unicode , "'{0}' attribute: '{1}' type is not 'unicode'!" . format ( "root_namespace" , value )
self . __root_namespace = value
|
def updateIncomeProcess ( self ) :
'''An alternative method for constructing the income process in the infinite horizon model .
Parameters
none
Returns
none'''
|
if self . cycles == 0 :
tax_rate = ( self . IncUnemp * self . UnempPrb ) / ( ( 1.0 - self . UnempPrb ) * self . IndL )
TranShkDstn = deepcopy ( approxMeanOneLognormal ( self . TranShkCount , sigma = self . TranShkStd [ 0 ] , tail_N = 0 ) )
TranShkDstn [ 0 ] = np . insert ( TranShkDstn [ 0 ] * ( 1.0 - self . UnempPrb ) , 0 , self . UnempPrb )
TranShkDstn [ 1 ] = np . insert ( TranShkDstn [ 1 ] * ( 1.0 - tax_rate ) * self . IndL , 0 , self . IncUnemp )
PermShkDstn = approxMeanOneLognormal ( self . PermShkCount , sigma = self . PermShkStd [ 0 ] , tail_N = 0 )
self . IncomeDstn = [ combineIndepDstns ( PermShkDstn , TranShkDstn ) ]
self . TranShkDstn = TranShkDstn
self . PermShkDstn = PermShkDstn
self . addToTimeVary ( 'IncomeDstn' )
else : # Do the usual method if this is the lifecycle model
EstimationAgentClass . updateIncomeProcess ( self )
|
def subtract ( lhs , rhs ) :
"""Returns element - wise difference of the input arrays with broadcasting .
Equivalent to ` ` lhs - rhs ` ` , ` ` mx . nd . broadcast _ sub ( lhs , rhs ) ` ` and
` ` mx . nd . broadcast _ minus ( lhs , rhs ) ` ` when shapes of lhs and rhs do not
match . If lhs . shape = = rhs . shape , this is equivalent to
` ` mx . nd . elemwise _ sub ( lhs , rhs ) ` `
. . note : :
If the corresponding dimensions of two arrays have the same size or one of them has size 1,
then the arrays are broadcastable to a common shape .
Parameters
lhs : scalar or mxnet . ndarray . sparse . array
First array to be subtracted .
rhs : scalar or mxnet . ndarray . sparse . array
Second array to be subtracted .
If ` ` lhs . shape ! = rhs . shape ` ` , they must be
broadcastable to a common shape . _ _ spec _ _
Returns
NDArray
The element - wise difference of the input arrays .
Examples
> > > a = mx . nd . ones ( ( 2,3 ) ) . tostype ( ' csr ' )
> > > b = mx . nd . ones ( ( 2,3 ) ) . tostype ( ' csr ' )
> > > a . asnumpy ( )
array ( [ [ 1 . , 1 . , 1 . ] ,
[ 1 . , 1 . , 1 . ] ] , dtype = float32)
> > > b . asnumpy ( )
array ( [ [ 1 . , 1 . , 1 . ] ,
[ 1 . , 1 . , 1 . ] ] , dtype = float32)
> > > ( a - b ) . asnumpy ( )
array ( [ [ 0 . , 0 . , 0 . ] ,
[ 0 . , 0 . , 0 . ] ] , dtype = float32)
> > > c = mx . nd . ones ( ( 2,3 ) ) . tostype ( ' row _ sparse ' )
> > > d = mx . nd . ones ( ( 2,3 ) ) . tostype ( ' row _ sparse ' )
> > > c . asnumpy ( )
array ( [ [ 1 . , 1 . , 1 . ] ,
[ 1 . , 1 . , 1 . ] ] , dtype = float32)
> > > d . asnumpy ( )
array ( [ [ 1 . , 1 . , 1 . ] ,
[ 1 . , 1 . , 1 . ] ] , dtype = float32)
> > > ( c - d ) . asnumpy ( )
array ( [ [ 0 . , 0 . , 0 . ] ,
[ 0 . , 0 . , 0 . ] ] , dtype = float32)"""
|
# pylint : disable = no - member , protected - access
if isinstance ( lhs , NDArray ) and isinstance ( rhs , NDArray ) and lhs . shape == rhs . shape :
return _ufunc_helper ( lhs , rhs , op . elemwise_sub , operator . sub , _internal . _minus_scalar , None )
return _ufunc_helper ( lhs , rhs , op . broadcast_sub , operator . sub , _internal . _minus_scalar , None )
|
def __get_update_uri ( self , account_id , ** kwargs ) :
"""Call documentation : ` / account / get _ update _ uri
< https : / / www . wepay . com / developer / reference / account # update _ uri > ` _ , plus extra
keyword parameters :
: keyword str access _ token : will be used instead of instance ' s
` ` access _ token ` ` , with ` ` batch _ mode = True ` ` will set ` authorization `
param to it ' s value .
: keyword bool batch _ mode : turn on / off the batch _ mode , see
: class : ` wepay . api . WePay `
: keyword str batch _ reference _ id : ` reference _ id ` param for batch call ,
see : class : ` wepay . api . WePay `
: keyword str api _ version : WePay API version , see
: class : ` wepay . api . WePay `"""
|
params = { 'account_id' : account_id }
return self . make_call ( self . __get_update_uri , params , kwargs )
|
def get_files ( * bases ) :
"""List all files in a data directory ."""
|
for base in bases :
basedir , _ = base . split ( "." , 1 )
base = os . path . join ( os . path . dirname ( __file__ ) , * base . split ( "." ) )
rem = len ( os . path . dirname ( base ) ) + len ( basedir ) + 2
for root , dirs , files in os . walk ( base ) :
for name in files :
yield os . path . join ( basedir , root , name ) [ rem : ]
|
def sign_execute_deposit ( deposit_params , private_key , infura_url ) :
"""Function to execute the deposit request by signing the transaction generated from the create deposit function .
Execution of this function is as follows : :
sign _ execute _ deposit ( deposit _ params = create _ deposit , private _ key = eth _ private _ key )
The expected return result for this function is as follows : :
' transaction _ hash ' : ' 0xcf3ea5d1821544e1686fbcb1f49d423b9ea9f42772ff9ecdaf615616d780fa75'
: param deposit _ params : The parameters generated by the create function that now requires a signature .
: type deposit _ params : dict
: param private _ key : The Ethereum private key to sign the deposit parameters .
: type private _ key : str
: param infura _ url : The URL used to broadcast the deposit transaction to the Ethereum network .
: type infura _ url : str
: return : Dictionary of the signed transaction to initiate the deposit of ETH via the Switcheo API ."""
|
create_deposit_upper = deposit_params . copy ( )
create_deposit_upper [ 'transaction' ] [ 'from' ] = to_checksum_address ( create_deposit_upper [ 'transaction' ] [ 'from' ] )
create_deposit_upper [ 'transaction' ] [ 'to' ] = to_checksum_address ( create_deposit_upper [ 'transaction' ] [ 'to' ] )
create_deposit_upper [ 'transaction' ] . pop ( 'sha256' )
signed_create_txn = Account . signTransaction ( create_deposit_upper [ 'transaction' ] , private_key = private_key )
execute_signed_txn = binascii . hexlify ( signed_create_txn [ 'hash' ] ) . decode ( )
# Broadcast transaction to Ethereum Network .
Web3 ( HTTPProvider ( infura_url ) ) . eth . sendRawTransaction ( signed_create_txn . rawTransaction )
return { 'transaction_hash' : '0x' + execute_signed_txn }
|
def appendData ( self , content ) :
"""Add characters to the element ' s pcdata ."""
|
if self . pcdata is not None :
self . pcdata += content
else :
self . pcdata = content
|
def __prepare_body ( self , search_value , search_type = 'url' ) :
"""Prepares the http body for querying safebrowsing api . Maybe the list need to get adjusted .
: param search _ value : value to search for
: type search _ value : str
: param search _ type : ' url ' or ' ip '
: type search _ type : str
: returns : http body as dict
: rtype : dict"""
|
body = { 'client' : { 'clientId' : self . client_id , 'clientVersion' : self . client_version } }
if search_type == 'url' :
data = { 'threatTypes' : [ 'MALWARE' , 'SOCIAL_ENGINEERING' , 'UNWANTED_SOFTWARE' , 'POTENTIALLY_HARMFUL_APPLICATION' ] , 'platformTypes' : [ 'ANY_PLATFORM' , 'ALL_PLATFORMS' , 'WINDOWS' , 'LINUX' , 'OSX' , 'ANDROID' , 'IOS' ] , 'threatEntryTypes' : [ 'URL' ] }
elif search_type == 'ip' :
data = { 'threatTypes' : [ 'MALWARE' ] , 'platformTypes' : [ 'WINDOWS' , 'LINUX' , 'OSX' ] , 'threatEntryTypes' : [ 'IP_RANGE' ] }
else :
raise SearchTypeNotSupportedError ( 'Currently supported search types are \'url\' and \'ip\'.' )
# TODO : Only found threatEntry ' url ' in the docs . What to use for ip _ range ?
data [ 'threatEntries' ] = [ { 'url' : search_value } ]
body [ 'threatInfo' ] = data
return body
|
def shift_fn ( self , i , pre_dl = None , post_dl = None ) :
"""Press Shift + Fn1 ~ 12 once .
* * 中文文档 * *
按下 Shift + Fn1 ~ 12 组合键 。"""
|
self . delay ( pre_dl )
self . k . press_key ( self . k . shift_key )
self . k . tap_key ( self . k . function_keys [ i ] )
self . k . release_key ( self . k . shift_key )
self . delay ( post_dl )
|
def rewrite_elife_title_prefix_json ( json_content , doi ) :
"""this does the work of rewriting elife title prefix json values"""
|
if not json_content :
return json_content
# title prefix rewrites by article DOI
title_prefix_values = { }
title_prefix_values [ "10.7554/eLife.00452" ] = "Point of View"
title_prefix_values [ "10.7554/eLife.00615" ] = "Point of View"
title_prefix_values [ "10.7554/eLife.00639" ] = "Point of View"
title_prefix_values [ "10.7554/eLife.00642" ] = "Point of View"
title_prefix_values [ "10.7554/eLife.00856" ] = "Point of View"
title_prefix_values [ "10.7554/eLife.01061" ] = "Point of View"
title_prefix_values [ "10.7554/eLife.01138" ] = "Point of View"
title_prefix_values [ "10.7554/eLife.01139" ] = "Point of View"
title_prefix_values [ "10.7554/eLife.01820" ] = "Animal Models of Disease"
title_prefix_values [ "10.7554/eLife.02576" ] = "Point of View"
title_prefix_values [ "10.7554/eLife.04902" ] = "Point of View"
title_prefix_values [ "10.7554/eLife.05614" ] = "Point of View"
title_prefix_values [ "10.7554/eLife.05635" ] = "The Natural History of Model Organisms"
title_prefix_values [ "10.7554/eLife.05826" ] = "The Natural History of Model Organisms"
title_prefix_values [ "10.7554/eLife.05835" ] = "The Natural History of Model Organisms"
title_prefix_values [ "10.7554/eLife.05849" ] = "The Natural History of Model Organisms"
title_prefix_values [ "10.7554/eLife.05861" ] = "The Natural History of Model Organisms"
title_prefix_values [ "10.7554/eLife.05959" ] = "The Natural History of Model Organisms"
title_prefix_values [ "10.7554/eLife.06024" ] = "The Natural History of Model Organisms"
title_prefix_values [ "10.7554/eLife.06100" ] = "The Natural History of Model Organisms"
title_prefix_values [ "10.7554/eLife.06793" ] = "The Natural History of Model Organisms"
title_prefix_values [ "10.7554/eLife.06813" ] = "The Natural History of Model Organisms"
title_prefix_values [ "10.7554/eLife.06956" ] = "The Natural History of Model Organisms"
title_prefix_values [ "10.7554/eLife.09305" ] = "Point of View"
title_prefix_values [ "10.7554/eLife.10825" ] = "Point of View"
title_prefix_values [ "10.7554/eLife.11628" ] = "Living Science"
title_prefix_values [ "10.7554/eLife.12708" ] = "Point of View"
title_prefix_values [ "10.7554/eLife.12844" ] = "Point of View"
title_prefix_values [ "10.7554/eLife.13035" ] = "Point of View"
title_prefix_values [ "10.7554/eLife.14258" ] = "Cutting Edge"
title_prefix_values [ "10.7554/eLife.14424" ] = "Point of View"
title_prefix_values [ "10.7554/eLife.14511" ] = "Cell Proliferation"
title_prefix_values [ "10.7554/eLife.14721" ] = "Intracellular Bacteria"
title_prefix_values [ "10.7554/eLife.14790" ] = "Decision Making"
title_prefix_values [ "10.7554/eLife.14830" ] = "Progenitor Cells"
title_prefix_values [ "10.7554/eLife.14953" ] = "Gene Expression"
title_prefix_values [ "10.7554/eLife.14973" ] = "Breast Cancer"
title_prefix_values [ "10.7554/eLife.15352" ] = "Autoimmune Disorders"
title_prefix_values [ "10.7554/eLife.15438" ] = "Motor Circuits"
title_prefix_values [ "10.7554/eLife.15591" ] = "Protein Tagging"
title_prefix_values [ "10.7554/eLife.15928" ] = "Point of View"
title_prefix_values [ "10.7554/eLife.15938" ] = "Cancer Metabolism"
title_prefix_values [ "10.7554/eLife.15957" ] = "Stem Cells"
title_prefix_values [ "10.7554/eLife.15963" ] = "Prediction Error"
title_prefix_values [ "10.7554/eLife.16019" ] = "Social Networks"
title_prefix_values [ "10.7554/eLife.16076" ] = "mRNA Decay"
title_prefix_values [ "10.7554/eLife.16207" ] = "Cardiac Development"
title_prefix_values [ "10.7554/eLife.16209" ] = "Neural Coding"
title_prefix_values [ "10.7554/eLife.16393" ] = "Neural Circuits"
title_prefix_values [ "10.7554/eLife.16598" ] = "RNA Localization"
title_prefix_values [ "10.7554/eLife.16758" ] = "Adaptive Evolution"
title_prefix_values [ "10.7554/eLife.16800" ] = "Point of View"
title_prefix_values [ "10.7554/eLife.16846" ] = "Living Science"
title_prefix_values [ "10.7554/eLife.16931" ] = "Point of View"
title_prefix_values [ "10.7554/eLife.16964" ] = "Ion Channels"
title_prefix_values [ "10.7554/eLife.17224" ] = "Host-virus Interactions"
title_prefix_values [ "10.7554/eLife.17293" ] = "Ion Channels"
title_prefix_values [ "10.7554/eLife.17393" ] = "Point of View"
title_prefix_values [ "10.7554/eLife.17394" ] = "p53 Family Proteins"
title_prefix_values [ "10.7554/eLife.18203" ] = "Antibody Engineering"
title_prefix_values [ "10.7554/eLife.18243" ] = "Host-virus Interactions"
title_prefix_values [ "10.7554/eLife.18365" ] = "DNA Repair"
title_prefix_values [ "10.7554/eLife.18431" ] = "Unfolded Protein Response"
title_prefix_values [ "10.7554/eLife.18435" ] = "Long Distance Transport"
title_prefix_values [ "10.7554/eLife.18721" ] = "Decision Making"
title_prefix_values [ "10.7554/eLife.18753" ] = "Resource Competition"
title_prefix_values [ "10.7554/eLife.18871" ] = "Mathematical Modeling"
title_prefix_values [ "10.7554/eLife.18887" ] = "Sensorimotor Transformation"
title_prefix_values [ "10.7554/eLife.19285" ] = "Genetic Screen"
title_prefix_values [ "10.7554/eLife.19351" ] = "Motor Control"
title_prefix_values [ "10.7554/eLife.19405" ] = "Membrane Structures"
title_prefix_values [ "10.7554/eLife.19733" ] = "Focal Adhesions"
title_prefix_values [ "10.7554/eLife.20043" ] = "Amyloid-beta Peptides"
title_prefix_values [ "10.7554/eLife.20314" ] = "Plant Reproduction"
title_prefix_values [ "10.7554/eLife.20468" ] = "Endoplasmic Reticulum"
title_prefix_values [ "10.7554/eLife.20516" ] = "Innate Like Lymphocytes"
title_prefix_values [ "10.7554/eLife.21070" ] = "Scientific Publishing"
title_prefix_values [ "10.7554/eLife.21236" ] = "Developmental Neuroscience"
title_prefix_values [ "10.7554/eLife.21522" ] = "Developmental Neuroscience"
title_prefix_values [ "10.7554/eLife.21723" ] = "Living Science"
title_prefix_values [ "10.7554/eLife.21863" ] = "Genetic Screening"
title_prefix_values [ "10.7554/eLife.21864" ] = "Evolutionary Biology"
title_prefix_values [ "10.7554/eLife.22073" ] = "Unfolded Protein Response"
title_prefix_values [ "10.7554/eLife.22186" ] = "Point of View"
title_prefix_values [ "10.7554/eLife.22215" ] = "Neural Wiring"
title_prefix_values [ "10.7554/eLife.22256" ] = "Molecular Communication"
title_prefix_values [ "10.7554/eLife.22471" ] = "Point of View"
title_prefix_values [ "10.7554/eLife.22661" ] = "Reproducibility in Cancer Biology"
title_prefix_values [ "10.7554/eLife.22662" ] = "Reproducibility in Cancer Biology"
title_prefix_values [ "10.7554/eLife.22735" ] = "Motor Networks"
title_prefix_values [ "10.7554/eLife.22850" ] = "Heat Shock Response"
title_prefix_values [ "10.7554/eLife.22915" ] = "Reproducibility in Cancer Biology"
title_prefix_values [ "10.7554/eLife.22926" ] = "Skeletal Stem Cells"
title_prefix_values [ "10.7554/eLife.23375" ] = "Social Evolution"
title_prefix_values [ "10.7554/eLife.23383" ] = "Reproducibility in Cancer Biology"
title_prefix_values [ "10.7554/eLife.23447" ] = "Genetic Rearrangement"
title_prefix_values [ "10.7554/eLife.23693" ] = "Reproducibility in Cancer Biology"
title_prefix_values [ "10.7554/eLife.23804" ] = "Point of View"
title_prefix_values [ "10.7554/eLife.24038" ] = "Cell Division"
title_prefix_values [ "10.7554/eLife.24052" ] = "DNA Replication"
title_prefix_values [ "10.7554/eLife.24106" ] = "Germ Granules"
title_prefix_values [ "10.7554/eLife.24238" ] = "Tumor Angiogenesis"
title_prefix_values [ "10.7554/eLife.24276" ] = "Stem Cells"
title_prefix_values [ "10.7554/eLife.24611" ] = "Point of View"
title_prefix_values [ "10.7554/eLife.24896" ] = "Visual Behavior"
title_prefix_values [ "10.7554/eLife.25000" ] = "Chromatin Mapping"
title_prefix_values [ "10.7554/eLife.25001" ] = "Cell Cycle"
title_prefix_values [ "10.7554/eLife.25159" ] = "Ion Channels"
title_prefix_values [ "10.7554/eLife.25358" ] = "Cell Division"
title_prefix_values [ "10.7554/eLife.25375" ] = "Membrane Phase Separation"
title_prefix_values [ "10.7554/eLife.25408" ] = "Plain-language Summaries of Research"
title_prefix_values [ "10.7554/eLife.25410" ] = "Plain-language Summaries of Research"
title_prefix_values [ "10.7554/eLife.25411" ] = "Plain-language Summaries of Research"
title_prefix_values [ "10.7554/eLife.25412" ] = "Plain-language Summaries of Research"
title_prefix_values [ "10.7554/eLife.25431" ] = "Genetic Diversity"
title_prefix_values [ "10.7554/eLife.25654" ] = "Systems Biology"
title_prefix_values [ "10.7554/eLife.25669" ] = "Paternal Effects"
title_prefix_values [ "10.7554/eLife.25700" ] = "TOR Signaling"
title_prefix_values [ "10.7554/eLife.25835" ] = "Cutting Edge"
title_prefix_values [ "10.7554/eLife.25858" ] = "Developmental Biology"
title_prefix_values [ "10.7554/eLife.25956" ] = "Point of View"
title_prefix_values [ "10.7554/eLife.25996" ] = "Cancer Therapeutics"
title_prefix_values [ "10.7554/eLife.26295" ] = "Point of View"
title_prefix_values [ "10.7554/eLife.26401" ] = "Object Recognition"
title_prefix_values [ "10.7554/eLife.26775" ] = "Human Evolution"
title_prefix_values [ "10.7554/eLife.26787" ] = "Cutting Edge"
title_prefix_values [ "10.7554/eLife.26942" ] = "Alzheimer’s Disease"
title_prefix_values [ "10.7554/eLife.27085" ] = "Translational Control"
title_prefix_values [ "10.7554/eLife.27198" ] = "Cell Signaling"
title_prefix_values [ "10.7554/eLife.27438" ] = "Point of View"
title_prefix_values [ "10.7554/eLife.27467" ] = "Evolutionary Developmental Biology"
title_prefix_values [ "10.7554/eLife.27605" ] = "Population Genetics"
title_prefix_values [ "10.7554/eLife.27933" ] = "Ion Channels"
title_prefix_values [ "10.7554/eLife.27982" ] = "Living Science"
title_prefix_values [ "10.7554/eLife.28339" ] = "Oncogene Regulation"
title_prefix_values [ "10.7554/eLife.28514" ] = "Maternal Behavior"
title_prefix_values [ "10.7554/eLife.28699" ] = "Point of View"
title_prefix_values [ "10.7554/eLife.28757" ] = "Mitochondrial Homeostasis"
title_prefix_values [ "10.7554/eLife.29056" ] = "Gene Variation"
title_prefix_values [ "10.7554/eLife.29104" ] = "Cardiac Hypertrophy"
title_prefix_values [ "10.7554/eLife.29502" ] = "Meiotic Recombination"
title_prefix_values [ "10.7554/eLife.29586" ] = "Virus Evolution"
title_prefix_values [ "10.7554/eLife.29942" ] = "Post-translational Modifications"
title_prefix_values [ "10.7554/eLife.30076" ] = "Scientific Publishing"
title_prefix_values [ "10.7554/eLife.30183" ] = "Point of View"
title_prefix_values [ "10.7554/eLife.30194" ] = "Organ Development"
title_prefix_values [ "10.7554/eLife.30249" ] = "Tissue Regeneration"
title_prefix_values [ "10.7554/eLife.30280" ] = "Adverse Drug Reactions"
title_prefix_values [ "10.7554/eLife.30599" ] = "Living Science"
title_prefix_values [ "10.7554/eLife.30865" ] = "Stone Tool Use"
title_prefix_values [ "10.7554/eLife.31106" ] = "Sensory Neurons"
title_prefix_values [ "10.7554/eLife.31328" ] = "Drought Stress"
title_prefix_values [ "10.7554/eLife.31697" ] = "Scientific Publishing"
title_prefix_values [ "10.7554/eLife.31808" ] = "Tissue Engineering"
title_prefix_values [ "10.7554/eLife.31816" ] = "Sound Processing"
title_prefix_values [ "10.7554/eLife.32011" ] = "Peer Review"
title_prefix_values [ "10.7554/eLife.32012" ] = "Peer Review"
title_prefix_values [ "10.7554/eLife.32014" ] = "Peer Review"
title_prefix_values [ "10.7554/eLife.32015" ] = "Peer Review"
title_prefix_values [ "10.7554/eLife.32016" ] = "Peer Review"
title_prefix_values [ "10.7554/eLife.32715" ] = "Point of View"
# Edge case fix title prefix values
if doi in title_prefix_values : # Do a quick sanity check , only replace if the lowercase comparison is equal
# just in case the value has been changed to something else we will not replace it
if json_content . lower ( ) == title_prefix_values [ doi ] . lower ( ) :
json_content = title_prefix_values [ doi ]
return json_content
|
def _raise_error_if_disconnected ( self ) -> None :
"""See if we ' re still connected , and if not , raise
` ` SMTPServerDisconnected ` ` ."""
|
if ( self . transport is None or self . protocol is None or self . transport . is_closing ( ) ) :
self . close ( )
raise SMTPServerDisconnected ( "Disconnected from SMTP server" )
|
def msetnx ( self , * args , ** kwargs ) :
"""Sets key / values based on a mapping if none of the keys are already set .
Mapping can be supplied as a single dictionary argument or as kwargs .
Returns a boolean indicating if the operation was successful ."""
|
if args :
if len ( args ) != 1 or not isinstance ( args [ 0 ] , dict ) :
raise RedisError ( 'MSETNX requires **kwargs or a single dict arg' )
mapping = args [ 0 ]
else :
mapping = kwargs
if len ( mapping ) == 0 :
raise ResponseError ( "wrong number of arguments for 'msetnx' command" )
for key in mapping . keys ( ) :
if self . _encode ( key ) in self . redis :
return False
for key , value in mapping . items ( ) :
self . set ( key , value )
return True
|
def recoverTransaction ( self , serialized_transaction ) :
'''Get the address of the account that signed this transaction .
: param serialized _ transaction : the complete signed transaction
: type serialized _ transaction : hex str , bytes or int
: returns : address of signer , hex - encoded & checksummed
: rtype : str
. . code - block : : python
> > > raw _ transaction = ' 0xf86a8086d55698372431831e848094f0109fc8df283027b6285cc889f5aa624eac1f55843b9aca008025a009ebb6ca057a0535d6186462bc0b465b561c94a295bdb0621fc19208ab149a9ca0440ffd775ce91a833ab410777204d5341a6f9fa91216a6f3ee2c051fea6a0428 ' , # noqa : E501
> > > Account . recoverTransaction ( raw _ transaction )
'0x2c7536E3605D9C16a7a3D7b1898e529396a65c23' '''
|
txn_bytes = HexBytes ( serialized_transaction )
txn = Transaction . from_bytes ( txn_bytes )
msg_hash = hash_of_signed_transaction ( txn )
return self . recoverHash ( msg_hash , vrs = vrs_from ( txn ) )
|
def get_family ( self ) :
"""Gets the ` ` Family ` ` associated with this session .
return : ( osid . relationship . Family ) - the family
raise : OperationFailed - unable to complete request
raise : PermissionDenied - authorization failure
* compliance : mandatory - - This method must be implemented . *"""
|
return FamilyLookupSession ( proxy = self . _proxy , runtime = self . _runtime ) . get_family ( self . _family_id )
|
def recode ( inlist , listmap , cols = None ) :
"""Changes the values in a list to a new set of values ( useful when
you need to recode data from ( e . g . ) strings to numbers . cols defaults
to None ( meaning all columns are recoded ) .
Usage : recode ( inlist , listmap , cols = None ) cols = recode cols , listmap = 2D list
Returns : inlist with the appropriate values replaced with new ones"""
|
lst = copy . deepcopy ( inlist )
if cols != None :
if type ( cols ) not in [ ListType , TupleType ] :
cols = [ cols ]
for col in cols :
for row in range ( len ( lst ) ) :
try :
idx = colex ( listmap , 0 ) . index ( lst [ row ] [ col ] )
lst [ row ] [ col ] = listmap [ idx ] [ 1 ]
except ValueError :
pass
else :
for row in range ( len ( lst ) ) :
for col in range ( len ( lst ) ) :
try :
idx = colex ( listmap , 0 ) . index ( lst [ row ] [ col ] )
lst [ row ] [ col ] = listmap [ idx ] [ 1 ]
except ValueError :
pass
return lst
|
def flush ( self ) :
"""Flush message queue if there ' s an active connection running"""
|
self . _pending_flush = False
if self . handler is None :
return
if self . send_queue . is_empty ( ) :
return
self . handler . send_pack ( 'a[%s]' % self . send_queue . get ( ) )
self . send_queue . clear ( )
|
def unmapped ( sam , mates ) :
"""get unmapped reads"""
|
for read in sam :
if read . startswith ( '@' ) is True :
continue
read = read . strip ( ) . split ( )
if read [ 2 ] == '*' and read [ 6 ] == '*' :
yield read
elif mates is True :
if read [ 2 ] == '*' or read [ 6 ] == '*' :
yield read
for i in read :
if i == 'YT:Z:UP' :
yield read
|
def cross_product_matrix ( vec ) :
"""Returns a 3x3 cross - product matrix from a 3 - element vector ."""
|
return np . array ( [ [ 0 , - vec [ 2 ] , vec [ 1 ] ] , [ vec [ 2 ] , 0 , - vec [ 0 ] ] , [ - vec [ 1 ] , vec [ 0 ] , 0 ] ] )
|
def _tree_line ( self , no_type : bool = False ) -> str :
"""Return the receiver ' s contribution to tree diagram ."""
|
return super ( ) . _tree_line ( ) + ( "!" if self . presence else "" )
|
def update ( self , unique_name = values . unset , callback_method = values . unset , callback_url = values . unset , friendly_name = values . unset , rate_plan = values . unset , status = values . unset , commands_callback_method = values . unset , commands_callback_url = values . unset , sms_fallback_method = values . unset , sms_fallback_url = values . unset , sms_method = values . unset , sms_url = values . unset , voice_fallback_method = values . unset , voice_fallback_url = values . unset , voice_method = values . unset , voice_url = values . unset , reset_status = values . unset ) :
"""Update the SimInstance
: param unicode unique _ name : A user - provided string that uniquely identifies this resource as an alternative to the Sid .
: param unicode callback _ method : The HTTP method Twilio will use when making a request to the callback URL .
: param unicode callback _ url : Twilio will make a request to this URL when the Sim has finished updating .
: param unicode friendly _ name : A user - provided string that identifies this resource .
: param unicode rate _ plan : The Sid or UniqueName of the RatePlan that this Sim should use .
: param SimInstance . Status status : A string representing the status of the Sim .
: param unicode commands _ callback _ method : A string representing the HTTP method to use when making a request to CommandsCallbackUrl .
: param unicode commands _ callback _ url : The URL that will receive a webhook when this Sim originates a Command .
: param unicode sms _ fallback _ method : The HTTP method Twilio will use when requesting the sms _ fallback _ url .
: param unicode sms _ fallback _ url : The URL that Twilio will request if an error occurs retrieving or executing the TwiML requested by sms _ url .
: param unicode sms _ method : The HTTP method Twilio will use when requesting the above Url .
: param unicode sms _ url : The URL Twilio will request when the SIM - connected device sends an SMS message that is not a Command .
: param unicode voice _ fallback _ method : The HTTP method Twilio will use when requesting the voice _ fallback _ url .
: param unicode voice _ fallback _ url : The URL that Twilio will request if an error occurs retrieving or executing the TwiML requested by voice _ url .
: param unicode voice _ method : The HTTP method Twilio will use when requesting the above Url .
: param unicode voice _ url : The URL Twilio will request when the SIM - connected device makes a call .
: param SimInstance . ResetStatus reset _ status : Initiate a connectivity reset on a Sim .
: returns : Updated SimInstance
: rtype : twilio . rest . wireless . v1 . sim . SimInstance"""
|
data = values . of ( { 'UniqueName' : unique_name , 'CallbackMethod' : callback_method , 'CallbackUrl' : callback_url , 'FriendlyName' : friendly_name , 'RatePlan' : rate_plan , 'Status' : status , 'CommandsCallbackMethod' : commands_callback_method , 'CommandsCallbackUrl' : commands_callback_url , 'SmsFallbackMethod' : sms_fallback_method , 'SmsFallbackUrl' : sms_fallback_url , 'SmsMethod' : sms_method , 'SmsUrl' : sms_url , 'VoiceFallbackMethod' : voice_fallback_method , 'VoiceFallbackUrl' : voice_fallback_url , 'VoiceMethod' : voice_method , 'VoiceUrl' : voice_url , 'ResetStatus' : reset_status , } )
payload = self . _version . update ( 'POST' , self . _uri , data = data , )
return SimInstance ( self . _version , payload , sid = self . _solution [ 'sid' ] , )
|
def generate_PJdelJ_nt_pos_vecs ( self , generative_model , genomic_data ) :
"""Process P ( J ) * P ( delJ | J ) into Pi arrays .
Sets the attributes PJdelJ _ nt _ pos _ vec and PJdelJ _ 2nd _ nt _ pos _ per _ aa _ vec .
Parameters
generative _ model : GenerativeModelVDJ
VDJ generative model class containing the model parameters .
genomic _ data : GenomicDataVDJ
VDJ genomic data class containing the V , D , and J germline
sequences and info ."""
|
cutJ_genomic_CDR3_segs = genomic_data . cutJ_genomic_CDR3_segs
nt2num = { 'A' : 0 , 'C' : 1 , 'G' : 2 , 'T' : 3 }
num_del_pos = generative_model . PdelJ_given_J . shape [ 0 ]
num_D_genes , num_J_genes = generative_model . PDJ . shape
PJ = np . sum ( generative_model . PDJ , axis = 0 )
PJdelJ_nt_pos_vec = [ [ ] ] * num_J_genes
PJdelJ_2nd_nt_pos_per_aa_vec = [ [ ] ] * num_J_genes
for J_in , pj in enumerate ( PJ ) : # We include the marginal PJ here
current_PJdelJ_nt_pos_vec = np . zeros ( ( 4 , len ( cutJ_genomic_CDR3_segs [ J_in ] ) ) )
current_PJdelJ_2nd_nt_pos_per_aa_vec = { }
for aa in self . codons_dict . keys ( ) :
current_PJdelJ_2nd_nt_pos_per_aa_vec [ aa ] = np . zeros ( ( 4 , len ( cutJ_genomic_CDR3_segs [ J_in ] ) ) )
for pos , nt in enumerate ( cutJ_genomic_CDR3_segs [ J_in ] ) :
if pos >= num_del_pos :
continue
if ( len ( cutJ_genomic_CDR3_segs [ J_in ] ) - pos ) % 3 == 1 : # Start of a codon
current_PJdelJ_nt_pos_vec [ nt2num [ nt ] , pos ] = pj * generative_model . PdelJ_given_J [ pos , J_in ]
elif ( len ( cutJ_genomic_CDR3_segs [ J_in ] ) - pos ) % 3 == 2 : # Mid codon position
for ins_nt in 'ACGT' : # We need to find what possible codons are allowed for any aa ( or motif )
for aa in self . codons_dict . keys ( ) :
if ins_nt + cutJ_genomic_CDR3_segs [ J_in ] [ pos : pos + 2 ] in self . codons_dict [ aa ] :
current_PJdelJ_2nd_nt_pos_per_aa_vec [ aa ] [ nt2num [ ins_nt ] , pos ] = pj * generative_model . PdelJ_given_J [ pos , J_in ]
elif ( len ( cutJ_genomic_CDR3_segs [ J_in ] ) - pos ) % 3 == 0 : # End of codon
current_PJdelJ_nt_pos_vec [ 0 , pos ] = pj * generative_model . PdelJ_given_J [ pos , J_in ]
PJdelJ_nt_pos_vec [ J_in ] = current_PJdelJ_nt_pos_vec
PJdelJ_2nd_nt_pos_per_aa_vec [ J_in ] = current_PJdelJ_2nd_nt_pos_per_aa_vec
self . PJdelJ_nt_pos_vec = PJdelJ_nt_pos_vec
self . PJdelJ_2nd_nt_pos_per_aa_vec = PJdelJ_2nd_nt_pos_per_aa_vec
|
def run_selection ( self ) :
"""Run selected text or current line in console .
If some text is selected , then execute that text in console .
If no text is selected , then execute current line , unless current line
is empty . Then , advance cursor to next line . If cursor is on last line
and that line is not empty , then add a new blank line and move the
cursor there . If cursor is on last line and that line is empty , then do
not move cursor ."""
|
text = self . get_current_editor ( ) . get_selection_as_executable_code ( )
if text :
self . exec_in_extconsole . emit ( text . rstrip ( ) , self . focus_to_editor )
return
editor = self . get_current_editor ( )
line = editor . get_current_line ( )
text = line . lstrip ( )
if text :
self . exec_in_extconsole . emit ( text , self . focus_to_editor )
if editor . is_cursor_on_last_line ( ) and text :
editor . append ( editor . get_line_separator ( ) )
editor . move_cursor_to_next ( 'line' , 'down' )
|
async def grant ( self , acl = 'login' ) :
"""Set access level of this user on the controller .
: param str acl : Access control ( ' login ' , ' add - model ' , or ' superuser ' )"""
|
if await self . controller . grant ( self . username , acl ) :
self . _user_info . access = acl
|
def update_notes ( self , ** kwargs ) :
"""Updates the notes on the subscription without generating a change
This endpoint also allows you to update custom fields :
sub . custom _ fields [ 0 ] . value = ' A new value '
sub . update _ notes ( )"""
|
for key , val in iteritems ( kwargs ) :
setattr ( self , key , val )
url = urljoin ( self . _url , '/notes' )
self . put ( url )
|
def MakePmf ( self , xs , name = '' ) :
"""Makes a discrete version of this Pdf , evaluated at xs .
xs : equally - spaced sequence of values
Returns : new Pmf"""
|
pmf = Pmf ( name = name )
for x in xs :
pmf . Set ( x , self . Density ( x ) )
pmf . Normalize ( )
return pmf
|
def _threaded ( self , * args , ** kwargs ) :
"""Call the target and put the result in the Queue ."""
|
for target in self . targets :
result = target ( * args , ** kwargs )
self . queue . put ( result )
|
def _optimize_A ( self , A ) :
"""Find optimal transformation matrix A by minimization .
Parameters
A : ndarray
The transformation matrix A .
Returns
A : ndarray
The transformation matrix ."""
|
right_eigenvectors = self . right_eigenvectors_ [ : , : self . n_macrostates ]
flat_map , square_map = get_maps ( A )
alpha = to_flat ( 1.0 * A , flat_map )
def obj ( x ) :
return - 1 * self . _objective_function ( x , self . transmat_ , right_eigenvectors , square_map , self . populations_ )
alpha = scipy . optimize . basinhopping ( obj , alpha , niter_success = 1000 , ) [ 'x' ]
alpha = scipy . optimize . fmin ( obj , alpha , full_output = True , xtol = 1E-4 , ftol = 1E-4 , maxfun = 5000 , maxiter = 100000 ) [ 0 ]
if np . isneginf ( obj ( alpha ) ) :
raise ValueError ( "Error: minimization has not located a feasible point." )
A = to_square ( alpha , square_map )
return A
|
def filter ( self , table , vg_snapshots , filter_string ) :
"""Naive case - insensitive search ."""
|
query = filter_string . lower ( )
return [ vg_snapshot for vg_snapshot in vg_snapshots if query in vg_snapshot . name . lower ( ) ]
|
def _approxaAInv ( self , Or , Op , Oz , ar , ap , az , interp = True ) :
"""NAME :
_ approxaAInv
PURPOSE :
return R , vR , . . . coordinates for a point based on the linear
approximation around the stream track
INPUT :
Or , Op , Oz , ar , ap , az - phase space coordinates in frequency - angle
space
interp = ( True ) , if True , use the interpolated track
OUTPUT :
( R , vR , vT , z , vz , phi )
HISTORY :
2013-12-22 - Written - Bovy ( IAS )"""
|
if isinstance ( Or , ( int , float , numpy . float32 , numpy . float64 ) ) : # Scalar input
Or = numpy . array ( [ Or ] )
Op = numpy . array ( [ Op ] )
Oz = numpy . array ( [ Oz ] )
ar = numpy . array ( [ ar ] )
ap = numpy . array ( [ ap ] )
az = numpy . array ( [ az ] )
# Calculate apar , angle offset along the stream
closestIndx = [ self . _find_closest_trackpointaA ( Or [ ii ] , Op [ ii ] , Oz [ ii ] , ar [ ii ] , ap [ ii ] , az [ ii ] , interp = interp ) for ii in range ( len ( Or ) ) ]
out = numpy . empty ( ( 6 , len ( Or ) ) )
for ii in range ( len ( Or ) ) :
dOa = numpy . empty ( 6 )
if interp :
dOa [ 0 ] = Or [ ii ] - self . _interpolatedObsTrackAA [ closestIndx [ ii ] , 0 ]
dOa [ 1 ] = Op [ ii ] - self . _interpolatedObsTrackAA [ closestIndx [ ii ] , 1 ]
dOa [ 2 ] = Oz [ ii ] - self . _interpolatedObsTrackAA [ closestIndx [ ii ] , 2 ]
dOa [ 3 ] = ar [ ii ] - self . _interpolatedObsTrackAA [ closestIndx [ ii ] , 3 ]
dOa [ 4 ] = ap [ ii ] - self . _interpolatedObsTrackAA [ closestIndx [ ii ] , 4 ]
dOa [ 5 ] = az [ ii ] - self . _interpolatedObsTrackAA [ closestIndx [ ii ] , 5 ]
jacIndx = self . _find_closest_trackpointaA ( Or [ ii ] , Op [ ii ] , Oz [ ii ] , ar [ ii ] , ap [ ii ] , az [ ii ] , interp = False )
else :
dOa [ 0 ] = Or [ ii ] - self . _ObsTrackAA [ closestIndx [ ii ] , 0 ]
dOa [ 1 ] = Op [ ii ] - self . _ObsTrackAA [ closestIndx [ ii ] , 1 ]
dOa [ 2 ] = Oz [ ii ] - self . _ObsTrackAA [ closestIndx [ ii ] , 2 ]
dOa [ 3 ] = ar [ ii ] - self . _ObsTrackAA [ closestIndx [ ii ] , 3 ]
dOa [ 4 ] = ap [ ii ] - self . _ObsTrackAA [ closestIndx [ ii ] , 4 ]
dOa [ 5 ] = az [ ii ] - self . _ObsTrackAA [ closestIndx [ ii ] , 5 ]
jacIndx = closestIndx [ ii ]
# Find 2nd closest Jacobian point for smoothing
da = numpy . stack ( numpy . meshgrid ( _TWOPIWRAPS + ar [ ii ] - self . _progenitor_angle [ 0 ] , _TWOPIWRAPS + ap [ ii ] - self . _progenitor_angle [ 1 ] , _TWOPIWRAPS + az [ ii ] - self . _progenitor_angle [ 2 ] , indexing = 'xy' ) ) . T . reshape ( ( len ( _TWOPIWRAPS ) ** 3 , 3 ) )
dapar = self . _sigMeanSign * numpy . dot ( da [ numpy . argmin ( numpy . linalg . norm ( numpy . cross ( da , self . _dsigomeanProgDirection ) , axis = 1 ) ) ] , self . _dsigomeanProgDirection )
dmJacIndx = numpy . fabs ( dapar - self . _thetasTrack [ jacIndx ] )
if jacIndx == 0 :
jacIndx2 = jacIndx + 1
dmJacIndx2 = numpy . fabs ( dapar - self . _thetasTrack [ jacIndx + 1 ] )
elif jacIndx == self . _nTrackChunks - 1 :
jacIndx2 = jacIndx - 1
dmJacIndx2 = numpy . fabs ( dapar - self . _thetasTrack [ jacIndx - 1 ] )
else :
dm1 = numpy . fabs ( dapar - self . _thetasTrack [ jacIndx - 1 ] )
dm2 = numpy . fabs ( dapar - self . _thetasTrack [ jacIndx + 1 ] )
if dm1 < dm2 :
jacIndx2 = jacIndx - 1
dmJacIndx2 = dm1
else :
jacIndx2 = jacIndx + 1
dmJacIndx2 = dm2
ampJacIndx = dmJacIndx / ( dmJacIndx + dmJacIndx2 )
# Make sure the angles haven ' t wrapped around
if dOa [ 3 ] > numpy . pi :
dOa [ 3 ] -= 2. * numpy . pi
elif dOa [ 3 ] < - numpy . pi :
dOa [ 3 ] += 2. * numpy . pi
if dOa [ 4 ] > numpy . pi :
dOa [ 4 ] -= 2. * numpy . pi
elif dOa [ 4 ] < - numpy . pi :
dOa [ 4 ] += 2. * numpy . pi
if dOa [ 5 ] > numpy . pi :
dOa [ 5 ] -= 2. * numpy . pi
elif dOa [ 5 ] < - numpy . pi :
dOa [ 5 ] += 2. * numpy . pi
# Apply closest jacobian
out [ : , ii ] = numpy . dot ( ( 1. - ampJacIndx ) * self . _allinvjacsTrack [ jacIndx , : , : ] + ampJacIndx * self . _allinvjacsTrack [ jacIndx2 , : , : ] , dOa )
if interp :
out [ : , ii ] += self . _interpolatedObsTrack [ closestIndx [ ii ] ]
else :
out [ : , ii ] += self . _ObsTrack [ closestIndx [ ii ] ]
return out
|
def delete_object_in_seconds ( self , obj , seconds , extra_info = None ) :
"""Sets the object in this container to be deleted after the specified
number of seconds .
The ' extra _ info ' parameter is included for backwards compatibility . It
is no longer used at all , and will not be modified with swiftclient
info , since swiftclient is not used any more ."""
|
return self . manager . delete_object_in_seconds ( self , obj , seconds )
|
def _ite ( lexer ) :
"""Return an ITE expression ."""
|
s = _impl ( lexer )
tok = next ( lexer )
# IMPL ' ? ' ITE ' : ' ITE
if isinstance ( tok , OP_question ) :
d1 = _ite ( lexer )
_expect_token ( lexer , { OP_colon } )
d0 = _ite ( lexer )
return ( 'ite' , s , d1 , d0 )
# IMPL
else :
lexer . unpop_token ( tok )
return s
|
def expandScopesGet ( self , * args , ** kwargs ) :
"""Expand Scopes
Return an expanded copy of the given scopeset , with scopes implied by any
roles included .
This call uses the GET method with an HTTP body . It remains only for
backward compatibility .
This method takes input : ` ` v1 / scopeset . json # ` `
This method gives output : ` ` v1 / scopeset . json # ` `
This method is ` ` deprecated ` `"""
|
return self . _makeApiCall ( self . funcinfo [ "expandScopesGet" ] , * args , ** kwargs )
|
def prepare_jochem ( ctx , jochem , output , csoutput ) :
"""Process and filter jochem file to produce list of names for dictionary ."""
|
click . echo ( 'chemdataextractor.dict.prepare_jochem' )
for i , line in enumerate ( jochem ) :
print ( 'JC%s' % i )
if line . startswith ( 'TM ' ) :
if line . endswith ( ' @match=ci\n' ) :
for tokens in _make_tokens ( line [ 3 : - 11 ] ) :
output . write ( ' ' . join ( tokens ) )
output . write ( '\n' )
else :
for tokens in _make_tokens ( line [ 3 : - 1 ] ) :
csoutput . write ( ' ' . join ( tokens ) )
csoutput . write ( '\n' )
|
def create_from_name_and_dictionary ( self , name , datas ) :
"""Return a populated object Parameter from dictionary datas"""
|
parameter = ObjectParameter ( )
self . set_common_datas ( parameter , name , datas )
if "optional" in datas :
parameter . optional = to_boolean ( datas [ "optional" ] )
if "type" in datas :
parameter . type = str ( datas [ "type" ] )
if "generic" in datas :
parameter . generic = to_boolean ( datas [ "generic" ] )
return parameter
|
def update_annotations_on_build ( self , build_id , annotations ) :
"""set annotations on build object
: param build _ id : str , id of build
: param annotations : dict , annotations to set
: return :"""
|
return self . adjust_attributes_on_object ( 'builds' , build_id , 'annotations' , annotations , self . _update_metadata_things )
|
def qos_rcv_queue_multicast_threshold_traffic_class0 ( self , ** kwargs ) :
"""Auto Generated Code"""
|
config = ET . Element ( "config" )
qos = ET . SubElement ( config , "qos" , xmlns = "urn:brocade.com:mgmt:brocade-qos" )
rcv_queue = ET . SubElement ( qos , "rcv-queue" )
multicast = ET . SubElement ( rcv_queue , "multicast" )
threshold = ET . SubElement ( multicast , "threshold" )
traffic_class0 = ET . SubElement ( threshold , "traffic-class0" )
traffic_class0 . text = kwargs . pop ( 'traffic_class0' )
callback = kwargs . pop ( 'callback' , self . _callback )
return callback ( config )
|
def _fit_position_tsmap ( self , name , ** kwargs ) :
"""Localize a source from its TS map ."""
|
prefix = kwargs . get ( 'prefix' , '' )
dtheta_max = kwargs . get ( 'dtheta_max' , 0.5 )
zmin = kwargs . get ( 'zmin' , - 3.0 )
kw = { 'map_size' : 2.0 * dtheta_max , 'write_fits' : kwargs . get ( 'write_fits' , False ) , 'write_npy' : kwargs . get ( 'write_npy' , False ) , 'use_pylike' : kwargs . get ( 'use_pylike' , True ) , 'max_kernel_radius' : self . config [ 'tsmap' ] [ 'max_kernel_radius' ] , 'loglevel' : logging . DEBUG }
src = self . roi . copy_source ( name )
if src [ 'SpatialModel' ] in [ 'RadialDisk' , 'RadialGaussian' ] :
kw [ 'max_kernel_radius' ] = max ( kw [ 'max_kernel_radius' ] , 2.0 * src [ 'SpatialWidth' ] )
skydir = kwargs . get ( 'skydir' , src . skydir )
tsmap = self . tsmap ( utils . join_strings ( [ prefix , name . lower ( ) . replace ( ' ' , '_' ) ] ) , model = src . data , map_skydir = skydir , exclude = [ name ] , make_plots = False , ** kw )
# Find peaks with TS > 4
peaks = find_peaks ( tsmap [ 'ts' ] , 4.0 , 0.2 )
peak_best = None
o = { }
for p in sorted ( peaks , key = lambda t : t [ 'amp' ] , reverse = True ) :
xy = p [ 'ix' ] , p [ 'iy' ]
ts_value = tsmap [ 'ts' ] . data [ xy [ 1 ] , xy [ 0 ] ]
posfit = fit_error_ellipse ( tsmap [ 'ts' ] , xy = xy , dpix = 2 , zmin = max ( zmin , - ts_value * 0.5 ) )
offset = posfit [ 'skydir' ] . separation ( self . roi [ name ] . skydir ) . deg
if posfit [ 'fit_success' ] and posfit [ 'fit_inbounds' ] :
peak_best = p
break
if peak_best is None :
ts_value = np . max ( tsmap [ 'ts' ] . data )
posfit = fit_error_ellipse ( tsmap [ 'ts' ] , dpix = 2 , zmin = max ( zmin , - ts_value * 0.5 ) )
o . update ( posfit )
pix = posfit [ 'skydir' ] . to_pixel ( self . geom . wcs )
o [ 'xpix' ] = float ( pix [ 0 ] )
o [ 'ypix' ] = float ( pix [ 1 ] )
o [ 'skydir' ] = posfit [ 'skydir' ] . transform_to ( 'icrs' )
o [ 'pos_offset' ] = posfit [ 'skydir' ] . separation ( self . roi [ name ] . skydir ) . deg
o [ 'loglike' ] = 0.5 * posfit [ 'zoffset' ]
o [ 'tsmap' ] = tsmap [ 'ts' ]
return o
|
def import_oauth2_credentials ( filename = STORAGE_FILENAME ) :
"""Import OAuth 2.0 session credentials from storage file .
Parameters
filename ( str )
Name of storage file .
Returns
credentials ( dict )
All your app credentials and information
imported from the configuration file ."""
|
with open ( filename , 'r' ) as storage_file :
storage = safe_load ( storage_file )
# depending on OAuth 2.0 grant _ type , these values may not exist
client_secret = storage . get ( 'client_secret' )
refresh_token = storage . get ( 'refresh_token' )
credentials = { 'access_token' : storage [ 'access_token' ] , 'client_id' : storage [ 'client_id' ] , 'client_secret' : client_secret , 'expires_in_seconds' : storage [ 'expires_in_seconds' ] , 'grant_type' : storage [ 'grant_type' ] , 'refresh_token' : refresh_token , 'scopes' : storage [ 'scopes' ] , }
return credentials
|
def transfer_syntax ( UID = None , description = None ) :
"""Transfer Syntax UID < - > Description lookup .
: param UID : Transfer Syntax UID , returns description
: param description : Take the description of a transfer syntax and return its UID"""
|
transfer_syntax = { "1.2.840.10008.1.2" : "Implicit VR Endian: Default Transfer Syntax for DICOM" , "1.2.840.10008.1.2.1" : "Explicit VR Little Endian" , "1.2.840.10008.1.2.1.99" : "Deflated Explicit VR Big Endian" , "1.2.840.10008.1.2.2" : "Explicit VR Big Endian" , "1.2.840.10008.1.2.4.50" : "JPEG Baseline (Process 1): Default Transfer Syntax for Lossy JPEG 8-bit Image Compression" , "1.2.840.10008.1.2.4.51" : "JPEG Baseline (Processes 2 & 4): Default Transfer Syntax for Lossy JPEG 12-bit Image Compression (Process 4 only)" , "1.2.840.10008.1.2.4.57" : "JPEG Lossless, Nonhierarchical (Processes 14)" , "1.2.840.10008.1.2.4.70" : "JPEG Lossless, Nonhierarchical, First-Order Prediction (Processes 14 [Selection Value 1])" , "1.2.840.10008.1.2.4.80" : "JPEG-LS Lossless Image Compression" , "1.2.840.10008.1.2.4.81" : "JPEG-LS Lossy (Near- Lossless) Image Compression" , "1.2.840.10008.1.2.4.90" : "JPEG 2000 Image Compression (Lossless Only)" , "1.2.840.10008.1.2.4.91" : "JPEG 2000 Image Compression" , "1.2.840.10008.1.2.4.92" : "JPEG 2000 Part 2 Multicomponent Image Compression (Lossless Only)" , "1.2.840.10008.1.2.4.93" : "JPEG 2000 Part 2 Multicomponent Image Compression" , "1.2.840.10008.1.2.4.94" : "JPIP Referenced" , "1.2.840.10008.1.2.4.95" : "JPIP Referenced Deflate" , "1.2.840.10008.1.2.5" : "RLE Lossless" , "1.2.840.10008.1.2.6.1" : "RFC 2557 MIME Encapsulation" , "1.2.840.10008.1.2.4.100" : "MPEG2 Main Profile Main Level" , "1.2.840.10008.1.2.4.102" : "MPEG-4 AVC/H.264 High Profile / Level 4.1" , "1.2.840.10008.1.2.4.103" : "MPEG-4 AVC/H.264 BD-compatible High Profile / Level 4.1" }
assert UID or description , "Either Transfer syntax UID or description required"
if UID in transfer_syntax :
return transfer_syntax [ UID ]
for key , value in transfer_syntax . iteritems ( ) :
if description == value :
return key
return None
|
def PrintResponse ( batch_job_helper , response_xml ) :
"""Prints the BatchJobService response .
Args :
batch _ job _ helper : a BatchJobHelper instance .
response _ xml : a string containing a response from the BatchJobService ."""
|
response = batch_job_helper . ParseResponse ( response_xml )
if 'rval' in response [ 'mutateResponse' ] :
for data in response [ 'mutateResponse' ] [ 'rval' ] :
if 'errorList' in data :
print 'Operation %s - FAILURE:' % data [ 'index' ]
print '\terrorType=%s' % data [ 'errorList' ] [ 'errors' ] [ 'ApiError.Type' ]
print '\ttrigger=%s' % data [ 'errorList' ] [ 'errors' ] [ 'trigger' ]
print '\terrorString=%s' % data [ 'errorList' ] [ 'errors' ] [ 'errorString' ]
print '\tfieldPath=%s' % data [ 'errorList' ] [ 'errors' ] [ 'fieldPath' ]
print '\treason=%s' % data [ 'errorList' ] [ 'errors' ] [ 'reason' ]
if 'result' in data :
print 'Operation %s - SUCCESS.' % data [ 'index' ]
|
def _html_output ( self , normal_row , error_row , row_ender , help_text_html , errors_on_separate_row ) :
"""Extend BaseForm ' s helper function for outputting HTML . Used by as _ table ( ) , as _ ul ( ) , as _ p ( ) .
Combines the HTML version of the main form ' s fields with the HTML content
for any subforms ."""
|
parts = [ ]
parts . append ( super ( XmlObjectForm , self ) . _html_output ( normal_row , error_row , row_ender , help_text_html , errors_on_separate_row ) )
def _subform_output ( subform ) :
return subform . _html_output ( normal_row , error_row , row_ender , help_text_html , errors_on_separate_row )
for name , subform in six . iteritems ( self . subforms ) : # use form label if one was set
if hasattr ( subform , 'form_label' ) :
name = subform . form_label
parts . append ( self . _html_subform_output ( subform , name , _subform_output ) )
for name , formset in six . iteritems ( self . formsets ) :
parts . append ( u ( formset . management_form ) )
# use form label if one was set
# - use declared subform label if any
if hasattr ( formset . forms [ 0 ] , 'form_label' ) and formset . forms [ 0 ] . form_label is not None :
name = formset . forms [ 0 ] . form_label
# fallback to generated label from field name
elif hasattr ( formset , 'form_label' ) :
name = formset . form_label
# collect the html output for all the forms in the formset
subform_parts = list ( )
for subform in formset . forms :
subform_parts . append ( self . _html_subform_output ( subform , gen_html = _subform_output , suppress_section = True ) )
# then wrap all forms in the section container , so formset label appears once
parts . append ( self . _html_subform_output ( name = name , content = u'\n' . join ( subform_parts ) ) )
return mark_safe ( u'\n' . join ( parts ) )
|
def chimera_node_placer_2d ( m , n , t , scale = 1. , center = None , dim = 2 ) :
"""Generates a function that converts Chimera indices to x , y
coordinates for a plot .
Parameters
m : int
Number of rows in the Chimera lattice .
n : int
Number of columns in the Chimera lattice .
t : int
Size of the shore within each Chimera tile .
scale : float ( default 1 . )
Scale factor . When scale = 1 , all positions fit within [ 0 , 1]
on the x - axis and [ - 1 , 0 ] on the y - axis .
center : None or array ( default None )
Coordinates of the top left corner .
dim : int ( default 2)
Number of dimensions . When dim > 2 , all extra dimensions are
set to 0.
Returns
xy _ coords : function
A function that maps a Chimera index ( i , j , u , k ) in an
( m , n , t ) Chimera lattice to x , y coordinates such as
used by a plot ."""
|
import numpy as np
tile_center = t // 2
tile_length = t + 3
# 1 for middle of cross , 2 for spacing between tiles
# want the enter plot to fill in [ 0 , 1 ] when scale = 1
scale /= max ( m , n ) * tile_length - 3
grid_offsets = { }
if center is None :
center = np . zeros ( dim )
else :
center = np . asarray ( center )
paddims = dim - 2
if paddims < 0 :
raise ValueError ( "layout must have at least two dimensions" )
if len ( center ) != dim :
raise ValueError ( "length of center coordinates must match dimension of layout" )
def _xy_coords ( i , j , u , k ) : # row , col , shore , shore index
# first get the coordinatiates within the tile
if k < tile_center :
p = k
else :
p = k + 1
if u :
xy = np . array ( [ tile_center , - 1 * p ] )
else :
xy = np . array ( [ p , - 1 * tile_center ] )
# next offset the corrdinates based on the which tile
if i > 0 or j > 0 :
if ( i , j ) in grid_offsets :
xy += grid_offsets [ ( i , j ) ]
else :
off = np . array ( [ j * tile_length , - 1 * i * tile_length ] )
xy += off
grid_offsets [ ( i , j ) ] = off
# convention for Chimera - lattice pictures is to invert the y - axis
return np . hstack ( ( xy * scale , np . zeros ( paddims ) ) ) + center
return _xy_coords
|
def _new_from_rft ( self , base_template , rft_file ) :
"""Append a new file from . rft entry to the journal .
This instructs Revit to create a new model based on
the provided . rft template .
Args :
base _ template ( str ) : new file journal template from rmj . templates
rft _ file ( str ) : full path to . rft template to be used"""
|
self . _add_entry ( base_template )
self . _add_entry ( templates . NEW_FROM_RFT . format ( rft_file_path = rft_file , rft_file_name = op . basename ( rft_file ) ) )
|
def set_multi ( self , mappings , time = 100 , compress_level = - 1 ) :
"""Set multiple keys with its values on server .
If a key is a ( key , cas ) tuple , insert as if cas ( key , value , cas ) had
been called .
: param mappings : A dict with keys / values
: type mappings : dict
: param time : Time in seconds that your key will expire .
: type time : int
: param compress _ level : How much to compress .
0 = no compression , 1 = fastest , 9 = slowest but best ,
-1 = default compression level .
: type compress _ level : int
: return : True
: rtype : bool"""
|
mappings = mappings . items ( )
msg = [ ]
for key , value in mappings :
if isinstance ( key , tuple ) :
key , cas = key
else :
cas = None
if cas == 0 : # Like cas ( ) , if the cas value is 0 , treat it as compare - and - set against not
# existing .
command = 'addq'
else :
command = 'setq'
flags , value = self . serialize ( value , compress_level = compress_level )
m = struct . pack ( self . HEADER_STRUCT + self . COMMANDS [ command ] [ 'struct' ] % ( len ( key ) , len ( value ) ) , self . MAGIC [ 'request' ] , self . COMMANDS [ command ] [ 'command' ] , len ( key ) , 8 , 0 , 0 , len ( key ) + len ( value ) + 8 , 0 , cas or 0 , flags , time , str_to_bytes ( key ) , value )
msg . append ( m )
m = struct . pack ( self . HEADER_STRUCT + self . COMMANDS [ 'noop' ] [ 'struct' ] , self . MAGIC [ 'request' ] , self . COMMANDS [ 'noop' ] [ 'command' ] , 0 , 0 , 0 , 0 , 0 , 0 , 0 )
msg . append ( m )
if six . PY2 :
msg = '' . join ( msg )
else :
msg = b'' . join ( msg )
self . _send ( msg )
opcode = - 1
retval = True
while opcode != self . COMMANDS [ 'noop' ] [ 'command' ] :
( magic , opcode , keylen , extlen , datatype , status , bodylen , opaque , cas , extra_content ) = self . _get_response ( )
if status != self . STATUS [ 'success' ] :
retval = False
if status == self . STATUS [ 'server_disconnected' ] :
break
return retval
|
def encode_batch ( self , inputBatch ) :
"""Encodes a whole batch of input arrays , without learning ."""
|
X = inputBatch
encode = self . encode
Y = np . array ( [ encode ( x ) for x in X ] )
return Y
|
def block ( seed ) :
"""Return block of normal random numbers
Parameters
seed : { None , int }
The seed to generate the noise . sd
Returns
noise : numpy . ndarray
Array of random numbers"""
|
num = SAMPLE_RATE * BLOCK_SIZE
rng = RandomState ( seed % 2 ** 32 )
variance = SAMPLE_RATE / 2
return rng . normal ( size = num , scale = variance ** 0.5 )
|
def make_worlist_trie ( wordlist ) :
"""Creates a nested dictionary representing the trie created
by the given word list .
: param wordlist : str list :
: return : nested dictionary
> > > make _ worlist _ trie ( [ ' einander ' , ' einen ' , ' neben ' ] )
{ ' e ' : { ' i ' : { ' n ' : { ' a ' : { ' n ' : { ' d ' : { ' e ' : { ' r ' : { ' _ _ end _ _ ' : ' _ _ end _ _ ' } } } } } , ' e ' : { ' n ' : { ' _ _ end _ _ ' : ' _ _ end _ _ ' } } } } } , ' n ' : { ' e ' : { ' b ' : { ' e ' : { ' n ' : { ' _ _ end _ _ ' : ' _ _ end _ _ ' } } } } }"""
|
dicts = dict ( )
for w in wordlist :
curr = dicts
for l in w :
curr = curr . setdefault ( l , { } )
curr [ '__end__' ] = '__end__'
return dicts
|
def on_complete ( cls , req ) :
"""Callback called when the request to REST is done . Handles the errors
and if there is none , : class : ` . OutputPicker ` is shown ."""
|
# handle http errors
if not ( req . status == 200 or req . status == 0 ) :
ViewController . log_view . add ( req . text )
alert ( req . text )
# TODO : better handling
return
try :
resp = json . loads ( req . text )
except ValueError :
resp = None
if not resp :
alert ( "Chyba při konverzi!" )
# TODO : better
ViewController . log_view . add ( "Error while generating MARC: %s" % resp . text )
return
OutputPicker . show ( resp )
|
def install ( self ) : # pragma : no cover
"""Install / download ssh keys from LDAP for consumption by SSH ."""
|
keys = self . get_keys_from_ldap ( )
for user , ssh_keys in keys . items ( ) :
user_dir = API . __authorized_keys_path ( user )
if not os . path . isdir ( user_dir ) :
os . makedirs ( user_dir )
authorized_keys_file = os . path . join ( user_dir , 'authorized_keys' )
with open ( authorized_keys_file , 'w' ) as FILE :
print ( "\n" . join ( [ k . decode ( ) for k in ssh_keys ] ) , file = FILE )
|
def getDwordAtRva ( self , rva ) :
"""Returns a C { DWORD } from a given RVA .
@ type rva : int
@ param rva : The RVA to get the C { DWORD } from .
@ rtype : L { DWORD }
@ return : The L { DWORD } obtained at the given RVA ."""
|
return datatypes . DWORD . parse ( utils . ReadData ( self . getDataAtRva ( rva , 4 ) ) )
|
def setVarcompExact ( self , ldeltamin = - 5 , ldeltamax = 5 , num_intervals = 100 ) :
"""setVarcompExact ( ALMM self , limix : : mfloat _ t ldeltamin = - 5 , limix : : mfloat _ t ldeltamax = 5 , limix : : muint _ t num _ intervals = 100)
Parameters
ldeltamin : limix : : mfloat _ t
ldeltamax : limix : : mfloat _ t
num _ intervals : limix : : muint _ t
setVarcompExact ( ALMM self , limix : : mfloat _ t ldeltamin = - 5 , limix : : mfloat _ t ldeltamax = 5)
Parameters
ldeltamin : limix : : mfloat _ t
ldeltamax : limix : : mfloat _ t
setVarcompExact ( ALMM self , limix : : mfloat _ t ldeltamin = - 5)
Parameters
ldeltamin : limix : : mfloat _ t
setVarcompExact ( ALMM self )
Parameters
self : limix : : ALMM *"""
|
return _core . ALMM_setVarcompExact ( self , ldeltamin , ldeltamax , num_intervals )
|
def eglGetDisplay ( display = EGL_DEFAULT_DISPLAY ) :
"""Connect to the EGL display server ."""
|
res = _lib . eglGetDisplay ( display )
if not res or res == EGL_NO_DISPLAY :
raise RuntimeError ( 'Could not create display' )
return res
|
def addSkip ( self , test : unittest . case . TestCase , reason : str ) :
"""Transforms the test in a serializable version of it and sends it to a queue for further analysis
: param test : the test to save
: param reason : the reason why the test was skipped"""
|
test . time_taken = time . time ( ) - self . start_time
test . _outcome = None
self . result_queue . put ( ( TestState . skipped , test , reason ) )
|
def largest_tuple_diff ( tuple_list : list ) -> int :
"""This function computes the highest difference among pairs within a provided list of tuples .
Args :
tuple _ list : A list of tuples
Returns :
An integer denoting the highest absolute difference between pair elements in the list .
Examples :
> > > largest _ tuple _ diff ( [ ( 3 , 5 ) , ( 1 , 7 ) , ( 10 , 3 ) , ( 1 , 2 ) ] )
> > > largest _ tuple _ diff ( [ ( 4 , 6 ) , ( 2 , 17 ) , ( 9 , 13 ) , ( 11 , 12 ) ] )
15
> > > largest _ tuple _ diff ( [ ( 12 , 35 ) , ( 21 , 27 ) , ( 13 , 23 ) , ( 41 , 22 ) ] )
23"""
|
differences = [ abs ( a - b ) for a , b in tuple_list ]
max_difference = max ( differences )
return max_difference
|
def _fetch_system_by_machine_id ( self ) :
'''Get a system by machine ID
Returns
dict system exists in inventory
False system does not exist in inventory
None error connection or parsing response'''
|
machine_id = generate_machine_id ( )
try :
url = self . api_url + '/inventory/v1/hosts?insights_id=' + machine_id
net_logger . info ( "GET %s" , url )
res = self . session . get ( url , timeout = self . config . http_timeout )
except ( requests . ConnectionError , requests . Timeout ) as e :
logger . error ( e )
logger . error ( 'The Insights API could not be reached.' )
return None
try :
if ( self . handle_fail_rcs ( res ) ) :
return None
res_json = json . loads ( res . content )
except ValueError as e :
logger . error ( e )
logger . error ( 'Could not parse response body.' )
return None
if res_json [ 'total' ] == 0 :
logger . debug ( 'No hosts found with machine ID: %s' , machine_id )
return False
return res_json [ 'results' ]
|
def set_plugins_params ( self , plugins = None , search_dirs = None , autoload = None , required = False ) :
"""Sets plugin - related parameters .
: param list | str | unicode | OptionsGroup | list [ OptionsGroup ] plugins : uWSGI plugins to load
: param list | str | unicode search _ dirs : Directories to search for uWSGI plugins .
: param bool autoload : Try to automatically load plugins when unknown options are found .
: param bool required : Load uWSGI plugins and exit on error ."""
|
plugins = plugins or [ ]
command = 'need-plugin' if required else 'plugin'
for plugin in listify ( plugins ) :
if plugin not in self . _plugins :
self . _set ( command , plugin , multi = True )
self . _plugins . append ( plugin )
self . _set ( 'plugins-dir' , search_dirs , multi = True , priority = 0 )
self . _set ( 'autoload' , autoload , cast = bool )
return self
|
def get_date ( ) :
'''Displays the current date
: return : the system date
: rtype : str
CLI Example :
. . code - block : : bash
salt ' * ' timezone . get _ date'''
|
ret = salt . utils . mac_utils . execute_return_result ( 'systemsetup -getdate' )
return salt . utils . mac_utils . parse_return ( ret )
|
def write_branch_data ( self , file ) :
"""Writes branch data to file ."""
|
# I , J , CKT , R , X , B , RATEA , RATEB , RATEC , GI , BI , GJ , BJ , ST , LEN , O1 , F1 , . . . , O4 , F4
branch_attr = [ "r" , "x" , "b" , "rate_a" , "rate_b" , "rate_c" ]
for branch in self . case . branches :
if feq ( branch . ratio , 0.0 ) :
vals = [ getattr ( branch , a ) for a in branch_attr ]
if float ( vals [ 1 ] ) < 0.001 :
vals [ 1 ] = 0.001
# small reactance , todo : increase decimal
vals . insert ( 0 , "1 " )
vals . insert ( 0 , branch . to_bus . _i )
vals . insert ( 0 , branch . from_bus . _i )
vals . extend ( [ 0. , 0. , 0. , 0. ] )
vals . append ( branch . online )
vals . extend ( [ 0.0 , 1 , 1.0 , ] )
file . write ( "%6d,%6d,'%s',%10.3f,%10.3f,%10.3f,%10.3f,%10.3f," "%10.3f,%10.3f,%10.3f,%10.3f,%10.3f,%d,%10.3f,%4d,%6.4f\n" % tuple ( vals ) )
file . write ( " 0 / END OF NON-TRANSFORMER BRANCH DATA, BEGIN TRANSFORMER DATA\n" )
# I , J , K , CKT , CW , CZ , CM , MAG1 , MAG2 , NMETR , ' NAME ' , STAT , O1 , F1 , . . . , O4 , F4
# R1-2 , X1-2 , SBASE1-2
# WINDV1 , NOMV1 , ANG1 , RATA1 , RATB1 , RATC1 , COD1 , CONT1 , RMA1 , RMI1 , VMA1 , VMI1 , NTP1 , TAB1 , CR1 , CX1
# WINDV2 , NOMV2
for branch in self . case . branches :
if not feq ( branch . ratio , 0.0 ) :
vals = [ ]
vals . append ( branch . from_bus . _i )
vals . append ( branch . to_bus . _i )
# K , CKT , CW , CZ , CM , MAG1 , MAG2 , NMETR
vals . extend ( [ 0 , "1 " , 1 , 1 , 1 , 0.0 , 0.0 , 2 ] )
vals . append ( branch . name )
vals . append ( branch . online )
vals . extend ( [ 1 , 1.0 ] )
# O1 , F1
file . write ( "%6d,%6d,%6d,'%2s',%d,%d,%d,%10.3f,%10.3f,%d," "'%-12s',%d,%4d,%6.4f\n" % tuple ( vals ) )
file . write ( "%8.3f,%8.3f,%10.2f\n" % ( branch . r , branch . x , self . case . base_mva ) )
line3 = [ ]
line3 . append ( branch . ratio )
# Winding - 1 RATIO
line3 . append ( 0.0 )
line3 . append ( branch . phase_shift )
line3 . append ( branch . rate_a )
line3 . append ( branch . rate_b )
line3 . append ( branch . rate_c )
# COD1 , CONT1 , RMA1 , RMI1 , VMA1 , VMI1 , NTP1 , TAB1 , CR1 , CX1
line3 . extend ( [ 0 , 0 , 1.1 , 0.9 , 1.1 , 0.9 , 33 , 0 , 0.0 , 0.0 ] )
file . write ( "%7.5f,%8.3f,%8.3f,%8.2f,%8.2f,%8.2f,%d,%7d,%8.5f," "%8.5f,%8.5f,%8.5f,%4d,%2d,%8.5f,%8.5f\n" % tuple ( line3 ) )
file . write ( "%7.5f,%8.3f\n" % ( 1.0 , 0.0 ) )
# Winding - 2 RATIO : 1
file . write ( """ 0 / END OF TRANSFORMER DATA, BEGIN AREA INTERCHANGE DATA
0 / END OF AREA INTERCHANGE DATA, BEGIN TWO-TERMINAL DC DATA
0 / END OF TWO-TERMINAL DC DATA, BEGIN VSC DC LINE DATA
0 / END OF VSC DC LINE DATA, BEGIN SWITCHED SHUNT DATA
0 / END OF SWITCHED SHUNT DATA, BEGIN TRANS. IMP. CORR. TABLE DATA
0 / END OF TRANS. IMP. CORR. TABLE DATA, BEGIN MULTI-TERMINAL DC LINE DATA
0 / END OF MULTI-TERMINAL DC LINE DATA, BEGIN MULTI-SECTION LINE DATA
0 / END OF MULTI-SECTION LINE DATA, BEGIN ZONE DATA
0 / END OF ZONE DATA, BEGIN INTERAREA TRANSFER DATA
0 / END OF INTERAREA TRANSFER DATA, BEGIN OWNER DATA
0 / END OF OWNER DATA, BEGIN FACTS DEVICE DATA
0 / END OF FACTS DEVICE DATA, END OF CASE DATA
""" )
|
def read_moc_json ( moc , filename = None , file = None ) :
"""Read JSON encoded data into a MOC .
Either a filename , or an open file object can be specified ."""
|
if file is not None :
obj = _read_json ( file )
else :
with open ( filename , 'rb' ) as f :
obj = _read_json ( f )
for ( order , cells ) in obj . items ( ) :
moc . add ( order , cells )
|
def mac_address_table_aging_time_conversational_time_out ( self , ** kwargs ) :
"""Auto Generated Code"""
|
config = ET . Element ( "config" )
mac_address_table = ET . SubElement ( config , "mac-address-table" , xmlns = "urn:brocade.com:mgmt:brocade-mac-address-table" )
aging_time = ET . SubElement ( mac_address_table , "aging-time" )
conversational_time_out = ET . SubElement ( aging_time , "conversational-time-out" )
conversational_time_out . text = kwargs . pop ( 'conversational_time_out' )
callback = kwargs . pop ( 'callback' , self . _callback )
return callback ( config )
|
def interleave ( args ) :
r"""zip followed by flatten
Args :
args ( tuple ) : tuple of lists to interleave
SeeAlso :
You may actually be better off doing something like this :
a , b , = args
ut . flatten ( ut . bzip ( a , b ) )
ut . flatten ( ut . bzip ( [ 1 , 2 , 3 ] , [ ' - ' ] ) )
[1 , ' - ' , 2 , ' - ' , 3 , ' - ' ]
Example :
> > > # ENABLE _ DOCTEST
> > > from utool . util _ iter import * # NOQA
> > > import utool as ut
> > > args = ( [ 1 , 2 , 3 , 4 , 5 ] , [ ' A ' , ' B ' , ' C ' , ' D ' , ' E ' , ' F ' , ' G ' ] )
> > > genresult = interleave ( args )
> > > result = ut . repr4 ( list ( genresult ) , nl = False )
> > > print ( result )
[1 , ' A ' , 2 , ' B ' , 3 , ' C ' , 4 , ' D ' , 5 , ' E ' ]"""
|
arg_iters = list ( map ( iter , args ) )
cycle_iter = it . cycle ( arg_iters )
for iter_ in cycle_iter :
yield six . next ( iter_ )
|
def get_parameter_value ( self , parameter , from_cache = True , timeout = 10 ) :
"""Retrieve the current value of the specified parameter .
: param str parameter : Either a fully - qualified XTCE name or an alias in the
format ` ` NAMESPACE / NAME ` ` .
: param bool from _ cache : If ` ` False ` ` this call will block until a
fresh value is received on the processor .
If ` ` True ` ` the server returns the latest
value instead ( which may be ` ` None ` ` ) .
: param float timeout : The amount of seconds to wait for a fresh value .
( ignored if ` ` from _ cache = True ` ` ) .
: rtype : . ParameterValue"""
|
params = { 'fromCache' : from_cache , 'timeout' : int ( timeout * 1000 ) , }
parameter = adapt_name_for_rest ( parameter )
url = '/processors/{}/{}/parameters{}' . format ( self . _instance , self . _processor , parameter )
response = self . _client . get_proto ( url , params = params )
proto = pvalue_pb2 . ParameterValue ( )
proto . ParseFromString ( response . content )
# Server returns ParameterValue with only ' id ' set if no
# value existed . Convert this to ` ` None ` ` .
if proto . HasField ( 'rawValue' ) or proto . HasField ( 'engValue' ) :
return ParameterValue ( proto )
return None
|
def _combine ( self , x , y ) :
"""Combines two constraints , raising an error if they are not compatible ."""
|
if x is None or y is None :
return x or y
if x != y :
raise ValueError ( 'Incompatible set of constraints provided.' )
return x
|
def simxGetObjectVelocity ( clientID , objectHandle , operationMode ) :
'''Please have a look at the function description / documentation in the V - REP user manual'''
|
linearVel = ( ct . c_float * 3 ) ( )
angularVel = ( ct . c_float * 3 ) ( )
ret = c_GetObjectVelocity ( clientID , objectHandle , linearVel , angularVel , operationMode )
arr1 = [ ]
for i in range ( 3 ) :
arr1 . append ( linearVel [ i ] )
arr2 = [ ]
for i in range ( 3 ) :
arr2 . append ( angularVel [ i ] )
return ret , arr1 , arr2
|
def confd_state_epoll ( self , ** kwargs ) :
"""Auto Generated Code"""
|
config = ET . Element ( "config" )
confd_state = ET . SubElement ( config , "confd-state" , xmlns = "http://tail-f.com/yang/confd-monitoring" )
epoll = ET . SubElement ( confd_state , "epoll" )
epoll . text = kwargs . pop ( 'epoll' )
callback = kwargs . pop ( 'callback' , self . _callback )
return callback ( config )
|
def is_mainthread ( thread = None ) :
'''Check if thread is the main thread .
If ` ` thread ` ` is not supplied check the current thread'''
|
thread = thread if thread is not None else current_thread ( )
return isinstance ( thread , threading . _MainThread )
|
def validar ( self , id_vlan ) :
"""Validates ACL - IPv4 of VLAN from its identifier .
Assigns 1 to ' acl _ valida ' .
: param id _ vlan : Identifier of the Vlan . Integer value and greater than zero .
: return : None
: raise InvalidParameterError : Vlan identifier is null and invalid .
: raise VlanNaoExisteError : Vlan not registered .
: raise DataBaseError : Networkapi failed to access the database .
: raise XMLError : Networkapi failed to generate the XML response ."""
|
if not is_valid_int_param ( id_vlan ) :
raise InvalidParameterError ( u'The identifier of Vlan is invalid or was not informed.' )
url = 'vlan/' + str ( id_vlan ) + '/validate/' + IP_VERSION . IPv4 [ 0 ] + '/'
code , xml = self . submit ( None , 'PUT' , url )
return self . response ( code , xml )
|
def getDataMap ( self , intype , pos , name , offset = 0 ) :
"""Hook defined to lookup a name , and get it from a vector .
Can be overloaded to get it from somewhere else ."""
|
if intype == "input" :
vector = self . inputs
elif intype == "target" :
vector = self . targets
else :
raise AttributeError ( "invalid map type '%s'" % intype )
return vector [ pos ] [ offset : offset + self [ name ] . size ]
|
def rotate_texture ( texture , rotation , x_offset = 0.5 , y_offset = 0.5 ) :
"""Rotates the given texture by a given angle .
Args :
texture ( texture ) : the texture to rotate
rotation ( float ) : the angle of rotation in degrees
x _ offset ( float ) : the x component of the center of rotation ( optional )
y _ offset ( float ) : the y component of the center of rotation ( optional )
Returns :
texture : A texture ."""
|
x , y = texture
x = x . copy ( ) - x_offset
y = y . copy ( ) - y_offset
angle = np . radians ( rotation )
x_rot = x * np . cos ( angle ) + y * np . sin ( angle )
y_rot = x * - np . sin ( angle ) + y * np . cos ( angle )
return x_rot + x_offset , y_rot + y_offset
|
def bucket_lister ( manager , bucket_name , prefix = None , marker = None , limit = None ) :
"""A generator function for listing keys in a bucket ."""
|
eof = False
while not eof :
ret , eof , info = manager . list ( bucket_name , prefix = prefix , limit = limit , marker = marker )
if ret is None :
raise QiniuError ( info )
if not eof :
marker = ret [ 'marker' ]
for item in ret [ 'items' ] :
yield item
|
def find_all ( cls , vid = None , pid = None ) :
"""Returns all FTDI devices matching our vendor and product IDs .
: returns : list of devices
: raises : : py : class : ` ~ alarmdecoder . util . CommError `"""
|
if not have_pyftdi :
raise ImportError ( 'The USBDevice class has been disabled due to missing requirement: pyftdi or pyusb.' )
cls . __devices = [ ]
query = cls . PRODUCT_IDS
if vid and pid :
query = [ ( vid , pid ) ]
try :
cls . __devices = Ftdi . find_all ( query , nocache = True )
except ( usb . core . USBError , FtdiError ) as err :
raise CommError ( 'Error enumerating AD2USB devices: {0}' . format ( str ( err ) ) , err )
return cls . __devices
|
def send_keyevents ( self , keyevent : int ) -> None :
'''Simulates typing keyevents .'''
|
self . _execute ( '-s' , self . device_sn , 'shell' , 'input' , 'keyevent' , str ( keyevent ) )
|
def get_csig ( self , calc = None ) :
"""Because we ' re a Python value node and don ' t have a real
timestamp , we get to ignore the calculator and just use the
value contents ."""
|
try :
return self . ninfo . csig
except AttributeError :
pass
contents = self . get_contents ( )
self . get_ninfo ( ) . csig = contents
return contents
|
def unregister ( self , observers ) :
u"""Concrete method of Subject . unregister ( ) .
Unregister observers as an argument to self . observers ."""
|
if isinstance ( observers , list ) or isinstance ( observers , tuple ) :
for observer in observers :
try :
index = self . _observers . index ( observer )
self . _observers . remove ( self . _observers [ index ] )
except ValueError : # logging
print ( '{observer} not in list...' . format ( observer ) )
elif isinstance ( observers , base . Observer ) :
try :
index = self . _observers . index ( observers )
self . _observers . remove ( self . _observers [ index ] )
except ValueError : # logging
print ( '{observer} not in list...' . format ( observers ) )
else :
err_message = ( 'ConfigReader.register support' 'ListType, TupleType and {observer} Object.' '' . format ( base . Observer . __name__ ) )
raise ValueError ( err_message )
|
def _generate_author_query ( self , author_name ) :
"""Generates a query handling specifically authors .
Notes :
The match query is generic enough to return many results . Then , using the filter clause we truncate these
so that we imitate legacy ' s behaviour on returning more " exact " results . E . g . Searching for ` Smith , John `
shouldn ' t return papers of ' Smith , Bob ' .
Additionally , doing a ` ` match ` ` with ` ` " operator " : " and " ` ` in order to be even more exact in our search , by
requiring that ` ` full _ name ` ` field contains both"""
|
name_variations = [ name_variation . lower ( ) for name_variation in generate_minimal_name_variations ( author_name ) ]
# When the query contains sufficient data , i . e . full names , e . g . ` ` Mele , Salvatore ` ` ( and not ` ` Mele , S ` ` or
# ` ` Mele ` ` ) we can improve our filtering in order to filter out results containing records with authors that
# have the same non lastnames prefix , e . g . ' Mele , Samuele ' .
if author_name_contains_fullnames ( author_name ) :
specialized_author_filter = [ { 'bool' : { 'must' : [ { 'term' : { ElasticSearchVisitor . AUTHORS_NAME_VARIATIONS_FIELD : names_variation [ 0 ] } } , generate_match_query ( ElasticSearchVisitor . KEYWORD_TO_ES_FIELDNAME [ 'author' ] , names_variation [ 1 ] , with_operator_and = True ) ] } } for names_variation in product ( name_variations , name_variations ) ]
else : # In the case of initials or even single lastname search , filter with only the name variations .
specialized_author_filter = [ { 'term' : { ElasticSearchVisitor . AUTHORS_NAME_VARIATIONS_FIELD : name_variation } } for name_variation in name_variations ]
query = { 'bool' : { 'filter' : { 'bool' : { 'should' : specialized_author_filter } } , 'must' : { 'match' : { ElasticSearchVisitor . KEYWORD_TO_ES_FIELDNAME [ 'author' ] : author_name } } } }
return generate_nested_query ( ElasticSearchVisitor . AUTHORS_NESTED_QUERY_PATH , query )
|
def _setup_source_conn ( self , source_conn_id , source_bucket_name = None ) :
"""Retrieve connection based on source _ conn _ id . In case of s3 it also configures the bucket .
Validates that connection id belongs to supported connection type .
: param source _ conn _ id :
: param source _ bucket _ name :"""
|
self . source_conn = BaseHook . get_hook ( source_conn_id )
self . source_conn_id = source_conn_id
# Workaround for getting hook in case of s3 connection
# This is needed because get _ hook silently returns None for s3 connections
# See https : / / issues . apache . org / jira / browse / AIRFLOW - 2316 for more info
connection = BaseHook . _get_connection_from_env ( source_conn_id )
self . log . info ( connection . extra_dejson )
if connection . conn_type == 's3' :
self . log . info ( "Setting up s3 connection {0}" . format ( source_conn_id ) )
self . source_conn = S3Hook ( aws_conn_id = source_conn_id )
# End Workaround
if source_bucket_name is None :
raise AttributeError ( "Missing source bucket for s3 connection" )
self . source_bucket_name = source_bucket_name
if not isinstance ( self . source_conn , DbApiHook ) and not isinstance ( self . source_conn , S3Hook ) :
raise AttributeError ( "Only s3_csv, local and sql connection types are allowed, not {0}" . format ( type ( self . source_conn ) ) )
|
def delete ( self , event ) :
"""Abort running task if it exists ."""
|
super ( CeleryReceiver , self ) . delete ( event )
AsyncResult ( event . id ) . revoke ( terminate = True )
|
def get_view_name ( self ) :
"""Return the view name , as used in OPTIONS responses and in the
browsable API ."""
|
func = self . settings . VIEW_NAME_FUNCTION
return func ( self . __class__ , getattr ( self , 'suffix' , None ) )
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.