signature
stringlengths 29
44.1k
| implementation
stringlengths 0
85.2k
|
---|---|
def _set_xfpe ( self , v , load = False ) :
"""Setter method for xfpe , mapped from YANG variable / brocade _ interface _ ext _ rpc / get _ media _ detail / output / interface / xfpe ( container )
If this variable is read - only ( config : false ) in the
source YANG file , then _ set _ xfpe is considered as a private
method . Backends looking to populate this variable should
do so via calling thisObj . _ set _ xfpe ( ) directly ."""
|
if hasattr ( v , "_utype" ) :
v = v . _utype ( v )
try :
t = YANGDynClass ( v , base = xfpe . xfpe , is_container = 'container' , presence = False , yang_name = "xfpe" , rest_name = "xfpe" , parent = self , choice = ( u'interface-identifier' , u'xfpe' ) , path_helper = self . _path_helper , extmethods = self . _extmethods , register_paths = False , extensions = None , namespace = 'urn:brocade.com:mgmt:brocade-interface-ext' , defining_module = 'brocade-interface-ext' , yang_type = 'container' , is_config = True )
except ( TypeError , ValueError ) :
raise ValueError ( { 'error-string' : """xfpe must be of a type compatible with container""" , 'defined-type' : "container" , 'generated-type' : """YANGDynClass(base=xfpe.xfpe, is_container='container', presence=False, yang_name="xfpe", rest_name="xfpe", parent=self, choice=(u'interface-identifier', u'xfpe'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions=None, namespace='urn:brocade.com:mgmt:brocade-interface-ext', defining_module='brocade-interface-ext', yang_type='container', is_config=True)""" , } )
self . __xfpe = t
if hasattr ( self , '_set' ) :
self . _set ( )
|
def _diff ( self , cursor , tokenizer , output_fh ) :
"""Returns output _ fh with diff results that have been reduced .
Uses a temporary file to store the results from ` cursor `
before being reduced , in order to not have the results stored
in memory twice .
: param cursor : database cursor containing raw diff data
: type cursor : ` sqlite3 . Cursor `
: param tokenizer : tokenizer for the n - grams
: type tokenizer : ` Tokenizer `
: type output _ fh : file - like object
: rtype : file - like object"""
|
temp_path = self . _csv_temp ( cursor , constants . QUERY_FIELDNAMES )
output_fh = self . _reduce_diff_results ( temp_path , tokenizer , output_fh )
try :
os . remove ( temp_path )
except OSError as e :
self . _logger . error ( 'Failed to remove temporary file containing ' 'unreduced results: {}' . format ( e ) )
return output_fh
|
def _fill_diagonals ( m , diag_indices ) :
"""Fills diagonals of ` nsites ` matrices in ` m ` so rows sum to 0."""
|
assert m . ndim == 3 , "M must have 3 dimensions"
assert m . shape [ 1 ] == m . shape [ 2 ] , "M must contain square matrices"
for r in range ( m . shape [ 0 ] ) :
scipy . fill_diagonal ( m [ r ] , 0 )
m [ r ] [ diag_indices ] -= scipy . sum ( m [ r ] , axis = 1 )
|
def from_memdb_file ( path ) :
"""Creates a sourcemap view from MemDB at a given file ."""
|
path = to_bytes ( path )
return View . _from_ptr ( rustcall ( _lib . lsm_view_from_memdb_file , path ) )
|
def get_current_state ( self ) :
"""Get current state for user session or None if session doesn ' t exist"""
|
try :
session_id = session . sessionId
return self . session_machines . current_state ( session_id )
except UninitializedStateMachine as e :
logger . error ( e )
|
def load ( source , ** kwargs ) :
"""Deserialize and load a model .
Example :
end _ model = EndModel . load ( " my _ end _ model . pkl " )
end _ model . score ( . . . )"""
|
with open ( source , "rb" ) as f :
return torch . load ( f , ** kwargs )
|
def run ( self , node ) :
"""Apply transformation and dependencies and fix new node location ."""
|
n = super ( Transformation , self ) . run ( node )
if self . update :
ast . fix_missing_locations ( n )
self . passmanager . _cache . clear ( )
return n
|
def transactions ( self , cursor = None , order = 'asc' , limit = 10 , sse = False ) :
"""Retrieve the transactions JSON from this instance ' s Horizon server .
Retrieve the transactions JSON response for the account associated with
this : class : ` Address ` .
: param cursor : A paging token , specifying where to start returning records from .
When streaming this can be set to " now " to stream object created since your request time .
: type cursor : int , str
: param str order : The order in which to return rows , " asc " or " desc " .
: param int limit : Maximum number of records to return .
: param bool sse : Use server side events for streaming responses ."""
|
return self . horizon . account_transactions ( self . address , cursor = cursor , order = order , limit = limit , sse = sse )
|
async def close_interface ( self , conn_id , interface ) :
"""Close an interface on this IOTile device .
See : meth : ` AbstractDeviceAdapter . close _ interface ` ."""
|
self . _ensure_connection ( conn_id , True )
connection_string = self . _get_property ( conn_id , "connection_string" )
msg = dict ( interface = interface , connection_string = connection_string )
await self . _send_command ( OPERATIONS . CLOSE_INTERFACE , msg , COMMANDS . CloseInterfaceResponse )
|
def estimate_entropy ( X , epsilon = None ) :
r"""Estimate a dataset ' s Shannon entropy .
This function can take datasets of mixed discrete and continuous
features , and uses a set of heuristics to determine which functions
to apply to each .
Because this function is a subroutine in a mutual information estimator ,
we employ the Kozachenko Estimator [ 1 ] for continuous features when this
function is _ not _ used for mutual information and an adaptation of the
Kraskov Estimator [ 2 ] when it is .
Let X be made of continuous features c and discrete features d .
To deal with both continuous and discrete features , We use the
following reworking of entropy :
$ H ( X ) = H ( c , d ) = \ sum _ { x \ in d } p ( x ) \ times H ( c ( x ) ) + H ( d ) $
Where c ( x ) is a dataset that represents the rows of the continuous dataset
in the same row as a discrete column with value x in the original dataset .
Args :
X ( array - like ) : An array - like ( np arr , pandas df , etc . ) with shape
( n _ samples , n _ features ) or ( n _ samples )
epsilon ( array - like ) : An array with shape ( n _ samples , 1 ) that is
the epsilon used in Kraskov Estimator . Represents the chebyshev
distance from an element to its k - th nearest neighbor in the full
dataset .
Returns :
float : A floating - point number representing the entropy in X .
If the dataset is fully discrete , an exact calculation is done .
If this is not the case and epsilon is not provided , this
will be the Kozacheko Estimator of the dataset ' s entropy .
If epsilon is provided , this is a partial estimation of the
Kraskov entropy estimator . The bias is cancelled out when
computing mutual information .
References :
. . [ 1 ] A . Kraskov , H . Stogbauer and P . Grassberger , " Estimating mutual
information " . Phys . Rev . E 69 , 2004.
. . [ 2 ] L . F . Kozachenko , N . N . Leonenko , " Sample Estimate of the Entropy
of a Random Vector : , Probl . Peredachi Inf . , 23:2 ( 1987 ) , 9-16."""
|
X = asarray2d ( X )
n_samples , n_features = X . shape
if n_features < 1 :
return 0
disc_mask = _get_discrete_columns ( X )
cont_mask = ~ disc_mask
# If our dataset is fully discrete / continuous , do something easier
if np . all ( disc_mask ) :
return calculate_disc_entropy ( X )
elif np . all ( cont_mask ) :
return estimate_cont_entropy ( X , epsilon )
# Separate the dataset into discrete and continuous datasets d , c
disc_features = asarray2d ( X [ : , disc_mask ] )
cont_features = asarray2d ( X [ : , cont_mask ] )
entropy = 0
uniques , counts = np . unique ( disc_features , axis = 0 , return_counts = True )
empirical_p = counts / n_samples
# $ \ sum _ { x \ in d } p ( x ) \ times H ( c ( x ) ) $
for i in range ( counts . size ) :
unique_mask = np . all ( disc_features == uniques [ i ] , axis = 1 )
selected_cont_samples = cont_features [ unique_mask , : ]
if epsilon is None :
selected_epsilon = None
else :
selected_epsilon = epsilon [ unique_mask , : ]
conditional_cont_entropy = estimate_cont_entropy ( selected_cont_samples , selected_epsilon )
entropy += empirical_p [ i ] * conditional_cont_entropy
# H ( d )
entropy += calculate_disc_entropy ( disc_features )
if epsilon is None :
entropy = max ( 0 , entropy )
return entropy
|
def delete_refund ( self , refund_id ) :
"""Deletes an existing refund transaction ."""
|
request = self . _delete ( 'transactions/refunds/' + str ( refund_id ) )
return self . responder ( request )
|
def is_zip_file ( models ) :
r'''Ensure that a path is a zip file by :
- checking length is 1
- checking extension is ' . zip ' '''
|
ext = os . path . splitext ( models [ 0 ] ) [ 1 ]
return ( len ( models ) == 1 ) and ( ext == '.zip' )
|
def is_feature_enabled ( self , feature_key , user_id , attributes = None ) :
"""Returns true if the feature is enabled for the given user .
Args :
feature _ key : The key of the feature for which we are determining if it is enabled or not for the given user .
user _ id : ID for user .
attributes : Dict representing user attributes .
Returns :
True if the feature is enabled for the user . False otherwise ."""
|
if not self . is_valid :
self . logger . error ( enums . Errors . INVALID_DATAFILE . format ( 'is_feature_enabled' ) )
return False
if not validator . is_non_empty_string ( feature_key ) :
self . logger . error ( enums . Errors . INVALID_INPUT_ERROR . format ( 'feature_key' ) )
return False
if not isinstance ( user_id , string_types ) :
self . logger . error ( enums . Errors . INVALID_INPUT_ERROR . format ( 'user_id' ) )
return False
if not self . _validate_user_inputs ( attributes ) :
return False
feature = self . config . get_feature_from_key ( feature_key )
if not feature :
return False
feature_enabled = False
source_info = { }
decision = self . decision_service . get_variation_for_feature ( feature , user_id , attributes )
is_source_experiment = decision . source == enums . DecisionSources . FEATURE_TEST
if decision . variation :
if decision . variation . featureEnabled is True :
feature_enabled = True
# Send event if Decision came from an experiment .
if is_source_experiment :
source_info = { 'experiment_key' : decision . experiment . key , 'variation_key' : decision . variation . key }
self . _send_impression_event ( decision . experiment , decision . variation , user_id , attributes )
if feature_enabled :
self . logger . info ( 'Feature "%s" is enabled for user "%s".' % ( feature_key , user_id ) )
else :
self . logger . info ( 'Feature "%s" is not enabled for user "%s".' % ( feature_key , user_id ) )
self . notification_center . send_notifications ( enums . NotificationTypes . DECISION , enums . DecisionNotificationTypes . FEATURE , user_id , attributes or { } , { 'feature_key' : feature_key , 'feature_enabled' : feature_enabled , 'source' : decision . source , 'source_info' : source_info } )
return feature_enabled
|
def header ( args ) :
"""% prog header map conversion _ table
Rename lines in the map header . The mapping of old names to new names are
stored in two - column ` conversion _ table ` ."""
|
from jcvi . formats . base import DictFile
p = OptionParser ( header . __doc__ )
p . add_option ( "--prefix" , default = "" , help = "Prepend text to line number [default: %default]" )
p . add_option ( "--ids" , help = "Write ids to file [default: %default]" )
opts , args = p . parse_args ( args )
if len ( args ) != 2 :
sys . exit ( not p . print_help ( ) )
mstmap , conversion_table = args
data = MSTMap ( mstmap )
hd = data . header
conversion = DictFile ( conversion_table )
newhd = [ opts . prefix + conversion . get ( x , x ) for x in hd ]
print ( "\t" . join ( hd ) )
print ( "--->" )
print ( "\t" . join ( newhd ) )
ids = opts . ids
if ids :
fw = open ( ids , "w" )
print ( "\n" . join ( newhd ) , file = fw )
fw . close ( )
|
def query_clients ( self ) :
"""查询clients
Returns :
[ type ] - - [ description ]"""
|
try :
data = self . call ( "clients" , { 'client' : 'None' } )
if len ( data ) > 0 :
return pd . DataFrame ( data ) . drop ( [ 'commandLine' , 'processId' ] , axis = 1 )
else :
return pd . DataFrame ( None , columns = [ 'id' , 'name' , 'windowsTitle' , 'accountInfo' , 'status' ] )
except Exception as e :
return False , e
|
def descriptions ( self ) :
"""Human readable word descriptions ."""
|
descs = [ ]
for postag , form in zip ( self . postags , self . forms ) :
desc = VERB_TYPES . get ( form , '' )
if len ( desc ) == 0 :
toks = form . split ( ' ' )
if len ( toks ) == 2 :
plur_desc = PLURALITY . get ( toks [ 0 ] , None )
case_desc = CASES . get ( toks [ 1 ] , None )
toks = [ ]
if plur_desc is not None :
toks . append ( plur_desc )
if case_desc is not None :
toks . append ( case_desc )
desc = ' ' . join ( toks )
descs . append ( desc )
return descs
|
def update_plot_limits ( ax , white_space ) :
"""Sets the limit options of a matplotlib plot .
Args :
ax : matplotlib axes
white _ space ( float ) : whitespace added to surround the tight limit of the data
Note : This relies on ax . dataLim ( in 2d ) and ax . [ xy , zz ] _ dataLim being set in 3d"""
|
if hasattr ( ax , 'zz_dataLim' ) :
bounds = ax . xy_dataLim . bounds
ax . set_xlim ( bounds [ 0 ] - white_space , bounds [ 0 ] + bounds [ 2 ] + white_space )
ax . set_ylim ( bounds [ 1 ] - white_space , bounds [ 1 ] + bounds [ 3 ] + white_space )
bounds = ax . zz_dataLim . bounds
ax . set_zlim ( bounds [ 0 ] - white_space , bounds [ 0 ] + bounds [ 2 ] + white_space )
else :
bounds = ax . dataLim . bounds
assert not any ( map ( np . isinf , bounds ) ) , 'Cannot set bounds if dataLim has infinite elements'
ax . set_xlim ( bounds [ 0 ] - white_space , bounds [ 0 ] + bounds [ 2 ] + white_space )
ax . set_ylim ( bounds [ 1 ] - white_space , bounds [ 1 ] + bounds [ 3 ] + white_space )
|
def convert_data_element_to_data_and_metadata_1 ( data_element ) -> DataAndMetadata . DataAndMetadata :
"""Convert a data element to xdata . No data copying occurs .
The data element can have the following keys :
data ( required )
is _ sequence , collection _ dimension _ count , datum _ dimension _ count ( optional description of the data )
spatial _ calibrations ( optional list of spatial calibration dicts , scale , offset , units )
intensity _ calibration ( optional intensity calibration dict , scale , offset , units )
metadata ( optional )
properties ( get stored into metadata . hardware _ source )
one of either timestamp or datetime _ modified
if datetime _ modified ( dst , tz ) it is converted and used as timestamp
then timezone gets stored into metadata . description . timezone ."""
|
# data . takes ownership .
data = data_element [ "data" ]
dimensional_shape = Image . dimensional_shape_from_data ( data )
is_sequence = data_element . get ( "is_sequence" , False )
dimension_count = len ( Image . dimensional_shape_from_data ( data ) )
adjusted_dimension_count = dimension_count - ( 1 if is_sequence else 0 )
collection_dimension_count = data_element . get ( "collection_dimension_count" , 2 if adjusted_dimension_count in ( 3 , 4 ) else 0 )
datum_dimension_count = data_element . get ( "datum_dimension_count" , adjusted_dimension_count - collection_dimension_count )
data_descriptor = DataAndMetadata . DataDescriptor ( is_sequence , collection_dimension_count , datum_dimension_count )
# dimensional calibrations
dimensional_calibrations = None
if "spatial_calibrations" in data_element :
dimensional_calibrations_list = data_element . get ( "spatial_calibrations" )
if len ( dimensional_calibrations_list ) == len ( dimensional_shape ) :
dimensional_calibrations = list ( )
for dimension_calibration in dimensional_calibrations_list :
offset = float ( dimension_calibration . get ( "offset" , 0.0 ) )
scale = float ( dimension_calibration . get ( "scale" , 1.0 ) )
units = dimension_calibration . get ( "units" , "" )
units = str ( units ) if units is not None else str ( )
if scale != 0.0 :
dimensional_calibrations . append ( Calibration . Calibration ( offset , scale , units ) )
else :
dimensional_calibrations . append ( Calibration . Calibration ( ) )
# intensity calibration
intensity_calibration = None
if "intensity_calibration" in data_element :
intensity_calibration_dict = data_element . get ( "intensity_calibration" )
offset = float ( intensity_calibration_dict . get ( "offset" , 0.0 ) )
scale = float ( intensity_calibration_dict . get ( "scale" , 1.0 ) )
units = intensity_calibration_dict . get ( "units" , "" )
units = str ( units ) if units is not None else str ( )
if scale != 0.0 :
intensity_calibration = Calibration . Calibration ( offset , scale , units )
# properties ( general tags )
metadata = dict ( )
if "metadata" in data_element :
metadata . update ( Utility . clean_dict ( data_element . get ( "metadata" ) ) )
if "properties" in data_element and data_element [ "properties" ] :
hardware_source_metadata = metadata . setdefault ( "hardware_source" , dict ( ) )
hardware_source_metadata . update ( Utility . clean_dict ( data_element . get ( "properties" ) ) )
# dates are _ local _ time and must use this specific ISO 8601 format . 2013-11-17T08:43:21.389391
# time zones are offsets ( east of UTC ) in the following format " + HHMM " or " - HHMM "
# daylight savings times are time offset ( east of UTC ) in format " + MM " or " - MM "
# timezone is for conversion and is the Olson timezone string .
# datetime . datetime . strptime ( datetime . datetime . isoformat ( datetime . datetime . now ( ) ) , " % Y - % m - % dT % H : % M : % S . % f " )
# datetime _ modified , datetime _ modified _ tz , datetime _ modified _ dst , datetime _ modified _ tzname is the time at which this image was modified .
# datetime _ original , datetime _ original _ tz , datetime _ original _ dst , datetime _ original _ tzname is the time at which this image was created .
timestamp = data_element . get ( "timestamp" , datetime . datetime . utcnow ( ) )
datetime_item = data_element . get ( "datetime_modified" , Utility . get_datetime_item_from_utc_datetime ( timestamp ) )
local_datetime = Utility . get_datetime_from_datetime_item ( datetime_item )
dst_value = datetime_item . get ( "dst" , "+00" )
tz_value = datetime_item . get ( "tz" , "+0000" )
timezone = datetime_item . get ( "timezone" )
time_zone = { "dst" : dst_value , "tz" : tz_value }
if timezone is not None :
time_zone [ "timezone" ] = timezone
# note : dst is informational only ; tz already include dst
tz_adjust = ( int ( tz_value [ 1 : 3 ] ) * 60 + int ( tz_value [ 3 : 5 ] ) ) * ( - 1 if tz_value [ 0 ] == '-' else 1 )
utc_datetime = local_datetime - datetime . timedelta ( minutes = tz_adjust )
# tz _ adjust already contains dst _ adjust
timestamp = utc_datetime
return DataAndMetadata . new_data_and_metadata ( data , intensity_calibration = intensity_calibration , dimensional_calibrations = dimensional_calibrations , metadata = metadata , timestamp = timestamp , data_descriptor = data_descriptor , timezone = timezone , timezone_offset = tz_value )
|
def parse_mapping ( value ) :
"""Parse the given VCF header line mapping
Such a mapping consists of " key = value " pairs , separated by commas and
wrapped into angular brackets ( " < . . . > " ) . Strings are usually quoted ,
for certain known keys , exceptions are made , depending on the tag key .
this , however , only gets important when serializing .
: raises : : py : class : ` vcfpy . exceptions . InvalidHeaderException ` if
there was a problem parsing the file"""
|
if not value . startswith ( "<" ) or not value . endswith ( ">" ) :
raise exceptions . InvalidHeaderException ( "Header mapping value was not wrapped in angular brackets" )
# split the comma - separated list into pairs , ignoring commas in quotes
pairs = split_quoted_string ( value [ 1 : - 1 ] , delim = "," , quote = '"' )
# split these pairs into key / value pairs , converting flags to mappings
# to True
key_values = [ ]
for pair in pairs :
if "=" in pair :
key , value = split_mapping ( pair )
if value . startswith ( '"' ) and value . endswith ( '"' ) :
value = ast . literal_eval ( value )
elif value . startswith ( "[" ) and value . endswith ( "]" ) :
value = [ v . strip ( ) for v in value [ 1 : - 1 ] . split ( "," ) ]
else :
key , value = pair , True
key_values . append ( ( key , value ) )
# return completely parsed mapping as OrderedDict
return OrderedDict ( key_values )
|
def get_all_templates ( self , params = None ) :
"""Get all templates
This will iterate over all pages until it gets all elements .
So if the rate limit exceeded it will throw an Exception and you will get nothing
: param params : search params
: return : list"""
|
if not params :
params = { }
return self . _iterate_through_pages ( self . get_templates_per_page , resource = TEMPLATES , ** { 'params' : params } )
|
def load_class ( alias ) :
"""Finds the class registered to the alias .
The search is done in order :
1 . Checks if the class name has been registered via L { register _ class }
or L { register _ package } .
2 . Checks all functions registered via L { register _ class _ loader } .
3 . Attempts to load the class via standard module loading techniques .
@ param alias : The class name .
@ type alias : C { string }
@ raise UnknownClassAlias : The C { alias } was not found .
@ raise TypeError : Expecting class type or L { ClassAlias } from loader .
@ return : Class registered to the alias .
@ rtype : C { classobj }"""
|
# Try the CLASS _ CACHE first
try :
return CLASS_CACHE [ alias ]
except KeyError :
pass
for loader in CLASS_LOADERS :
klass = loader ( alias )
if klass is None :
continue
if isinstance ( klass , python . class_types ) :
return register_class ( klass , alias )
elif isinstance ( klass , ClassAlias ) :
CLASS_CACHE [ klass . alias ] = klass
CLASS_CACHE [ klass . klass ] = klass
return klass
raise TypeError ( "Expecting class object or ClassAlias from loader" )
mod_class = alias . split ( '.' )
if mod_class :
module = '.' . join ( mod_class [ : - 1 ] )
klass = mod_class [ - 1 ]
try :
module = util . get_module ( module )
except ( ImportError , AttributeError ) :
pass
else :
klass = getattr ( module , klass )
if isinstance ( klass , python . class_types ) :
return register_class ( klass , alias )
elif isinstance ( klass , ClassAlias ) :
CLASS_CACHE [ klass . alias ] = klass
CLASS_CACHE [ klass . klass ] = klass
return klass . klass
else :
raise TypeError ( "Expecting class type or ClassAlias from loader" )
# All available methods for finding the class have been exhausted
raise UnknownClassAlias ( "Unknown alias for %r" % ( alias , ) )
|
def load_stdlib ( ) :
'''Scans sys . path for standard library modules .'''
|
if _stdlib :
return _stdlib
prefixes = tuple ( { os . path . abspath ( p ) for p in ( sys . prefix , getattr ( sys , 'real_prefix' , sys . prefix ) , getattr ( sys , 'base_prefix' , sys . prefix ) , ) } )
for sp in sys . path :
if not sp :
continue
_import_paths . append ( os . path . abspath ( sp ) )
stdpaths = tuple ( { p for p in _import_paths if p . startswith ( prefixes ) and 'site-packages' not in p } )
_stdlib . update ( sys . builtin_module_names )
for stdpath in stdpaths :
if not os . path . isdir ( stdpath ) :
continue
for item in os . listdir ( stdpath ) :
if item . startswith ( '.' ) or item == 'site-packages' :
continue
p = os . path . join ( stdpath , item )
if not os . path . isdir ( p ) and not item . endswith ( ( '.py' , '.so' ) ) :
continue
_stdlib . add ( item . split ( '.' , 1 ) [ 0 ] )
return _stdlib
|
def _generate_list_skippers ( self ) :
"""Generate the list of skippers of page .
: return : The list of skippers of page .
: rtype : hatemile . util . html . htmldomelement . HTMLDOMElement"""
|
container = self . parser . find ( '#' + AccessibleNavigationImplementation . ID_CONTAINER_SKIPPERS ) . first_result ( )
html_list = None
if container is None :
local = self . parser . find ( 'body' ) . first_result ( )
if local is not None :
container = self . parser . create_element ( 'div' )
container . set_attribute ( 'id' , AccessibleNavigationImplementation . ID_CONTAINER_SKIPPERS )
local . prepend_element ( container )
if container is not None :
html_list = self . parser . find ( container ) . find_children ( 'ul' ) . first_result ( )
if html_list is None :
html_list = self . parser . create_element ( 'ul' )
container . append_element ( html_list )
self . list_skippers_added = True
return html_list
|
def by_credentials ( cls , session , login , password ) :
"""Get a user from given credentials
: param session : SQLAlchemy session
: type session : : class : ` sqlalchemy . Session `
: param login : username
: type login : unicode
: param password : user password
: type password : unicode
: return : associated user
: rtype : : class : ` pyshop . models . User `"""
|
user = cls . by_login ( session , login , local = True )
if not user :
return None
if crypt . check ( user . password , password ) :
return user
|
def parse_args ( options = { } , * args , ** kwds ) :
"""Parser of arguments .
dict options {
int min _ items : Min of required items to fold one tuple . ( default : 1)
int max _ items : Count of items in one tuple . Last ` max _ items - min _ items `
items is by default set to None . ( default : 1)
bool allow _ dict : Flag allowing dictionary as first ( and only one )
argument or dictinary as * * kwds . ( default : False )
bool allow _ list : Flag allowing list as first ( and only one ) argument .
( default : False )
Examples :
calling with min _ items = 1 , max _ items = 2 , allow _ dict = False :
arg1 , arg2 = > ( ( arg1 , None ) , ( arg2 , None ) )
( arg1a , arg1b ) , arg2 = > ( ( arg1a , arg1b ) , arg2 , None ) )
arg1 = val1 = > FAIL
{ key1 : val1 } = > FAIL
calling with min _ items = 2 , max _ items = 3 , allow _ dict = True :
arg1 , arg2 = > ( ( arg1 , arg2 , None ) , )
arg1 , arg2 , arg3 = > ( ( arg1 , arg2 , arg3 ) , )
( arg1a , arg1b , arg1c ) = > ( ( arg1a , arg1b , arg1c ) , )
arg1 = val1 , arg2 = val2 = > ( ( arg1 , val1 , None ) , ( arg2 , val2 , None ) )
{ key1 : val1 , key2 : val2 } = > ( ( key1 , val1 , None ) , ( key2 , val2 , None ) )
( arg1a , arg1b ) , arg2a , arg2b = > FAIL"""
|
parser_options = ParserOptions ( options )
parser_input = ParserInput ( args , kwds )
parser = Parser ( parser_options , parser_input )
parser . parse ( )
return parser . output_data
|
def request ( self , method , url , body = None , headers = None , * args , ** kwargs ) :
'''Persist the request metadata in self . _ vcr _ request'''
|
self . _vcr_request = Request ( method = method , uri = self . _uri ( url ) , body = body , headers = headers or { } )
log . debug ( 'Got {}' . format ( self . _vcr_request ) )
# Note : The request may not actually be finished at this point , so
# I ' m not sending the actual request until getresponse ( ) . This
# allows me to compare the entire length of the response to see if it
# exists in the cassette .
self . _sock = VCRFakeSocket ( )
|
def setViewMode ( self , state = True ) :
"""Starts the view mode for moving around the scene ."""
|
if self . _viewMode == state :
return
self . _viewMode = state
if state :
self . _mainView . setDragMode ( self . _mainView . ScrollHandDrag )
else :
self . _mainView . setDragMode ( self . _mainView . RubberBandDrag )
self . emitViewModeChanged ( )
|
def _insert_breathe_configs ( c , * , project_name , doxygen_xml_dirname ) :
"""Add breathe extension configurations to the state ."""
|
if doxygen_xml_dirname is not None :
c [ 'breathe_projects' ] = { project_name : doxygen_xml_dirname }
c [ 'breathe_default_project' ] = project_name
return c
|
def _or_join ( self , terms ) :
"""Joins terms using OR operator .
Args :
terms ( list ) : terms to join
Examples :
self . _ or _ join ( [ ' term1 ' , ' term2 ' ] ) - > ' term1 | term2'
Returns :
str"""
|
from six import text_type
if isinstance ( terms , ( tuple , list ) ) :
if len ( terms ) > 1 :
return ' | ' . join ( text_type ( t ) for t in terms )
else :
return terms [ 0 ]
else :
return terms
|
def _set_fcoeport ( self , v , load = False ) :
"""Setter method for fcoeport , mapped from YANG variable / interface / port _ channel / fcoeport ( container )
If this variable is read - only ( config : false ) in the
source YANG file , then _ set _ fcoeport is considered as a private
method . Backends looking to populate this variable should
do so via calling thisObj . _ set _ fcoeport ( ) directly ."""
|
if hasattr ( v , "_utype" ) :
v = v . _utype ( v )
try :
t = YANGDynClass ( v , base = fcoeport . fcoeport , is_container = 'container' , presence = False , yang_name = "fcoeport" , rest_name = "fcoeport" , parent = self , path_helper = self . _path_helper , extmethods = self . _extmethods , register_paths = True , extensions = { u'tailf-common' : { u'info' : u'Configure the LAG to enable FCoE' , u'callpoint' : u'fcoeport_attr_lag_cp' , u'sort-priority' : u'138' , u'display-when' : u'(/vcsmode/vcs-mode = "true")' } } , namespace = 'urn:brocade.com:mgmt:brocade-fcoe' , defining_module = 'brocade-fcoe' , yang_type = 'container' , is_config = True )
except ( TypeError , ValueError ) :
raise ValueError ( { 'error-string' : """fcoeport must be of a type compatible with container""" , 'defined-type' : "container" , 'generated-type' : """YANGDynClass(base=fcoeport.fcoeport, is_container='container', presence=False, yang_name="fcoeport", rest_name="fcoeport", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure the LAG to enable FCoE', u'callpoint': u'fcoeport_attr_lag_cp', u'sort-priority': u'138', u'display-when': u'(/vcsmode/vcs-mode = "true")'}}, namespace='urn:brocade.com:mgmt:brocade-fcoe', defining_module='brocade-fcoe', yang_type='container', is_config=True)""" , } )
self . __fcoeport = t
if hasattr ( self , '_set' ) :
self . _set ( )
|
def info ( name ) :
'''Get information about a service on the system
Args :
name ( str ) : The name of the service . This is not the display name . Use
` ` get _ service _ name ` ` to find the service name .
Returns :
dict : A dictionary containing information about the service .
CLI Example :
. . code - block : : bash
salt ' * ' service . info spooler'''
|
try :
handle_scm = win32service . OpenSCManager ( None , None , win32service . SC_MANAGER_CONNECT )
except pywintypes . error as exc :
raise CommandExecutionError ( 'Failed to connect to the SCM: {0}' . format ( exc . strerror ) )
try :
handle_svc = win32service . OpenService ( handle_scm , name , win32service . SERVICE_ENUMERATE_DEPENDENTS | win32service . SERVICE_INTERROGATE | win32service . SERVICE_QUERY_CONFIG | win32service . SERVICE_QUERY_STATUS )
except pywintypes . error as exc :
raise CommandExecutionError ( 'Failed To Open {0}: {1}' . format ( name , exc . strerror ) )
try :
config_info = win32service . QueryServiceConfig ( handle_svc )
status_info = win32service . QueryServiceStatusEx ( handle_svc )
try :
description = win32service . QueryServiceConfig2 ( handle_svc , win32service . SERVICE_CONFIG_DESCRIPTION )
except pywintypes . error :
description = 'Failed to get description'
delayed_start = win32service . QueryServiceConfig2 ( handle_svc , win32service . SERVICE_CONFIG_DELAYED_AUTO_START_INFO )
finally :
win32service . CloseServiceHandle ( handle_scm )
win32service . CloseServiceHandle ( handle_svc )
ret = dict ( )
try :
sid = win32security . LookupAccountName ( '' , 'NT Service\\{0}' . format ( name ) ) [ 0 ]
ret [ 'sid' ] = win32security . ConvertSidToStringSid ( sid )
except pywintypes . error :
ret [ 'sid' ] = 'Failed to get SID'
ret [ 'BinaryPath' ] = config_info [ 3 ]
ret [ 'LoadOrderGroup' ] = config_info [ 4 ]
ret [ 'TagID' ] = config_info [ 5 ]
ret [ 'Dependencies' ] = config_info [ 6 ]
ret [ 'ServiceAccount' ] = config_info [ 7 ]
ret [ 'DisplayName' ] = config_info [ 8 ]
ret [ 'Description' ] = description
ret [ 'Status_ServiceCode' ] = status_info [ 'ServiceSpecificExitCode' ]
ret [ 'Status_CheckPoint' ] = status_info [ 'CheckPoint' ]
ret [ 'Status_WaitHint' ] = status_info [ 'WaitHint' ]
ret [ 'StartTypeDelayed' ] = delayed_start
flags = list ( )
for bit in SERVICE_TYPE :
if isinstance ( bit , int ) :
if config_info [ 0 ] & bit :
flags . append ( SERVICE_TYPE [ bit ] )
ret [ 'ServiceType' ] = flags if flags else config_info [ 0 ]
flags = list ( )
for bit in SERVICE_CONTROLS :
if status_info [ 'ControlsAccepted' ] & bit :
flags . append ( SERVICE_CONTROLS [ bit ] )
ret [ 'ControlsAccepted' ] = flags if flags else status_info [ 'ControlsAccepted' ]
try :
ret [ 'Status_ExitCode' ] = SERVICE_ERRORS [ status_info [ 'Win32ExitCode' ] ]
except KeyError :
ret [ 'Status_ExitCode' ] = status_info [ 'Win32ExitCode' ]
try :
ret [ 'StartType' ] = SERVICE_START_TYPE [ config_info [ 1 ] ]
except KeyError :
ret [ 'StartType' ] = config_info [ 1 ]
try :
ret [ 'ErrorControl' ] = SERVICE_ERROR_CONTROL [ config_info [ 2 ] ]
except KeyError :
ret [ 'ErrorControl' ] = config_info [ 2 ]
try :
ret [ 'Status' ] = SERVICE_STATE [ status_info [ 'CurrentState' ] ]
except KeyError :
ret [ 'Status' ] = status_info [ 'CurrentState' ]
return ret
|
def conv_to_json ( obj , fields = None ) :
"""return cdx as json dictionary string
if ` ` fields ` ` is ` ` None ` ` , output will include all fields
in order stored , otherwise only specified fields will be
included
: param fields : list of field names to output"""
|
if fields is None :
return json_encode ( OrderedDict ( ( ( x , obj [ x ] ) for x in obj if not x . startswith ( '_' ) ) ) ) + '\n'
result = json_encode ( OrderedDict ( [ ( x , obj [ x ] ) for x in fields if x in obj ] ) ) + '\n'
return result
|
def login ( self , usr , pwd ) :
"""Use login ( ) to Log in with a username and password ."""
|
self . _usr = usr
self . _pwd = pwd
|
def _bselect ( self , selection , start_bindex , end_bindex ) :
"""add the given buffer indices to the given QItemSelection , both byte and char panes"""
|
selection . select ( self . _model . index2qindexb ( start_bindex ) , self . _model . index2qindexb ( end_bindex ) )
selection . select ( self . _model . index2qindexc ( start_bindex ) , self . _model . index2qindexc ( end_bindex ) )
|
def process_update ( self , update ) :
"""Process an incoming update from a remote NetworkTables"""
|
data = json . loads ( update )
NetworkTables . getEntry ( data [ "k" ] ) . setValue ( data [ "v" ] )
|
def has_attribute ( module_name , attribute_name ) :
"""Is this attribute present ?"""
|
init_file = '%s/__init__.py' % module_name
return any ( [ attribute_name in init_line for init_line in open ( init_file ) . readlines ( ) ] )
|
def select_one ( select , tag , namespaces = None , flags = 0 , ** kwargs ) :
"""Select a single tag ."""
|
return compile ( select , namespaces , flags , ** kwargs ) . select_one ( tag )
|
def plat_specific_errors ( * errnames ) :
"""Return error numbers for all errors in errnames on this platform .
The ' errno ' module contains different global constants depending on
the specific platform ( OS ) . This function will return the list of
numeric values for a given list of potential names ."""
|
errno_names = dir ( errno )
nums = [ getattr ( errno , k ) for k in errnames if k in errno_names ]
# de - dupe the list
return dict . fromkeys ( nums ) . keys ( )
|
def get_current ( self , channel , unit = 'A' ) :
'''Reading current'''
|
kwargs = self . _ch_map [ channel ] [ 'ADCI' ]
current_raw = self . _get_adc_value ( ** kwargs )
voltage = self . get_voltage ( channel )
current_raw_iq = current_raw - ( self . _ch_cal [ channel ] [ 'ADCI' ] [ 'iq_offset' ] + self . _ch_cal [ channel ] [ 'ADCI' ] [ 'iq_gain' ] * voltage )
# quiescent current ( IQ ) compensation
current = ( current_raw_iq - self . _ch_cal [ channel ] [ 'ADCI' ] [ 'offset' ] ) / self . _ch_cal [ channel ] [ 'ADCI' ] [ 'gain' ]
if unit == 'raw' :
return current_raw
elif unit == 'raw_iq' :
return current_raw_iq
elif unit == 'A' :
return current
elif unit == 'mA' :
return current * 1000
elif unit == 'uA' :
return current * 1000000
else :
raise TypeError ( "Invalid unit type." )
|
def register_download_command ( self , download_func ) :
"""Add ' download ' command for downloading a project to a directory .
For non empty directories it will download remote files replacing local files .
: param download _ func : function to run when user choses this option"""
|
description = "Download the contents of a remote remote project to a local folder."
download_parser = self . subparsers . add_parser ( 'download' , description = description )
add_project_name_or_id_arg ( download_parser , help_text_suffix = "download" )
_add_folder_positional_arg ( download_parser )
include_or_exclude = download_parser . add_mutually_exclusive_group ( required = False )
_add_include_arg ( include_or_exclude )
_add_exclude_arg ( include_or_exclude )
download_parser . set_defaults ( func = download_func )
|
def _cast ( self , value ) :
"""Cast the specifief value to the type of this setting ."""
|
if self . type != 'text' :
value = utils . cast ( self . TYPES . get ( self . type ) [ 'cast' ] , value )
return value
|
def write_Bar ( file , bar , bpm = 120 , repeat = 0 , verbose = False ) :
"""Write a mingus . Bar to a MIDI file .
Both the key and the meter are written to the file as well ."""
|
m = MidiFile ( )
t = MidiTrack ( bpm )
m . tracks = [ t ]
while repeat >= 0 :
t . play_Bar ( bar )
repeat -= 1
return m . write_file ( file , verbose )
|
def _get_valid_mount_strings ( self ) :
"""Return a tuple of potential mount strings .
Casper Admin seems to mount in a number of ways :
- hostname / share
- fqdn / share
Plus , there ' s the possibility of :
- IPAddress / share
Then factor in the possibility that the port is included too !
This gives us a total of up to six valid addresses for mount
to report ."""
|
results = set ( )
join = os . path . join
url = self . connection [ "url" ]
share_name = urllib . quote ( self . connection [ "share_name" ] , safe = "~()*!.'" )
port = self . connection [ "port" ]
# URL from python - jss form :
results . add ( join ( url , share_name ) )
results . add ( join ( "%s:%s" % ( url , port ) , share_name ) )
# IP Address form :
# socket . gethostbyname ( ) will return an IP address whether
# an IP address , FQDN , or . local name is provided .
ip_address = socket . gethostbyname ( url )
results . add ( join ( ip_address , share_name ) )
results . add ( join ( "%s:%s" % ( ip_address , port ) , share_name ) )
# Domain name only form :
domain_name = url . split ( "." ) [ 0 ]
results . add ( join ( domain_name , share_name ) )
results . add ( join ( "%s:%s" % ( domain_name , port ) , share_name ) )
# FQDN form using getfqdn :
# socket . getfqdn ( ) could just resolve back to the ip
# or be the same as the initial URL so only add it if it ' s
# different than both .
fqdn = socket . getfqdn ( ip_address )
results . add ( join ( fqdn , share_name ) )
results . add ( join ( "%s:%s" % ( fqdn , port ) , share_name ) )
return tuple ( results )
|
def firmware_download_input_protocol_type_sftp_protocol_sftp_host ( self , ** kwargs ) :
"""Auto Generated Code"""
|
config = ET . Element ( "config" )
firmware_download = ET . Element ( "firmware_download" )
config = firmware_download
input = ET . SubElement ( firmware_download , "input" )
protocol_type = ET . SubElement ( input , "protocol-type" )
sftp_protocol = ET . SubElement ( protocol_type , "sftp-protocol" )
sftp = ET . SubElement ( sftp_protocol , "sftp" )
host = ET . SubElement ( sftp , "host" )
host . text = kwargs . pop ( 'host' )
callback = kwargs . pop ( 'callback' , self . _callback )
return callback ( config )
|
def graph_from_dot_file ( path ) :
"""Load graph as defined by a DOT file .
The file is assumed to be in DOT format . It will
be loaded , parsed and a Dot class will be returned ,
representing the graph ."""
|
fd = file ( path , 'rb' )
data = fd . read ( )
fd . close ( )
return graph_from_dot_data ( data )
|
def empbayes_fit ( z0 , fitargs , ** minargs ) :
"""Return fit and ` ` z ` ` corresponding to the fit
` ` lsqfit . nonlinear _ fit ( * * fitargs ( z ) ) ` ` that maximizes ` ` logGBF ` ` .
This function maximizes the logarithm of the Bayes Factor from
fit ` ` lsqfit . nonlinear _ fit ( * * fitargs ( z ) ) ` ` by varying ` ` z ` ` ,
starting at ` ` z0 ` ` . The fit is redone for each value of ` ` z ` `
that is tried , in order to determine ` ` logGBF ` ` .
The Bayes Factor is proportional to the probability that the data
came from the model ( fit function and priors ) used in the fit .
: func : ` empbayes _ fit ` finds the model or data that maximizes this
probability .
One application is illustrated by the following code : :
import numpy as np
import gvar as gv
import lsqfit
# fit data
x = np . array ( [ 1 . , 2 . , 3 . , 4 . ] )
y = np . array ( [ 3.4422 , 1.2929 , 0.4798 , 0.1725 ] )
# prior
prior = gv . gvar ( [ ' 10(1 ) ' , ' 1.0(1 ) ' ] )
# fit function
def fcn ( x , p ) :
return p [ 0 ] * gv . exp ( - p [ 1 ] * x )
# find optimal dy
def fitargs ( z ) :
dy = y * z
newy = gv . gvar ( y , dy )
return dict ( data = ( x , newy ) , fcn = fcn , prior = prior )
fit , z = lsqfit . empbayes _ fit ( 0.1 , fitargs )
print ( fit . format ( True ) )
Here we want to fit data ` ` y ` ` with fit function ` ` fcn ` ` but we don ' t know
the uncertainties in our ` ` y ` ` values . We assume that the relative errors
are ` ` x ` ` - independent and uncorrelated . We add the error ` ` dy ` ` that
maximizes the Bayes Factor , as this is the most likely choice . This fit
gives the following output : :
Least Square Fit :
chi2 / dof [ dof ] = 0.58 [ 4 ] Q = 0.67 logGBF = 7.4834
Parameters :
0 9.44 ( 18 ) [ 10.0 ( 1.0 ) ]
1 0.9979 ( 69 ) [ 1.00 ( 10 ) ]
Fit :
x [ k ] y [ k ] f ( x [ k ] , p )
1 3.442 ( 54 ) 3.481 ( 45)
2 1.293 ( 20 ) 1.283 ( 11)
3 0.4798 ( 75 ) 0.4731 ( 41)
4 0.1725 ( 27 ) 0.1744 ( 23)
Settings :
svdcut / n = 1e - 12/0 tol = ( 1e - 08 * , 1e - 10,1e - 10 ) ( itns / time = 3/0.0)
We have , in effect , used the variation in the data relative to the best
fit curve to estimate that the uncertainty in each data point is
of order 1.6 % .
Args :
z0 ( number , array or dict ) : Starting point for search .
fitargs ( callable ) : Function of ` ` z ` ` that returns a
dictionary ` ` args ` ` containing the : class : ` lsqfit . nonlinear _ fit `
arguments corresponding to ` ` z ` ` . ` ` z ` ` should have
the same layout ( number , array or dictionary ) as ` ` z0 ` ` .
` ` fitargs ( z ) ` ` can instead return a tuple ` ` ( args , plausibility ) ` ` ,
where ` ` args ` ` is again the dictionary for
: class : ` lsqfit . nonlinear _ fit ` . ` ` plausibility ` ` is the logarithm
of the * a priori * probabilitiy that ` ` z ` ` is sensible . When
` ` plausibility ` ` is provided , : func : ` lsqfit . empbayes _ fit `
maximizes the sum ` ` logGBF + plausibility ` ` . Specifying
` ` plausibility ` ` is a way of steering selections away from
completely implausible values for ` ` z ` ` .
minargs ( dict ) : Optional argument dictionary , passed on to
: class : ` lsqfit . gsl _ multiminex ` ( or
: class : ` lsqfit . scipy _ multiminex ` ) , which finds the minimum .
Returns :
A tuple containing the best fit ( object of type
: class : ` lsqfit . nonlinear _ fit ` ) and the
optimal value for parameter ` ` z ` ` ."""
|
save = dict ( lastz = None , lastp0 = None )
if hasattr ( z0 , 'keys' ) : # z is a dictionary
if not isinstance ( z0 , gvar . BufferDict ) :
z0 = gvar . BufferDict ( z0 )
z0buf = z0 . buf
def convert ( zbuf ) :
return gvar . BufferDict ( z0 , buf = zbuf )
elif numpy . shape ( z0 ) == ( ) : # z is a number
z0buf = numpy . array ( [ z0 ] )
def convert ( zbuf ) :
return zbuf [ 0 ]
else : # z is an array
z0 = numpy . asarray ( z0 )
z0buf = z0
def convert ( zbuf ) :
return zbuf
def minfcn ( zbuf , save = save , convert = convert ) :
z = convert ( zbuf )
args = fitargs ( z )
if not hasattr ( args , 'keys' ) :
args , plausibility = args
else :
plausibility = 0.0
if save [ 'lastp0' ] is not None :
args [ 'p0' ] = save [ 'lastp0' ]
fit = lsqfit . nonlinear_fit ( ** args )
if numpy . isnan ( fit . logGBF ) :
raise ValueError
else :
save [ 'lastz' ] = z
save [ 'lastp0' ] = fit . pmean
return - fit . logGBF - plausibility
try :
z = convert ( _multiminex ( z0buf , minfcn , ** minargs ) . x )
except ValueError :
print ( '*** empbayes_fit warning: null logGBF' )
z = save [ 'lastz' ]
args = fitargs ( z )
if not hasattr ( args , 'keys' ) :
args , plausibility = args
if save [ 'lastp0' ] is not None :
args [ 'p0' ] = save [ 'lastp0' ]
return lsqfit . nonlinear_fit ( ** args ) , z
|
def remove ( self , vehID , reason = tc . REMOVE_VAPORIZED ) :
'''Remove vehicle with the given ID for the give reason .
Reasons are defined in module constants and start with REMOVE _'''
|
self . _connection . _sendByteCmd ( tc . CMD_SET_VEHICLE_VARIABLE , tc . REMOVE , vehID , reason )
|
def remove_update_callback ( self , callback , device ) :
"""Remove a registered update callback ."""
|
if [ callback , device ] in self . _update_callbacks :
self . _update_callbacks . remove ( [ callback , device ] )
_LOGGER . debug ( 'Removed update callback %s for %s' , callback , device )
|
def list_devices ( connection : ForestConnection = None ) :
"""Query the Forest 2.0 server for a list of underlying QPU devices .
NOTE : These can ' t directly be used to manufacture pyQuil Device objects , but this gives a list
of legal values that can be supplied to list _ lattices to filter its ( potentially very
noisy ) output .
: return : A list of device names ."""
|
# For the record , the dictionary stored in " devices " that we ' re getting back is keyed on device
# names and has this structure in its values :
# " is _ online " : a boolean indicating the availability of the device ,
# " is _ retuning " : a boolean indicating whether the device is busy retuning ,
# " specs " : a Specs object describing the entire device , serialized as a dictionary ,
# " isa " : an ISA object describing the entire device , serialized as a dictionary ,
# " noise _ model " : a NoiseModel object describing the entire device , serialized as a dictionary
if connection is None :
connection = ForestConnection ( )
session = connection . session
url = connection . forest_cloud_endpoint + '/devices'
return sorted ( get_json ( session , url ) [ "devices" ] . keys ( ) )
|
def check_positive_integer ( name , value ) :
"""Check a value is a positive integer .
Returns the value if so , raises ValueError otherwise ."""
|
try :
value = int ( value )
is_positive = ( value > 0 )
except ValueError :
raise ValueError ( '%s should be an integer; got %r' % ( name , value ) )
if is_positive :
return value
else :
raise ValueError ( '%s should be positive; got %r' % ( name , value ) )
|
def _set_default_key ( mapping ) :
"""Replace the field with the most features with a ' default ' field ."""
|
key_longest = sorted ( [ ( len ( v ) , k ) for k , v in mapping . items ( ) ] , reverse = True ) [ 0 ] [ 1 ]
mapping [ 'default' ] = key_longest
del ( mapping [ key_longest ] )
|
def data ( self ) :
"""Get the data , after performing post - processing if necessary ."""
|
data = super ( DynamicListSerializer , self ) . data
processed_data = ReturnDict ( SideloadingProcessor ( self , data ) . data , serializer = self ) if self . child . envelope else ReturnList ( data , serializer = self )
processed_data = post_process ( processed_data )
return processed_data
|
def is_deb_package_installed ( pkg ) :
"""checks if a particular deb package is installed"""
|
with settings ( hide ( 'warnings' , 'running' , 'stdout' , 'stderr' ) , warn_only = True , capture = True ) :
result = sudo ( 'dpkg-query -l "%s" | grep -q ^.i' % pkg )
return not bool ( result . return_code )
|
def remove_ignore ( path , use_sudo = False , force = False ) :
"""Recursively removes a file or directory , ignoring any errors that may occur . Should only be used for temporary
files that can be assumed to be cleaned up at a later point .
: param path : Path to file or directory to remove .
: type path : unicode
: param use _ sudo : Use the ` sudo ` command .
: type use _ sudo : bool
: param force : Force the removal .
: type force : bool"""
|
which = sudo if use_sudo else run
which ( rm ( path , recursive = True , force = force ) , warn_only = True )
|
def fill_rawq ( self ) :
"""Fill raw queue from exactly one recv ( ) system call .
Block if no data is immediately available . Set self . eof when
connection is closed ."""
|
if self . irawq >= len ( self . rawq ) :
self . rawq = b''
self . irawq = 0
# The buffer size should be fairly small so as to avoid quadratic
# behavior in process _ rawq ( ) above .
buf = self . sock . recv ( 64 )
self . msg ( "recv %s" , repr ( buf ) )
self . eof = ( not buf )
self . rawq = self . rawq + buf
|
def get_forecast ( self ) :
'''If configured to do so , make an API request to retrieve the forecast
data for the configured / queried weather station , and return the low and
high temperatures . Otherwise , return two empty strings .'''
|
no_data = ( '' , '' )
if self . forecast :
query_url = STATION_QUERY_URL % ( self . api_key , 'forecast' , self . station_id )
try :
response = self . api_request ( query_url ) [ 'forecast' ]
response = response [ 'simpleforecast' ] [ 'forecastday' ] [ 0 ]
except ( KeyError , IndexError , TypeError ) :
self . logger . error ( 'No forecast data found for %s' , self . station_id )
self . data [ 'update_error' ] = self . update_error
return no_data
unit = 'celsius' if self . units == 'metric' else 'fahrenheit'
low_temp = response . get ( 'low' , { } ) . get ( unit , '' )
high_temp = response . get ( 'high' , { } ) . get ( unit , '' )
return low_temp , high_temp
else :
return no_data
|
def _writeWMSDatasets ( self , session , directory , wmsDatasetCards , name = None ) :
"""GSSHAPY Project Write WMS Datasets to File Method"""
|
if self . mapType in self . MAP_TYPES_SUPPORTED :
for card in self . projectCards :
if ( card . name in wmsDatasetCards ) and self . _noneOrNumValue ( card . value ) :
filename = card . value . strip ( '"' )
# Determine new filename
filename = self . _replaceNewFilename ( filename , name )
# Handle case where fileIO interfaces with multiple files
# Retrieve File using FileIO and file extension
extension = filename . split ( '.' ) [ 1 ]
# Get mask map file
maskMap = session . query ( RasterMapFile ) . filter ( RasterMapFile . projectFile == self ) . filter ( RasterMapFile . fileExtension == 'msk' ) . one ( )
# Default wms dataset
wmsDataset = None
try :
wmsDataset = session . query ( WMSDatasetFile ) . filter ( WMSDatasetFile . projectFile == self ) . filter ( WMSDatasetFile . fileExtension == extension ) . one ( )
except NoResultFound : # Handle case when there is no file in database but
# the card is listed in the project file
log . warning ( '{0} listed as card in project file, ' 'but the file is not found in the database.' . format ( filename ) )
except MultipleResultsFound : # Write all instances
self . _invokeWriteForMultipleOfType ( directory , extension , WMSDatasetFile , filename , session , maskMap = maskMap )
return
# Initiate Write Method on File
if wmsDataset is not None and maskMap is not None :
wmsDataset . write ( session = session , directory = directory , name = filename , maskMap = maskMap )
else :
log . error ( 'Could not write WMS Dataset files. ' 'MAP_TYPE {0} not supported.' . format ( self . mapType ) )
|
def reconnect ( self ) :
"""Reconnect to rabbitmq server"""
|
parsed = urlparse . urlparse ( self . amqp_url )
port = parsed . port or 5672
self . connection = amqp . Connection ( host = "%s:%s" % ( parsed . hostname , port ) , userid = parsed . username or 'guest' , password = parsed . password or 'guest' , virtual_host = unquote ( parsed . path . lstrip ( '/' ) or '%2F' ) )
self . channel = self . connection . channel ( )
try :
self . channel . queue_declare ( self . name )
except amqp . exceptions . PreconditionFailed :
pass
|
def delete ( self , tag , params = { } , ** options ) :
"""A specific , existing tag can be deleted by making a DELETE request
on the URL for that tag .
Returns an empty data record .
Parameters
tag : { Id } The tag to delete ."""
|
path = "/tags/%s" % ( tag )
return self . client . delete ( path , params , ** options )
|
def configure_alias ( self , ns , definition ) :
"""Register an alias endpoint which will redirect to a resource ' s retrieve endpoint .
Note that the retrieve endpoint MUST be registered prior to the alias endpoint .
The definition ' s func should be a retrieve function , which must :
- accept kwargs for path data
- return a resource
: param ns : the namespace
: param definition : the endpoint definition"""
|
@ self . add_route ( ns . alias_path , Operation . Alias , ns )
@ qs ( definition . request_schema )
@ wraps ( definition . func )
def retrieve ( ** path_data ) : # request _ schema is optional for Alias
request_data = ( load_query_string_data ( definition . request_schema ) if definition . request_schema else dict ( ) )
resource = definition . func ( ** merge_data ( path_data , request_data ) )
kwargs = dict ( )
identifier = "{}_id" . format ( name_for ( ns . subject ) )
kwargs [ identifier ] = resource . id
url = ns . url_for ( Operation . Retrieve , ** kwargs )
return redirect ( url )
retrieve . __doc__ = "Alias a {} by name" . format ( ns . subject_name )
|
def angle ( self ) :
"""Get the angle ( in radians ) describing the magnitude of the quaternion rotation about its rotation axis .
This is guaranteed to be within the range ( - pi : pi ) with the direction of
rotation indicated by the sign .
When a particular rotation describes a 180 degree rotation about an arbitrary
axis vector ` v ` , the conversion to axis / angle representation may jump
discontinuously between all permutations of ` ( - pi , pi ) ` and ` ( - v , v ) ` ,
each being geometrically equivalent ( see Note in documentation ) .
Returns :
A real number in the range ( - pi : pi ) describing the angle of rotation
in radians about a Quaternion object ' s axis of rotation .
Note :
This feature only makes sense when referring to a unit quaternion .
Calling this method will implicitly normalise the Quaternion object to a unit quaternion if it is not already one ."""
|
self . _normalise ( )
norm = np . linalg . norm ( self . vector )
return self . _wrap_angle ( 2.0 * atan2 ( norm , self . scalar ) )
|
def add_inote ( self , msg , idx , off = None ) :
"""Add a message to a specific instruction by using ( default ) the index of the address if specified
: param msg : the message
: type msg : string
: param idx : index of the instruction ( the position in the list of the instruction )
: type idx : int
: param off : address of the instruction
: type off : int"""
|
if off is not None :
idx = self . off_to_pos ( off )
if idx not in self . notes :
self . notes [ idx ] = [ ]
self . notes [ idx ] . append ( msg )
|
def apply_deduct ( self , body , total_fee , contract_id , notify_url , out_trade_no = None , detail = None , attach = None , fee_type = 'CNY' , goods_tag = None , clientip = None , deviceid = None , mobile = None , email = None , qq = None , openid = None , creid = None , outerid = None ) :
"""申请扣款 api
: param body : 商品描述 商品或支付单简要描述
: param out _ trade _ no : 可选 商户订单号 商户系统内部的订单号 , 32个字符内 、 可包含字母 , 其他说明见商户订单号
: param total _ fee : 总金额 订单总金额 , 单位为分 , 只能为整数 , 详见支付金额
: param contract _ id : 委托代扣协议id 签约成功后 , 微信返回的委托代扣协议id
: param notify _ url : 回调通知url 接受扣款结果异步回调通知的url
: param detail : 可选 商品详情 商品名称明细列表
: param attach : 可选 附加数据 附加数据 , 在查询API和支付通知中原样返回 , 该字段主要用于商户携带订单的自定义数据
: param fee _ type : 可选 货币类型 符合ISO 4217标准的三位字母代码 , 默认人民币 : CNY
: param goods _ tag : 可选 商品标记 商品标记 , 代金券或立减优惠功能的参数 , 说明详见代金券或立减优惠
: param clientip : 可选 客户端 IP 点分IP格式 ( 客户端IP )
: param deviceid : 可选 设备ID android填imei的一次md5 ; ios填idfa的一次md5
: param mobile : 可选 手机号 用户手机号
: param email : 可选 邮箱地址 用户邮箱地址
: param qq : 可选 QQ号 用户QQ号
: param openid : 可选 微信open ID 用户微信open ID
: param creid : 可选 身份证号 用户身份证号
: param outerid : 可选 商户侧用户标识 用户在商户侧的标识
: return : 返回的结果信息"""
|
trade_type = 'PAP'
# 交易类型 交易类型PAP - 微信委托代扣支付
timestamp = int ( time . time ( ) )
# 10位时间戳
spbill_create_ip = get_external_ip ( )
# 终端IP 调用微信支付API的机器IP
if not out_trade_no :
now = datetime . fromtimestamp ( time . time ( ) , tz = timezone ( 'Asia/Shanghai' ) )
out_trade_no = '{0}{1}{2}' . format ( self . mch_id , now . strftime ( '%Y%m%d%H%M%S' ) , random . randint ( 1000 , 10000 ) )
data = { "appid" : self . appid , "mch_id" : self . mch_id , "body" : body , "out_trade_no" : out_trade_no , "total_fee" : total_fee , "trade_type" : trade_type , "contract_id" : contract_id , "notify_url" : notify_url , "detail" : detail , "attach" : attach , "fee_type" : fee_type , "goods_tag" : goods_tag , "clientip" : clientip , "deviceid" : deviceid , "mobile" : mobile , "email" : email , "qq" : qq , "openid" : openid , "creid" : creid , "outerid" : outerid , "timestamp" : timestamp , "spbill_create_ip" : spbill_create_ip , }
return self . _post ( "pay/pappayapply" , data = data )
|
def clean_with_zeros ( self , x ) :
"""set nan and inf rows from x to zero"""
|
x [ ~ np . any ( np . isnan ( x ) | np . isinf ( x ) , axis = 1 ) ] = 0
return x
|
def normalize_package_path ( cls , package_path ) :
"""Returns a normalized version of the given package path .
The root package might by denoted by ' ' or ' . ' and is normalized to ' ' .
All other packages are of the form ' path ' or ' path / subpath ' , etc .
If the given path is either absolute or relative ( includes the parent dir path signifier , ' . . ' ) ,
then an error is raised since the path cannot be noramlized .
: param string package _ path : The Go package path to normalize .
: raises : ` ValueError ` if the package path cannot be normalized ."""
|
if package_path . startswith ( os . pardir + os . sep ) :
raise ValueError ( 'Relative package paths are not allowed. Given: {!r}' . format ( package_path ) )
if os . path . isabs ( package_path ) :
raise ValueError ( 'Absolute package paths are not allowed. Given: {!r}' . format ( package_path ) )
return '' if not package_path or package_path == os . curdir else package_path . lstrip ( '/' )
|
def next ( self ) :
'''Never return StopIteration'''
|
if self . started is False :
self . started = True
now_ = datetime . now ( )
if self . hour : # Fixed hour in a day
# Next run will be the next day
scheduled = now_ . replace ( hour = self . hour , minute = self . minute , second = self . second , microsecond = 0 )
if scheduled == now_ :
return timedelta ( seconds = 0 )
elif scheduled < now_ : # Scheduled time is passed
return scheduled . replace ( day = now_ . day + 1 ) - now_
else : # Every hour in a day
# Next run will be the next hour
scheduled = now_ . replace ( minute = self . minute , second = self . second , microsecond = 0 )
if scheduled == now_ :
return timedelta ( seconds = 0 )
elif scheduled < now_ : # Scheduled time is passed
return scheduled . replace ( hour = now_ . hour + 1 ) - now_
return scheduled - now_
else :
if self . hour :
return timedelta ( days = 1 )
# next day
return timedelta ( hours = 1 )
|
def complete ( self ) :
"""Return True if the limit has been reached"""
|
if self . scan_limit is not None and self . scan_limit == 0 :
return True
if self . item_limit is not None and self . item_limit == 0 :
return True
return False
|
def get_document ( self , doc_id ) :
'''Download the document given the id .'''
|
conn = self . agency . _database . get_connection ( )
return conn . get_document ( doc_id )
|
def started ( self ) :
"""Datetime at which the job was started .
: rtype : ` ` datetime . datetime ` ` , or ` ` NoneType ` `
: returns : the start time ( None until set from the server ) ."""
|
statistics = self . _properties . get ( "statistics" )
if statistics is not None :
millis = statistics . get ( "startTime" )
if millis is not None :
return _helpers . _datetime_from_microseconds ( millis * 1000.0 )
|
def update_note ( self , note_id , revision , content ) :
'''Updates the note with the given ID to have the given content'''
|
return notes_endpoint . update_note ( self , note_id , revision , content )
|
def spi_configure_mode ( self , spi_mode ) :
"""Configure the SPI interface by the well known SPI modes ."""
|
if spi_mode == SPI_MODE_0 :
self . spi_configure ( SPI_POL_RISING_FALLING , SPI_PHASE_SAMPLE_SETUP , SPI_BITORDER_MSB )
elif spi_mode == SPI_MODE_3 :
self . spi_configure ( SPI_POL_FALLING_RISING , SPI_PHASE_SETUP_SAMPLE , SPI_BITORDER_MSB )
else :
raise RuntimeError ( 'SPI Mode not supported' )
|
def generate_jid ( name , append_date = None ) :
"""Generates a v alid JID based on the room name .
: param append _ date : appends the given date to the JID"""
|
if not append_date :
return sanitize_jid ( name )
return '{}-{}' . format ( sanitize_jid ( name ) , append_date . strftime ( '%Y-%m-%d' ) )
|
def ip_hide_as_path_holder_as_path_access_list_ip_action ( self , ** kwargs ) :
"""Auto Generated Code"""
|
config = ET . Element ( "config" )
ip = ET . SubElement ( config , "ip" , xmlns = "urn:brocade.com:mgmt:brocade-common-def" )
hide_as_path_holder = ET . SubElement ( ip , "hide-as-path-holder" , xmlns = "urn:brocade.com:mgmt:brocade-ip-policy" )
as_path = ET . SubElement ( hide_as_path_holder , "as-path" )
access_list = ET . SubElement ( as_path , "access-list" )
name_key = ET . SubElement ( access_list , "name" )
name_key . text = kwargs . pop ( 'name' )
seq_keyword_key = ET . SubElement ( access_list , "seq-keyword" )
seq_keyword_key . text = kwargs . pop ( 'seq_keyword' )
instance_key = ET . SubElement ( access_list , "instance" )
instance_key . text = kwargs . pop ( 'instance' )
ip_action = ET . SubElement ( access_list , "ip-action" )
ip_action . text = kwargs . pop ( 'ip_action' )
callback = kwargs . pop ( 'callback' , self . _callback )
return callback ( config )
|
def new_request_session ( config , cookies ) :
"""Create a new request session ."""
|
session = requests . Session ( )
if cookies :
session . cookies = cookies
session . max_redirects = config [ "maxhttpredirects" ]
session . headers . update ( { "User-Agent" : config [ "useragent" ] , } )
if config [ "cookiefile" ] :
for cookie in cookies . from_file ( config [ "cookiefile" ] ) :
session . cookies = requests . cookies . merge_cookies ( session . cookies , cookie )
return session
|
def get_key ( self , compressed = None ) :
"""Get the hex - encoded key .
: param compressed : False if you want a standard 65 Byte key ( the most
standard option ) . True if you want the compressed 33 Byte form .
Defaults to None , which in turn uses the self . compressed attribute .
: type compressed : bool
PublicKeys consist of an ID byte , the x , and the y coordinates
on the elliptic curve .
In the case of uncompressed keys , the ID byte is 04.
Compressed keys use the SEC1 format :
If Y is odd : id _ byte = 03
else : id _ byte = 02
Note that I pieced this algorithm together from the pycoin source .
This is documented in http : / / www . secg . org / collateral / sec1 _ final . pdf
but , honestly , it ' s pretty confusing .
I guess this is a pretty big warning that I ' m not * positive * this
will do the right thing in all cases . The tests pass , and this does
exactly what pycoin does , but I ' m not positive pycoin works either !"""
|
if compressed is None :
compressed = self . compressed
if compressed :
parity = 2 + ( self . y & 1 )
# 0x02 even , 0x03 odd
return ensure_bytes ( long_to_hex ( parity , 2 ) + long_to_hex ( self . x , 64 ) )
else :
return ensure_bytes ( b'04' + long_to_hex ( self . x , 64 ) + long_to_hex ( self . y , 64 ) )
|
def _key_to_pb ( self , pb ) :
"""Internal helper to copy the key into a protobuf ."""
|
key = self . _key
if key is None :
pairs = [ ( self . _get_kind ( ) , None ) ]
ref = key_module . _ReferenceFromPairs ( pairs , reference = pb . mutable_key ( ) )
else :
ref = key . reference ( )
pb . mutable_key ( ) . CopyFrom ( ref )
group = pb . mutable_entity_group ( )
# Must initialize this .
# To work around an SDK issue , only set the entity group if the
# full key is complete . TODO : Remove the top test once fixed .
if key is not None and key . id ( ) :
elem = ref . path ( ) . element ( 0 )
if elem . id ( ) or elem . name ( ) :
group . add_element ( ) . CopyFrom ( elem )
|
def compute ( self ) :
"""Compute and return the signature according to the given data ."""
|
if "Signature" in self . params :
raise RuntimeError ( "Existing signature in parameters" )
if self . signature_version is not None :
version = self . signature_version
else :
version = self . params [ "SignatureVersion" ]
if str ( version ) == "1" :
bytes = self . old_signing_text ( )
hash_type = "sha1"
elif str ( version ) == "2" :
bytes = self . signing_text ( )
if self . signature_method is not None :
signature_method = self . signature_method
else :
signature_method = self . params [ "SignatureMethod" ]
hash_type = signature_method [ len ( "Hmac" ) : ] . lower ( )
else :
raise RuntimeError ( "Unsupported SignatureVersion: '%s'" % version )
return self . creds . sign ( bytes , hash_type )
|
def compile_regex ( self , fmt , query ) :
"""Turn glob ( graphite ) queries into compiled regex
* becomes . *
. becomes \ .
fmt argument is so that caller can control anchoring ( must contain exactly 1 { 0 } !"""
|
return re . compile ( fmt . format ( query . pattern . replace ( '.' , '\.' ) . replace ( '*' , '[^\.]*' ) . replace ( '{' , '(' ) . replace ( ',' , '|' ) . replace ( '}' , ')' ) ) )
|
def calculate_size ( name , expected , updated ) :
"""Calculates the request payload size"""
|
data_size = 0
data_size += calculate_size_str ( name )
data_size += LONG_SIZE_IN_BYTES
data_size += LONG_SIZE_IN_BYTES
return data_size
|
def check_str ( obj ) :
"""Returns a string for various input types"""
|
if isinstance ( obj , str ) :
return obj
if isinstance ( obj , float ) :
return str ( int ( obj ) )
else :
return str ( obj )
|
def full_name ( self ) :
"""Returns the full name of this element by visiting every
non - None parent in its ancestor chain ."""
|
if self . _full_name is None :
ancestors = [ self . name ]
current = self . parent
while current is not None and type ( current ) . __name__ != "CodeParser" :
ancestors . append ( current . name )
current = current . parent
self . _full_name = "." . join ( reversed ( ancestors ) )
return self . _full_name
|
def add_plugin ( self , plugin , call ) :
"""Add plugin to list of plugins .
Will be added if it has the attribute I ' m bound to ."""
|
meth = getattr ( plugin , call , None )
if meth is not None :
self . plugins . append ( ( plugin , meth ) )
|
def getBothEdges ( self , label = None ) :
"""Gets all the edges of the node . If label
parameter is provided , it only returns the edges of
the given label
@ params label : Optional parameter to filter the edges
@ returns A generator function with the incoming edges"""
|
if label :
for edge in self . neoelement . relationships . all ( types = [ label ] ) :
yield Edge ( edge )
else :
for edge in self . neoelement . relationships . all ( ) :
yield Edge ( edge )
|
def build_parser ( ) :
"""Build the script ' s argument parser ."""
|
parser = argparse . ArgumentParser ( description = "The IOTile task supervisor" )
parser . add_argument ( '-c' , '--config' , help = "config json with options" )
parser . add_argument ( '-v' , '--verbose' , action = "count" , default = 0 , help = "Increase logging verbosity" )
return parser
|
def loadFromString ( self , body ) :
"""Load config data ( i . e . JSON text ) from the given string
: param str body : config data in JSON format"""
|
try :
self . _data = json . loads ( body )
except Exception as e :
raise ConfigException ( '%s: invalid config body: %s' % ( self . _path , e . message ) )
self . _doDefaults ( )
|
def import_setting ( self ) :
"""Import setting to a file ."""
|
LOGGER . debug ( 'Import button clicked' )
home_directory = os . path . expanduser ( '~' )
file_path , __ = QFileDialog . getOpenFileName ( self , self . tr ( 'Import InaSAFE settings' ) , home_directory , self . tr ( 'JSON File (*.json)' ) )
if file_path :
title = tr ( 'Import InaSAFE Settings.' )
question = tr ( 'This action will replace your current InaSAFE settings with ' 'the setting from the file. This action is not reversible. ' 'Are you sure to import InaSAFE Setting?' )
answer = QMessageBox . question ( self , title , question , QMessageBox . Yes | QMessageBox . No )
if answer == QMessageBox . Yes :
LOGGER . debug ( 'Import from %s' % file_path )
import_setting ( file_path )
|
def replace_git_url ( generator ) :
"""Replace gist tags in the article content ."""
|
template = g_jinja2 . get_template ( GIT_TEMPLATE )
should_cache = generator . context . get ( 'GIT_CACHE_ENABLED' )
cache_location = generator . context . get ( 'GIT_CACHE_LOCATION' )
for article in generator . articles :
for match in git_regex . findall ( article . _content ) :
params = collections . defaultdict ( str )
repo = match [ 1 ]
filename = match [ 3 ]
branch = match [ 5 ]
hash = match [ 7 ]
params [ 'repo' ] = match [ 1 ]
params [ 'filename' ] = match [ 3 ]
if match [ 5 ] :
params [ 'branch' ] = match [ 5 ]
if match [ 7 ] :
params [ 'hash' ] = match [ 7 ]
logger . info ( '[git]: Found repo {}, filename {}, branch {} and hash {}' . format ( repo , filename , branch , hash ) )
logger . info ( '[git]: {}' . format ( params ) )
body = None if not should_cache else get_cache ( cache_location , ** params )
# Fetch the git
if not body :
logger . info ( '[git]: Git did not exist in cache, fetching...' )
response = fetch_git ( ** params )
body = get_body ( response )
if should_cache :
logger . info ( '[git]: Saving git to cache...' )
cache_params = copy . copy ( params )
cache_params [ 'body' ] = body
set_cache ( cache_location , ** cache_params )
else :
logger . info ( '[git]: Found git in cache.' )
# Create a context to render with
context = generator . context . copy ( )
context . update ( { 'code' : body , 'footer' : 'full' , 'base' : __url__ , 'filename' : filename , 'url' : git_url ( ** params ) } )
replacement = template . render ( context )
article . _content = article . _content . replace ( match [ 0 ] , replacement )
|
async def _workaround_1695335 ( self , delta , old , new , model ) :
"""This is a ( hacky ) temporary work around for a bug in Juju where the
instance status and agent version fields don ' t get updated properly
by the AllWatcher .
Deltas never contain a value for ` data [ ' agent - status ' ] [ ' version ' ] ` ,
and once the ` instance - status ` reaches ` pending ` , we no longer get
any updates for it ( the deltas come in , but the ` instance - status `
data is always the same after that ) .
To work around this , whenever a delta comes in for this machine , we
query FullStatus and use the data from there if and only if it ' s newer .
Luckily , the timestamps on the ` since ` field does seem to be accurate .
See https : / / bugs . launchpad . net / juju / + bug / 1695335"""
|
if delta . data . get ( 'synthetic' , False ) : # prevent infinite loops re - processing already processed deltas
return
full_status = await utils . run_with_interrupt ( model . get_status ( ) , model . _watch_stopping , loop = model . loop )
if model . _watch_stopping . is_set ( ) :
return
if self . id not in full_status . machines :
return
if not full_status . machines [ self . id ] [ 'instance-status' ] [ 'since' ] :
return
machine = full_status . machines [ self . id ]
change_log = [ ]
key_map = { 'status' : 'current' , 'info' : 'message' , 'since' : 'since' , }
# handle agent version specially , because it ' s never set in
# deltas , and we don ' t want even a newer delta to clear it
agent_version = machine [ 'agent-status' ] [ 'version' ]
if agent_version :
delta . data [ 'agent-status' ] [ 'version' ] = agent_version
change_log . append ( ( 'agent-version' , '' , agent_version ) )
# only update ( other ) delta fields if status data is newer
status_since = pyrfc3339 . parse ( machine [ 'instance-status' ] [ 'since' ] )
delta_since = pyrfc3339 . parse ( delta . data [ 'instance-status' ] [ 'since' ] )
if status_since > delta_since :
for status_key in ( 'status' , 'info' , 'since' ) :
delta_key = key_map [ status_key ]
status_value = machine [ 'instance-status' ] [ status_key ]
delta_value = delta . data [ 'instance-status' ] [ delta_key ]
change_log . append ( ( delta_key , delta_value , status_value ) )
delta . data [ 'instance-status' ] [ delta_key ] = status_value
if change_log :
log . debug ( 'Overriding machine delta with FullStatus data' )
for log_item in change_log :
log . debug ( ' {}: {} -> {}' . format ( * log_item ) )
delta . data [ 'synthetic' ] = True
old_obj , new_obj = self . model . state . apply_delta ( delta )
await model . _notify_observers ( delta , old_obj , new_obj )
|
def do_output ( self , * args ) :
"""Pass a command directly to the current output processor"""
|
if args :
action , params = args [ 0 ] , args [ 1 : ]
log . debug ( "Pass %s directly to output with %s" , action , params )
function = getattr ( self . output , "do_" + action , None )
if function :
function ( * params )
|
def _standard_frame_length ( header ) :
"""Calculates the length of a standard ciphertext frame , given a complete header .
: param header : Complete message header object
: type header : aws _ encryption _ sdk . structures . MessageHeader
: rtype : int"""
|
frame_length = 4
# Sequence Number
frame_length += header . algorithm . iv_len
# IV
frame_length += header . frame_length
# Encrypted Content
frame_length += header . algorithm . auth_len
# Authentication Tag
return frame_length
|
def compute_integrated_acquisition ( acquisition , x ) :
'''Used to compute the acquisition function when samples of the hyper - parameters have been generated ( used in GP _ MCMC model ) .
: param acquisition : acquisition function with GpyOpt model type GP _ MCMC .
: param x : location where the acquisition is evaluated .'''
|
acqu_x = 0
for i in range ( acquisition . model . num_hmc_samples ) :
acquisition . model . model . kern [ : ] = acquisition . model . hmc_samples [ i , : ]
acqu_x += acquisition . acquisition_function ( x )
acqu_x = acqu_x / acquisition . model . num_hmc_samples
return acqu_x
|
def __import_vars ( self , env_file ) :
"""Actual importing function ."""
|
with open ( env_file , "r" ) as f : # pylint : disable = invalid - name
for line in f :
try :
line = line . lstrip ( )
if line . startswith ( 'export' ) :
line = line . replace ( 'export' , '' , 1 )
key , val = line . strip ( ) . split ( '=' , 1 )
except ValueError : # Take care of blank or comment lines
pass
else :
if not callable ( val ) :
if self . verbose_mode :
if key in self . app . config :
print ( " * Overwriting an existing config var:" " {0}" . format ( key ) )
else :
print ( " * Setting an entirely new config var:" " {0}" . format ( key ) )
self . app . config [ key ] = re . sub ( r"\A[\"']|[\"']\Z" , "" , val )
|
def get_event_tags ( self , id , ** kwargs ) : # noqa : E501
"""Get all tags associated with a specific event # noqa : E501
# noqa : E501
This method makes a synchronous HTTP request by default . To make an
asynchronous HTTP request , please pass async _ req = True
> > > thread = api . get _ event _ tags ( id , async _ req = True )
> > > result = thread . get ( )
: param async _ req bool
: param str id : ( required )
: return : ResponseContainerTagsResponse
If the method is called asynchronously ,
returns the request thread ."""
|
kwargs [ '_return_http_data_only' ] = True
if kwargs . get ( 'async_req' ) :
return self . get_event_tags_with_http_info ( id , ** kwargs )
# noqa : E501
else :
( data ) = self . get_event_tags_with_http_info ( id , ** kwargs )
# noqa : E501
return data
|
def get_relationship_admin_session_for_family ( self , family_id = None ) :
"""Gets the ` ` OsidSession ` ` associated with the relationship administration service for the given family .
arg : family _ id ( osid . id . Id ) : the ` ` Id ` ` of the ` ` Family ` `
return : ( osid . relationship . RelationshipAdminSession ) - a
` ` RelationshipAdminSession ` `
raise : NotFound - no family found by the given ` ` Id ` `
raise : NullArgument - ` ` family _ id ` ` is ` ` null ` `
raise : OperationFailed - unable to complete request
raise : Unimplemented - ` ` supports _ relationship _ admin ( ) ` ` or
` ` supports _ visible _ federation ( ) ` ` is ` ` false ` `
* compliance : optional - - This method must be implemented if ` ` supports _ relationship _ admin ( ) ` `
and ` ` supports _ visible _ federation ( ) ` ` are ` ` true ` ` *"""
|
if not family_id :
raise NullArgument
if not self . supports_relationship_admin ( ) :
raise Unimplemented ( )
# Need to include check to see if the familyId is found otherwise raise NotFound
try :
from . import sessions
except ImportError :
raise OperationFailed ( )
try :
session = sessions . RelationshipAdminSession ( family_id , proxy = self . _proxy , runtime = self . _runtime )
except AttributeError :
raise OperationFailed ( )
return session
|
def actionAngleTorus_xvFreqs_c ( pot , jr , jphi , jz , angler , anglephi , anglez , tol = 0.003 ) :
"""NAME :
actionAngleTorus _ xvFreqs _ c
PURPOSE :
compute configuration ( x , v ) and frequencies of a set of angles on a single torus
INPUT :
pot - Potential object or list thereof
jr - radial action ( scalar )
jphi - azimuthal action ( scalar )
jz - vertical action ( scalar )
angler - radial angle ( array [ N ] )
anglephi - azimuthal angle ( array [ N ] )
anglez - vertical angle ( array [ N ] )
tol = ( 0.003 ) goal for | dJ | / | J | along the torus
OUTPUT :
( R , vR , vT , z , vz , phi , Omegar , Omegaphi , Omegaz , flag )
HISTORY :
2015-08-05/07 - Written - Bovy ( UofT )"""
|
# Parse the potential
from galpy . orbit . integrateFullOrbit import _parse_pot
npot , pot_type , pot_args = _parse_pot ( pot , potfortorus = True )
# Set up result arrays
R = numpy . empty ( len ( angler ) )
vR = numpy . empty ( len ( angler ) )
vT = numpy . empty ( len ( angler ) )
z = numpy . empty ( len ( angler ) )
vz = numpy . empty ( len ( angler ) )
phi = numpy . empty ( len ( angler ) )
Omegar = numpy . empty ( 1 )
Omegaphi = numpy . empty ( 1 )
Omegaz = numpy . empty ( 1 )
flag = ctypes . c_int ( 0 )
# Set up the C code
ndarrayFlags = ( 'C_CONTIGUOUS' , 'WRITEABLE' )
actionAngleTorus_xvFreqsFunc = _lib . actionAngleTorus_xvFreqs
actionAngleTorus_xvFreqsFunc . argtypes = [ ctypes . c_double , ctypes . c_double , ctypes . c_double , ctypes . c_int , ndpointer ( dtype = numpy . float64 , flags = ndarrayFlags ) , ndpointer ( dtype = numpy . float64 , flags = ndarrayFlags ) , ndpointer ( dtype = numpy . float64 , flags = ndarrayFlags ) , ctypes . c_int , ndpointer ( dtype = numpy . int32 , flags = ndarrayFlags ) , ndpointer ( dtype = numpy . float64 , flags = ndarrayFlags ) , ctypes . c_double , ndpointer ( dtype = numpy . float64 , flags = ndarrayFlags ) , ndpointer ( dtype = numpy . float64 , flags = ndarrayFlags ) , ndpointer ( dtype = numpy . float64 , flags = ndarrayFlags ) , ndpointer ( dtype = numpy . float64 , flags = ndarrayFlags ) , ndpointer ( dtype = numpy . float64 , flags = ndarrayFlags ) , ndpointer ( dtype = numpy . float64 , flags = ndarrayFlags ) , ndpointer ( dtype = numpy . float64 , flags = ndarrayFlags ) , ndpointer ( dtype = numpy . float64 , flags = ndarrayFlags ) , ndpointer ( dtype = numpy . float64 , flags = ndarrayFlags ) , ctypes . POINTER ( ctypes . c_int ) ]
# Array requirements , first store old order
f_cont = [ angler . flags [ 'F_CONTIGUOUS' ] , anglephi . flags [ 'F_CONTIGUOUS' ] , anglez . flags [ 'F_CONTIGUOUS' ] ]
angler = numpy . require ( angler , dtype = numpy . float64 , requirements = [ 'C' , 'W' ] )
anglephi = numpy . require ( anglephi , dtype = numpy . float64 , requirements = [ 'C' , 'W' ] )
anglez = numpy . require ( anglez , dtype = numpy . float64 , requirements = [ 'C' , 'W' ] )
R = numpy . require ( R , dtype = numpy . float64 , requirements = [ 'C' , 'W' ] )
vR = numpy . require ( vR , dtype = numpy . float64 , requirements = [ 'C' , 'W' ] )
vT = numpy . require ( vT , dtype = numpy . float64 , requirements = [ 'C' , 'W' ] )
z = numpy . require ( z , dtype = numpy . float64 , requirements = [ 'C' , 'W' ] )
vz = numpy . require ( vz , dtype = numpy . float64 , requirements = [ 'C' , 'W' ] )
phi = numpy . require ( phi , dtype = numpy . float64 , requirements = [ 'C' , 'W' ] )
Omegar = numpy . require ( Omegar , dtype = numpy . float64 , requirements = [ 'C' , 'W' ] )
Omegaphi = numpy . require ( Omegaphi , dtype = numpy . float64 , requirements = [ 'C' , 'W' ] )
Omegaz = numpy . require ( Omegaz , dtype = numpy . float64 , requirements = [ 'C' , 'W' ] )
# Run the C code
actionAngleTorus_xvFreqsFunc ( ctypes . c_double ( jr ) , ctypes . c_double ( jphi ) , ctypes . c_double ( jz ) , ctypes . c_int ( len ( angler ) ) , angler , anglephi , anglez , ctypes . c_int ( npot ) , pot_type , pot_args , ctypes . c_double ( tol ) , R , vR , vT , z , vz , phi , Omegar , Omegaphi , Omegaz , ctypes . byref ( flag ) )
# Reset input arrays
if f_cont [ 0 ] :
angler = numpy . asfortranarray ( angler )
if f_cont [ 1 ] :
anglephi = numpy . asfortranarray ( anglephi )
if f_cont [ 2 ] :
anglez = numpy . asfortranarray ( anglez )
return ( R , vR , vT , z , vz , phi , Omegar [ 0 ] , Omegaphi [ 0 ] , Omegaz [ 0 ] , flag . value )
|
def devices ( self , value ) :
"""{ " PathOnHost " : " / dev / deviceName " , " PathInContainer " : " / dev / deviceName " , " CgroupPermissions " : " mrw " }"""
|
if value is None :
self . _devices = None
elif isinstance ( value , list ) :
results = [ ]
delimiter = ':'
for device in value :
if not isinstance ( device , six . string_types ) :
raise TypeError ( "each device must be a str. {0} was passed" . format ( device ) )
occurrences = device . count ( delimiter )
permissions = 'rwm'
if occurrences is 0 :
path_on_host = device
path_in_container = device
elif occurrences is 1 :
path_on_host , path_in_container = device . split ( delimiter )
elif occurrences is 2 :
path_on_host , path_in_container , permissions = device . split ( delimiter )
if permissions not in 'rwm' :
raise ValueError ( "only permissions supported for devices are any combination of 'r' 'w' 'm'." )
else :
raise ValueError ( """When passing devices they must be in one of the
following formats: path_on_host, path_on_host:path_in_container,
or path_on_host:path_in_container:permissions""" )
results . append ( "{0}:{1}:{2}" . format ( path_on_host , path_in_container , permissions ) )
self . _devices = results
else :
raise TypeError ( "devices must be a list or None." )
|
def get_address_reachability ( self , address : Address ) -> AddressReachability :
"""Return the current reachability state for ` ` address ` ` ."""
|
return self . _address_to_reachability . get ( address , AddressReachability . UNKNOWN )
|
def remove_page_boundary_lines ( docbody ) :
"""Try to locate page breaks , headers and footers within a document body ,
and remove the array cells at which they are found .
@ param docbody : ( list ) of strings , each string being a line in the
document ' s body .
@ return : ( list ) of strings . The document body , hopefully with page -
breaks , headers and footers removed . Each string in the list once more
represents a line in the document ."""
|
number_head_lines = number_foot_lines = 0
# Make sure document not just full of whitespace :
if not document_contains_text ( docbody ) : # document contains only whitespace - cannot safely
# strip headers / footers
return docbody
# Get list of index posns of pagebreaks in document :
page_break_posns = get_page_break_positions ( docbody )
# Get num lines making up each header if poss :
number_head_lines = get_number_header_lines ( docbody , page_break_posns )
# Get num lines making up each footer if poss :
number_foot_lines = get_number_footer_lines ( docbody , page_break_posns )
# Remove pagebreaks , headers , footers :
docbody = strip_headers_footers_pagebreaks ( docbody , page_break_posns , number_head_lines , number_foot_lines )
return docbody
|
def get_help ( self , about , help_type = 'Operation' ) :
"""Return information about the Mechanical Turk Service
operations and response group NOTE - this is basically useless
as it just returns the URL of the documentation
help _ type : either ' Operation ' or ' ResponseGroup '"""
|
params = { 'About' : about , 'HelpType' : help_type , }
return self . _process_request ( 'Help' , params )
|
def update ( callback = None , path = None , method = Method . PUT , resource = None , tags = None , summary = "Update specified resource." , middleware = None ) : # type : ( Callable , Path , Methods , Resource , Tags , str , List [ Any ] ) - > Operation
"""Decorator to configure an operation that updates a resource ."""
|
def inner ( c ) :
op = ResourceOperation ( c , path or PathParam ( '{key_field}' ) , method , resource , tags , summary , middleware )
op . responses . add ( Response ( HTTPStatus . NO_CONTENT , "{name} has been updated." ) )
op . responses . add ( Response ( HTTPStatus . BAD_REQUEST , "Validation failed." , Error ) )
op . responses . add ( Response ( HTTPStatus . NOT_FOUND , "Not found" , Error ) )
return op
return inner ( callback ) if callback else inner
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.