text
stringlengths 6
947k
| repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
|
---|---|---|---|---|---|---|
###
# Copyright (c) 2002-2004, Jeremiah Fincher
# Copyright (c) 2010, James Vega
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions, and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions, and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the author of this software nor the name of
# contributors to this software may be used to endorse or promote products
# derived from this software without specific prior written consent.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
###
from supybot.test import *
import supybot.ircdb as ircdb
class ChannelStatsTestCase(ChannelPluginTestCase):
plugins = ('ChannelStats', 'User')
def setUp(self):
ChannelPluginTestCase.setUp(self)
self.prefix = 'foo!bar@baz'
self.nick = 'foo'
self.irc.feedMsg(ircmsgs.privmsg(self.irc.nick,
'register foo bar',
prefix=self.prefix))
_ = self.irc.takeMsg()
chanop = ircdb.makeChannelCapability(self.channel, 'op')
ircdb.users.getUser(self.nick).addCapability(chanop)
def test(self):
self.assertNotError('channelstats')
self.assertNotError('channelstats')
self.assertNotError('channelstats')
def testStats(self):
self.assertError('channelstats stats %s' % self.nick)
self.assertNotError('channelstats stats %s' % self.nick)
self.assertNotError('channelstats stats %s' % self.nick.upper())
self.assertNotError('channelstats stats')
self.assertRegexp('channelstats stats', self.nick)
def testSelfStats(self):
self.assertError('channelstats stats %s' % self.irc.nick)
self.assertNotError('channelstats stats %s' % self.irc.nick)
self.assertNotError('channelstats stats %s' % self.irc.nick)
self.assertNotError('channelstats stats %s' % self.irc.nick.upper())
self.assertRegexp('channelstats rank chars', self.irc.nick)
u = ircdb.users.getUser(self.prefix)
u.addCapability(ircdb.makeChannelCapability(self.channel, 'op'))
ircdb.users.setUser(u)
try:
conf.supybot.plugins.ChannelStats.selfStats.setValue(False)
m1 = self.getMsg('channelstats stats %s' % self.irc.nick)
m2 = self.getMsg('channelstats stats %s' % self.irc.nick)
self.assertEqual(m1.args[1], m2.args[1])
finally:
conf.supybot.plugins.ChannelStats.selfStats.setValue(True)
def testNoKeyErrorStats(self):
self.assertNotRegexp('stats sweede', 'KeyError')
def testRank(self):
self.assertError('channelstats stats %s' % self.irc.nick)
self.assertNotError('channelstats stats %s' % self.irc.nick)
self.assertNotError('channelstats stats %s' % self.irc.nick)
self.assertNotError('channelstats stats %s' % self.irc.nick.upper())
self.assertNotError('channelstats stats %s' % self.nick)
self.assertNotError('channelstats stats %s' % self.nick.upper())
self.assertNotError('channelstats stats')
self.assertNotError('channelstats rank chars / msgs')
self.assertNotError('channelstats rank kicks/kicked') # Tests inf
self.assertNotError('channelstats rank log(msgs)')
# vim:set shiftwidth=4 softtabstop=4 expandtab textwidth=79:
|
mazaclub/mazabot-core
|
plugins/ChannelStats/test.py
|
Python
|
bsd-3-clause
| 4,460 | 0.000897 |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
from fairseq.data.encoders import register_tokenizer
from fairseq.dataclass import FairseqDataclass
@register_tokenizer("nltk", dataclass=FairseqDataclass)
class NLTKTokenizer(object):
def __init__(self, *unused):
try:
from nltk.tokenize import word_tokenize
self.word_tokenize = word_tokenize
except ImportError:
raise ImportError("Please install nltk with: pip install nltk")
def encode(self, x: str) -> str:
return " ".join(self.word_tokenize(x))
def decode(self, x: str) -> str:
return x
|
pytorch/fairseq
|
fairseq/data/encoders/nltk_tokenizer.py
|
Python
|
mit
| 755 | 0 |
from viterbi import Viterbi
from rtree import Rtree
from spatialfunclib import *
class GPSMatcher:
def __init__(self, hmm, emission_probability, constraint_length=10, MAX_DIST=500, priors=None, smallV=0.00000000001):
# initialize spatial index
self.previous_obs = None
if priors == None:
priors=dict([(state,1.0/len(hmm)) for state in hmm])
state_spatial_index = Rtree()
unlocated_states = []
id_to_state = {}
id = 0
for state in hmm:
geom=self.geometry_of_state(state)
if not geom:
unlocated_states.append(state)
else:
((lat1,lon1),(lat2,lon2))=geom
state_spatial_index.insert(id,
(min(lon1, lon2), min(lat1, lat2),
max(lon1, lon2), max(lat1, lat2)))
id_to_state[id]=state
id=id+1
def candidate_states(obs): #was (lat,lon) in place of obs
geom = self.geometry_of_observation(obs)
if geom == None:
return hmm.keys()
else:
(lat,lon)=geom
nearby_states = state_spatial_index.intersection((lon-MAX_DIST/METERS_PER_DEGREE_LONGITUDE,
lat-MAX_DIST/METERS_PER_DEGREE_LATITUDE,
lon+MAX_DIST/METERS_PER_DEGREE_LONGITUDE,
lat+MAX_DIST/METERS_PER_DEGREE_LATITUDE))
candidates = [id_to_state[id] for id in nearby_states]+unlocated_states
return candidates
self.viterbi = Viterbi(hmm,emission_probability,
constraint_length=constraint_length,
priors=priors,
candidate_states=candidate_states,
smallV=smallV)
def step(self,obs,V,p):
if self.previous_obs != None:
for int_obs in self.interpolated_obs(self.previous_obs, obs):
V,p = self.viterbi.step(int_obs,V,p)
V,p = self.viterbi.step(obs,V,p)
self.previous_obs = obs
return V,p
def interpolated_obs(self,prev,obs):
return []
def geometry_of_observation(self, obs):
return obs
def geometry_of_state(self, state):
""" Subclasses should override this method to return the geometry of a given state, typically an edge."""
if state == 'unknown': return None
else:
return state
|
Vanuan/gpx_to_road_map
|
biagoni2012/gpsmatcher.py
|
Python
|
apache-2.0
| 2,740 | 0.016788 |
import sys
sys.path.insert(1, "../../../")
import h2o
def nb_prostate(ip, port):
print "Importing prostate.csv data..."
prostate = h2o.upload_file(h2o.locate("smalldata/logreg/prostate.csv"))
print "Converting CAPSULE, RACE, DCAPS, and DPROS to categorical"
prostate['CAPSULE'] = prostate['CAPSULE'].asfactor()
prostate['RACE'] = prostate['CAPSULE'].asfactor()
prostate['DCAPS'] = prostate['DCAPS'].asfactor()
prostate['DPROS'] = prostate['DPROS'].asfactor()
print "Compare with Naive Bayes when x = 3:9, y = 2"
prostate_nb = h2o.naive_bayes(x=prostate[2:9], y=prostate[1], laplace = 0)
prostate_nb.show()
print "Predict on training data"
prostate_pred = prostate_nb.predict(prostate)
prostate_pred.head()
if __name__ == "__main__":
h2o.run_test(sys.argv, nb_prostate)
|
weaver-viii/h2o-3
|
h2o-py/tests/testdir_algos/naivebayes/pyunit_prostateNB.py
|
Python
|
apache-2.0
| 836 | 0.008373 |
from django import forms
class ContactForm(forms.Form):
name = forms.CharField(max_length=100)
email = forms.EmailField()
message = forms.CharField(widget=forms.Textarea)
|
UTAlan/ginniBeam.net
|
gin/contact/models.py
|
Python
|
gpl-2.0
| 189 | 0.015873 |
# -*- coding: utf-8 -*-
# This file is part of Shoop.
#
# Copyright (c) 2012-2015, Shoop Ltd. All rights reserved.
#
# This source code is licensed under the AGPLv3 license found in the
# LICENSE file in the root directory of this source tree.
from django.db import models
from shoop.utils.analog import define_log_model, BaseLogEntry
class FakeModel(models.Model):
pass
def test_analog():
FakeModelLogEntry = define_log_model(FakeModel)
assert FakeModelLogEntry.__module__ == FakeModel.__module__
assert FakeModelLogEntry._meta.get_field("target").rel.to is FakeModel
assert FakeModel.log_entries.related.model is FakeModel
assert FakeModel.log_entries.related.related_model is FakeModelLogEntry
assert issubclass(FakeModelLogEntry, BaseLogEntry)
assert isinstance(FakeModelLogEntry(), BaseLogEntry)
|
arth-co/shoop
|
shoop_tests/utils/test_analog.py
|
Python
|
agpl-3.0
| 837 | 0 |
#!/usr/bin/env python
#########################################################################################
#
# Get or set orientation of nifti 3d or 4d data.
#
# ---------------------------------------------------------------------------------------
# Copyright (c) 2014 Polytechnique Montreal <www.neuro.polymtl.ca>
# Authors: Julien Cohen-Adad
# Modified: 2014-10-18
#
# About the license: see the file LICENSE.TXT
#########################################################################################
import sys
import os
import getopt
import commands
import sct_utils as sct
import time
# DEFAULT PARAMETERS
class Param:
## The constructor
def __init__(self):
self.debug = 0
self.fname_data = ''
self.fname_out = ''
self.orientation = ''
self.list_of_correct_orientation = 'RIP LIP RSP LSP RIA LIA RSA LSA IRP ILP SRP SLP IRA ILA SRA SLA RPI LPI RAI LAI RPS LPS RAS LAS PRI PLI ARI ALI PRS PLS ARS ALS IPR SPR IAR SAR IPL SPL IAL SAL PIR PSR AIR ASR PIL PSL AIL ASL'
self.change_header = ''
self.verbose = 0
self.remove_tmp_files = 1
# main
#=======================================================================================================================
def main():
# Parameters for debug mode
if param.debug:
print '\n*** WARNING: DEBUG MODE ON ***\n'
# get path of the testing data
status, path_sct_data = commands.getstatusoutput('echo $SCT_TESTING_DATA_DIR')
param.fname_data = path_sct_data+'/dmri/dwi_moco_mean.nii.gz'
param.orientation = ''
param.change_header = ''
param.remove_tmp_files = 0
param.verbose = 1
else:
# Check input parameters
try:
opts, args = getopt.getopt(sys.argv[1:], 'hi:o:r:s:a:v:')
except getopt.GetoptError:
usage()
if not opts:
usage()
for opt, arg in opts:
if opt == '-h':
usage()
elif opt in '-i':
param.fname_data = arg
elif opt in '-o':
param.fname_out = arg
elif opt in '-r':
param.remove_tmp_files = int(arg)
elif opt in '-s':
param.orientation = arg
elif opt in '-t':
param.threshold = arg
elif opt in '-a':
param.change_header = arg
elif opt in '-v':
param.verbose = int(arg)
# run main program
get_or_set_orientation()
# get_or_set_orientation
#=======================================================================================================================
def get_or_set_orientation():
fsloutput = 'export FSLOUTPUTTYPE=NIFTI; ' # for faster processing, all outputs are in NIFTI
# display usage if a mandatory argument is not provided
if param.fname_data == '':
sct.printv('ERROR: All mandatory arguments are not provided. See usage.', 1, 'error')
# check existence of input files
sct.printv('\ncheck existence of input files...', param.verbose)
sct.check_file_exist(param.fname_data, param.verbose)
# find what to do
if param.orientation == '' and param.change_header is '':
todo = 'get_orientation'
else:
todo = 'set_orientation'
# check if orientation is correct
if check_orientation_input():
sct.printv('\nERROR in '+os.path.basename(__file__)+': orientation is not recognized. Use one of the following orientation: '+param.list_of_correct_orientation+'\n', 1, 'error')
sys.exit(2)
# display input parameters
sct.printv('\nInput parameters:', param.verbose)
sct.printv(' data ..................'+param.fname_data, param.verbose)
# Extract path/file/extension
path_data, file_data, ext_data = sct.extract_fname(param.fname_data)
if param.fname_out == '':
# path_out, file_out, ext_out = '', file_data+'_'+param.orientation, ext_data
fname_out = path_data+file_data+'_'+param.orientation+ext_data
else:
fname_out = param.fname_out
# create temporary folder
sct.printv('\nCreate temporary folder...', param.verbose)
path_tmp = sct.slash_at_the_end('tmp.'+time.strftime("%y%m%d%H%M%S"), 1)
sct.run('mkdir '+path_tmp, param.verbose)
# Copying input data to tmp folder and convert to nii
# NB: cannot use c3d here because c3d cannot convert 4D data.
sct.printv('\nCopying input data to tmp folder and convert to nii...', param.verbose)
sct.run('cp '+param.fname_data+' '+path_tmp+'data'+ext_data, param.verbose)
# go to tmp folder
os.chdir(path_tmp)
# convert to nii format
sct.run('fslchfiletype NIFTI data', param.verbose)
# Get dimensions of data
sct.printv('\nGet dimensions of data...', param.verbose)
nx, ny, nz, nt, px, py, pz, pt = sct.get_dimension('data.nii')
sct.printv(' ' + str(nx) + ' x ' + str(ny) + ' x ' + str(nz)+ ' x ' + str(nt), param.verbose)
# if 4d, loop across the data
if nt == 1:
if todo == 'set_orientation':
# set orientation
sct.printv('\nChange orientation...', param.verbose)
if param.change_header is '':
set_orientation('data.nii', param.orientation, 'data_orient.nii')
else:
set_orientation('data.nii', param.change_header, 'data_orient.nii', True)
elif todo == 'get_orientation':
# get orientation
sct.printv('\nGet orientation...', param.verbose)
sct.printv(get_orientation('data.nii'), 1)
else:
# split along T dimension
sct.printv('\nSplit along T dimension...', param.verbose)
sct.run(fsloutput+'fslsplit data data_T', param.verbose)
if todo == 'set_orientation':
# set orientation
sct.printv('\nChange orientation...', param.verbose)
for it in range(nt):
file_data_split = 'data_T'+str(it).zfill(4)+'.nii'
file_data_split_orient = 'data_orient_T'+str(it).zfill(4)+'.nii'
set_orientation(file_data_split, param.orientation, file_data_split_orient)
# Merge files back
sct.printv('\nMerge file back...', param.verbose)
cmd = fsloutput+'fslmerge -t data_orient'
for it in range(nt):
file_data_split_orient = 'data_orient_T'+str(it).zfill(4)+'.nii'
cmd = cmd+' '+file_data_split_orient
sct.run(cmd, param.verbose)
elif todo == 'get_orientation':
sct.printv('\nGet orientation...', param.verbose)
sct.printv(get_orientation('data_T0000.nii'), 1)
# come back to parent folder
os.chdir('..')
# Generate output files
if todo == 'set_orientation':
sct.printv('\nGenerate output files...', param.verbose)
sct.generate_output_file(path_tmp+'data_orient.nii', fname_out)
# Remove temporary files
if param.remove_tmp_files == 1:
sct.printv('\nRemove temporary files...', param.verbose)
sct.run('rm -rf '+path_tmp, param.verbose)
# to view results
if todo == 'set_orientation':
sct.printv('\nDone! To view results, type:', param.verbose)
sct.printv('fslview '+fname_out+' &', param.verbose, 'code')
print
# check_orientation_input
# ==========================================================================================
def check_orientation_input():
"""check if orientation input by user is correct"""
if param.orientation in param.list_of_correct_orientation:
return 0
else:
return -1
# get_orientation (uses FSL)
# ==========================================================================================
def get_orientation(fname):
status, output = sct.run('fslhd '+fname, 0)
# status, output = sct.run('isct_orientation3d -i '+fname+' -get', 0)
# orientation = output[26:]
orientation = output[output.find('sform_xorient')+15:output.find('sform_xorient')+16]+ \
output[output.find('sform_yorient')+15:output.find('sform_yorient')+16]+ \
output[output.find('sform_zorient')+15:output.find('sform_zorient')+16]
# check if orientation is specified in an other part of the header
if orientation == 'UUU':
orientation = output[output.find('qform_xorient')+15:output.find('qform_xorient')+16]+ \
output[output.find('qform_yorient')+15:output.find('qform_yorient')+16]+ \
output[output.find('qform_zorient')+15:output.find('qform_zorient')+16]
return orientation
# set_orientation
# ==========================================================================================
def set_orientation(fname_in, orientation, fname_out, inversion=False):
if not inversion:
sct.run('isct_orientation3d -i '+fname_in+' -orientation '+orientation+' -o '+fname_out, 0)
else:
from msct_image import Image
input_image = Image(fname_in)
input_image.change_orientation(orientation, True)
input_image.setFileName(fname_out)
input_image.save()
# return full path
return os.path.abspath(fname_out)
# Print usage
# ==========================================================================================
def usage():
print """
"""+os.path.basename(__file__)+"""
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Part of the Spinal Cord Toolbox <https://sourceforge.net/projects/spinalcordtoolbox>
DESCRIPTION
Get or set orientation of 3D or 4D data. Available orientations are:
"""+param_default.list_of_correct_orientation+"""
USAGE
Get orientation: """+os.path.basename(__file__)+""" -i <data>
Set orientation: """+os.path.basename(__file__)+""" -i <data> -s <orient>
MANDATORY ARGUMENTS
-i <file> image to get or set orientation from. Can be 3D or 4D.
OPTIONAL ARGUMENTS
-s <orient> orientation. Default=None.
-o <fname_out> output file name. Default=<file>_<orient>.<ext>.
-a <orient> actual orientation of image data (for corrupted data). Change the data
orientation to match orientation in the header.
-r {0,1} remove temporary files. Default="""+str(param_default.remove_tmp_files)+"""
-v {0,1} verbose. Default="""+str(param_default.verbose)+"""
-h help. Show this message
EXAMPLE
"""+os.path.basename(__file__)+""" -i dwi.nii.gz -s RPI\n"""
# exit program
sys.exit(2)
#=======================================================================================================================
# Start program
#=======================================================================================================================
if __name__ == "__main__":
# initialize parameters
param = Param()
param_default = Param()
# call main function
main()
|
benjamindeleener/scad
|
scripts/sct_orientation.py
|
Python
|
mit
| 11,010 | 0.003724 |
from model.project import Project
class ProjectHelper:
def __init__(self, app):
self.app = app
self.project_list_cache = None
def open_page_manage_projects(self):
wd = self.app.wd
wd.find_element_by_link_text("Manage").click()
wd.find_element_by_link_text("Manage Projects").click()
def create(self, project):
wd = self.app.wd
self.open_page_manage_projects()
wd.find_element_by_xpath("//table[3]/tbody/tr[1]/td/form/input[2]").click()
self.fill_add_rpoject_form(project)
wd.find_element_by_css_selector("input.button").click()
wd.find_element_by_link_text("Proceed").click()
self.project_cache = None
def fill_add_rpoject_form(self, project):
wd = self.app.wd
self.change_field_value("name", project.name)
self.change_field_value("description", project.description)
def change_field_value(self, field_name, text):
wd = self.app.wd
if text is not None:
wd.find_element_by_name(field_name).click()
wd.find_element_by_name(field_name).clear()
wd.find_element_by_name(field_name).send_keys(text)
def delete(self, project):
wd = self.app.wd
self.open_page_manage_projects()
wd.find_element_by_link_text(project.name).click()
wd.find_element_by_css_selector("form > input.button").click()
wd.find_element_by_css_selector("input.button").click()
def get_project_list(self):
wd = self.app.wd
self.open_page_manage_projects()
projects_list = wd.find_elements_by_xpath("//table[3]/tbody/tr")[2:]
return [Project(name=p.find_element_by_xpath("td[1]").text) for p in projects_list]
def count(self):
wd = self.app.wd
self.open_page_manage_projects()
return len(wd.find_elements_by_css_selector(".fa.fa-check.fa-lg"))
project_cache = None
|
AndreyBalabanov/python_training_mantisBT
|
fixture/project.py
|
Python
|
apache-2.0
| 1,944 | 0.002572 |
#!/usr/bin/env python
def backword(s):
length = len(s)
i = -1
t = s
while i >= -length:
t += s[i]
i -= 1
return t
|
opensvn/test
|
src/study/python/backward.py
|
Python
|
gpl-2.0
| 151 | 0 |
#!/usr/bin/env python
#
# Copyright 2007 Doug Hellmann.
#
#
# All Rights Reserved
#
# Permission to use, copy, modify, and distribute this software and
# its documentation for any purpose and without fee is hereby
# granted, provided that the above copyright notice appear in all
# copies and that both that copyright notice and this permission
# notice appear in supporting documentation, and that the name of Doug
# Hellmann not be used in advertising or publicity pertaining to
# distribution of the software without specific, written prior
# permission.
#
# DOUG HELLMANN DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE,
# INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN
# NO EVENT SHALL DOUG HELLMANN BE LIABLE FOR ANY SPECIAL, INDIRECT OR
# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
#
"""Including content from other files in the token stream.
"""
__version__ = "$Id$"
#end_pymotw_header
import shlex
text = """This text says to source quotes.txt before continuing."""
print 'ORIGINAL:', repr(text)
print
lexer = shlex.shlex(text)
lexer.wordchars += '.'
lexer.source = 'source'
print 'TOKENS:'
for token in lexer:
print repr(token)
|
qilicun/python
|
python2/PyMOTW-1.132/PyMOTW/shlex/shlex_source.py
|
Python
|
gpl-3.0
| 1,405 | 0.000712 |
from communityalmanac.tests import *
class TestUsersController(TestController):
def test_index(self):
response = self.app.get(url(controller='users', action='index'))
# Test response...
|
openplans/community-almanac
|
communityalmanac/tests/functional/test_users.py
|
Python
|
agpl-3.0
| 208 | 0.004808 |
"""
Called by tox.ini: uses the generated executable to run the tests in ./tests/
directory.
.. note:: somehow calling "build/runtests_script" directly from tox doesn't
seem to work (at least on Windows).
"""
if __name__ == '__main__':
import os
import sys
executable = os.path.join(os.getcwd(), 'build', 'runtests_script')
if sys.platform.startswith('win'):
executable += '.exe'
sys.exit(os.system('%s tests' % executable))
|
Yukarumya/Yukarum-Redfoxes
|
testing/web-platform/tests/tools/pytest/testing/cx_freeze/tox_run.py
|
Python
|
mpl-2.0
| 464 | 0.002155 |
# coding: utf-8
'''
Name : ThammeGowda Narayanaswamy
USCID: 2074669439
'''
import math
from scipy.stats import multivariate_normal
import numpy as np
from matplotlib import pyplot as plt
import matplotlib.patches as mpatches
import scipy as sp
from scipy import spatial
from scipy import stats
from pprint import pprint
blob_file = "hw5_blob.csv"
circle_file = "hw5_circle.csv"
def load_points(f_name):
with open(f_name) as f:
res = []
for l in f:
x,y = l.split(",")
res.append([float(x), float(y)])
return np.array(res)
blobs = load_points(blob_file)
circles = load_points(circle_file)
'''
# In[4]:
plt.plot(*zip(*circles), marker='o', color='r', ls='')
plt.show()
plt.plot(*zip(*blobs), marker='o', color='b', ls='')
plt.show()
'''
# In[5]:
def k_means(k, pts, get_indices=False, silent=True, tol=1e-5):
N = len(pts)
assert k <= N
print("K=%d, N=%d" % (k, N))
# pick random k points
pos = set()
while len(pos) < k:
r = np.random.randint(N)
pos.add(r)
centroids = []
for p in pos:
centroids.append(tuple(pts[p]))
change = float('inf')
conv_tol = 1e-5
itr, max_iters = 0, 100
while change > tol and itr < max_iters:
itr += 1
# assign cluster to each point
asgn = {}
indices = {}
for ct in centroids:
asgn[ct] = []
indices[ct] = []
for idx, pt in enumerate(pts):
mindist = float('inf')
a = None
for ct in centroids:
dist = spatial.distance.cdist([ct], [pt])
if dist < mindist:
mindist = dist
a = ct
asgn[a].append(pt)
indices[a].append(idx)
# compute means of each cluster
oldcentr = centroids
centroids = []
for ct, cluster in asgn.items():
centroids.append(tuple(np.array(cluster).mean(axis=0)))
dist_matrix = spatial.distance.cdist(oldcentr, centroids)
# has distance between each pair of {new, old} centroids
# need the diagonal values
change = dist_matrix.trace()
if not silent:
print("Movement in centroids", change)
return indices if get_indices else asgn
# In[6]:
print("# K Means")
colors = ['r', 'g', 'b', 'y', 'c', 'k']
plt.figure(1, figsize=(15, 10))
plt.title("K Means")
ks = {2,3,5}
dss = {'Blobs': blobs, 'Circles': circles}
j = 1
for title, ds in dss.items():
for k in ks:
clstrs = k_means(k, ds)
plt.subplot(2, 3, j)
i = 0
for cnt, cpts in clstrs.items():
plt.plot(*zip(*cpts), marker='o', color=colors[i], ls='')
i += 1
plt.title("%s , K=%d" % (title, k))
j += 1
plt.show()
# # Kernel
'''
# ## Feature Mapping
# In[7]:
center = [0.0, 0.0]
newdim = sp.spatial.distance.cdist([center], circles).transpose()
clusters = k_means(2, newdim, get_indices=True)
i = 0
for cnt, cpts in clusters.items():
cpts = map(lambda x: circles[x], cpts)
plt.plot(*zip(*cpts), marker='o', color=colors[i], ls='')
i += 1
plt.show()
'''
# ## Kernel K Means
#
# Kernel used :
# 1 - (radius of x1) / (radius of x2)
#
# It ensures that the smaller radius goes to numerator and larger radius goes to denominator - for symmetry and bounding
print("Kernel K means")
class KernelKMeans(object):
def kernel_matrix(self, data, kernel_func):
''' Computes kernel matrix
: params:
data - data points
kernel_func - kernel function
:returns: nxn matrix
'''
n = data.shape[0]
K = np.zeros((n,n), dtype=float)
for i in range(n):
for j in range(n):
K[i,j] = kernel_func(data[i], data[j])
return K
def cluster(self, X, k, kernel_func, max_itr=100, tol=1e-3):
'''
Clusters the points
:params:
X - data points
k - number of clusters
kernel_func - kernel function that outputs smaller values for points in same cluster
:returns: Nx1 vector of assignments
'''
# N
N = X.shape[0]
# NxN matrix from kernel funnction element wise
K = self.kernel_matrix(X, kernel_func)
# equal weightage to all
cluster_weights = np.ones(N)
# Assignments : random assignments to begin with
A = np.random.randint(k, size=N)
for it in xrange(max_itr): # stuck up between 2 local minimas, abort after maxiter
# N x k matrix that stores distance between every point and cluster center
dist = self.compute_dist(K, k, A, sw=cluster_weights)
oldA, A = A, dist.argmin(axis=1)
# Check if it is conveged
n_same = np.sum(np.abs(A - oldA) == 0)
if 1 - float(n_same) / N < tol:
print "Converged at iteration:", it + 1
break
return A
def compute_dist(self, K, k, A, sw):
"""
Computes Nxk distance matrix using kernel matrix
: params:
K - NxN kernel Matrix
k - number of clusters
A - Nx1 Assignments
sw - sample weights
: returns : Nxk distance matrix
"""
dist = np.zeros((K.shape[0], k))
for cl in xrange(k):
mask = A == cl
if np.sum(mask) == 0:
raise Error("ERROR:cluster '%d' is empty. Looks like we cant make %d clusters" % (cl, k))
N_ = sw[mask].sum()
KK = K[mask][:, mask]
dist[:, cl] += np.sum(np.outer(sw[mask], sw[mask]) * KK / (N_*N_))
dist[:, cl] -= 2 * np.sum(sw[mask] * K[:, mask], axis=1) / N_
return dist
def distance(x1, x2):
'''Squared Eucledian distance between 2 points
:params:
x1 - point1
x2 - point2
'''
return np.sum((x1 - x2) ** 2)
def circular_kernel(x1, x2, center=None):
'''This kernel outputs lesser distance for the points that are from circumference
:params:
x1 - first point
x2 - second point
center - center of circle(default = origin (0,0,...))
'''
if center is None:
center = np.zeros(len(x1))
dist1 = distance(x1, center)
dist2 = distance(x2, center)
return 1.0 - min(dist1, dist2) / max(dist1, dist2)
clusters = KernelKMeans().cluster(circles, 2, circular_kernel)
for i in range(k):
cpts = circles[clusters == i]
plt.plot(*zip(*cpts), marker='o', color=colors[i], ls='')
i += 1
plt.show()
# # EM Algorithm with GMM
print("EM Algorithm")
# In[62]:
def multivar_gaussian_pdf(x, mu, covar):
return multivariate_normal.pdf(x, mean=mu, cov=covar)
class EM_GMM(object):
def __init__(self, data, k):
self.data = data
self.k = k
self.N = data.shape[0]
# theta param
self.mean, self.cov, self.weight = [], [], []
# random initialization
A = np.random.randint(k, size=data.shape[0])
for c in range(k):
cpts = data[A == c]
self.mean.append(np.mean(cpts, axis=0))
self.cov.append(np.cov(np.array(cpts).transpose()))
self.weight.append(1.0 * cpts.shape[0] / data.shape[0])
def compute_gamma(self):
gamma = np.zeros((self.N, self.k), dtype=float)
for idx, pt in enumerate(data):
pdf = []
for ct in range(k):
temp = multivar_gaussian_pdf(pt, self.mean[ct], self.cov[ct])
pdf.append(temp * self.weight[ct])
gamma[idx] = np.array(pdf) / sum(pdf)
return gamma
def update_theta(self, P):
weights = P.sum(axis=0)/P.sum()
means = []
covs = []
for i in range(self.k):
nr_mu = (P[:, i:i+1] * self.data).sum(axis=0)
dr_mu = P[:, i].sum(axis=0)
pt_mu = nr_mu / dr_mu
means.append(pt_mu)
for i in range(self.k):
nr_cov = (P[:, i:i+1] * (self.data - means[i])).transpose().dot(self.data - means[i])
dr_cov = P[:, i].sum(axis=0)
covs.append(nr_cov / dr_cov)
self.mean= means
self.cov = covs
self.weight = weights
def log_likelihood(self):
log_sum = 0.
for _, pt in enumerate(self.data):
row_sum = []
for ct in range(self.k):
p_X_given_N = multivar_gaussian_pdf(pt, self.mean[ct], self.cov[ct])
p_N = self.weight[ct]
joint = p_N * p_X_given_N
row_sum.append(joint)
res = sum(row_sum)
log_sum += math.log(res)
return log_sum
def gmm(self, max_itr = 50):
ll = []
for itr in range(max_itr):
old_means = self.mean # used for convergance test
gamma = self.compute_gamma()
self.update_theta(gamma)
ll.append(self.log_likelihood())
if np.sum(np.abs(np.array(self.mean) - np.array(old_means))) < 1e-3:
break
return gamma, ll
data = blobs
max_ll = 0
plt.figure(1, figsize=(8, 6))
legends = []
k = 3
for i in range(1,6):
em = EM_GMM(data, k)
gamma, ll = em.gmm()
if ll >= max_ll:
best_gamma = gamma
best = em
max_ll = ll
print "Converged: ", len(ll)
plt.plot(range(len(ll)), ll , '-', color=colors[i])
legends.append(mpatches.Patch(color=colors[i], label='Iteration: %d' % i))
plt.legend(handles=legends)
plt.show()
idx = best_gamma.argmax(axis=1)
print "Best parameters: "
print "Mean:", best.mean
print "Covar:", best.cov
plt.scatter(data[:,0], data[:,1], color=[colors[i] for i in idx] )
plt.show()
|
thammegowda/algos
|
usc-csci-ml/hw5/src/CSCI567_hw5_fall16.py
|
Python
|
apache-2.0
| 9,745 | 0.003797 |
# encoding: utf-8
# module PyQt4.QtGui
# from /usr/lib/python3/dist-packages/PyQt4/QtGui.cpython-34m-x86_64-linux-gnu.so
# by generator 1.135
# no doc
# imports
import PyQt4.QtCore as __PyQt4_QtCore
class QApplication(__PyQt4_QtCore.QCoreApplication):
"""
QApplication(list-of-str)
QApplication(list-of-str, bool)
QApplication(list-of-str, QApplication.Type)
QApplication(Display, int visual=0, int colormap=0)
QApplication(Display, list-of-str, int visual=0, int cmap=0)
"""
def aboutQt(self): # real signature unknown; restored from __doc__
""" QApplication.aboutQt() """
pass
def activeModalWidget(self): # real signature unknown; restored from __doc__
""" QApplication.activeModalWidget() -> QWidget """
return QWidget
def activePopupWidget(self): # real signature unknown; restored from __doc__
""" QApplication.activePopupWidget() -> QWidget """
return QWidget
def activeWindow(self): # real signature unknown; restored from __doc__
""" QApplication.activeWindow() -> QWidget """
return QWidget
def alert(self, QWidget, int_msecs=0): # real signature unknown; restored from __doc__
""" QApplication.alert(QWidget, int msecs=0) """
pass
def allWidgets(self): # real signature unknown; restored from __doc__
""" QApplication.allWidgets() -> list-of-QWidget """
pass
def autoSipEnabled(self): # real signature unknown; restored from __doc__
""" QApplication.autoSipEnabled() -> bool """
return False
def beep(self): # real signature unknown; restored from __doc__
""" QApplication.beep() """
pass
def changeOverrideCursor(self, QCursor): # real signature unknown; restored from __doc__
""" QApplication.changeOverrideCursor(QCursor) """
pass
def clipboard(self): # real signature unknown; restored from __doc__
""" QApplication.clipboard() -> QClipboard """
return QClipboard
def closeAllWindows(self): # real signature unknown; restored from __doc__
""" QApplication.closeAllWindows() """
pass
def colorSpec(self): # real signature unknown; restored from __doc__
""" QApplication.colorSpec() -> int """
return 0
def commitData(self, QSessionManager): # real signature unknown; restored from __doc__
""" QApplication.commitData(QSessionManager) """
pass
def commitDataRequest(self, *args, **kwargs): # real signature unknown
""" QApplication.commitDataRequest[QSessionManager] [signal] """
pass
def cursorFlashTime(self): # real signature unknown; restored from __doc__
""" QApplication.cursorFlashTime() -> int """
return 0
def desktop(self): # real signature unknown; restored from __doc__
""" QApplication.desktop() -> QDesktopWidget """
return QDesktopWidget
def desktopSettingsAware(self): # real signature unknown; restored from __doc__
""" QApplication.desktopSettingsAware() -> bool """
return False
def doubleClickInterval(self): # real signature unknown; restored from __doc__
""" QApplication.doubleClickInterval() -> int """
return 0
def event(self, QEvent): # real signature unknown; restored from __doc__
""" QApplication.event(QEvent) -> bool """
return False
def exec(self): # real signature unknown; restored from __doc__
""" QApplication.exec() -> int """
return 0
def exec_(self): # real signature unknown; restored from __doc__
""" QApplication.exec_() -> int """
return 0
def focusChanged(self, *args, **kwargs): # real signature unknown
""" QApplication.focusChanged[QWidget, QWidget] [signal] """
pass
def focusWidget(self): # real signature unknown; restored from __doc__
""" QApplication.focusWidget() -> QWidget """
return QWidget
def font(self, *__args): # real signature unknown; restored from __doc__ with multiple overloads
"""
QApplication.font() -> QFont
QApplication.font(QWidget) -> QFont
QApplication.font(str) -> QFont
"""
return QFont
def fontDatabaseChanged(self, *args, **kwargs): # real signature unknown
""" QApplication.fontDatabaseChanged [signal] """
pass
def fontMetrics(self): # real signature unknown; restored from __doc__
""" QApplication.fontMetrics() -> QFontMetrics """
return QFontMetrics
def globalStrut(self): # real signature unknown; restored from __doc__
""" QApplication.globalStrut() -> QSize """
pass
def inputContext(self): # real signature unknown; restored from __doc__
""" QApplication.inputContext() -> QInputContext """
return QInputContext
def isEffectEnabled(self, Qt_UIEffect): # real signature unknown; restored from __doc__
""" QApplication.isEffectEnabled(Qt.UIEffect) -> bool """
return False
def isLeftToRight(self): # real signature unknown; restored from __doc__
""" QApplication.isLeftToRight() -> bool """
return False
def isRightToLeft(self): # real signature unknown; restored from __doc__
""" QApplication.isRightToLeft() -> bool """
return False
def isSessionRestored(self): # real signature unknown; restored from __doc__
""" QApplication.isSessionRestored() -> bool """
return False
def keyboardInputDirection(self): # real signature unknown; restored from __doc__
""" QApplication.keyboardInputDirection() -> Qt.LayoutDirection """
pass
def keyboardInputInterval(self): # real signature unknown; restored from __doc__
""" QApplication.keyboardInputInterval() -> int """
return 0
def keyboardInputLocale(self): # real signature unknown; restored from __doc__
""" QApplication.keyboardInputLocale() -> QLocale """
pass
def keyboardModifiers(self): # real signature unknown; restored from __doc__
""" QApplication.keyboardModifiers() -> Qt.KeyboardModifiers """
pass
def lastWindowClosed(self, *args, **kwargs): # real signature unknown
""" QApplication.lastWindowClosed [signal] """
pass
def layoutDirection(self): # real signature unknown; restored from __doc__
""" QApplication.layoutDirection() -> Qt.LayoutDirection """
pass
def mouseButtons(self): # real signature unknown; restored from __doc__
""" QApplication.mouseButtons() -> Qt.MouseButtons """
pass
def notify(self, QObject, QEvent): # real signature unknown; restored from __doc__
""" QApplication.notify(QObject, QEvent) -> bool """
return False
def overrideCursor(self): # real signature unknown; restored from __doc__
""" QApplication.overrideCursor() -> QCursor """
return QCursor
def palette(self, *__args): # real signature unknown; restored from __doc__ with multiple overloads
"""
QApplication.palette() -> QPalette
QApplication.palette(QWidget) -> QPalette
QApplication.palette(str) -> QPalette
"""
return QPalette
def queryKeyboardModifiers(self): # real signature unknown; restored from __doc__
""" QApplication.queryKeyboardModifiers() -> Qt.KeyboardModifiers """
pass
def quitOnLastWindowClosed(self): # real signature unknown; restored from __doc__
""" QApplication.quitOnLastWindowClosed() -> bool """
return False
def restoreOverrideCursor(self): # real signature unknown; restored from __doc__
""" QApplication.restoreOverrideCursor() """
pass
def saveState(self, QSessionManager): # real signature unknown; restored from __doc__
""" QApplication.saveState(QSessionManager) """
pass
def saveStateRequest(self, *args, **kwargs): # real signature unknown
""" QApplication.saveStateRequest[QSessionManager] [signal] """
pass
def sessionId(self): # real signature unknown; restored from __doc__
""" QApplication.sessionId() -> str """
return ""
def sessionKey(self): # real signature unknown; restored from __doc__
""" QApplication.sessionKey() -> str """
return ""
def setActiveWindow(self, QWidget): # real signature unknown; restored from __doc__
""" QApplication.setActiveWindow(QWidget) """
pass
def setAutoSipEnabled(self, bool): # real signature unknown; restored from __doc__
""" QApplication.setAutoSipEnabled(bool) """
pass
def setColorSpec(self, p_int): # real signature unknown; restored from __doc__
""" QApplication.setColorSpec(int) """
pass
def setCursorFlashTime(self, p_int): # real signature unknown; restored from __doc__
""" QApplication.setCursorFlashTime(int) """
pass
def setDesktopSettingsAware(self, bool): # real signature unknown; restored from __doc__
""" QApplication.setDesktopSettingsAware(bool) """
pass
def setDoubleClickInterval(self, p_int): # real signature unknown; restored from __doc__
""" QApplication.setDoubleClickInterval(int) """
pass
def setEffectEnabled(self, Qt_UIEffect, bool_enabled=True): # real signature unknown; restored from __doc__
""" QApplication.setEffectEnabled(Qt.UIEffect, bool enabled=True) """
pass
def setFont(self, QFont, str_className=None): # real signature unknown; restored from __doc__
""" QApplication.setFont(QFont, str className=None) """
pass
def setGlobalStrut(self, QSize): # real signature unknown; restored from __doc__
""" QApplication.setGlobalStrut(QSize) """
pass
def setGraphicsSystem(self, p_str): # real signature unknown; restored from __doc__
""" QApplication.setGraphicsSystem(str) """
pass
def setInputContext(self, QInputContext): # real signature unknown; restored from __doc__
""" QApplication.setInputContext(QInputContext) """
pass
def setKeyboardInputInterval(self, p_int): # real signature unknown; restored from __doc__
""" QApplication.setKeyboardInputInterval(int) """
pass
def setLayoutDirection(self, Qt_LayoutDirection): # real signature unknown; restored from __doc__
""" QApplication.setLayoutDirection(Qt.LayoutDirection) """
pass
def setOverrideCursor(self, QCursor): # real signature unknown; restored from __doc__
""" QApplication.setOverrideCursor(QCursor) """
pass
def setPalette(self, QPalette, str_className=None): # real signature unknown; restored from __doc__
""" QApplication.setPalette(QPalette, str className=None) """
pass
def setQuitOnLastWindowClosed(self, bool): # real signature unknown; restored from __doc__
""" QApplication.setQuitOnLastWindowClosed(bool) """
pass
def setStartDragDistance(self, p_int): # real signature unknown; restored from __doc__
""" QApplication.setStartDragDistance(int) """
pass
def setStartDragTime(self, p_int): # real signature unknown; restored from __doc__
""" QApplication.setStartDragTime(int) """
pass
def setStyle(self, *__args): # real signature unknown; restored from __doc__ with multiple overloads
"""
QApplication.setStyle(QStyle)
QApplication.setStyle(str) -> QStyle
"""
return QStyle
def setStyleSheet(self, p_str): # real signature unknown; restored from __doc__
""" QApplication.setStyleSheet(str) """
pass
def setWheelScrollLines(self, p_int): # real signature unknown; restored from __doc__
""" QApplication.setWheelScrollLines(int) """
pass
def setWindowIcon(self, QIcon): # real signature unknown; restored from __doc__
""" QApplication.setWindowIcon(QIcon) """
pass
def startDragDistance(self): # real signature unknown; restored from __doc__
""" QApplication.startDragDistance() -> int """
return 0
def startDragTime(self): # real signature unknown; restored from __doc__
""" QApplication.startDragTime() -> int """
return 0
def style(self): # real signature unknown; restored from __doc__
""" QApplication.style() -> QStyle """
return QStyle
def styleSheet(self): # real signature unknown; restored from __doc__
""" QApplication.styleSheet() -> str """
return ""
def syncX(self): # real signature unknown; restored from __doc__
""" QApplication.syncX() """
pass
def topLevelAt(self, *__args): # real signature unknown; restored from __doc__ with multiple overloads
"""
QApplication.topLevelAt(QPoint) -> QWidget
QApplication.topLevelAt(int, int) -> QWidget
"""
return QWidget
def topLevelWidgets(self): # real signature unknown; restored from __doc__
""" QApplication.topLevelWidgets() -> list-of-QWidget """
pass
def type(self): # real signature unknown; restored from __doc__
""" QApplication.type() -> QApplication.Type """
pass
def wheelScrollLines(self): # real signature unknown; restored from __doc__
""" QApplication.wheelScrollLines() -> int """
return 0
def widgetAt(self, *__args): # real signature unknown; restored from __doc__ with multiple overloads
"""
QApplication.widgetAt(QPoint) -> QWidget
QApplication.widgetAt(int, int) -> QWidget
"""
return QWidget
def windowIcon(self): # real signature unknown; restored from __doc__
""" QApplication.windowIcon() -> QIcon """
return QIcon
def x11EventFilter(self, sip_voidptr): # real signature unknown; restored from __doc__
""" QApplication.x11EventFilter(sip.voidptr) -> bool """
return False
def x11ProcessEvent(self, sip_voidptr): # real signature unknown; restored from __doc__
""" QApplication.x11ProcessEvent(sip.voidptr) -> int """
return 0
def __init__(self, *__args): # real signature unknown; restored from __doc__ with multiple overloads
pass
ColorSpec = None # (!) real value is ''
CustomColor = 1
GuiClient = 1
GuiServer = 2
ManyColor = 2
NormalColor = 0
Tty = 0
Type = None # (!) real value is ''
|
ProfessorX/Config
|
.PyCharm30/system/python_stubs/-1247971765/PyQt4/QtGui/QApplication.py
|
Python
|
gpl-2.0
| 14,490 | 0.009455 |
from coinpy.lib.serialization.common.field import Field
from coinpy.lib.serialization.common.structure import Structure
from coinpy.lib.blockchain.bsddb.objects.disktxpos import DiskTxPos
class DiskTxPosSerializer():
DISKTXPOS = Structure([Field("<I", "file"),
Field("<I", "blockpos"),
Field("<I", "txpos")], "disktxpos")
def serialize(self, disktxpos_obj):
return (self.DISKTXPOS.serialize([disktxpos_obj.file,
disktxpos_obj.blockpos,
disktxpos_obj.txpos]))
def deserialize(self, data, cursor=0):
(file, nblockpos, ntxpos), cursor = self.DISKTXPOS.deserialize(data, cursor)
return (DiskTxPos(file, nblockpos, ntxpos), cursor)
|
sirk390/coinpy
|
coinpy-lib/src/coinpy/lib/blockchain/bsddb/serialization/s11n_disktxpos.py
|
Python
|
lgpl-3.0
| 804 | 0.002488 |
# -*- coding: utf-8 -*-
#
# Total Open Station documentation build configuration file, created by
# sphinx-quickstart on Sat Feb 28 23:03:04 2015.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
sys.path.insert(0, os.path.abspath('../'))
sys.path.insert(0, os.path.abspath('../totalopenstation'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.napoleon',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'Total Open Station'
copyright = '2015-2020, Stefano Costa, Damien Gaignon and Luca Bianconi'
author = 'Stefano Costa'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.5'
# The full version, including alpha/beta/rc tags.
release = '0.5.2'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build',
'global.rst',
]
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
rst_prolog = """
.. include:: /global.rst
"""
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'alabaster'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
html_theme_options = {
'github_user': 'totalopenstation',
'github_repo': 'totalopenstation',
'github_type': 'star',
'github_count': 'true',
'github_button': True,
'description': 'Download and export field survey data from your total station'
}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
html_favicon = "tops.ico"
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
html_sidebars = {
'**': [
'about.html',
'navigation.html',
'relations.html',
'searchbox.html',
'donate.html',
]
}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'TotalOpenStationdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
'papersize': 'a4paper',
# The font size ('10pt', '11pt' or '12pt').
'pointsize': '12pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'TotalOpenStation.tex', 'Total Open Station Documentation',
'Stefano Costa, Damien Gaignon, Luca Bianconi', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('totalopenstation-cli-parser',
'totalopenstation-cli-parser',
'Total Open Station command line converter',
['Stefano Costa, Luca Bianconi'],
1),
('totalopenstation-cli-connector',
'totalopenstation-cli-connector',
'Total Open Station command line downloader',
['Stefano Costa, Luca Bianconi'],
1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'TotalOpenStation', 'Total Open Station Documentation',
'Stefano Costa, Damien Gaignon, Luca Bianconi', 'TotalOpenStation', 'Total Open Station downloads data from your total station into common formats',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
# Napoleon settings
napoleon_google_docstring = True
napoleon_numpy_docstring = True
napoleon_use_ivar = True
napoleon_use_param = False
|
steko/totalopenstation
|
docs/conf.py
|
Python
|
gpl-3.0
| 9,411 | 0.006057 |
print '... Importing simuvex/plugins/cgc.py ...'
from angr.state_plugins.cgc import *
|
Ruide/angr-dev
|
simuvex/simuvex/plugins/cgc.py
|
Python
|
bsd-2-clause
| 86 | 0.011628 |
"""SI tune correction configuration.
Values in _template_dict are arbitrary. They are used just to compare with
corresponding values when a new configuration is tried to be inserted in the
servconf database.
"""
from copy import deepcopy as _dcopy
def get_dict():
"""Return configuration type dictionary."""
module_name = __name__.split('.')[-1]
_dict = {
'config_type_name': module_name,
'value': _dcopy(_template_dict)
}
return _dict
# Tune Correction Parameters for Storage Ring
#
# | DeltaTuneX | | m00 m01...m07 | | KL SI QFA |
# | | = | | * | . |
# | DeltaTuneY | | m10 m11...m17 | | . |
# | . |
# | KL SI QDP2 |
# Where (1+f)KL = KL + DeltaKL.
#
# Correction Matrix of Svd and Additional Method
# (obtained by matlab lnls_calc_tunecorr_params routine)
# m(0,0) m(0,1)...m(0,7)
# m(1,0) m(1,1)...m(1,7)
#
# Nominals KLs
# [quadrupole_order QFA QFB QFP QDA QDB1 QDB2 QDP1 QDP2]
_template_dict = {
'matrix': [[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]],
'nominal KLs': [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
}
|
lnls-sirius/dev-packages
|
siriuspy/siriuspy/clientconfigdb/types/si_tunecorr_params.py
|
Python
|
gpl-3.0
| 1,290 | 0 |
# -*- coding:utf-8 -*-
#
# Copyright (C) 2008 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from command import PagedCommand
class Diff(PagedCommand):
common = True
helpSummary = "Show changes between commit and working tree"
helpUsage = """
%prog [<project>...]
The -u option causes '%prog' to generate diff output with file paths
relative to the repository root, so the output can be applied
to the Unix 'patch' command.
"""
def _Options(self, p):
p.add_option('-u', '--absolute',
dest='absolute', action='store_true',
help='Paths are relative to the repository root')
def Execute(self, opt, args):
ret = 0
for project in self.GetProjects(args):
if not project.PrintWorkTreeDiff(opt.absolute):
ret = 1
return ret
|
TheQtCompany/git-repo
|
subcmds/diff.py
|
Python
|
apache-2.0
| 1,328 | 0.004518 |
# Copyright 2019 Open Source Integrators
# License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl.html).
from . import models
|
OCA/stock-logistics-warehouse
|
stock_request_picking_type/__init__.py
|
Python
|
agpl-3.0
| 133 | 0 |
# Copyright 2012 Spanish National Research Council
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid
from keystone import config
from keystone import exception as ks_exc
from keystone.assignment import controllers
from keystone import middleware
from keystone import tests
from keystone.tests import default_fixtures
from keystone.tests.ksfixtures import database
from keystone.tests import test_auth
from keystone.tests import test_middleware
import keystone_voms.core as ks_voms
from keystone_voms import exception
CONF = config.CONF
# fake proxy from a fake cert from a fake ca
user_dn = "/C=ES/O=FAKE CA/CN=Fake User"
user_vo = "dteam"
valid_cert = """-----BEGIN CERTIFICATE-----
MIIGNjCCBZ+gAwIBAgIUI6TVyFmQEXRIq6FOHrmHtb56XDMwDQYJKoZIhvcNAQEF
BQAwMzELMAkGA1UEBhMCRVMxEDAOBgNVBAoTB0ZBS0UgQ0ExEjAQBgNVBAMTCUZh
a2UgVXNlcjAeFw0xMjA4MzAxNDI2MjBaFw0yNDAxMjcwNTMxMjBaMEgxCzAJBgNV
BAYTAkVTMRAwDgYDVQQKEwdGQUtFIENBMRIwEAYDVQQDEwlGYWtlIFVzZXIxEzAR
BgNVBAMTCjE3MDAwOTE3MTMwgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBALNI
YdjO2XueOPtSEp2GeshPQuRvXl4937vz4WPu9hVemuxS83kXfi2PP9FAoP5lQv4g
+RXStuOy47Cr2Qc6OYg6+YUPTWlQAIFVnLlDgsNvxhqG4YvQwIEsy6n1Q/TjnbKZ
LG2qNRMfUR+I7EhPKqyZW1PLUoKP30MNo++eJW8XAgMBAAGjggQwMIIELDCCA94G
CisGAQQBvkVkZAUEggPOMIIDyjCCA8YwggPCMIIDKwIBATA+oDwwN6Q1MDMxCzAJ
BgNVBAYTAkVTMRAwDgYDVQQKEwdGQUtFIENBMRIwEAYDVQQDEwlGYWtlIFVzZXIC
AQagSjBIpEYwRDELMAkGA1UEBhMCRVMxEDAOBgNVBAoTB0ZBS0UgQ0ExIzAhBgNV
BAMTGmhvc3QvZmFrZS52b21zLXNlcnZlci5mYWtlMA0GCSqGSIb3DQEBBQUAAgEB
MCIYDzIwMTIwODMwMTQzMTIwWhgPMjAyNDAxMjcwNTMxMjBaMEIwQAYKKwYBBAG+
RWRkBDEyMDCgCoYIZHRlYW06Ly8wIgQgL2R0ZWFtL1JvbGU9TlVMTC9DYXBhYmls
aXR5PU5VTEwwggIeMIIB7gYKKwYBBAG+RWRkCgSCAd4wggHaMIIB1jCCAdIwggE7
AgEEMA0GCSqGSIb3DQEBBAUAMB8xEDAOBgNVBAoTB0ZBS0UgQ0ExCzAJBgNVBAYT
AkVTMB4XDTEyMDgyOTE3MzY0OVoXDTQwMDExNDE3MzY0OVowRDELMAkGA1UEBhMC
RVMxEDAOBgNVBAoTB0ZBS0UgQ0ExIzAhBgNVBAMTGmhvc3QvZmFrZS52b21zLXNl
cnZlci5mYWtlMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQC/9bo6pA8fcuo4
2+CDV430nKykGB4mqsKqHkFCD8kRduW4eFdWrSXitqKRlw9/8hLmbsu5abPa/P99
VekJPCbZwtIm+3M1qGlJ+TonTWbBQakvOmPnoLH+/uppssyRulGj61AlnR20ByRo
2DbrSTThbdkztGOmZmQf2gzRGGtbxQIDAQABMA0GCSqGSIb3DQEBBAUAA4GBAH/g
EMVvDtgNaxzH5UYRubvapReeqspS5mYndaGFaztOJQ6pv1Qa7/LpkeYOxrXX+xWm
dYdXvHIYbMkc/pO0PyV/TIOb8EcgC/Gs3idZSHUxhcsk8IcpcwCrPczpu2JC+N5z
LTkbcREjevF7WFlPMlOq2IVEIVBo95uQaS3TdmJHMAkGA1UdOAQCBQAwHwYDVR0j
BBgwFoAUMXhLHLSgWZoV/Y8KaT6VOIQNVNQwDQYJKoZIhvcNAQEFBQADgYEAbngH
D69ViU3UsIbUlmr8a7pMhRSJRnXsO0xzg0rwy3g5KPqJM1zYYdNufHJkOdW+gjd5
w52n/zbwtXOwAW7xf9w+xQ1/gyj5Kb8Ob/iW3x4Qs0a3OEaWFyqTvN7J3vP91Qaz
S12lLPSLPdP6sFe0ODf3ZQOv19aN/eW8On2WIHMwDQYDVR0PAQH/BAMDAQAwDAYD
VR0TAQH/BAIwADAJBgNVHSMEAjAAMCAGCCsGAQUFBwEOAQH/BBEwDwIBATAKBggr
BgEFBQcVATANBgkqhkiG9w0BAQUFAAOBgQCPjeviQf/CbAh4z+0KtIgd7YLOiZiw
FcJwC/Z2+zm54d1SCCFMCCygKe5tu/gSLaEcRky6P1lG/0vG/7DxLiu37xQ15Mae
O32z0LuL+XkC3k8C+3aH0ht1cW+zwR4bBQax7rphByuY2Wgwf1TFlYdMU0eZ7akj
W5Rbega2GkADBQ==
-----END CERTIFICATE----- """
valid_cert_chain = """-----BEGIN CERTIFICATE-----
MIIBwTCCASoCAQYwDQYJKoZIhvcNAQEEBQAwHzEQMA4GA1UEChMHRkFLRSBDQTEL
MAkGA1UEBhMCRVMwHhcNMTIwODMwMTIxMjU0WhcNNDAwMTE1MTIxMjU0WjAzMQsw
CQYDVQQGEwJFUzEQMA4GA1UEChMHRkFLRSBDQTESMBAGA1UEAxMJRmFrZSBVc2Vy
MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDE4WuwYDT+ke9olzMIZ+gTwDl1
cajAIvp6jVl40YYV2CgUdQo0CSj/bmB+y6U3GCdpp0HKNoAbCuYsyyUtqedgMy2D
x+We/3f005jQvSLtrnK3k8Nw2qwkClObKhyLw5j0iH0sx0PWbr4mIcic2AY8gWiM
OshoESxjXETMkqgQpQIDAQABMA0GCSqGSIb3DQEBBAUAA4GBAA9KBCfPLuWJWKN/
X+MgdJfMtg9MbfrKwQbmoxIS7qCEe2OUNs4BvHEnp7lBMJkaoSjhvFDOFMKaXmfz
Kl441BisyU4Pz8fHU5dj4Z7pPD7i71f1oK/42kZZWFEkoJxOU4Vu/fHr9DXdrBVH
9sFWctb8TM20AtJmYE/n+M1G6Foj
-----END CERTIFICATE-----"""
valid_cert_no_tenant = """-----BEGIN CERTIFICATE-----
MIIGMDCCBZmgAwIBAgIUdvt3rmPnrq2Kyoi6oKdeSb7Ye4EwDQYJKoZIhvcNAQEF
BQAwMzELMAkGA1UEBhMCRVMxEDAOBgNVBAoTB0ZBS0UgQ0ExEjAQBgNVBAMTCUZh
a2UgVXNlcjAeFw0xMjA4MzAxNDI5NTVaFw0yNDAxMjcwNTM0NTVaMEgxCzAJBgNV
BAYTAkVTMRAwDgYDVQQKEwdGQUtFIENBMRIwEAYDVQQDEwlGYWtlIFVzZXIxEzAR
BgNVBAMTCjE3MDAwOTE3MTMwgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBALgC
BIZSxt4X4Hxuapff694eHx9pjpdpfnuU5E/zuv6qNjoZn10WzngonodRG6YGjY5r
yWZm2yplAWVXuZNMD7qOo3ToeBVhl5sK8dS/dCtrCrKcAoQCAq3CdOM/cUJyDW3m
I7hYvw0BfyuOAgqZuz2trGoObHhS3HrwuNgzAYnZAgMBAAGjggQqMIIEJjCCA9gG
CisGAQQBvkVkZAUEggPIMIIDxDCCA8AwggO8MIIDJQIBATA+oDwwN6Q1MDMxCzAJ
BgNVBAYTAkVTMRAwDgYDVQQKEwdGQUtFIENBMRIwEAYDVQQDEwlGYWtlIFVzZXIC
AQagSjBIpEYwRDELMAkGA1UEBhMCRVMxEDAOBgNVBAoTB0ZBS0UgQ0ExIzAhBgNV
BAMTGmhvc3QvZmFrZS52b21zLXNlcnZlci5mYWtlMA0GCSqGSIb3DQEBBQUAAgEB
MCIYDzIwMTIwODMwMTQzNDU1WhgPMjAyNDAxMjcwNTM0NTVaMDwwOgYKKwYBBAG+
RWRkBDEsMCqgFIYSbm9fc3VwcG9ydGVkX3ZvOi8vMBIEEC9ub19zdXBwb3J0ZWRf
dm8wggIeMIIB7gYKKwYBBAG+RWRkCgSCAd4wggHaMIIB1jCCAdIwggE7AgEEMA0G
CSqGSIb3DQEBBAUAMB8xEDAOBgNVBAoTB0ZBS0UgQ0ExCzAJBgNVBAYTAkVTMB4X
DTEyMDgyOTE3MzY0OVoXDTQwMDExNDE3MzY0OVowRDELMAkGA1UEBhMCRVMxEDAO
BgNVBAoTB0ZBS0UgQ0ExIzAhBgNVBAMTGmhvc3QvZmFrZS52b21zLXNlcnZlci5m
YWtlMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQC/9bo6pA8fcuo42+CDV430
nKykGB4mqsKqHkFCD8kRduW4eFdWrSXitqKRlw9/8hLmbsu5abPa/P99VekJPCbZ
wtIm+3M1qGlJ+TonTWbBQakvOmPnoLH+/uppssyRulGj61AlnR20ByRo2DbrSTTh
bdkztGOmZmQf2gzRGGtbxQIDAQABMA0GCSqGSIb3DQEBBAUAA4GBAH/gEMVvDtgN
axzH5UYRubvapReeqspS5mYndaGFaztOJQ6pv1Qa7/LpkeYOxrXX+xWmdYdXvHIY
bMkc/pO0PyV/TIOb8EcgC/Gs3idZSHUxhcsk8IcpcwCrPczpu2JC+N5zLTkbcREj
evF7WFlPMlOq2IVEIVBo95uQaS3TdmJHMAkGA1UdOAQCBQAwHwYDVR0jBBgwFoAU
MXhLHLSgWZoV/Y8KaT6VOIQNVNQwDQYJKoZIhvcNAQEFBQADgYEACztWoNeofMnd
das5pTFA8WJgrMXa8BslqM+hm/VPwA+4MoGMxQadDQGzuLSp9yMHcYzvj+Gimjs4
WZHAshZdd6E9S2hQoDRUpQguu5CNeKdJ7uUb+QQinTD6y3DjdxCFE10pFunYEMnY
2JSJbEqm32ybnFPdBBqqYlb3gXGEVQwwDQYDVR0PAQH/BAMDAQAwDAYDVR0TAQH/
BAIwADAJBgNVHSMEAjAAMCAGCCsGAQUFBwEOAQH/BBEwDwIBATAKBggrBgEFBQcV
ATANBgkqhkiG9w0BAQUFAAOBgQAQjXxCkLajAedCNqIYw1L5qlWT71sF2FgSoyEk
B7iMyweroDP90CzR1DIwWj5yGr138Z3jvDvFRzQpUrZa4hsPck/zmO/lTB+6iA/U
V5PvMZQ8wMyfMlSiFQNvWm7weNlFpvUpNRHQQj3FLb8L55RhtONIYFRzTIS9du3P
c8Dc+w==
-----END CERTIFICATE-----"""
def get_auth_body(tenant=None):
d = {"auth": {"voms": True}}
if tenant is not None:
d["auth"]["tenantName"] = tenant
return d
def prepare_request(body=None, cert=None, chain=None):
req = test_middleware.make_request()
if body:
req.environ[middleware.PARAMS_ENV] = body
if cert:
req.environ[ks_voms.SSL_CLIENT_CERT_ENV] = cert
if chain:
req.environ[ks_voms.SSL_CLIENT_CERT_CHAIN_ENV_PREFIX +
"0"] = chain
return req
class MiddlewareVomsAuthn(tests.TestCase):
def setUp(self):
super(MiddlewareVomsAuthn, self).setUp()
self.config([tests.dirs.etc('keystone.conf.sample'),
tests.dirs.tests_conf('keystone_voms.conf')])
self.useFixture(database.Database())
self.load_backends()
self.load_fixtures(default_fixtures)
self.tenant_name = default_fixtures.TENANTS[0]['name']
CONF.voms.voms_policy = tests.dirs.tests_conf("voms.json")
def test_middleware_proxy_unscoped(self):
"""Verify unscoped request."""
req = prepare_request(get_auth_body(),
valid_cert,
valid_cert_chain)
aux = ks_voms.VomsAuthNMiddleware(None)
aux._no_verify = True
aux._process_request(req)
user_out = req.environ['REMOTE_USER']
params = req.environ[middleware.PARAMS_ENV]
self.assertEqual(user_out, user_dn)
self.assertNotIn("tenantName", params)
def test_middleware_proxy_scoped(self):
"""Verify scoped request."""
req = prepare_request(get_auth_body(tenant=self.tenant_name),
valid_cert,
valid_cert_chain)
aux = ks_voms.VomsAuthNMiddleware(None)
aux._no_verify = True
aux._process_request(req)
user_out = req.environ['REMOTE_USER']
self.assertEqual(user_out, user_dn)
def test_middleware_proxy_scoped_bad_tenant(self):
"""Verify request not matching tenant."""
req = prepare_request(get_auth_body(tenant=uuid.uuid4().hex),
valid_cert,
valid_cert_chain)
aux = ks_voms.VomsAuthNMiddleware(None)
aux._no_verify = True
self.assertRaises(
ks_exc.Unauthorized,
aux._process_request,
req)
def test_middleware_proxy_tenant_not_found(self):
"""Verify that mapping to a non existing tenant raises ks_exc."""
CONF.voms.voms_policy = tests.dirs.tests_conf("voms_no_tenant.json")
req = prepare_request(get_auth_body(tenant=self.tenant_name),
valid_cert,
valid_cert_chain)
aux = ks_voms.VomsAuthNMiddleware(None)
aux._no_verify = True
self.assertRaises(
ks_exc.Unauthorized,
aux._process_request,
req)
def test_middleware_proxy_vo_not_found(self):
"""Verify that no VO-tenant mapping raises ks_exc."""
CONF.voms.voms_policy = tests.dirs.tests_conf("voms_no_vo.json")
req = prepare_request(get_auth_body(tenant=self.tenant_name),
valid_cert,
valid_cert_chain)
aux = ks_voms.VomsAuthNMiddleware(None)
aux._no_verify = True
self.assertRaises(
ks_exc.Unauthorized,
aux._process_request,
req)
def test_middleware_proxy_vo_not_found_unscoped(self):
"""Verify that no VO-tenant mapping raises ks_exc."""
CONF.voms.voms_policy = tests.dirs.tests_conf("voms_no_vo.json")
req = prepare_request(get_auth_body(tenant=self.tenant_name),
valid_cert,
valid_cert_chain)
aux = ks_voms.VomsAuthNMiddleware(None)
aux._no_verify = True
self.assertRaises(
ks_exc.Unauthorized,
aux._process_request,
req)
def test_middleware_proxy_user_not_found_autocreate(self):
"""Verify that user is autocreated."""
CONF.voms.autocreate_users = True
req = prepare_request(get_auth_body(tenant=self.tenant_name),
valid_cert,
valid_cert_chain)
aux = ks_voms.VomsAuthNMiddleware(None)
aux._no_verify = True
aux._process_request(req)
user_out = req.environ['REMOTE_USER']
self.assertEqual(user_out, user_dn)
def test_middleware_proxy_user_not_found_autocreate_once(self):
"""Verify that user is autocreated only once."""
CONF.voms.autocreate_users = True
req = prepare_request(get_auth_body(tenant=self.tenant_name),
valid_cert,
valid_cert_chain)
aux = ks_voms.VomsAuthNMiddleware(None)
aux._no_verify = True
aux._process_request(req)
user_out = req.environ['REMOTE_USER']
self.assertEqual(user_out, user_dn)
req = prepare_request(get_auth_body(tenant=self.tenant_name),
valid_cert,
valid_cert_chain)
aux._process_request(req)
user_out = req.environ['REMOTE_USER']
self.assertEqual(user_out, user_dn)
def test_middleware_proxy_user_not_found_autocreate_unscoped(self):
"""Verify that user is autocreated with unscoped request."""
CONF.voms.autocreate_users = True
req = prepare_request(get_auth_body(),
valid_cert,
valid_cert_chain)
aux = ks_voms.VomsAuthNMiddleware(None)
aux._no_verify = True
aux._process_request(req)
user_out = req.environ['REMOTE_USER']
self.assertEqual(user_out, user_dn)
def test_middleware_proxy_user_not_found_autocreate_chain(self):
"""Verify that an unscoped req creates the user in the tenant."""
CONF.voms.autocreate_users = True
req = prepare_request(get_auth_body(tenant=self.tenant_name),
valid_cert,
valid_cert_chain)
aux = ks_voms.VomsAuthNMiddleware(None)
aux._no_verify = True
aux._process_request(req)
user_out = req.environ['REMOTE_USER']
self.assertEqual(user_out, user_dn)
# Ensure that we are geting the user already created
CONF.voms.autocreate_users = False
req = prepare_request(get_auth_body(tenant=self.tenant_name),
valid_cert,
valid_cert_chain)
aux = ks_voms.VomsAuthNMiddleware(None)
aux._no_verify = True
aux._process_request(req)
user_out = req.environ['REMOTE_USER']
self.assertEqual(user_out, user_dn)
def test_middleware_proxy_user_not_found_not_autocreate(self):
"""Verify that user is not autocreated."""
CONF.voms.autocreate_users = False
req = prepare_request(get_auth_body(),
valid_cert,
valid_cert_chain)
aux = ks_voms.VomsAuthNMiddleware(None)
aux._no_verify = True
self.assertRaises(
ks_exc.UserNotFound,
aux._process_request,
req)
def test_middleware_proxy_user_not_found_not_autocreate_unscoped(self):
"""Verify that user is not autocreated with unscoped request."""
CONF.voms.autocreate_users = False
req = prepare_request(get_auth_body(tenant=self.tenant_name),
valid_cert,
valid_cert_chain)
aux = ks_voms.VomsAuthNMiddleware(None)
aux._no_verify = True
self.assertRaises(
ks_exc.UserNotFound,
aux._process_request,
req)
def test_middleware_proxy_unknown(self):
"""Verify that an unknown proxy raises ks_exc."""
req = prepare_request(get_auth_body(),
valid_cert,
valid_cert_chain)
self.assertRaises(
exception.VomsError,
ks_voms.VomsAuthNMiddleware(None)._process_request,
req)
def test_middleware_no_proxy(self):
"""Verify that no proxy raises ks_exc."""
req = prepare_request()
req.environ[middleware.PARAMS_ENV] = get_auth_body()
self.assertRaises(
ks_exc.ValidationError,
ks_voms.VomsAuthNMiddleware(None)._process_request,
req)
def test_middleware_incorrect_json(self):
"""Verify that bad JSON raises ks_exc."""
req = prepare_request()
req.environ[middleware.PARAMS_ENV] = {"auth": {"voms": "True"}}
self.assertRaises(
ks_exc.ValidationError,
ks_voms.VomsAuthNMiddleware(None)._process_request,
req)
def test_middleware_no_params(self):
"""Verify that empty request returns none."""
req = prepare_request()
ret = ks_voms.VomsAuthNMiddleware(None)._process_request(req)
self.assertIsNone(ret)
def test_middleware_remote_user_set(self):
"""Verify that if REMOTE_USER already set we skip the auth."""
req = prepare_request()
req.environ["REMOTE_USER"] = "Fake"
ret = ks_voms.VomsAuthNMiddleware(None)._process_request(req)
self.assertIsNone(ret)
def test_no_json_data(self):
"""Verify that no JSON data raises ks_exc."""
CONF.voms.voms_policy = None
self.assertRaises(
ks_exc.UnexpectedError,
ks_voms.VomsAuthNMiddleware,
None)
class VomsTokenService(test_auth.AuthTest):
def setUp(self):
super(VomsTokenService, self).setUp()
self.config([tests.dirs.etc('keystone.conf.sample'),
tests.dirs.tests_conf('keystone_voms.conf')])
self.tenant_name = default_fixtures.TENANTS[0]['name']
self.tenant_id = default_fixtures.TENANTS[0]['id']
CONF.voms.voms_policy = tests.dirs.tests_conf("voms.json")
def test_unscoped_remote_authn(self):
"""Verify unscoped request."""
req = prepare_request(get_auth_body(),
valid_cert,
valid_cert_chain)
aux = ks_voms.VomsAuthNMiddleware(None)
aux._no_verify = True
aux._process_request(req)
params = req.environ[middleware.PARAMS_ENV]
context = {"environment": req.environ}
remote_token = self.controller.authenticate(context,
params["auth"])
self.assertEqual(user_dn, remote_token["access"]["user"]["username"])
self.assertNotIn("tenant", remote_token["access"])
def test_unscoped_remote_authn_existing_user_in_tenant(self):
"""Verify unscoped request for existing user, already in a tenant."""
user = {
"name": user_dn,
"enabled": True,
"domain_id": default_fixtures.DEFAULT_DOMAIN_ID,
}
tenant_id = default_fixtures.TENANTS[-1]["id"]
# Create the user
user = self.identity_api.create_user(user)
# Add the user to tenant different than the mapped one
self.assignment_api.add_user_to_project(tenant_id, user["id"])
req = prepare_request(get_auth_body(),
valid_cert,
valid_cert_chain)
aux = ks_voms.VomsAuthNMiddleware(None)
aux._no_verify = True
aux._process_request(req)
params = req.environ[middleware.PARAMS_ENV]
context = {"environment": req.environ}
remote_token = self.controller.authenticate(context,
params["auth"])
# this is deprecated!
tenant_controller = controllers.TenantAssignment()
fake_context = {
"token_id": remote_token["access"]["token"]["id"],
"query_string": {"limit": None},
}
tenants = tenant_controller.get_projects_for_token(fake_context)
self.assertItemsEqual(
(self.tenant_id, tenant_id), # User tenants
[i["id"].lower() for i in tenants["tenants"]]
)
def test_scoped_remote_authn(self):
"""Verify unscoped request."""
req = prepare_request(get_auth_body(tenant=self.tenant_name),
valid_cert,
valid_cert_chain)
aux = ks_voms.VomsAuthNMiddleware(None)
aux._no_verify = True
aux._process_request(req)
params = req.environ[middleware.PARAMS_ENV]
context = {"environment": req.environ}
remote_token = self.controller.authenticate(context,
params["auth"])
self.assertEqual(user_dn,
remote_token["access"]["user"]["username"])
self.assertEqual(self.tenant_name,
remote_token["access"]["token"]["tenant"]["name"])
def test_scoped_remote_authn_add_roles_created_user(self):
"""Verify roles are added when user is created on authentication."""
CONF.voms.add_roles = True
CONF.voms.user_roles = ["role1", "role2"]
req = prepare_request(get_auth_body(tenant=self.tenant_name),
valid_cert,
valid_cert_chain)
aux = ks_voms.VomsAuthNMiddleware(None)
aux._no_verify = True
aux._process_request(req)
params = req.environ[middleware.PARAMS_ENV]
context = {"environment": req.environ}
remote_token = self.controller.authenticate(context,
params["auth"])
roles = [r['name'] for r in remote_token['access']['user']['roles']]
self.assertIn("role1", roles)
self.assertIn("role2", roles)
def test_scoped_remote_authn_add_roles_existing_user(self):
"""Verify roles are updated for existing user."""
CONF.voms.add_roles = True
CONF.voms.user_roles = ["role1", "role2"]
user = {
"name": user_dn,
"enabled": True,
"domain_id": default_fixtures.DEFAULT_DOMAIN_ID,
}
self.identity_api.create_user(user)
req = prepare_request(get_auth_body(tenant=self.tenant_name),
valid_cert,
valid_cert_chain)
aux = ks_voms.VomsAuthNMiddleware(None)
aux._no_verify = True
aux._process_request(req)
params = req.environ[middleware.PARAMS_ENV]
context = {"environment": req.environ}
remote_token = self.controller.authenticate(context,
params["auth"])
roles = [r['name'] for r in remote_token['access']['user']['roles']]
self.assertIn("role1", roles)
self.assertIn("role2", roles)
def test_scoped_remote_authn_update_roles_existing_user(self):
"""Verify roles are not re-added to existing user."""
CONF.voms.add_roles = True
CONF.voms.user_roles = ["role1", "role2"]
user = {
"name": user_dn,
"enabled": True,
"domain_id": default_fixtures.DEFAULT_DOMAIN_ID,
}
# Create the user and add to tenant
user = self.identity_api.create_user(user)
self.assignment_api.add_user_to_project(self.tenant_id, user["id"])
# create roles and add them to user
for r in CONF.voms.user_roles:
self.assignment_api.create_role(r, {'id': r, 'name': r})
self.assignment_api.add_role_to_user_and_project(user["id"],
self.tenant_id,
r)
req = prepare_request(get_auth_body(tenant=self.tenant_name),
valid_cert,
valid_cert_chain)
aux = ks_voms.VomsAuthNMiddleware(None)
aux._no_verify = True
aux._process_request(req)
params = req.environ[middleware.PARAMS_ENV]
context = {"environment": req.environ}
remote_token = self.controller.authenticate(context,
params["auth"])
roles = [r['name'] for r in remote_token['access']['user']['roles']]
self.assertIn("role1", roles)
self.assertIn("role2", roles)
def test_scoped_remote_authn_add_roles_disabled(self):
"""Verify plugin does not try to add roles to user if disabled."""
CONF.voms.add_roles = False
CONF.voms.user_roles = ["role1", "role2"]
req = prepare_request(get_auth_body(tenant=self.tenant_name),
valid_cert,
valid_cert_chain)
aux = ks_voms.VomsAuthNMiddleware(None)
aux._no_verify = True
aux._process_request(req)
params = req.environ[middleware.PARAMS_ENV]
context = {"environment": req.environ}
remote_token = self.controller.authenticate(context,
params["auth"])
roles = [r['name'] for r in remote_token['access']['user']['roles']]
self.assertNotIn("role1", roles)
self.assertNotIn("role2", roles)
|
enolfc/keystone-voms
|
tests/test_middleware_voms_authn.py
|
Python
|
apache-2.0
| 23,887 | 0 |
# -*- coding: utf-8 -*-
from .permissions import SignedPermission # noqa
from .signing import sign_filter_permissions # noqa
from .views import SignedViewSetMixin # noqa
|
chewse/djangorestframework-signed-permissions
|
signedpermissions/__init__.py
|
Python
|
mit
| 174 | 0 |
__author__ = 'kevin'
import re
from attacksurfacemeter.loaders.base_line_parser import BaseLineParser
class CflowLineParser(BaseLineParser):
""""""
_instance = None
@staticmethod
def get_instance(cflow_line=None):
if CflowLineParser._instance is None:
CflowLineParser._instance = CflowLineParser()
CflowLineParser._instance.load(cflow_line)
return CflowLineParser._instance
indent = " "
def __init__(self):
super(CflowLineParser, self).__init__()
self._level = 0
def load(self, cflow_line):
self.__init__()
split_line = cflow_line.split(CflowLineParser.indent)
function_info = split_line[-1].strip()
self._level = len(split_line) - 1
function_name = re.search(r"(\w+\(\))", function_info).group(0)
self._function_name = function_name[:function_name.index('(')]
match = re.search(r"(?:at\s)(\..*)(?::\d+>)", function_info)
if match:
self._function_signature = match.group(1)
def get_level(self, cflow_line=None):
self._load_if_new(cflow_line)
return self._level
|
andymeneely/attack-surface-metrics
|
attacksurfacemeter/loaders/cflow_line_parser.py
|
Python
|
mit
| 1,154 | 0 |
"""
Experiment for XGBoost + CF
Aim: To find the best tc(max_depth), mb(min_child_weight), mf(colsample_bytree * 93), ntree
tc: [13, 15, 17]
mb: [5, 7, 9]
mf: [40, 45, 50, 55, 60]
ntree: [160, 180, 200, 220, 240, 260, 280, 300, 320, 340, 360]
Averaging 20 models
Summary
Best
loss ntree
mf 40 45 50 55 60 40 45 50 55 60
tc mb
13 5 0.4471 0.4471 0.4473 0.4471 0.4476 300 300 280 280 260
7 0.4477 0.4475 0.4469 0.4472 0.4481 340 320 300 300 300
9 0.4485 0.4484 0.4487 0.4488 0.4487 360 360 340 340 340
15 5 0.4471 *0.4465* 0.4471 0.4476 0.4478 260 *260* 240 240 240
7 0.4473 0.4468 0.4473 0.4474 0.4478 300 280 260 260 260
9 0.4483 0.4480 0.4483 0.4484 0.4492 340 320 300 300 280
17 5 0.4471 0.4472 0.4474 0.4476 0.4478 240 240 220 220 200
7 0.4474 0.4470 0.4468 0.4475 0.4473 280 260 260 240 240
9 0.4481 0.4480 0.4476 0.4480 0.4486 320 300 280 260 260
Time: 1 day, 7:37:21 on i7-4790k 32G MEM GTX660
"""
import numpy as np
import scipy as sp
import pandas as pd
from sklearn.cross_validation import StratifiedKFold
from sklearn.metrics import log_loss
from datetime import datetime
import os
from sklearn.grid_search import ParameterGrid
import xgboost as xgb
from utility import *
path = os.getcwd() + '/'
path_log = path + 'logs/'
file_train = path + 'train.csv'
training = pd.read_csv(file_train, index_col = 0)
num_train = training.shape[0]
y = training['target'].values
yMat = pd.get_dummies(training['target']).values
X = training.iloc[:,:93].values
kf = StratifiedKFold(y, n_folds=5, shuffle = True, random_state = 345)
for train_idx, valid_idx in kf:
break
y_train_1 = yMat[train_idx].argmax(1)
y_train = yMat[train_idx]
y_valid = yMat[valid_idx]
X2, ignore = count_feature(X)
dtrain , dvalid= xgb.DMatrix(X2[train_idx], label = y_train_1), xgb.DMatrix(X2[valid_idx])
#
nIter = 20
nt = 360
nt_lst = range(160, 370, 20)
nt_len = len(nt_lst)
bf = .8 # subsample
sh = .1 # eta
# tc:max_depth, mb:min_child_weight, mf(max features):colsample_bytree * 93
param_grid = {'tc':[13, 15, 17], 'mb':[5, 7, 9], 'mf':[40, 45, 50, 55, 60]}
scores = []
t0 = datetime.now()
for params in ParameterGrid(param_grid):
tc = params['tc']
mb = params['mb']
mf = params['mf']
cs = float(mf) / X.shape[1]
print tc, mb, mf
predAll = [np.zeros(y_valid.shape) for k in range(nt_len)]
for i in range(nIter):
seed = 112233 + i
param = {'bst:max_depth':tc, 'bst:eta':sh,'objective':'multi:softprob','num_class':9,
'min_child_weight':mb, 'subsample':bf, 'colsample_bytree':cs,
'silent':1, 'nthread':8, 'seed':seed}
plst = param.items()
bst = xgb.train(plst, dtrain, nt)
for s in range(nt_len):
ntree = nt_lst[s]
pred = bst.predict(dvalid, ntree_limit = ntree).reshape(y_valid.shape)
predAll[s] += pred
scores.append({'tc':tc, 'mb':mb, 'mf':mf, 'ntree':ntree, 'nModels':i+1, 'seed':seed,
'valid':log_loss(y_valid, pred),
'valid_avg':log_loss(y_valid, predAll[s] / (i+1))})
print scores[-4], datetime.now() - t0
df = pd.DataFrame(scores)
if os.path.exists(path_log) is False:
print 'mkdir', path_log
os.mkdir(path_log)
df.to_csv(path_log + 'exp_XGB_CF_tc_mb_mf_ntree.csv')
keys = ['tc', 'mb', 'mf', 'ntree']
grouped = df.groupby(keys)
pd.set_option('display.precision', 5)
print pd.DataFrame({'loss':grouped['valid_avg'].last().unstack().min(1),
'ntree':grouped['valid_avg'].last().unstack().idxmin(1)}).unstack()
# loss ntree
# mf 40 45 50 55 60 40 45 50 55 60
# tc mb
# 13 5 0.4471 0.4471 0.4473 0.4471 0.4476 300 300 280 280 260
# 7 0.4477 0.4475 0.4469 0.4472 0.4481 340 320 300 300 300
# 9 0.4485 0.4484 0.4487 0.4488 0.4487 360 360 340 340 340
# 15 5 0.4471 0.4465 0.4471 0.4476 0.4478 260 260 240 240 240
# 7 0.4473 0.4468 0.4473 0.4474 0.4478 300 280 260 260 260
# 9 0.4483 0.4480 0.4483 0.4484 0.4492 340 320 300 300 280
# 17 5 0.4471 0.4472 0.4474 0.4476 0.4478 240 240 220 220 200
# 7 0.4474 0.4470 0.4468 0.4475 0.4473 280 260 260 240 240
# 9 0.4481 0.4480 0.4476 0.4480 0.4486 320 300 280 260 260
print pd.DataFrame({'loss':grouped['valid'].mean().unstack().min(1),
'ntree':grouped['valid'].mean().unstack().idxmin(1)}).unstack()
# loss ntree
# mf 40 45 50 55 60 40 45 50 55 60
# tc mb
# 13 5 0.4563 0.4564 0.4564 0.4561 0.4566 280 260 260 260 240
# 7 0.4565 0.4563 0.4557 0.4561 0.4569 320 300 300 300 280
# 9 0.4571 0.4569 0.4571 0.4573 0.4570 340 340 320 300 300
# 15 5 0.4567 0.4559 0.4565 0.4571 0.4571 260 240 240 220 220
# 7 0.4565 0.4558 0.4562 0.4564 0.4568 280 260 260 260 240
# 9 0.4570 0.4567 0.4570 0.4570 0.4577 300 300 280 280 260
# 17 5 0.4568 0.4569 0.4570 0.4572 0.4574 220 220 200 200 200
# 7 0.4567 0.4563 0.4559 0.4567 0.4564 260 240 240 220 220
# 9 0.4571 0.4569 0.4565 0.4567 0.4573 280 280 260 260 240
#
criterion = df.apply(lambda x: x['tc']==15 and x['mb']==5 and x['mf']==45, axis = 1)
grouped = df[criterion].groupby('ntree')
g = grouped[['valid']].mean()
g['valid_avg'] = grouped['valid_avg'].last()
print g
# valid valid_avg
# ntree
# 160 0.461023 0.452912
# 180 0.458513 0.450111
# 200 0.456939 0.448232
# 220 0.456147 0.447141
# 240 0.455870 0.446598
# 260 0.456097 0.446525
# 280 0.456657 0.446827
# 300 0.457434 0.447327
# 320 0.458462 0.448101
# 340 0.459635 0.449036
# 360 0.460977 0.450160
ax = g.plot()
ax.set_title('XGB+CF max_depth=15\n min_child_weight=5, colsample_bytree=45/93.')
ax.set_ylabel('Logloss')
fig = ax.get_figure()
fig.savefig(path_log + 'exp_XGB_CF_tc_mb_mf_ntree.png')
|
tks0123456789/kaggle-Otto
|
exp_XGB_CF_tc_mb_mf_ntree.py
|
Python
|
mit
| 6,574 | 0.009735 |
import math
from collections import deque
def run():
N, K, W = list(map(int, input().split()))
Ls = list(map(int, input().split()))
A_L, B_L, C_L, D_L = list(map(int, input().split()))
Hs = list(map(int, input().split()))
A_H, B_H, C_H, D_H = list(map(int, input().split()))
for i in range(K+1, N+1):
Li = ((A_L * Ls[-2] + B_L * Ls[-1] + C_L) % D_L) + 1
Ls.append(Li)
Hi = ((A_H * Hs[-2] + B_H * Hs[-1] + C_H) % D_H) + 1
Hs.append(Hi)
MODULO = 1000000007
height_queue = deque()
curr_perim = 2 * W + 2 * Hs[0]
ending_x = Ls[0] + W
height_queue_len = 1
height_queue.append((Hs[0], ending_x))
ans = curr_perim
# print("\ti = 0 curr = {}".format(curr_perim))
for i in range(1, N):
starting_x = Ls[i]
while height_queue_len > 0 and height_queue[0][1] < starting_x:
height_queue.popleft()
height_queue_len -= 1
# print(height_queue)
if starting_x > ending_x:
ending_x = Ls[i] + W
curr_perim += 2 * W + 2 * Hs[i]
height_queue.append((Hs[i], ending_x))
height_queue_len += 1
else:
# print("\tCase 2 [Adding = {} {} {}]".format(Ls[i], W, Hs[i]))
new_ending_x = Ls[i] + W
excess_x = (ending_x - starting_x)
curr_perim -= 2 * excess_x
# print("-2*{}".format(excess_x))
curr_perim += 2 * W
# print("+2*{}".format(W))
"""
2 5
___________
| ___|_________
| | |
4| | | 3
|_______|____________|
4 7
"""
max_height = -1
while height_queue_len > 0 and Hs[i] >= height_queue[0][0]:
max_height = max(max_height, height_queue[0][0])
height_queue.popleft()
height_queue_len -= 1
height_queue.append((Hs[i], new_ending_x))
height_queue_len += 1
if max_height > -1:
curr_perim -= max_height
curr_perim += abs(Hs[i] - max_height) + Hs[i]
# print("adding {} {}".format(abs(Hs[i] - max_height), Hs[i]))
""" ________________
| |
2 5| |
___________| |
| ___|_________ | 6
| | | | |
4| | | | 3 |
|_______|__|_________|____|
3 4 5 7 8
"""
ending_x = new_ending_x
# print("\ti = {} {}".format(i, curr_perim))
ans = (ans * curr_perim) % MODULO
return ans
if __name__ == '__main__':
T = int(input())
for i in range(1, T+1):
print("Case #{}: {}".format(i, run()))
|
mjenrungrot/competitive_programming
|
Facebook Hackercup/2020/Round 1/A1.py
|
Python
|
mit
| 3,016 | 0.001658 |
"""
Examples
========
ModularFocalNetwork(8, [1600, 800], 4).plot() => 8 modules, 4 connections to each neuron
"""
import numpy as np
from Plotters import plot_connectivity_matrix
def range_from_base(base, size):
return xrange(base, base + size)
class ModularFocalNetwork(object):
def __init__(self, C, dim, focal_width):
"""
Generates connectivity matrix for a modular network with...
C -- # communities/modules
dim -- dimensions of matrix, [nodes_in_target_layer, nodes_in_input_layer]
focal_width -- how connections per node in target layer
Each community will have an even number of nodes, where each node has focal_width
connections from randomly chosen nodes in the input layer.
CIJ[i,j] represents the connection from node j in input layer to node i in this layer.
"""
self.C = C
self.dim = dim
self.module_dim = [layer_size / C for layer_size in dim]
self.focal_width = focal_width
self.CIJ = np.zeros(dim)
for i in range(C):
self.init_module(i)
def init_module(self, module_index):
"""
Initialises the target module with connections from the input layer.
"""
target_dim, input_dim = self.module_dim
input_nodes = range_from_base(module_index * input_dim, input_dim)
target_nodes = range_from_base(module_index * target_dim, target_dim)
for i in target_nodes:
nodes_to_connect = np.random.choice(input_nodes, self.focal_width, replace=False)
self.CIJ[i, nodes_to_connect] = 1
def plot(self):
"""
Uses pyplot to draw a plot of the connectivity matrix
"""
plot_connectivity_matrix(self.CIJ, self.dim).show()
|
lawrencejones/neuro
|
iz/ModularFocalNetwork.py
|
Python
|
gpl-3.0
| 1,792 | 0.00279 |
from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'festina.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^resume/$', include('resume.urls')),
url(r'^admin/', include(admin.site.urls)),
)
|
wbrefvem/festina
|
festina/urls.py
|
Python
|
mit
| 345 | 0.011594 |
#!/usr/bin/env python
"""
Dispatch the likelihood scan to a cluster.
"""
import os,sys
from os.path import join, exists
import shutil
import subprocess
import time
import glob
import numpy as np
import healpy as hp
import ugali.utils.config
import ugali.utils.skymap
import ugali.utils.batch
from ugali.utils.projector import gal2cel,cel2gal
from ugali.utils.healpix import subpixel,superpixel,query_disc
from ugali.utils.healpix import pix2ang,ang2vec,read_partial_map
from ugali.utils.logger import logger
from ugali.utils.shell import mkdir
class Farm:
""" Class for organizing and submitting likelihood scan jobs. """
def __init__(self, configfile, verbose=False):
self.configfile = configfile
self.config = ugali.utils.config.Config(configfile)
self._setup()
self.verbose = verbose
def _setup(self):
self.nside_catalog = self.config['coords']['nside_catalog']
self.nside_likelihood = self.config['coords']['nside_likelihood']
self.nside_pixel = self.config['coords']['nside_pixel']
self.filenames = self.config.getFilenames()
self.skip = "Outfile already exists. Skipping..."
# Might consider storing only the good filenames
# self.filenames = self.filenames.compress(~self.filenames.mask['pix'])
self.catalog_pixels = self.filenames['pix'].compressed()
def command(self, outfile, configfile, pix):
"""
Generate the command for running the likelihood scan.
"""
params = dict(script=self.config['scan']['script'],
config=configfile, outfile=outfile,
nside=self.nside_likelihood, pix=pix,
verbose='-v' if self.verbose else '')
cmd = '%(script)s %(config)s %(outfile)s --hpx %(nside)i %(pix)i %(verbose)s'%params
return cmd
def submit_all(self, coords=None, queue=None, debug=False):
"""
Submit likelihood analyses on a set of coordinates. If
coords is `None`, submit all coordinates in the footprint.
Inputs:
coords : Array of target locations in Galactic coordinates.
queue : Overwrite submit queue.
debug : Don't run.
"""
if coords is None:
pixels = np.arange(hp.nside2npix(self.nside_likelihood))
else:
lon,lat,radius = coords['lon'],coords['lat'],coords['radius']
#ADW: coords are always parsed in GAL, so convert to CEL if necessary
if self.config['coords']['coordsys'].lower() == 'cel':
lon,lat = gal2cel(lon,lat)
vec = ang2vec(lon,lat)
pixels = np.zeros(0, dtype=int)
for v,r in zip(vec,radius):
pix = query_disc(self.nside_likelihood,v,r,inclusive=True,fact=32)
pixels = np.hstack([pixels, pix])
#pixels = np.unique(pixels)
inside = ugali.utils.skymap.inFootprint(self.config,pixels)
if inside.sum() != len(pixels):
logger.warning("Ignoring pixels outside survey footprint:\n"+str(pixels[~inside]))
if inside.sum() == 0:
logger.warning("No pixels inside footprint.")
return
# Only write the configfile once
outdir = mkdir(self.config['output']['likedir'])
# Actually copy config instead of re-writing
shutil.copy(self.config.filename,outdir)
configfile = join(outdir,os.path.basename(self.config.filename))
pixels = pixels[inside]
self.submit(pixels,queue=queue,debug=debug,configfile=configfile)
def submit(self, pixels, queue=None, debug=False, configfile=None):
"""
Submit the likelihood job for the given pixel(s).
"""
# For backwards compatibility
batch = self.config['scan'].get('batch',self.config['batch'])
queue = batch.get('default','medium') if queue is None else queue
# Need to develop some way to take command line arguments...
self.batch = ugali.utils.batch.batchFactory(queue,**batch.get(queue,{}))
self.batch.max_jobs = self.config['scan'].get('max_jobs',200)
if np.isscalar(pixels): pixels = np.array([pixels])
outdir = mkdir(self.config['output']['likedir'])
logdir = mkdir(join(outdir,'log'))
subdir = mkdir(join(outdir,'sub'))
# Save the current configuation settings; avoid writing
# file multiple times if configfile passed as argument.
if configfile is None:
shutil.copy(self.config.filename,outdir)
configfile = join(outdir,os.path.basename(self.config.filename))
lon,lat = pix2ang(self.nside_likelihood,pixels)
commands = []
chunk = self.config['scan'].get('chunk',25)
istart = 0
logger.info('=== Submit Likelihood ===')
for ii,pix in enumerate(pixels):
msg = ' (%i/%i) pixel=%i nside=%i; (lon, lat) = (%.2f, %.2f)'
msg = msg%(ii+1,len(pixels),pix, self.nside_likelihood,lon[ii],lat[ii])
logger.info(msg)
# Create outfile name
outfile = self.config.likefile%(pix,self.config['coords']['coordsys'].lower())
outbase = os.path.basename(outfile)
jobname = batch.get('jobname','ugali')
# Submission command
sub = not os.path.exists(outfile)
cmd = self.command(outfile,configfile,pix)
commands.append([ii,cmd,lon[ii],lat[ii],sub])
if chunk == 0:
# No chunking
command = cmd
submit = sub
logfile = join(logdir,os.path.splitext(outbase)[0]+'.log')
elif (len(commands)%chunk==0) or (ii+1 == len(pixels)):
# End of chunk, create submission script
commands = np.array(commands,dtype=object)
istart, iend = commands[0][0], commands[-1][0]
subfile = join(subdir,'submit_%08i_%08i.sh'%(istart,iend))
logfile = join(logdir,'submit_%08i_%08i.log'%(istart,iend))
command = "sh %s"%subfile
submit = np.any(commands[:,-1])
if submit: self.write_script(subfile,commands)
else:
# Not end of chunk
continue
commands=[]
# Actual job submission
if not submit:
logger.info(self.skip)
continue
else:
job = self.batch.submit(command,jobname,logfile)
logger.info(" "+job)
time.sleep(0.5)
def write_script(self, filename, commands):
""" Write a batch submission script.
Parameters
----------
filename : filename of batch script
commands : list of commands to execute
Returns
-------
None
"""
info = 'echo "{0:=^60}";\n'
hline = info.format("")
newline = 'echo;\n'
shebang = "#!/usr/bin/env bash"
# Limit the memory based on SLAC 4 GB per node (defined in KB)
# Careful, shell arithmetic is weird.
memory_limit = """
if [ -n "$LSB_CG_MEMLIMIT" ] & [ -n "$LSB_HOSTS" ]; then
mlimit=$(( $(wc -w <<< $LSB_HOSTS) * $LSB_CG_MEMLIMIT/1024 * 9/10 ))
ulimit -v ${mlimit}; ulimit -H -v ${mlimit};
fi
"""
memory_usage=r"""free -m | awk 'NR==2{printf "Memory Usage: %.2f/%.2fGB (%.2f%%)\n",$3/1024,$2/1024,$3*100/$2}';"""
memory_usage=r"""ps -U $USER --no-headers -o rss | awk '{sum+=$1} END {print "Memory Usage: " int(sum/1024**2) "GB"}'"""
istart, iend = commands[0][0], commands[-1][0]
script = open(filename,'w')
script.write(shebang)
#script.write(memory_limit)
script.write(hline)
script.write(info.format('Submit Jobs %i to %i'%(istart,iend)))
script.write(hline)
script.write(newline)
script.write('status=0;\n')
for i,cmd,lon,lat,sub in commands:
script.write(info.format('Job %i: (%.2f, %.2f)'%(i,lon,lat)))
if sub:
script.write(memory_usage+'\n')
script.write('%s; [ $? -ne 0 ] && status=1;\n'%cmd)
else:
script.write('echo "%s";\n'%self.skip)
script.write(hline)
script.write(newline)
script.write('exit $status;\n')
script.close()
if __name__ == "__main__":
import ugali.utils.parser
parser = ugali.utils.parser.Parser(description=__doc__)
parser.add_config()
parser.add_debug()
parser.add_queue()
parser.add_verbose()
parser.add_coords(required=True,radius=True,targets=True)
opts = parser.parse_args()
farm = Farm(opts.config,verbose=opts.verbose)
x = farm.submit_all(coords=opts.coords,queue=opts.queue,debug=opts.debug)
|
kadrlica/ugali
|
ugali/analysis/farm.py
|
Python
|
mit
| 8,918 | 0.013007 |
# Copyright (C) 2013 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Generate template values for methods.
Extends IdlArgument with property |default_cpp_value|.
Extends IdlTypeBase and IdlUnionType with property |union_arguments|.
Design doc: http://www.chromium.org/developers/design-documents/idl-compiler
"""
from idl_definitions import IdlArgument, IdlOperation
from idl_types import IdlTypeBase, IdlUnionType, inherits_interface
from v8_globals import includes
import v8_types
import v8_utilities
from v8_utilities import (has_extended_attribute_value, is_unforgeable,
is_legacy_interface_type_checking)
# Methods with any of these require custom method registration code in the
# interface's configure*Template() function.
CUSTOM_REGISTRATION_EXTENDED_ATTRIBUTES = frozenset([
'DoNotCheckSecurity',
'DoNotCheckSignature',
])
def use_local_result(method):
extended_attributes = method.extended_attributes
idl_type = method.idl_type
return (has_extended_attribute_value(method, 'CallWith', 'ScriptState') or
'ImplementedInPrivateScript' in extended_attributes or
'RaisesException' in extended_attributes or
idl_type.is_union_type or
idl_type.is_explicit_nullable)
def method_context(interface, method, is_visible=True):
arguments = method.arguments
extended_attributes = method.extended_attributes
idl_type = method.idl_type
is_static = method.is_static
name = method.name
if is_visible:
idl_type.add_includes_for_type(extended_attributes)
this_cpp_value = cpp_value(interface, method, len(arguments))
is_implemented_in_private_script = 'ImplementedInPrivateScript' in extended_attributes
if is_implemented_in_private_script:
includes.add('bindings/core/v8/PrivateScriptRunner.h')
includes.add('core/frame/LocalFrame.h')
includes.add('platform/ScriptForbiddenScope.h')
# [OnlyExposedToPrivateScript]
is_only_exposed_to_private_script = 'OnlyExposedToPrivateScript' in extended_attributes
is_call_with_script_arguments = has_extended_attribute_value(method, 'CallWith', 'ScriptArguments')
if is_call_with_script_arguments:
includes.update(['bindings/core/v8/ScriptCallStackFactory.h',
'core/inspector/ScriptArguments.h'])
is_call_with_script_state = has_extended_attribute_value(method, 'CallWith', 'ScriptState')
is_call_with_this_value = has_extended_attribute_value(method, 'CallWith', 'ThisValue')
if is_call_with_script_state or is_call_with_this_value:
includes.add('bindings/core/v8/ScriptState.h')
# [CheckSecurity]
is_do_not_check_security = 'DoNotCheckSecurity' in extended_attributes
is_check_security_for_receiver = (
has_extended_attribute_value(interface, 'CheckSecurity', 'Receiver') and
not is_do_not_check_security)
is_check_security_for_return_value = (
has_extended_attribute_value(method, 'CheckSecurity', 'ReturnValue'))
if is_check_security_for_receiver or is_check_security_for_return_value:
includes.add('bindings/core/v8/BindingSecurity.h')
is_custom_element_callbacks = 'CustomElementCallbacks' in extended_attributes
if is_custom_element_callbacks:
includes.add('core/dom/custom/CustomElementProcessingStack.h')
is_raises_exception = 'RaisesException' in extended_attributes
is_custom_call_prologue = has_extended_attribute_value(method, 'Custom', 'CallPrologue')
is_custom_call_epilogue = has_extended_attribute_value(method, 'Custom', 'CallEpilogue')
is_post_message = 'PostMessage' in extended_attributes
if is_post_message:
includes.add('bindings/core/v8/SerializedScriptValueFactory.h')
includes.add('core/dom/DOMArrayBuffer.h')
includes.add('core/dom/MessagePort.h')
includes.add('core/frame/ImageBitmap.h')
if 'LenientThis' in extended_attributes:
raise Exception('[LenientThis] is not supported for operations.')
if 'APIExperimentEnabled' in extended_attributes:
includes.add('core/experiments/ExperimentalFeatures.h')
includes.add('core/inspector/ConsoleMessage.h')
argument_contexts = [
argument_context(interface, method, argument, index, is_visible=is_visible)
for index, argument in enumerate(arguments)]
return {
'activity_logging_world_list': v8_utilities.activity_logging_world_list(method), # [ActivityLogging]
'api_experiment_enabled': v8_utilities.api_experiment_enabled_function(method), # [APIExperimentEnabled]
'api_experiment_enabled_per_interface': v8_utilities.api_experiment_enabled_function(interface), # [APIExperimentEnabled]
'arguments': argument_contexts,
'argument_declarations_for_private_script':
argument_declarations_for_private_script(interface, method),
'cpp_type': (v8_types.cpp_template_type('Nullable', idl_type.cpp_type)
if idl_type.is_explicit_nullable else idl_type.cpp_type),
'cpp_value': this_cpp_value,
'cpp_type_initializer': idl_type.cpp_type_initializer,
'custom_registration_extended_attributes':
CUSTOM_REGISTRATION_EXTENDED_ATTRIBUTES.intersection(
extended_attributes.iterkeys()),
'deprecate_as': v8_utilities.deprecate_as(method), # [DeprecateAs]
'exposed_test': v8_utilities.exposed(method, interface), # [Exposed]
# TODO(yukishiino): Retire has_custom_registration flag. Should be
# replaced with V8DOMConfiguration::PropertyLocationConfiguration.
'has_custom_registration':
v8_utilities.has_extended_attribute(
method, CUSTOM_REGISTRATION_EXTENDED_ATTRIBUTES),
'has_exception_state':
is_raises_exception or
is_check_security_for_receiver or
any(argument for argument in arguments
if (argument.idl_type.name == 'SerializedScriptValue' or
argument_conversion_needs_exception_state(method, argument))),
'has_optional_argument_without_default_value':
any(True for argument_context in argument_contexts
if argument_context['is_optional_without_default_value']),
'idl_type': idl_type.base_type,
'is_api_experiment_enabled': v8_utilities.api_experiment_enabled_function(method) or v8_utilities.api_experiment_enabled_function(interface), # [APIExperimentEnabled]
'is_call_with_execution_context': has_extended_attribute_value(method, 'CallWith', 'ExecutionContext'),
'is_call_with_script_arguments': is_call_with_script_arguments,
'is_call_with_script_state': is_call_with_script_state,
'is_call_with_this_value': is_call_with_this_value,
'is_check_security_for_receiver': is_check_security_for_receiver,
'is_check_security_for_return_value': is_check_security_for_return_value,
'is_custom': 'Custom' in extended_attributes and
not (is_custom_call_prologue or is_custom_call_epilogue),
'is_custom_call_prologue': is_custom_call_prologue,
'is_custom_call_epilogue': is_custom_call_epilogue,
'is_custom_element_callbacks': is_custom_element_callbacks,
'is_do_not_check_security': is_do_not_check_security,
'is_do_not_check_signature': 'DoNotCheckSignature' in extended_attributes,
'is_explicit_nullable': idl_type.is_explicit_nullable,
'is_implemented_in_private_script': is_implemented_in_private_script,
'is_partial_interface_member':
'PartialInterfaceImplementedAs' in extended_attributes,
'is_per_world_bindings': 'PerWorldBindings' in extended_attributes,
'is_post_message': is_post_message,
'is_raises_exception': is_raises_exception,
'is_static': is_static,
'is_unforgeable': is_unforgeable(interface, method),
'is_variadic': arguments and arguments[-1].is_variadic,
'measure_as': v8_utilities.measure_as(method, interface), # [MeasureAs]
'name': name,
'number_of_arguments': len(arguments),
'number_of_required_arguments': len([
argument for argument in arguments
if not (argument.is_optional or argument.is_variadic)]),
'number_of_required_or_variadic_arguments': len([
argument for argument in arguments
if not argument.is_optional]),
'on_instance': v8_utilities.on_instance(interface, method),
'on_interface': v8_utilities.on_interface(interface, method),
'on_prototype': v8_utilities.on_prototype(interface, method),
'only_exposed_to_private_script': is_only_exposed_to_private_script,
'private_script_v8_value_to_local_cpp_value': idl_type.v8_value_to_local_cpp_value(
extended_attributes, 'v8Value', 'cppValue', isolate='scriptState->isolate()', bailout_return_value='false'),
'property_attributes': property_attributes(interface, method),
'returns_promise': method.returns_promise,
'runtime_enabled_function': v8_utilities.runtime_enabled_function_name(method), # [RuntimeEnabled]
'should_be_exposed_to_script': not (is_implemented_in_private_script and is_only_exposed_to_private_script),
'use_output_parameter_for_result': idl_type.use_output_parameter_for_result,
'use_local_result': use_local_result(method),
'v8_set_return_value': v8_set_return_value(interface.name, method, this_cpp_value),
'v8_set_return_value_for_main_world': v8_set_return_value(interface.name, method, this_cpp_value, for_main_world=True),
'visible': is_visible,
'world_suffixes': ['', 'ForMainWorld'] if 'PerWorldBindings' in extended_attributes else [''], # [PerWorldBindings],
}
def argument_context(interface, method, argument, index, is_visible=True):
extended_attributes = argument.extended_attributes
idl_type = argument.idl_type
if is_visible:
idl_type.add_includes_for_type(extended_attributes)
this_cpp_value = cpp_value(interface, method, index)
is_variadic_wrapper_type = argument.is_variadic and idl_type.is_wrapper_type
# [LegacyInterfaceTypeChecking]
has_type_checking_interface = (
not is_legacy_interface_type_checking(interface, method) and
idl_type.is_wrapper_type)
if ('ImplementedInPrivateScript' in extended_attributes and
not idl_type.is_wrapper_type and
not idl_type.is_basic_type):
raise Exception('Private scripts supports only primitive types and DOM wrappers.')
set_default_value = argument.set_default_value
this_cpp_type = idl_type.cpp_type_args(extended_attributes=extended_attributes,
raw_type=True,
used_as_variadic_argument=argument.is_variadic)
context = {
'cpp_type': (
v8_types.cpp_template_type('Nullable', this_cpp_type)
if idl_type.is_explicit_nullable and not argument.is_variadic
else this_cpp_type),
'cpp_value': this_cpp_value,
# FIXME: check that the default value's type is compatible with the argument's
'set_default_value': set_default_value,
'enum_type': idl_type.enum_type,
'enum_values': idl_type.enum_values,
'handle': '%sHandle' % argument.name,
# FIXME: remove once [Default] removed and just use argument.default_value
'has_default': 'Default' in extended_attributes or set_default_value,
'has_type_checking_interface': has_type_checking_interface,
# Dictionary is special-cased, but arrays and sequences shouldn't be
'idl_type': idl_type.base_type,
'idl_type_object': idl_type,
'index': index,
'is_callback_function': idl_type.is_callback_function,
'is_callback_interface': idl_type.is_callback_interface,
# FIXME: Remove generic 'Dictionary' special-casing
'is_dictionary': idl_type.is_dictionary or idl_type.base_type == 'Dictionary',
'is_explicit_nullable': idl_type.is_explicit_nullable,
'is_nullable': idl_type.is_nullable,
'is_optional': argument.is_optional,
'is_variadic': argument.is_variadic,
'is_variadic_wrapper_type': is_variadic_wrapper_type,
'is_wrapper_type': idl_type.is_wrapper_type,
'name': argument.name,
'private_script_cpp_value_to_v8_value': idl_type.cpp_value_to_v8_value(
argument.name, isolate='scriptState->isolate()',
creation_context='scriptState->context()->Global()'),
'use_permissive_dictionary_conversion': 'PermissiveDictionaryConversion' in extended_attributes,
'v8_set_return_value': v8_set_return_value(interface.name, method, this_cpp_value),
'v8_set_return_value_for_main_world': v8_set_return_value(interface.name, method, this_cpp_value, for_main_world=True),
'v8_value_to_local_cpp_value': v8_value_to_local_cpp_value(method, argument, index),
}
context.update({
'is_optional_without_default_value':
context['is_optional'] and
not context['has_default'] and
not context['is_dictionary'] and
not context['is_callback_interface'],
})
return context
def argument_declarations_for_private_script(interface, method):
argument_declarations = ['LocalFrame* frame']
argument_declarations.append('%s* holderImpl' % interface.name)
argument_declarations.extend(['%s %s' % (argument.idl_type.cpp_type_args(
used_as_rvalue_type=True), argument.name) for argument in method.arguments])
if method.idl_type.name != 'void':
argument_declarations.append('%s* %s' % (method.idl_type.cpp_type, 'result'))
return argument_declarations
################################################################################
# Value handling
################################################################################
def cpp_value(interface, method, number_of_arguments):
def cpp_argument(argument):
idl_type = argument.idl_type
if idl_type.name == 'EventListener':
return argument.name
if (idl_type.name in ['NodeFilter', 'NodeFilterOrNull',
'XPathNSResolver', 'XPathNSResolverOrNull']):
# FIXME: remove this special case
return '%s.release()' % argument.name
return argument.name
# Truncate omitted optional arguments
arguments = method.arguments[:number_of_arguments]
cpp_arguments = []
if 'ImplementedInPrivateScript' in method.extended_attributes:
cpp_arguments.append('toLocalFrame(toFrameIfNotDetached(info.GetIsolate()->GetCurrentContext()))')
cpp_arguments.append('impl')
if method.is_constructor:
call_with_values = interface.extended_attributes.get('ConstructorCallWith')
else:
call_with_values = method.extended_attributes.get('CallWith')
cpp_arguments.extend(v8_utilities.call_with_arguments(call_with_values))
# Members of IDL partial interface definitions are implemented in C++ as
# static member functions, which for instance members (non-static members)
# take *impl as their first argument
if ('PartialInterfaceImplementedAs' in method.extended_attributes and
'ImplementedInPrivateScript' not in method.extended_attributes and
not method.is_static):
cpp_arguments.append('*impl')
cpp_arguments.extend(cpp_argument(argument) for argument in arguments)
if 'ImplementedInPrivateScript' in method.extended_attributes:
if method.idl_type.name != 'void':
cpp_arguments.append('&result')
elif ('RaisesException' in method.extended_attributes or
(method.is_constructor and
has_extended_attribute_value(interface, 'RaisesException', 'Constructor'))):
cpp_arguments.append('exceptionState')
# If a method returns an IDL dictionary or union type, the return value is
# passed as an argument to impl classes.
idl_type = method.idl_type
if idl_type and idl_type.use_output_parameter_for_result:
cpp_arguments.append('result')
if method.name == 'Constructor':
base_name = 'create'
elif method.name == 'NamedConstructor':
base_name = 'createForJSConstructor'
elif 'ImplementedInPrivateScript' in method.extended_attributes:
base_name = '%sMethod' % method.name
else:
base_name = v8_utilities.cpp_name(method)
cpp_method_name = v8_utilities.scoped_name(interface, method, base_name)
return '%s(%s)' % (cpp_method_name, ', '.join(cpp_arguments))
def v8_set_return_value(interface_name, method, cpp_value, for_main_world=False):
idl_type = method.idl_type
extended_attributes = method.extended_attributes
if not idl_type or idl_type.name == 'void':
# Constructors and void methods don't have a return type
return None
if ('ImplementedInPrivateScript' in extended_attributes and
not idl_type.is_wrapper_type and
not idl_type.is_basic_type):
raise Exception('Private scripts supports only primitive types and DOM wrappers.')
release = False
# [CallWith=ScriptState], [RaisesException]
if use_local_result(method):
if idl_type.is_explicit_nullable:
# result is of type Nullable<T>
cpp_value = 'result.get()'
else:
cpp_value = 'result'
release = idl_type.release
script_wrappable = 'impl' if inherits_interface(interface_name, 'Node') else ''
return idl_type.v8_set_return_value(cpp_value, extended_attributes, script_wrappable=script_wrappable, release=release, for_main_world=for_main_world, is_static=method.is_static)
def v8_value_to_local_cpp_variadic_value(method, argument, index, return_promise):
assert argument.is_variadic
idl_type = argument.idl_type
this_cpp_type = idl_type.cpp_type
if method.returns_promise:
check_expression = 'exceptionState.hadException()'
else:
check_expression = 'exceptionState.throwIfNeeded()'
if idl_type.is_dictionary or idl_type.is_union_type:
vector_type = 'HeapVector'
else:
vector_type = 'Vector'
return {
'assign_expression': 'toImplArguments<%s<%s>>(info, %s, exceptionState)' % (vector_type, this_cpp_type, index),
'check_expression': check_expression,
'cpp_type': this_cpp_type,
'cpp_name': argument.name,
'declare_variable': False,
}
def v8_value_to_local_cpp_value(method, argument, index, return_promise=False, restricted_float=False):
extended_attributes = argument.extended_attributes
idl_type = argument.idl_type
name = argument.name
if argument.is_variadic:
return v8_value_to_local_cpp_variadic_value(method, argument, index, return_promise)
return idl_type.v8_value_to_local_cpp_value(extended_attributes, 'info[%s]' % index,
name, index=index, declare_variable=False,
use_exception_state=method.returns_promise,
restricted_float=restricted_float)
################################################################################
# Auxiliary functions
################################################################################
# [NotEnumerable], [Unforgeable]
def property_attributes(interface, method):
extended_attributes = method.extended_attributes
property_attributes_list = []
if 'NotEnumerable' in extended_attributes:
property_attributes_list.append('v8::DontEnum')
if is_unforgeable(interface, method):
property_attributes_list.append('v8::ReadOnly')
property_attributes_list.append('v8::DontDelete')
return property_attributes_list
def argument_set_default_value(argument):
idl_type = argument.idl_type
default_value = argument.default_value
if not default_value:
return None
if idl_type.is_dictionary:
if not argument.default_value.is_null:
raise Exception('invalid default value for dictionary type')
return None
if idl_type.is_array_or_sequence_type:
if default_value.value != '[]':
raise Exception('invalid default value for sequence type: %s' % default_value.value)
# Nothing to do when we set an empty sequence as default value, but we
# need to return non-empty value so that we don't generate method calls
# without this argument.
return '/* Nothing to do */'
if idl_type.is_union_type:
if argument.default_value.is_null:
if not idl_type.includes_nullable_type:
raise Exception('invalid default value for union type: null for %s'
% idl_type.name)
# Union container objects are "null" initially.
return '/* null default value */'
if isinstance(default_value.value, basestring):
member_type = idl_type.string_member_type
elif isinstance(default_value.value, (int, float)):
member_type = idl_type.numeric_member_type
elif isinstance(default_value.value, bool):
member_type = idl_type.boolean_member_type
else:
member_type = None
if member_type is None:
raise Exception('invalid default value for union type: %r for %s'
% (default_value.value, idl_type.name))
member_type_name = (member_type.inner_type.name
if member_type.is_nullable else
member_type.name)
return '%s.set%s(%s)' % (argument.name, member_type_name,
member_type.literal_cpp_value(default_value))
return '%s = %s' % (argument.name,
idl_type.literal_cpp_value(default_value))
IdlArgument.set_default_value = property(argument_set_default_value)
def method_returns_promise(method):
return method.idl_type and method.idl_type.name == 'Promise'
IdlOperation.returns_promise = property(method_returns_promise)
def argument_conversion_needs_exception_state(method, argument):
idl_type = argument.idl_type
return (idl_type.v8_conversion_needs_exception_state or
argument.is_variadic or
(method.returns_promise and idl_type.is_string_type))
|
js0701/chromium-crosswalk
|
third_party/WebKit/Source/bindings/scripts/v8_methods.py
|
Python
|
bsd-3-clause
| 24,023 | 0.002622 |
#!/usr/bin/env python
"""Server run file.
Run by './server.py'
Access properties as 'config.property'
"""
import pkgutil, sys
from flask import Flask, Blueprint, render_template, request
import config
app = Flask(__name__)
modules = pkgutil.iter_modules(path=[config.modules_directory_name])
for loader, mod_name, ispkg in modules:
if mod_name not in sys.modules:
loaded_mod = __import__(config.modules_directory_name + '.' + mod_name, fromlist=[mod_name])
for obj in vars(loaded_mod).values():
if isinstance(obj, Blueprint):
app.register_blueprint(obj)
app.run(debug=config.debug, host=config.host, port=config.port)
|
rogerhoward/funcaas
|
server.py
|
Python
|
mit
| 673 | 0.002972 |
# Copyright (c) 2014 OpenStack Foundation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import collections
from oslo_config import cfg
from oslo_log import log as logging
from oslo_utils import excutils
import six
from neutron.api.v2 import attributes
from neutron.callbacks import events
from neutron.callbacks import exceptions
from neutron.callbacks import registry
from neutron.callbacks import resources
from neutron.common import constants as l3_const
from neutron.common import exceptions as n_exc
from neutron.common import utils as n_utils
from neutron.db import l3_attrs_db
from neutron.db import l3_db
from neutron.db import l3_dvrscheduler_db as l3_dvrsched_db
from neutron.db import models_v2
from neutron.extensions import l3
from neutron.extensions import portbindings
from neutron.i18n import _LI
from neutron import manager
from neutron.plugins.common import constants
from neutron.plugins.common import utils as p_utils
LOG = logging.getLogger(__name__)
router_distributed_opts = [
cfg.BoolOpt('router_distributed',
default=False,
help=_("System-wide flag to determine the type of router "
"that tenants can create. Only admin can override.")),
]
cfg.CONF.register_opts(router_distributed_opts)
class L3_NAT_with_dvr_db_mixin(l3_db.L3_NAT_db_mixin,
l3_attrs_db.ExtraAttributesMixin):
"""Mixin class to enable DVR support."""
router_device_owners = (
l3_db.L3_NAT_db_mixin.router_device_owners +
(l3_const.DEVICE_OWNER_DVR_INTERFACE,
l3_const.DEVICE_OWNER_ROUTER_SNAT,
l3_const.DEVICE_OWNER_AGENT_GW))
extra_attributes = (
l3_attrs_db.ExtraAttributesMixin.extra_attributes + [{
'name': "distributed",
'default': cfg.CONF.router_distributed
}])
def _create_router_db(self, context, router, tenant_id):
"""Create a router db object with dvr additions."""
router['distributed'] = is_distributed_router(router)
with context.session.begin(subtransactions=True):
router_db = super(
L3_NAT_with_dvr_db_mixin, self)._create_router_db(
context, router, tenant_id)
self._process_extra_attr_router_create(context, router_db, router)
return router_db
def _validate_router_migration(self, context, router_db, router_res):
"""Allow centralized -> distributed state transition only."""
if (router_db.extra_attributes.distributed and
router_res.get('distributed') is False):
LOG.info(_LI("Centralizing distributed router %s "
"is not supported"), router_db['id'])
raise n_exc.NotSupported(msg=_("Migration from distributed router "
"to centralized"))
elif (not router_db.extra_attributes.distributed and
router_res.get('distributed')):
# router should be disabled in order for upgrade
if router_db.admin_state_up:
msg = _('Cannot upgrade active router to distributed. Please '
'set router admin_state_up to False prior to upgrade.')
raise n_exc.BadRequest(resource='router', msg=msg)
# Notify advanced services of the imminent state transition
# for the router.
try:
kwargs = {'context': context, 'router': router_db}
registry.notify(
resources.ROUTER, events.BEFORE_UPDATE, self, **kwargs)
except exceptions.CallbackFailure as e:
with excutils.save_and_reraise_exception():
# NOTE(armax): preserve old check's behavior
if len(e.errors) == 1:
raise e.errors[0].error
raise l3.RouterInUse(router_id=router_db['id'],
reason=e)
def _update_distributed_attr(
self, context, router_id, router_db, data, gw_info):
"""Update the model to support the dvr case of a router."""
if data.get('distributed'):
old_owner = l3_const.DEVICE_OWNER_ROUTER_INTF
new_owner = l3_const.DEVICE_OWNER_DVR_INTERFACE
for rp in router_db.attached_ports.filter_by(port_type=old_owner):
rp.port_type = new_owner
rp.port.device_owner = new_owner
def _update_router_db(self, context, router_id, data, gw_info):
with context.session.begin(subtransactions=True):
router_db = super(
L3_NAT_with_dvr_db_mixin, self)._update_router_db(
context, router_id, data, gw_info)
migrating_to_distributed = (
not router_db.extra_attributes.distributed and
data.get('distributed') is True)
self._validate_router_migration(context, router_db, data)
router_db.extra_attributes.update(data)
self._update_distributed_attr(
context, router_id, router_db, data, gw_info)
if migrating_to_distributed:
if router_db['gw_port_id']:
# If the Legacy router is getting migrated to a DVR
# router, make sure to create corresponding
# snat interface ports that are to be consumed by
# the Service Node.
if not self._create_snat_intf_ports_if_not_exists(
context.elevated(), router_db):
LOG.debug("SNAT interface ports not created: %s",
router_db['id'])
cur_agents = self.list_l3_agents_hosting_router(
context, router_db['id'])['agents']
for agent in cur_agents:
self._unbind_router(context, router_db['id'],
agent['id'])
return router_db
def _delete_current_gw_port(self, context, router_id, router, new_network):
"""
Overriden here to handle deletion of dvr internal ports.
If there is a valid router update with gateway port to be deleted,
then go ahead and delete the csnat ports and the floatingip
agent gateway port associated with the dvr router.
"""
gw_ext_net_id = (
router.gw_port['network_id'] if router.gw_port else None)
super(L3_NAT_with_dvr_db_mixin,
self)._delete_current_gw_port(context, router_id,
router, new_network)
if (is_distributed_router(router) and
gw_ext_net_id != new_network):
self.delete_csnat_router_interface_ports(
context.elevated(), router)
# NOTE(Swami): Delete the Floatingip agent gateway port
# on all hosts when it is the last gateway port in the
# given external network.
filters = {'network_id': [gw_ext_net_id],
'device_owner': [l3_const.DEVICE_OWNER_ROUTER_GW]}
ext_net_gw_ports = self._core_plugin.get_ports(
context.elevated(), filters)
if not ext_net_gw_ports:
self.delete_floatingip_agent_gateway_port(
context.elevated(), None, gw_ext_net_id)
def _create_gw_port(self, context, router_id, router, new_network,
ext_ips):
super(L3_NAT_with_dvr_db_mixin,
self)._create_gw_port(context, router_id, router, new_network,
ext_ips)
# Make sure that the gateway port exists before creating the
# snat interface ports for distributed router.
if router.extra_attributes.distributed and router.gw_port:
snat_p_list = self._create_snat_intf_ports_if_not_exists(
context.elevated(), router)
if not snat_p_list:
LOG.debug("SNAT interface ports not created: %s", snat_p_list)
def _get_device_owner(self, context, router=None):
"""Get device_owner for the specified router."""
router_is_uuid = isinstance(router, six.string_types)
if router_is_uuid:
router = self._get_router(context, router)
if is_distributed_router(router):
return l3_const.DEVICE_OWNER_DVR_INTERFACE
return super(L3_NAT_with_dvr_db_mixin,
self)._get_device_owner(context, router)
def _get_interface_ports_for_network(self, context, network_id):
router_intf_qry = context.session.query(l3_db.RouterPort)
router_intf_qry = router_intf_qry.join(models_v2.Port)
return router_intf_qry.filter(
models_v2.Port.network_id == network_id,
l3_db.RouterPort.port_type.in_(l3_const.ROUTER_INTERFACE_OWNERS)
)
def _update_fip_assoc(self, context, fip, floatingip_db, external_port):
"""Override to create floating agent gw port for DVR.
Floating IP Agent gateway port will be created when a
floatingIP association happens.
"""
fip_port = fip.get('port_id')
super(L3_NAT_with_dvr_db_mixin, self)._update_fip_assoc(
context, fip, floatingip_db, external_port)
associate_fip = fip_port and floatingip_db['id']
if associate_fip and floatingip_db.get('router_id'):
admin_ctx = context.elevated()
router_dict = self.get_router(
admin_ctx, floatingip_db['router_id'])
# Check if distributed router and then create the
# FloatingIP agent gateway port
if router_dict.get('distributed'):
vm_hostid = self._get_vm_port_hostid(
context, fip_port)
if vm_hostid:
# FIXME (Swami): This FIP Agent Gateway port should be
# created only once and there should not be a duplicate
# for the same host. Until we find a good solution for
# augmenting multiple server requests we should use the
# existing flow.
fip_agent_port = (
self.create_fip_agent_gw_port_if_not_exists(
admin_ctx, external_port['network_id'],
vm_hostid))
LOG.debug("FIP Agent gateway port: %s", fip_agent_port)
def _get_floatingip_on_port(self, context, port_id=None):
"""Helper function to retrieve the fip associated with port."""
fip_qry = context.session.query(l3_db.FloatingIP)
floating_ip = fip_qry.filter_by(fixed_port_id=port_id)
return floating_ip.first()
def add_router_interface(self, context, router_id, interface_info):
add_by_port, add_by_sub = self._validate_interface_info(interface_info)
router = self._get_router(context, router_id)
device_owner = self._get_device_owner(context, router)
# This should be True unless adding an IPv6 prefix to an existing port
new_port = True
if add_by_port:
port, subnets = self._add_interface_by_port(
context, router, interface_info['port_id'], device_owner)
elif add_by_sub:
port, subnets, new_port = self._add_interface_by_subnet(
context, router, interface_info['subnet_id'], device_owner)
if new_port:
if router.extra_attributes.distributed and router.gw_port:
try:
admin_context = context.elevated()
self._add_csnat_router_interface_port(
admin_context, router, port['network_id'],
port['fixed_ips'][-1]['subnet_id'])
except Exception:
with excutils.save_and_reraise_exception():
# we need to preserve the original state prior
# the request by rolling back the port creation
# that led to new_port=True
self._core_plugin.delete_port(
admin_context, port['id'])
with context.session.begin(subtransactions=True):
router_port = l3_db.RouterPort(
port_id=port['id'],
router_id=router.id,
port_type=device_owner
)
context.session.add(router_port)
router_interface_info = self._make_router_interface_info(
router_id, port['tenant_id'], port['id'], subnets[-1]['id'],
[subnet['id'] for subnet in subnets])
self.notify_router_interface_action(
context, router_interface_info, 'add')
return router_interface_info
def _port_has_ipv6_address(self, port):
"""Overridden to return False if DVR SNAT port."""
if port['device_owner'] == l3_const.DEVICE_OWNER_ROUTER_SNAT:
return False
return super(L3_NAT_with_dvr_db_mixin,
self)._port_has_ipv6_address(port)
def _check_dvr_router_remove_required_and_notify_agent(
self, context, router, port, subnets):
if router.extra_attributes.distributed:
if router.gw_port and subnets[0]['id']:
self.delete_csnat_router_interface_ports(
context.elevated(), router, subnet_id=subnets[0]['id'])
plugin = manager.NeutronManager.get_service_plugins().get(
constants.L3_ROUTER_NAT)
l3_agents = plugin.get_l3_agents_hosting_routers(context,
[router['id']])
for l3_agent in l3_agents:
if not plugin.check_ports_exist_on_l3agent(context, l3_agent,
router['id']):
plugin.remove_router_from_l3_agent(
context, l3_agent['id'], router['id'])
router_interface_info = self._make_router_interface_info(
router['id'], port['tenant_id'], port['id'], subnets[0]['id'],
[subnet['id'] for subnet in subnets])
self.notify_router_interface_action(
context, router_interface_info, 'remove')
return router_interface_info
def remove_router_interface(self, context, router_id, interface_info):
remove_by_port, remove_by_subnet = (
self._validate_interface_info(interface_info, for_removal=True)
)
port_id = interface_info.get('port_id')
subnet_id = interface_info.get('subnet_id')
router = self._get_router(context, router_id)
device_owner = self._get_device_owner(context, router)
if remove_by_port:
port, subnets = self._remove_interface_by_port(
context, router_id, port_id, subnet_id, device_owner)
# remove_by_subnet is not used here, because the validation logic of
# _validate_interface_info ensures that at least one of remote_by_*
# is True.
else:
port, subnets = self._remove_interface_by_subnet(
context, router_id, subnet_id, device_owner)
router_interface_info = (
self._check_dvr_router_remove_required_and_notify_agent(
context, router, port, subnets))
return router_interface_info
def _get_snat_sync_interfaces(self, context, router_ids):
"""Query router interfaces that relate to list of router_ids."""
if not router_ids:
return []
qry = context.session.query(l3_db.RouterPort)
qry = qry.filter(
l3_db.RouterPort.router_id.in_(router_ids),
l3_db.RouterPort.port_type == l3_const.DEVICE_OWNER_ROUTER_SNAT
)
interfaces = collections.defaultdict(list)
for rp in qry:
interfaces[rp.router_id].append(
self._core_plugin._make_port_dict(rp.port, None))
LOG.debug("Return the SNAT ports: %s", interfaces)
return interfaces
def _build_routers_list(self, context, routers, gw_ports):
# Perform a single query up front for all routers
if not routers:
return []
router_ids = [r['id'] for r in routers]
snat_binding = l3_dvrsched_db.CentralizedSnatL3AgentBinding
query = (context.session.query(snat_binding).
filter(snat_binding.router_id.in_(router_ids))).all()
bindings = dict((b.router_id, b) for b in query)
for rtr in routers:
gw_port_id = rtr['gw_port_id']
# Collect gw ports only if available
if gw_port_id and gw_ports.get(gw_port_id):
rtr['gw_port'] = gw_ports[gw_port_id]
if 'enable_snat' in rtr[l3.EXTERNAL_GW_INFO]:
rtr['enable_snat'] = (
rtr[l3.EXTERNAL_GW_INFO]['enable_snat'])
binding = bindings.get(rtr['id'])
if not binding:
rtr['gw_port_host'] = None
LOG.debug('No snat is bound to router %s', rtr['id'])
continue
rtr['gw_port_host'] = binding.l3_agent.host
return routers
def _process_routers(self, context, routers):
routers_dict = {}
snat_intfs_by_router_id = self._get_snat_sync_interfaces(
context, [r['id'] for r in routers])
for router in routers:
routers_dict[router['id']] = router
if router['gw_port_id']:
snat_router_intfs = snat_intfs_by_router_id[router['id']]
LOG.debug("SNAT ports returned: %s ", snat_router_intfs)
router[l3_const.SNAT_ROUTER_INTF_KEY] = snat_router_intfs
return routers_dict
def _process_floating_ips_dvr(self, context, routers_dict,
floating_ips, host, agent):
fip_sync_interfaces = None
LOG.debug("FIP Agent : %s ", agent.id)
for floating_ip in floating_ips:
router = routers_dict.get(floating_ip['router_id'])
if router:
router_floatingips = router.get(l3_const.FLOATINGIP_KEY, [])
if router['distributed']:
if floating_ip.get('host', None) != host:
continue
LOG.debug("Floating IP host: %s", floating_ip['host'])
router_floatingips.append(floating_ip)
router[l3_const.FLOATINGIP_KEY] = router_floatingips
if not fip_sync_interfaces:
fip_sync_interfaces = self._get_fip_sync_interfaces(
context, agent.id)
LOG.debug("FIP Agent ports: %s", fip_sync_interfaces)
router[l3_const.FLOATINGIP_AGENT_INTF_KEY] = (
fip_sync_interfaces)
def _get_fip_sync_interfaces(self, context, fip_agent_id):
"""Query router interfaces that relate to list of router_ids."""
if not fip_agent_id:
return []
filters = {'device_id': [fip_agent_id],
'device_owner': [l3_const.DEVICE_OWNER_AGENT_GW]}
interfaces = self._core_plugin.get_ports(context.elevated(), filters)
LOG.debug("Return the FIP ports: %s ", interfaces)
return interfaces
def _get_dvr_sync_data(self, context, host, agent, router_ids=None,
active=None):
routers, interfaces, floating_ips = self._get_router_info_list(
context, router_ids=router_ids, active=active,
device_owners=l3_const.ROUTER_INTERFACE_OWNERS)
port_filter = {portbindings.HOST_ID: [host]}
ports = self._core_plugin.get_ports(context, port_filter)
port_dict = dict((port['id'], port) for port in ports)
# Add the port binding host to the floatingip dictionary
for fip in floating_ips:
vm_port = port_dict.get(fip['port_id'], None)
if vm_port:
fip['host'] = self._get_vm_port_hostid(context, fip['port_id'],
port=vm_port)
routers_dict = self._process_routers(context, routers)
self._process_floating_ips_dvr(context, routers_dict,
floating_ips, host, agent)
ports_to_populate = []
for router in routers_dict.values():
if router.get('gw_port'):
ports_to_populate.append(router['gw_port'])
if router.get(l3_const.FLOATINGIP_AGENT_INTF_KEY):
ports_to_populate += router[l3_const.FLOATINGIP_AGENT_INTF_KEY]
if router.get(l3_const.SNAT_ROUTER_INTF_KEY):
ports_to_populate += router[l3_const.SNAT_ROUTER_INTF_KEY]
ports_to_populate += interfaces
self._populate_subnets_for_ports(context, ports_to_populate)
self._process_interfaces(routers_dict, interfaces)
return list(routers_dict.values())
def _get_vm_port_hostid(self, context, port_id, port=None):
"""Return the portbinding host_id."""
vm_port_db = port or self._core_plugin.get_port(context, port_id)
device_owner = vm_port_db['device_owner'] if vm_port_db else ""
if (n_utils.is_dvr_serviced(device_owner) or
device_owner == l3_const.DEVICE_OWNER_AGENT_GW):
return vm_port_db[portbindings.HOST_ID]
def _get_agent_gw_ports_exist_for_network(
self, context, network_id, host, agent_id):
"""Return agent gw port if exist, or None otherwise."""
if not network_id:
LOG.debug("Network not specified")
return
filters = {
'network_id': [network_id],
'device_id': [agent_id],
'device_owner': [l3_const.DEVICE_OWNER_AGENT_GW]
}
ports = self._core_plugin.get_ports(context, filters)
if ports:
return ports[0]
def _get_router_ids(self, context):
"""Function to retrieve router IDs for a context without joins"""
query = self._model_query(context, l3_db.Router.id)
return [row[0] for row in query]
def delete_floatingip_agent_gateway_port(
self, context, host_id, ext_net_id):
"""Function to delete FIP gateway port with given ext_net_id."""
# delete any fip agent gw port
device_filter = {'device_owner': [l3_const.DEVICE_OWNER_AGENT_GW],
'network_id': [ext_net_id]}
ports = self._core_plugin.get_ports(context,
filters=device_filter)
for p in ports:
if not host_id or p[portbindings.HOST_ID] == host_id:
self._core_plugin.ipam.delete_port(context, p['id'])
if host_id:
return
def create_fip_agent_gw_port_if_not_exists(
self, context, network_id, host):
"""Function to return the FIP Agent GW port.
This function will create a FIP Agent GW port
if required. If the port already exists, it
will return the existing port and will not
create a new one.
"""
l3_agent_db = self._get_agent_by_type_and_host(
context, l3_const.AGENT_TYPE_L3, host)
if l3_agent_db:
LOG.debug("Agent ID exists: %s", l3_agent_db['id'])
f_port = self._get_agent_gw_ports_exist_for_network(
context, network_id, host, l3_agent_db['id'])
if not f_port:
LOG.info(_LI('Agent Gateway port does not exist,'
' so create one: %s'), f_port)
port_data = {'tenant_id': '',
'network_id': network_id,
'device_id': l3_agent_db['id'],
'device_owner': l3_const.DEVICE_OWNER_AGENT_GW,
'binding:host_id': host,
'admin_state_up': True,
'name': ''}
agent_port = p_utils.create_port(self._core_plugin, context,
{'port': port_data})
if agent_port:
self._populate_subnets_for_ports(context, [agent_port])
return agent_port
msg = _("Unable to create the Agent Gateway Port")
raise n_exc.BadRequest(resource='router', msg=msg)
else:
self._populate_subnets_for_ports(context, [f_port])
return f_port
def _get_snat_interface_ports_for_router(self, context, router_id):
"""Return all existing snat_router_interface ports."""
qry = context.session.query(l3_db.RouterPort)
qry = qry.filter_by(
router_id=router_id,
port_type=l3_const.DEVICE_OWNER_ROUTER_SNAT
)
ports = [self._core_plugin._make_port_dict(rp.port, None)
for rp in qry]
return ports
def _add_csnat_router_interface_port(
self, context, router, network_id, subnet_id, do_pop=True):
"""Add SNAT interface to the specified router and subnet."""
port_data = {'tenant_id': '',
'network_id': network_id,
'fixed_ips': [{'subnet_id': subnet_id}],
'device_id': router.id,
'device_owner': l3_const.DEVICE_OWNER_ROUTER_SNAT,
'admin_state_up': True,
'name': ''}
snat_port = p_utils.create_port(self._core_plugin, context,
{'port': port_data})
if not snat_port:
msg = _("Unable to create the SNAT Interface Port")
raise n_exc.BadRequest(resource='router', msg=msg)
with context.session.begin(subtransactions=True):
router_port = l3_db.RouterPort(
port_id=snat_port['id'],
router_id=router.id,
port_type=l3_const.DEVICE_OWNER_ROUTER_SNAT
)
context.session.add(router_port)
if do_pop:
return self._populate_subnets_for_ports(context, [snat_port])
return snat_port
def _create_snat_intf_ports_if_not_exists(self, context, router):
"""Function to return the snat interface port list.
This function will return the snat interface port list
if it exists. If the port does not exist it will create
new ports and then return the list.
"""
port_list = self._get_snat_interface_ports_for_router(
context, router.id)
if port_list:
self._populate_subnets_for_ports(context, port_list)
return port_list
port_list = []
int_ports = (
rp.port for rp in
router.attached_ports.filter_by(
port_type=l3_const.DEVICE_OWNER_DVR_INTERFACE
)
)
LOG.info(_LI('SNAT interface port list does not exist,'
' so create one: %s'), port_list)
for intf in int_ports:
if intf.fixed_ips:
# Passing the subnet for the port to make sure the IP's
# are assigned on the right subnet if multiple subnet
# exists
snat_port = self._add_csnat_router_interface_port(
context, router, intf['network_id'],
intf['fixed_ips'][0]['subnet_id'], do_pop=False)
port_list.append(snat_port)
if port_list:
self._populate_subnets_for_ports(context, port_list)
return port_list
def dvr_vmarp_table_update(self, context, port_dict, action):
"""Notify L3 agents of VM ARP table changes.
When a VM goes up or down, look for one DVR router on the port's
subnet, and send the VM's ARP details to all L3 agents hosting the
router.
"""
# Check this is a valid VM or service port
if not (n_utils.is_dvr_serviced(port_dict['device_owner']) and
port_dict['fixed_ips']):
return
ip_address = port_dict['fixed_ips'][0]['ip_address']
subnet = port_dict['fixed_ips'][0]['subnet_id']
filters = {'fixed_ips': {'subnet_id': [subnet]}}
ports = self._core_plugin.get_ports(context, filters=filters)
for port in ports:
if port['device_owner'] == l3_const.DEVICE_OWNER_DVR_INTERFACE:
router_id = port['device_id']
router_dict = self._get_router(context, router_id)
if router_dict.extra_attributes.distributed:
arp_table = {'ip_address': ip_address,
'mac_address': port_dict['mac_address'],
'subnet_id': subnet}
if action == "add":
notify_action = self.l3_rpc_notifier.add_arp_entry
elif action == "del":
notify_action = self.l3_rpc_notifier.del_arp_entry
notify_action(context, router_id, arp_table)
return
def delete_csnat_router_interface_ports(self, context,
router, subnet_id=None):
# Each csnat router interface port is associated
# with a subnet, so we need to pass the subnet id to
# delete the right ports.
# TODO(markmcclain): This is suboptimal but was left to reduce
# changeset size since it is late in cycle
ports = (
rp.port.id for rp in
router.attached_ports.filter_by(
port_type=l3_const.DEVICE_OWNER_ROUTER_SNAT)
if rp.port
)
c_snat_ports = self._core_plugin.get_ports(
context,
filters={'id': ports}
)
for p in c_snat_ports:
if subnet_id is None:
self._core_plugin.delete_port(context,
p['id'],
l3_port_check=False)
else:
if p['fixed_ips'][0]['subnet_id'] == subnet_id:
LOG.debug("Subnet matches: %s", subnet_id)
self._core_plugin.delete_port(context,
p['id'],
l3_port_check=False)
def is_distributed_router(router):
"""Return True if router to be handled is distributed."""
try:
# See if router is a DB object first
requested_router_type = router.extra_attributes.distributed
except AttributeError:
# if not, try to see if it is a request body
requested_router_type = router.get('distributed')
if attributes.is_attr_set(requested_router_type):
return requested_router_type
return cfg.CONF.router_distributed
|
shahbazn/neutron
|
neutron/db/l3_dvr_db.py
|
Python
|
apache-2.0
| 31,701 | 0.000315 |
# (c) Copyright IBM Corp. 2021
# (c) Copyright Instana Inc. 2020
"""
Module to handle the work related to the many AWS Lambda Triggers.
"""
import gzip
import json
import base64
from io import BytesIO
import opentracing as ot
from ...log import logger
STR_LAMBDA_TRIGGER = 'lambda.trigger'
def get_context(tracer, event):
# TODO: Search for more types of trigger context
is_proxy_event = is_api_gateway_proxy_trigger(event) or \
is_api_gateway_v2_proxy_trigger(event) or \
is_application_load_balancer_trigger(event)
if is_proxy_event:
return tracer.extract(ot.Format.HTTP_HEADERS, event.get('headers', {}), disable_w3c_trace_context=True)
return tracer.extract(ot.Format.HTTP_HEADERS, event, disable_w3c_trace_context=True)
def is_api_gateway_proxy_trigger(event):
for key in ["resource", "path", "httpMethod"]:
if key not in event:
return False
return True
def is_api_gateway_v2_proxy_trigger(event):
for key in ["version", "requestContext"]:
if key not in event:
return False
if event["version"] != "2.0":
return False
for key in ["apiId", "stage", "http"]:
if key not in event["requestContext"]:
return False
return True
def is_application_load_balancer_trigger(event):
if 'requestContext' in event and 'elb' in event['requestContext']:
return True
return False
def is_cloudwatch_trigger(event):
if "source" in event and 'detail-type' in event:
if event["source"] == 'aws.events' and event['detail-type'] == 'Scheduled Event':
return True
return False
def is_cloudwatch_logs_trigger(event):
if hasattr(event, 'get') and event.get("awslogs", False) is not False:
return True
else:
return False
def is_s3_trigger(event):
if "Records" in event:
if len(event["Records"]) > 0 and event["Records"][0]["eventSource"] == 'aws:s3':
return True
return False
def is_sqs_trigger(event):
if "Records" in event:
if len(event["Records"]) > 0 and event["Records"][0]["eventSource"] == 'aws:sqs':
return True
return False
def read_http_query_params(event):
"""
Used to parse the Lambda QueryString formats.
@param event: lambda event dict
@return: String in the form of "a=b&c=d"
"""
params = []
try:
if event is None or type(event) is not dict:
return ""
mvqsp = event.get('multiValueQueryStringParameters', None)
qsp = event.get('queryStringParameters', None)
if mvqsp is not None and type(mvqsp) is dict:
for key in mvqsp:
params.append("%s=%s" % (key, mvqsp[key]))
return "&".join(params)
elif qsp is not None and type(qsp) is dict:
for key in qsp:
params.append("%s=%s" % (key, qsp[key]))
return "&".join(params)
else:
return ""
except Exception:
logger.debug("read_http_query_params: ", exc_info=True)
return ""
def capture_extra_headers(event, span, extra_headers):
"""
Capture the headers specified in `extra_headers` from `event` and log them
as a tag in the span.
@param event: the lambda event
@param span: the lambda entry span
@param extra_headers: a list of http headers to capture
@return: None
"""
try:
event_headers = event.get("headers", None)
if event_headers is not None:
for custom_header in extra_headers:
for key in event_headers:
if key.lower() == custom_header.lower():
span.set_tag("http.header.%s" % custom_header, event_headers[key])
except Exception:
logger.debug("capture_extra_headers: ", exc_info=True)
def enrich_lambda_span(agent, span, event, context):
"""
Extract the required information about this Lambda run (and the trigger) and store the data
on `span`.
@param agent: the AWSLambdaAgent in use
@param span: the Lambda entry span
@param event: the lambda handler event
@param context: the lambda handler context
@return: None
"""
try:
span.set_tag('lambda.arn', agent.collector.get_fq_arn())
span.set_tag('lambda.name', context.function_name)
span.set_tag('lambda.version', context.function_version)
if event is None or type(event) is not dict:
logger.debug("enrich_lambda_span: bad event %s", type(event))
return
if is_api_gateway_proxy_trigger(event):
logger.debug("Detected as API Gateway Proxy Trigger")
span.set_tag(STR_LAMBDA_TRIGGER, 'aws:api.gateway')
span.set_tag('http.method', event["httpMethod"])
span.set_tag('http.url', event["path"])
span.set_tag('http.path_tpl', event["resource"])
span.set_tag('http.params', read_http_query_params(event))
if agent.options.extra_http_headers is not None:
capture_extra_headers(event, span, agent.options.extra_http_headers)
elif is_api_gateway_v2_proxy_trigger(event):
logger.debug("Detected as API Gateway v2.0 Proxy Trigger")
reqCtx = event["requestContext"]
# trim optional HTTP method prefix
route_path = event["routeKey"].split(" ", 2)[-1]
span.set_tag(STR_LAMBDA_TRIGGER, 'aws:api.gateway')
span.set_tag('http.method', reqCtx["http"]["method"])
span.set_tag('http.url', reqCtx["http"]["path"])
span.set_tag('http.path_tpl', route_path)
span.set_tag('http.params', read_http_query_params(event))
if agent.options.extra_http_headers is not None:
capture_extra_headers(event, span, agent.options.extra_http_headers)
elif is_application_load_balancer_trigger(event):
logger.debug("Detected as Application Load Balancer Trigger")
span.set_tag(STR_LAMBDA_TRIGGER, 'aws:application.load.balancer')
span.set_tag('http.method', event["httpMethod"])
span.set_tag('http.url', event["path"])
span.set_tag('http.params', read_http_query_params(event))
if agent.options.extra_http_headers is not None:
capture_extra_headers(event, span, agent.options.extra_http_headers)
elif is_cloudwatch_trigger(event):
logger.debug("Detected as Cloudwatch Trigger")
span.set_tag(STR_LAMBDA_TRIGGER, 'aws:cloudwatch.events')
span.set_tag('data.lambda.cw.events.id', event['id'])
resources = event['resources']
resource_count = len(event['resources'])
if resource_count > 3:
resources = event['resources'][:3]
span.set_tag('lambda.cw.events.more', True)
else:
span.set_tag('lambda.cw.events.more', False)
report = []
for item in resources:
if len(item) > 200:
item = item[:200]
report.append(item)
span.set_tag('lambda.cw.events.resources', report)
elif is_cloudwatch_logs_trigger(event):
logger.debug("Detected as Cloudwatch Logs Trigger")
span.set_tag(STR_LAMBDA_TRIGGER, 'aws:cloudwatch.logs')
try:
if 'awslogs' in event and 'data' in event['awslogs']:
data = event['awslogs']['data']
decoded_data = base64.b64decode(data)
decompressed_data = gzip.GzipFile(fileobj=BytesIO(decoded_data)).read()
log_data = json.loads(decompressed_data.decode('utf-8'))
span.set_tag('lambda.cw.logs.group', log_data.get('logGroup', None))
span.set_tag('lambda.cw.logs.stream', log_data.get('logStream', None))
if len(log_data['logEvents']) > 3:
span.set_tag('lambda.cw.logs.more', True)
events = log_data['logEvents'][:3]
else:
events = log_data['logEvents']
event_data = []
for item in events:
msg = item.get('message', None)
if len(msg) > 200:
msg = msg[:200]
event_data.append(msg)
span.set_tag('lambda.cw.logs.events', event_data)
except Exception as e:
span.set_tag('lambda.cw.logs.decodingError', repr(e))
elif is_s3_trigger(event):
logger.debug("Detected as S3 Trigger")
span.set_tag(STR_LAMBDA_TRIGGER, 'aws:s3')
if "Records" in event:
events = []
for item in event["Records"][:3]:
bucket_name = "Unknown"
if "s3" in item and "bucket" in item["s3"]:
bucket_name = item["s3"]["bucket"]["name"]
object_name = ""
if "s3" in item and "object" in item["s3"]:
object_name = item["s3"]["object"].get("key", "Unknown")
if len(object_name) > 200:
object_name = object_name[:200]
events.append({"event": item['eventName'],
"bucket": bucket_name,
"object": object_name})
span.set_tag('lambda.s3.events', events)
elif is_sqs_trigger(event):
logger.debug("Detected as SQS Trigger")
span.set_tag(STR_LAMBDA_TRIGGER, 'aws:sqs')
if "Records" in event:
events = []
for item in event["Records"][:3]:
events.append({'queue': item['eventSourceARN']})
span.set_tag('lambda.sqs.messages', events)
else:
logger.debug("Detected as Unknown Trigger: %s" % event)
span.set_tag(STR_LAMBDA_TRIGGER, 'unknown')
except Exception:
logger.debug("enrich_lambda_span: ", exc_info=True)
|
instana/python-sensor
|
instana/instrumentation/aws/triggers.py
|
Python
|
mit
| 10,212 | 0.001469 |
import argparse
import ui.output
def help_format_cloudcredgrab(prog):
kwargs = dict()
kwargs['width'] = ui.output.columns()
kwargs['max_help_position'] = 34
format = argparse.HelpFormatter(prog, **kwargs)
return (format)
def parse(args):
parser = argparse.ArgumentParser(prog="cloudcredgrab", add_help=False, usage=argparse.SUPPRESS)
parser.formatter_class = help_format_cloudcredgrab
parser.add_argument('-u', '--username',
metavar="<USER>", default=None)
parser.add_argument('platform')
options = vars(parser.parse_args(args))
|
nil0x42/phpsploit
|
plugins/credentials/cloudcredgrab/plugin_args.py
|
Python
|
gpl-3.0
| 596 | 0.003356 |
"""
WSGI config for repomaker project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "repomaker.settings")
application = get_wsgi_application()
|
fdroidtravis/repomaker
|
repomaker/wsgi.py
|
Python
|
agpl-3.0
| 396 | 0 |
import psycopg2
import urlparse
import os
def server_db():
urlparse.uses_netloc.append("postgres")
url = urlparse.urlparse(os.environ["DATABASE_URL"])
conn = psycopg2.connect(database=url.path[1:], user=url.username, password=url.password, host=url.hostname, port=url.port)
cur = conn.cursor()
return cur
def local_db():
conn = psycopg2.connect(host="", user="", password="", dbname="")
cur = conn.cursor()
return cur
|
debasishbai/django_blog
|
blog/database_config.py
|
Python
|
mit
| 454 | 0.002203 |
#!/usr/bin/env python3
'''
@file: bin_pack.py
@auth: Sprax Lines
@date: 2018-02-07 00:19:39 Wed 07 Feb
Can the space requirements specified by bits be packed into the specified bins?
'''
from __future__ import print_function
from itertools import islice
# import pdb
# from pdb import set_trace
from datetime import datetime
from num import fibonaccis
from num import prime_gen
def excess_space(bins, bits):
''' total excess space '''
return sum(bins) - sum(bits)
def can_pack_track_rec(bins, num_usable, bits, num_unpacked, usable_space, needed_space):
'''
* Sorted recursion. Early return if largest item cannot fit in largest
remaining bin.
* @param bins
* @param num_usable
* @param bits
* @param num_unpacked
* @return True if can pack, else False
'''
if num_unpacked < 1:
return True
if num_usable < 1:
return False
j = num_unpacked - 1
k = num_usable - 1
# return False if the largest remaining bin cannot fit the largest
# num_unpacked item.
if bins[k] < bits[j]:
return False
# Use reverse order, assuming the inputs were sorted in ascending order.
for k in reversed(range(num_usable)):
diff_k_j = bins[k] - bits[j]
# expected to be True at beginning of loop
if diff_k_j >= 0:
swapping = False
# If the space left in this bin would be less than the
if diff_k_j < bits[0]:
# smallest item, then this bin would become unusable.
usable_space -= diff_k_j
# If the remaining usable space would not suffice,
if usable_space < needed_space:
# return False immediately, without decrementing, etc.
return False
# Need to swap the diminished bins[k] off the active list.
swapping = True
usable_space -= bits[j]
needed_space -= bits[j]
bins[k] = diff_k_j
if swapping:
num_usable -= 1
bins[k] = bins[num_usable]
bins[num_usable] = diff_k_j
else:
# Otherwise, sort the list by re-inserting diminished bin[k]
# value where it now belongs.
for rdx in reversed(range(k)):
if diff_k_j < bins[rdx]:
bins[rdx + 1] = bins[rdx]
else:
bins[rdx + 1] = diff_k_j
break
else:
# set_trace()
bins[0] = diff_k_j
# Exhaustive recursion: check all remaining solutions that start
# with item[j] packed in bin[rdx]
if can_pack_track_rec(bins, num_usable, bits, j, usable_space, needed_space):
return True
# failed, so swap back and increment.
if swapping:
bins[num_usable] = bins[k]
bins[k] = diff_k_j
usable_space += diff_k_j
num_usable += 1
usable_space += bits[j]
needed_space += bits[j]
bins[k] += bits[j]
return False
def can_pack_track(bins, bits):
'''returns True IFF bits can be packed into bins'''
usable_space = sum(bins)
needed_space = sum(bits)
excess = usable_space - needed_space
if excess < 0:
return False # return early: insufficient total space
sbins = sorted(bins) # make a sorted copy
sbits = sorted(bits)
if sbins[-1] < sbits[-1]:
return False # return early: max bin < max bit
if can_pack_track_rec(sbins, len(sbins), sbits, len(sbits), usable_space, needed_space):
# Change the original array. (Pass by value means bins = sbins would
# not.)
for idx, sbin in enumerate(sbins):
bins[idx] = sbin
return True
print("sbins after failure:", sbins)
return False
def can_pack(bins, bits):
''' uses the best method here '''
return can_pack_track(bins, bits)
def can_pack_naive(bins, bits):
''' uses naive method '''
packed = [False] * len(bits)
return can_pack_naive_rec(bins, bits, packed)
def can_pack_naive_rec(bins, bits, packed):
'''
Naive exhaustive recursion, no early failure (as when sum(bins) <
sum(bits)), no sorting.
Implementation: Naive exhaustive recursion with supplementary array.
Complexity: Time O(N!), additional space O(N).
* Tries to fit bits into bins in the original order given.
* @param bins
* @param bits
* @param packed
* @return
'''
if all(packed):
return True
for i in range(len(bits)):
if not packed[i]:
# Exhaustive: check all remaining solutions that start with item[i]
# packed in some bin[j]
packed[i] = True
for j in range(len(bins)):
if bins[j] >= bits[i]:
# deduct item amount from bin and try to pack the rest
bins[j] -= bits[i]
if can_pack_naive_rec(bins, bits, packed):
return True # success: return
bins[j] += bits[i] # failure: restore item amount to bin
packed[i] = False
return False
###############################################################################
def show_wrong(result, expected):
''' show result if unexpected '''
if result == expected:
return 0
print("Wrong result: %s, expected: %s\n" % (result, expected))
return 1
def test_can_pack(packer, bins, bits, verbose, name, number, expected):
''' the basic test function '''
result = False
excess = excess_space(bins, bits)
if verbose > 0:
print(" Test can_pack: %s: %d" % (name, number))
print("bins to fill:", bins)
print("bits to pack:", bits)
sum_bins = sum(bins)
sum_bits = sum(bits)
diff = sum_bins - sum_bits
assert diff == excess
print("bin space - bits space: %d - %d = %d" % (sum_bins, sum_bits, diff))
if excess < 0:
print("Insufficient total bin space.")
else:
# Test the interface function:
beg_time = datetime.now()
result = packer(bins, bits)
run_time = datetime.now() - beg_time
if verbose > 0:
print("Pack bits in bins?", result)
print("Bin space after:", bins)
print("Run time millis: %7.2f" % (run_time.total_seconds() * 1000))
if result:
assert sum(bins) == excess
return show_wrong(result, expected)
def pass_fail(num_wrong):
''' pass or fail string '''
return "PASS" if num_wrong == 0 else "FAIL"
def test_packer(packer, packer_name, level):
''' tests a can_pack method '''
test_name = "test_packer(" + packer_name + ")"
num_wrong = 0
test_num = 0
if level < 1:
test_num += 1
bins = [1, 1, 4]
bits = [2, 3]
num_wrong += test_can_pack(packer, bins, bits, 1, test_name, test_num, False)
test_num += 1
bins = [2, 2, 37]
bits = [4, 37]
num_wrong += test_can_pack(packer, bins, bits, 1, test_name, test_num, False)
test_num += 1
bins = [8, 16, 8, 32]
bits = [18, 4, 8, 4, 6, 6, 8, 8]
num_wrong += test_can_pack(packer, bins, bits, 1, test_name, test_num, True)
test_num += 1
limits = [1, 3]
needs = [4]
num_wrong += test_can_pack(packer, limits, needs, 1, test_name, test_num, False)
test_num += 1
duffels = [2, 5, 2, 2, 6]
bags = [3, 3, 5]
num_wrong += test_can_pack(packer, duffels, bags, 1, test_name, test_num, True)
test_num += 1
sashes = [1, 2, 3, 4, 5, 6, 8, 9]
badges = [1, 4, 6, 6, 8, 8]
num_wrong += test_can_pack(packer, sashes, badges, 1, test_name, test_num, False)
if level > 0:
test_num += 1
crates = list(fibonaccis.fib_generate(11, 1))
boxes = list(islice(prime_gen.sieve(), 12))
boxes.append(27)
num_wrong += test_can_pack(packer, crates, boxes, 1, test_name, test_num, False)
if level > 1: # A naive algorithm may take a very long time...
test_num += 1
fibs = list(fibonaccis.fib_generate(12, 1))
mems = list(islice(prime_gen.sieve(), 47))
print("%s:\t%d\n" % (test_name, test_num))
num_wrong += test_can_pack(packer, fibs, mems, 1, test_name, test_num, True)
test_num += 1
frames = list(fibonaccis.fib_generate(13, 1))
photos = list(islice(prime_gen.sieve(), 70))
num_wrong += test_can_pack(packer, frames, photos, 1, test_name, test_num, False)
test_num += 1
blocks = list(fibonaccis.fib_generate(14, 1))
allocs = list(islice(prime_gen.sieve(), 2, 90))
num_wrong += test_can_pack(packer, blocks, allocs, 1, test_name, test_num, False)
test_num += 1
frames = list(fibonaccis.fib_generate(15, 1))
photos = list(islice(prime_gen.sieve(), 24))
num_wrong += test_can_pack(packer, frames, photos, 1, test_name, test_num, False)
test_num += 1
frames = list(fibonaccis.fib_generate(15, 1))
photos[0] = 4
num_wrong += test_can_pack(packer, frames, photos, 1, test_name, test_num, False)
test_num += 1
frames = list(fibonaccis.fib_generate(36, 1))
photos = list(islice(prime_gen.sieve(), 27650))
for j in range(min(1500, len(photos))):
photos[j] += 1
num_wrong += test_can_pack(packer, frames, photos, 1, test_name, test_num, False)
print("END %s, wrong %d, %s\n" % (test_name, num_wrong, pass_fail(num_wrong)))
return num_wrong
def unit_test(level):
''' generic unit test '''
test_name = "BinPack.unit_test"
print("BEGIN:", test_name)
num_wrong = 0
num_wrong += test_packer(can_pack_track, "can_pack_track", level)
num_wrong += test_packer(can_pack_naive, "can_pack_naive", level)
print("END: ", test_name, num_wrong)
return num_wrong
def main():
''' test driver '''
unit_test(1)
if __name__ == '__main__':
main()
|
sprax/python
|
bin_pack.py
|
Python
|
lgpl-3.0
| 10,413 | 0.001825 |
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Generated code. DO NOT EDIT!
#
# Snippet for DeleteSink
# NOTE: This snippet has been automatically generated for illustrative purposes only.
# It may require modifications to work in your environment.
# To install the latest published package dependency, execute the following:
# python3 -m pip install google-cloud-logging
# [START logging_v2_generated_ConfigServiceV2_DeleteSink_sync]
from google.cloud import logging_v2
def sample_delete_sink():
# Create a client
client = logging_v2.ConfigServiceV2Client()
# Initialize request argument(s)
request = logging_v2.DeleteSinkRequest(
sink_name="sink_name_value",
)
# Make the request
client.delete_sink(request=request)
# [END logging_v2_generated_ConfigServiceV2_DeleteSink_sync]
|
googleapis/gapic-generator-python
|
tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_sync.py
|
Python
|
apache-2.0
| 1,381 | 0.000724 |
from nose.tools import eq_
import inflect
def test_ancient_1():
p = inflect.engine()
# DEFAULT...
eq_(p.plural_noun('wildebeest'), 'wildebeests', msg="classical 'herd' not active")
# "person" PLURALS ACTIVATED...
p.classical(herd=True)
eq_(p.plural_noun('wildebeest'), 'wildebeest', msg="classical 'herd' active")
# OTHER CLASSICALS NOT ACTIVATED...
eq_(p.plural_noun('formula'), 'formulas', msg="classical 'ancient' active")
eq_(p.plural_noun('error', 0), 'errors', msg="classical 'zero' not active")
eq_(p.plural_noun('Sally'), 'Sallys', msg="classical 'names' active")
eq_(p.plural_noun('brother'), 'brothers', msg="classical 'all' not active")
eq_(p.plural_noun('person'), 'people', msg="classical 'persons' not active")
|
GeneralizedLearningUtilities/SuperGLU
|
python_module/SuperGLU/Services/TextProcessing/Tests/Inflect/test_classical_herd.py
|
Python
|
mit
| 835 | 0.008383 |
#!/usr/bin/env python
"""
A credential management component for AutoPyFactory
"""
import logging
import math
import os
import pwd, grp
import sys
import threading
import time
import socket
# Added to support running module as script from arbitrary location.
from os.path import dirname, realpath, sep, pardir
fullpathlist = realpath(__file__).split(sep)
prepath = sep.join(fullpathlist[:-2])
sys.path.insert(0, prepath)
import pluginmanager
import autopyfactory
###from autopyfactory.plugins.auth.X509 import X509
###from autopyfactory.plugins.auth.SSH import SSH
from autopyfactory.apfexceptions import InvalidAuthFailure
from autopyfactory.configloader import Config, ConfigsDiff
class AuthManager(object):
"""
Manager to maintain multiple credential Handlers, one for each target account.
For some handlers, if they need to perform periodic checks, they will be run
as threads. Others, which only hold information, will just be objects.
"""
def __init__(self, factory=None):
self.log = logging.getLogger('autopyfactory')
self.log.info("Creating new authmanager...")
self.aconfig = Config()
self.handlers = []
self.factory = factory
if factory:
self.sleep = int(self.factory.fcl.get('Factory', 'authmanager.sleep'))
else:
self.sleep = 5
def reconfig(self, newconfig):
hdiff = ConfigsDiff(self.aconfig, newconfig)
self.aconfig = newconfig
self._addhandlers(hdiff.added())
def _addhandlers(self, newsections):
for sect in newsections:
try:
pclass = self.aconfig.get(sect, 'plugin')
except Exception as e:
self.log.warn("No plugin attribute for section %s" % sect)
if pclass == 'X509':
self.log.debug("Creating X509 handler for %s" % sect )
authpluginname = self.aconfig.get(sect, 'plugin')
x509h = pluginmanager.getplugin(['autopyfactory', 'plugins', 'authmanager', 'auth'], authpluginname, self, self.aconfig, sect)
self.handlers.append(x509h)
elif pclass == 'SSH':
self.log.debug("Creating SSH handler for %s" % sect )
authpluginname = self.aconfig.get(sect, 'plugin')
sshh = pluginmanager.getplugin(['autopyfactory', 'plugins', 'authmanager', 'auth'], authpluginname, self, self.aconfig, sect)
self.handlers.append(sshh)
else:
self.log.warn("Unrecognized auth plugin %s" % pclass )
def activate(self):
"""
start all Handlers, if needed
"""
for ah in self.handlers:
if isinstance(ah, threading.Thread) :
self.log.debug("Handler [%s] is a thread. Starting..." % ah.name)
ah.start()
else:
self.log.debug("Handler [%s] is not a thread. No action." % ah.name)
def listNames(self):
"""
Returns list of valid names of Handlers in this Manager.
"""
names = []
for h in self.handlers:
names.append(h.name)
return names
#
# API for X509Handler
#
def getProxyPath(self, profilelist):
"""
Check all the handlers for matching profile name(s).
profiles argument is a list
"""
pp = None
for profile in profilelist:
self.log.debug("Getting proxy path for profile %s" % profile)
ph = None
for h in self.handlers:
self.log.debug("Finding handler. Checking %s" % h.name)
if h.name == profile:
ph = h
break
if ph:
self.log.debug("Found handler %s. Getting proxypath..." % ph.name)
pp = ph.getProxyPath()
self.log.debug("Proxypath is %s" % pp)
if pp:
break
if not pp:
subject = "Proxy problem on %s" % self.factory.factoryid
messagestring = "Unable to get valid proxy from configured profiles: %s" % profilelist
self.factory.sendAdminEmail(subject, messagestring)
raise InvalidAuthFailure("Problem getting proxy for profile %s" % profilelist)
return pp
#
# API for SSHKeyHandler
#
def getSSHKeyPair(self, profile):
"""
Returns tuple (public, private, pass) key/phrase string from profile.
"""
pass
def getSSHKeyPairPaths(self, profile):
"""
Returns tuple (public, private, pass) key/passfile paths to files from profile.
"""
h = self._getHandler(profile)
pub = h.getSSHPubKeyFilePath()
priv = h.getSSHPrivKeyFilePath()
pasf = h.getSSHPassFilePath()
self.log.info('Got file paths for pub, priv, pass for SSH profile %s' % profile)
return (pub,priv,pasf)
def _getHandler(self, profile):
"""
"""
handler = None
for h in self.handlers:
self.log.debug("Finding handler. Checking %s" % h.name)
if h.name == profile:
self.log.debug("Found handler for %s" % h.name)
handler = h
if handler is None:
raise InvalidAuthFailure('No handler for %s ' % profile)
return handler
if __name__ == '__main__':
import getopt
import sys
import os
from ConfigParser import ConfigParser, SafeConfigParser
debug = 0
info = 0
aconfig_file = None
default_configfile = os.path.expanduser("~/etc/auth.conf")
usage = """Usage: authmanager.py [OPTIONS]
OPTIONS:
-h --help Print this message
-d --debug Debug messages
-v --verbose Verbose information
-c --config Config file [~/etc/auth.conf]"""
# Handle command line options
argv = sys.argv[1:]
try:
opts, args = getopt.getopt(argv,
"c:hdvt",
["config=",
"help",
"debug",
"verbose",
])
except getopt.GetoptError as error:
print( str(error))
print( usage )
sys.exit(1)
for opt, arg in opts:
if opt in ("-h", "--help"):
print(usage)
sys.exit()
elif opt in ("-c", "--config"):
aconfig_file = arg
elif opt in ("-d", "--debug"):
debug = 1
elif opt in ("-v", "--verbose"):
info = 1
# Check python version
major, minor, release, st, num = sys.version_info
# Set up logging, handle differences between Python versions...
# In Python 2.3, logging.basicConfig takes no args
#
FORMAT23="[ %(levelname)s ] %(asctime)s %(filename)s (Line %(lineno)d): %(message)s"
FORMAT24=FORMAT23
FORMAT25="[%(levelname)s] %(asctime)s %(module)s.%(funcName)s(): %(message)s"
FORMAT26=FORMAT25
if major == 2:
if minor ==3:
formatstr = FORMAT23
elif minor == 4:
formatstr = FORMAT24
elif minor == 5:
formatstr = FORMAT25
elif minor == 6:
formatstr = FORMAT26
elif minor == 7:
formatstr = FORMAT26
log = logging.getLogger('autopyfactory')
hdlr = logging.StreamHandler(sys.stdout)
formatter = logging.Formatter(FORMAT23)
hdlr.setFormatter(formatter)
log.addHandler(hdlr)
if debug:
log.setLevel(logging.DEBUG) # Override with command line switches
if info:
log.setLevel(logging.INFO) # Override with command line switches
log.debug("Logging initialized.")
# Read in config file
aconfig=ConfigParser()
if not aconfig_file:
aconfig_file = os.path.expanduser(default_configfile)
else:
aconfig_file = os.path.expanduser(aconfig_file)
got_config = aconfig.read(aconfig_file)
log.debug("Read config file %s, return value: %s" % (aconfig_file, got_config))
am = AuthManager(aconfig)
am.reconfig(aconfig)
log.info("Authmanager created. Starting handlers...")
am.startHandlers()
#am.start()
try:
while True:
time.sleep(2)
#log.debug('Checking for interrupt.')
except KeyboardInterrupt:
log.debug("Shutdown via Ctrl-C or -INT signal.")
|
PanDAWMS/autopyfactory
|
autopyfactory/authmanager.py
|
Python
|
apache-2.0
| 8,906 | 0.011004 |
import pytest
from sovrin_common.types import ClientPoolUpgradeOperation
from collections import OrderedDict
from plenum.common.messages.fields import ConstantField, ChooseField, VersionField, MapField, Sha256HexField, \
NonNegativeNumberField, LimitedLengthStringField, BooleanField
EXPECTED_ORDERED_FIELDS = OrderedDict([
("type", ConstantField),
('action', ChooseField),
("version", VersionField),
('schedule', MapField),
('sha256', Sha256HexField),
('timeout', NonNegativeNumberField),
('justification', LimitedLengthStringField),
("name", LimitedLengthStringField),
("force", BooleanField),
("reinstall", BooleanField),
])
def test_has_expected_fields():
actual_field_names = OrderedDict(ClientPoolUpgradeOperation.schema).keys()
assert actual_field_names == EXPECTED_ORDERED_FIELDS.keys()
def test_has_expected_validators():
schema = dict(ClientPoolUpgradeOperation.schema)
for field, validator in EXPECTED_ORDERED_FIELDS.items():
assert isinstance(schema[field], validator)
|
keenondrums/sovrin-node
|
sovrin_common/test/types/test_pool_upg_schema.py
|
Python
|
apache-2.0
| 1,055 | 0.000948 |
"""
WSGI config for ECAPlanet project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "ECARUSS.settings")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
|
SOMA-PainKiller/ECAReview
|
ECARUSS/wsgi.py
|
Python
|
mit
| 391 | 0.002558 |
# -*- coding: utf-8 -*-
# Copyright (c) 2013 Red Hat, Inc.
#
# This software is licensed to you under the GNU General Public
# License as published by the Free Software Foundation; either version
# 2 of the License (GPLv2) or (at your option) any later version.
# There is NO WARRANTY for this software, express or implied,
# including the implied warranties of MERCHANTABILITY,
# NON-INFRINGEMENT, or FITNESS FOR A PARTICULAR PURPOSE. You should
# have received a copy of GPLv2 along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
"""
Contains functions related to working with the Nectar downloading library.
"""
from functools import partial
from nectar.config import DownloaderConfig
from pulp.common.plugins import importer_constants as constants
def importer_config_to_nectar_config(importer_config):
"""
Translates the Pulp standard importer configuration into a DownloaderConfig instance.
:param importer_config: use the PluginCallConfiguration.flatten method to retrieve a
single dict view on the configuration
:type importer_config: dict
:rtype: nectar.config.DownloaderConfig
"""
# Mapping of importer config key to downloader config key
translations = (
(constants.KEY_SSL_CA_CERT, 'ssl_ca_cert'),
(constants.KEY_SSL_VALIDATION, 'ssl_validation'),
(constants.KEY_SSL_CLIENT_CERT, 'ssl_client_cert'),
(constants.KEY_SSL_CLIENT_KEY, 'ssl_client_key'),
(constants.KEY_PROXY_HOST, 'proxy_url'),
(constants.KEY_PROXY_PORT, 'proxy_port'),
(constants.KEY_PROXY_USER, 'proxy_username'),
(constants.KEY_PROXY_PASS, 'proxy_password'),
(constants.KEY_MAX_DOWNLOADS, 'max_concurrent'),
(constants.KEY_MAX_SPEED, 'max_speed'),
)
download_config_kwargs = {}
adder = partial(_safe_add_arg, importer_config, download_config_kwargs)
map(adder, translations)
download_config = DownloaderConfig(**download_config_kwargs)
return download_config
def _safe_add_arg(importer_config, dl_config, keys_tuple):
"""
Utility to only set values in the downloader config if they are present in the importer's
config.
:type importer_config: dict
:type dl_config: dict
:param keys_tuple: tuple of importer key to download config key
:type keys_tuple: (str, str)
"""
if keys_tuple[0] in importer_config:
dl_config[keys_tuple[1]] = importer_config[keys_tuple[0]]
|
beav/pulp
|
server/pulp/plugins/util/nectar_config.py
|
Python
|
gpl-2.0
| 2,490 | 0.001205 |
# -*- coding: utf-8 -*-
"""
Created on 27/04/2015
@author: C&C - HardSoft
"""
from util.HOFs import *
from util.CobolPatterns import *
from util.homogenize import Homogenize
def calc_length(copy):
if isinstance(copy, list):
book = copy
else:
if isinstance(copy, str):
book = copy.splitlines()
else:
book = []
lines = Homogenize(book)
havecopy = filter(isCopy, lines)
if havecopy:
bkm = ''.join(havecopy[0].split('COPY')[1].replace('.', '').split())
msg = 'COPY {} deve ser expandido.'.format(bkm)
return {'retorno': False, 'msg': msg, 'lrecl': 0}
lrecl = 0
redefines = False
occurs = 0
dicoccurs = {}
level_redefines = 0
for line in lines:
match = CobolPatterns.row_pattern.match(line.strip())
if not match:
continue
match = match.groupdict()
if not match['level']:
continue
if 'REDEFINES' in line and not match['redefines']:
match['redefines'] = CobolPatterns.row_pattern_redefines.search(line).groupdict().get('redefines')
if 'OCCURS' in line and not match['occurs']:
match['occurs'] = CobolPatterns.row_pattern_occurs.search(line).groupdict().get('occurs')
level = int(match['level'])
if redefines:
if level > level_redefines:
continue
redefines = False
level_redefines = 0
if match['redefines']:
level_redefines = level
redefines = True
continue
if occurs:
if level > dicoccurs[occurs]['level']:
if match['occurs']:
occurs += 1
attrib = {}
attrib['occ'] = int(match['occurs'])
attrib['level'] = level
attrib['length'] = 0
dicoccurs[occurs] = attrib
if match['pic']:
dicoccurs[occurs]['length'] += FieldLength(match['pic'], match['usage'])
continue
while True:
if occurs == 1:
lrecl += dicoccurs[occurs]['length'] * dicoccurs[occurs]['occ']
else:
dicoccurs[occurs-1]['length'] += dicoccurs[occurs]['length'] * dicoccurs[occurs]['occ']
del dicoccurs[occurs]
occurs -= 1
if not occurs:
break
if level > dicoccurs[occurs]['level']:
break
if match['occurs']:
occurs += 1
attrib = {}
attrib['occ'] = int(match['occurs'])
attrib['level'] = level
attrib['length'] = 0
dicoccurs[occurs] = attrib
if match['pic']:
if occurs:
dicoccurs[occurs]['length'] += FieldLength(match['pic'], match['usage'])
else:
lrecl += FieldLength(match['pic'], match['usage'])
return {'retorno': True, 'msg': None, 'lrecl': lrecl}
def FieldLength(pic_str, usage):
if pic_str[0] == 'S':
pic_str = pic_str[1:]
while True:
match = CobolPatterns.pic_pattern_repeats.search(pic_str)
if not match:
break
match = match.groupdict()
expanded_str = match['constant'] * int(match['repeat'])
pic_str = CobolPatterns.pic_pattern_repeats.sub(expanded_str, pic_str, 1)
len_field = len(pic_str.replace('V', ''))
if not usage:
usage = 'DISPLAY'
if 'COMP-3' in usage or 'COMPUTATIONAL-3' in usage:
len_field = len_field / 2 + 1
elif 'COMP' in usage or 'COMPUTATIONAL' in usage or 'BINARY' in usage:
len_field = len_field / 2
elif 'SIGN' in usage:
len_field += 1
return len_field
|
flavio-casacurta/File-FixedS
|
calc_length.py
|
Python
|
mit
| 3,848 | 0.002339 |
# -*- coding: utf-8 -*-
'''
Manage X509 certificates
.. versionadded:: 2015.8.0
'''
# Import python libs
from __future__ import absolute_import
import os
import logging
import hashlib
import glob
import random
import ctypes
import tempfile
import yaml
import re
import datetime
import ast
# Import salt libs
import salt.utils
import salt.exceptions
import salt.ext.six as six
from salt.utils.odict import OrderedDict
from salt.ext.six.moves import range # pylint: disable=import-error,redefined-builtin
from salt.state import STATE_INTERNAL_KEYWORDS as _STATE_INTERNAL_KEYWORDS
# Import 3rd Party Libs
try:
import M2Crypto
HAS_M2 = True
except ImportError:
HAS_M2 = False
__virtualname__ = 'x509'
log = logging.getLogger(__name__)
EXT_NAME_MAPPINGS = OrderedDict([
('basicConstraints', 'X509v3 Basic Constraints'),
('keyUsage', 'X509v3 Key Usage'),
('extendedKeyUsage', 'X509v3 Extended Key Usage'),
('subjectKeyIdentifier', 'X509v3 Subject Key Identifier'),
('authorityKeyIdentifier', 'X509v3 Authority Key Identifier'),
('issuserAltName', 'X509v3 Issuer Alternative Name'),
('authorityInfoAccess', 'X509v3 Authority Info Access'),
('subjectAltName', 'X509v3 Subject Alternative Name'),
('crlDistributionPoints', 'X509v3 CRL Distribution Points'),
('issuingDistributionPoint', 'X509v3 Issuing Distribution Point'),
('certificatePolicies', 'X509v3 Certificate Policies'),
('policyConstraints', 'X509v3 Policy Constraints'),
('inhibitAnyPolicy', 'X509v3 Inhibit Any Policy'),
('nameConstraints', 'X509v3 Name Constraints'),
('noCheck', 'X509v3 OCSP No Check'),
('nsComment', 'Netscape Comment'),
('nsCertType', 'Netscape Certificate Type'),
])
CERT_DEFAULTS = {'days_valid': 365, 'version': 3, 'serial_bits': 64, 'algorithm': 'sha256'}
def __virtual__():
'''
only load this module if m2crypto is available
'''
if HAS_M2:
return __virtualname__
else:
return (False, 'Could not load x509 module, m2crypto unavailable')
class _Ctx(ctypes.Structure):
'''
This is part of an ugly hack to fix an ancient bug in M2Crypto
https://bugzilla.osafoundation.org/show_bug.cgi?id=7530#c13
'''
# pylint: disable=too-few-public-methods
_fields_ = [('flags', ctypes.c_int),
('issuer_cert', ctypes.c_void_p),
('subject_cert', ctypes.c_void_p),
('subject_req', ctypes.c_void_p),
('crl', ctypes.c_void_p),
('db_meth', ctypes.c_void_p),
('db', ctypes.c_void_p),
]
def _fix_ctx(m2_ctx, issuer=None):
'''
This is part of an ugly hack to fix an ancient bug in M2Crypto
https://bugzilla.osafoundation.org/show_bug.cgi?id=7530#c13
'''
ctx = _Ctx.from_address(int(m2_ctx)) # pylint: disable=no-member
ctx.flags = 0
ctx.subject_cert = None
ctx.subject_req = None
ctx.crl = None
if issuer is None:
ctx.issuer_cert = None
else:
ctx.issuer_cert = int(issuer.x509)
def _new_extension(name, value, critical=0, issuer=None, _pyfree=1):
'''
Create new X509_Extension, This is required because M2Crypto doesn't support
getting the publickeyidentifier from the issuer to create the authoritykeyidentifier
extension.
'''
if name == 'subjectKeyIdentifier' and \
value.strip('0123456789abcdefABCDEF:') is not '':
raise salt.exceptions.SaltInvocationError('value must be precomputed hash')
lhash = M2Crypto.m2.x509v3_lhash() # pylint: disable=no-member
ctx = M2Crypto.m2.x509v3_set_conf_lhash(lhash) # pylint: disable=no-member
#ctx not zeroed
_fix_ctx(ctx, issuer)
x509_ext_ptr = M2Crypto.m2.x509v3_ext_conf(lhash, ctx, name, value) # pylint: disable=no-member
#ctx,lhash freed
if x509_ext_ptr is None:
raise Exception
x509_ext = M2Crypto.X509.X509_Extension(x509_ext_ptr, _pyfree)
x509_ext.set_critical(critical)
return x509_ext
# The next four functions are more hacks because M2Crypto doesn't support getting
# Extensions from CSRs. https://github.com/martinpaljak/M2Crypto/issues/63
def _parse_openssl_req(csr_filename):
'''
Parses openssl command line output, this is a workaround for M2Crypto's
inability to get them from CSR objects.
'''
cmd = ('openssl req -text -noout -in {0}'.format(csr_filename))
output = __salt__['cmd.run_stdout'](cmd)
output = re.sub(r': rsaEncryption', ':', output)
output = re.sub(r'[0-9a-f]{2}:', '', output)
return yaml.safe_load(output)
def _get_csr_extensions(csr):
'''
Returns a list of dicts containing the name, value and critical value of
any extension contained in a csr object.
'''
ret = OrderedDict()
csrtempfile = tempfile.NamedTemporaryFile()
csrtempfile.write(csr.as_pem())
csrtempfile.flush()
csryaml = _parse_openssl_req(csrtempfile.name)
csrtempfile.close()
if csryaml and 'Requested Extensions' in csryaml['Certificate Request']['Data']:
csrexts = csryaml['Certificate Request']['Data']['Requested Extensions']
for short_name, long_name in six.iteritems(EXT_NAME_MAPPINGS):
if long_name in csrexts:
ret[short_name] = csrexts[long_name]
return ret
# None of python libraries read CRLs. Again have to hack it with the openssl CLI
def _parse_openssl_crl(crl_filename):
'''
Parses openssl command line output, this is a workaround for M2Crypto's
inability to get them from CSR objects.
'''
cmd = ('openssl crl -text -noout -in {0}'.format(crl_filename))
output = __salt__['cmd.run_stdout'](cmd)
crl = {}
for line in output.split('\n'):
line = line.strip()
if line.startswith('Version '):
crl['Version'] = line.replace('Version ', '')
if line.startswith('Signature Algorithm: '):
crl['Signature Algorithm'] = line.replace('Signature Algorithm: ', '')
if line.startswith('Issuer: '):
line = line.replace('Issuer: ', '')
subject = {}
for sub_entry in line.split('/'):
if '=' in sub_entry:
sub_entry = sub_entry.split('=')
subject[sub_entry[0]] = sub_entry[1]
crl['Issuer'] = subject
if line.startswith('Last Update: '):
crl['Last Update'] = line.replace('Last Update: ', '')
last_update = datetime.datetime.strptime(
crl['Last Update'], "%b %d %H:%M:%S %Y %Z")
crl['Last Update'] = last_update.strftime("%Y-%m-%d %H:%M:%S")
if line.startswith('Next Update: '):
crl['Next Update'] = line.replace('Next Update: ', '')
next_update = datetime.datetime.strptime(
crl['Next Update'], "%b %d %H:%M:%S %Y %Z")
crl['Next Update'] = next_update.strftime("%Y-%m-%d %H:%M:%S")
if line.startswith('Revoked Certificates:'):
break
if 'No Revoked Certificates.' in output:
crl['Revoked Certificates'] = []
return crl
output = output.split('Revoked Certificates:')[1]
output = output.split('Signature Algorithm:')[0]
rev = []
for revoked in output.split('Serial Number: '):
if not revoked.strip():
continue
rev_sn = revoked.split('\n')[0].strip()
revoked = rev_sn + ':\n' + '\n'.join(revoked.split('\n')[1:])
rev_yaml = yaml.safe_load(revoked)
for rev_item, rev_values in six.iteritems(rev_yaml): # pylint: disable=unused-variable
if 'Revocation Date' in rev_values:
rev_date = datetime.datetime.strptime(
rev_values['Revocation Date'], "%b %d %H:%M:%S %Y %Z")
rev_values['Revocation Date'] = rev_date.strftime("%Y-%m-%d %H:%M:%S")
rev.append(rev_yaml)
crl['Revoked Certificates'] = rev
return crl
def _get_signing_policy(name):
policies = __salt__['pillar.get']('x509_signing_policies', None)
if policies:
signing_policy = policies.get(name)
if signing_policy:
return signing_policy
return __salt__['config.get']('x509_signing_policies', {}).get(name)
def _pretty_hex(hex_str):
'''
Nicely formats hex strings
'''
if len(hex_str) % 2 != 0:
hex_str = '0' + hex_str
return ':'.join([hex_str[i:i+2] for i in range(0, len(hex_str), 2)]).upper()
def _dec2hex(decval):
'''
Converts decimal values to nicely formatted hex strings
'''
return _pretty_hex('{0:X}'.format(decval))
def _text_or_file(input_):
'''
Determines if input is a path to a file, or a string with the content to be parsed.
'''
if os.path.isfile(input_):
return salt.utils.fopen(input_).read()
else:
return input_
def _parse_subject(subject):
'''
Returns a dict containing all values in an X509 Subject
'''
ret = {}
nids = []
for nid_name, nid_num in six.iteritems(subject.nid):
if nid_num in nids:
continue
val = getattr(subject, nid_name)
if val:
ret[nid_name] = val
nids.append(nid_num)
return ret
def _get_certificate_obj(cert):
'''
Returns a certificate object based on PEM text.
'''
if isinstance(cert, M2Crypto.X509.X509):
return cert
text = _text_or_file(cert)
text = get_pem_entry(text, pem_type='CERTIFICATE')
return M2Crypto.X509.load_cert_string(text)
def _get_private_key_obj(private_key):
'''
Returns a private key object based on PEM text.
'''
private_key = _text_or_file(private_key)
private_key = get_pem_entry(private_key)
rsaprivkey = M2Crypto.RSA.load_key_string(private_key)
evpprivkey = M2Crypto.EVP.PKey()
evpprivkey.assign_rsa(rsaprivkey)
return evpprivkey
def _get_request_obj(csr):
'''
Returns a CSR object based on PEM text.
'''
text = _text_or_file(csr)
text = get_pem_entry(text, pem_type='CERTIFICATE REQUEST')
return M2Crypto.X509.load_request_string(text)
def _get_pubkey_hash(cert):
'''
Returns the sha1 hash of the modulus of a public key in a cert
Used for generating subject key identifiers
'''
sha_hash = hashlib.sha1(cert.get_pubkey().get_modulus()).hexdigest()
return _pretty_hex(sha_hash)
def get_pem_entry(text, pem_type=None):
'''
Returns a properly formatted PEM string from the input text fixing
any whitespace or line-break issues
text:
Text containing the X509 PEM entry to be returned or path to a file containing the text.
pem_type:
If specified, this function will only return a pem of a certain type, for example
'CERTIFICATE' or 'CERTIFICATE REQUEST'.
CLI Example:
.. code-block:: bash
salt '*' x509.get_pem_entry "-----BEGIN CERTIFICATE REQUEST-----MIICyzCC Ar8CAQI...-----END CERTIFICATE REQUEST"
'''
text = _text_or_file(text)
if not pem_type:
# Split based on headers
if len(text.split('-----')) is not 5:
raise salt.exceptions.SaltInvocationError('PEM text not valid:\n{0}'.format(text))
pem_header = '-----'+text.split('-----')[1]+'-----'
# Remove all whitespace from body
pem_footer = '-----'+text.split('-----')[3]+'-----'
else:
pem_header = '-----BEGIN {0}-----'.format(pem_type)
pem_footer = '-----END {0}-----'.format(pem_type)
# Split based on defined headers
if (len(text.split(pem_header)) is not 2 or
len(text.split(pem_footer)) is not 2):
raise salt.exceptions.SaltInvocationError(
'PEM does not contain a single entry of type {0}:\n'
'{1}'.format(pem_type, text))
pem_body = text.split(pem_header)[1].split(pem_footer)[0]
# Remove all whitespace from body
pem_body = ''.join(pem_body.split())
# Generate correctly formatted pem
ret = pem_header+'\n'
for i in range(0, len(pem_body), 64):
ret += pem_body[i:i+64]+'\n'
ret += pem_footer+'\n'
return ret
def get_pem_entries(glob_path):
'''
Returns a dict containing PEM entries in files matching a glob
glob_path:
A path to certificates to be read and returned.
CLI Example:
.. code-block:: bash
salt '*' x509.read_pem_entries "/etc/pki/*.crt"
'''
ret = {}
for path in glob.glob(glob_path):
if os.path.isfile(path):
try:
ret[path] = get_pem_entry(text=path)
except ValueError:
pass
return ret
def read_certificate(certificate):
'''
Returns a dict containing details of a certificate. Input can be a PEM string or file path.
certificate:
The certificate to be read. Can be a path to a certificate file, or a string containing
the PEM formatted text of the certificate.
CLI Example:
.. code-block:: bash
salt '*' x509.read_certificate /etc/pki/mycert.crt
'''
cert = _get_certificate_obj(certificate)
ret = {
# X509 Version 3 has a value of 2 in the field.
# Version 2 has a value of 1.
# https://tools.ietf.org/html/rfc5280#section-4.1.2.1
'Version': cert.get_version()+1,
# Get size returns in bytes. The world thinks of key sizes in bits.
'Key Size': cert.get_pubkey().size()*8,
'Serial Number': _dec2hex(cert.get_serial_number()),
'SHA-256 Finger Print': _pretty_hex(cert.get_fingerprint(md='sha256')),
'MD5 Finger Print': _pretty_hex(cert.get_fingerprint(md='md5')),
'SHA1 Finger Print': _pretty_hex(cert.get_fingerprint(md='sha1')),
'Subject': _parse_subject(cert.get_subject()),
'Subject Hash': _dec2hex(cert.get_subject().as_hash()),
'Issuer': _parse_subject(cert.get_issuer()),
'Issuer Hash': _dec2hex(cert.get_issuer().as_hash()),
'Not Before': cert.get_not_before().get_datetime().strftime('%Y-%m-%d %H:%M:%S'),
'Not After': cert.get_not_after().get_datetime().strftime('%Y-%m-%d %H:%M:%S'),
'Public Key': get_public_key(cert)
}
exts = OrderedDict()
for ext_index in range(0, cert.get_ext_count()):
ext = cert.get_ext_at(ext_index)
name = ext.get_name()
val = ext.get_value()
if ext.get_critical():
val = 'critical ' + val
exts[name] = val
if exts:
ret['X509v3 Extensions'] = exts
return ret
def read_certificates(glob_path):
'''
Returns a dict containing details of a all certificates matching a glob
glob_path:
A path to certificates to be read and returned.
CLI Example:
.. code-block:: bash
salt '*' x509.read_certificates "/etc/pki/*.crt"
'''
ret = {}
for path in glob.glob(glob_path):
if os.path.isfile(path):
try:
ret[path] = read_certificate(certificate=path)
except ValueError:
pass
return ret
def read_csr(csr):
'''
Returns a dict containing details of a certificate request.
:depends: - OpenSSL command line tool
csr:
A path or PEM encoded string containing the CSR to read.
CLI Example:
.. code-block:: bash
salt '*' x509.read_csr /etc/pki/mycert.csr
'''
csr = _get_request_obj(csr)
ret = {
# X509 Version 3 has a value of 2 in the field.
# Version 2 has a value of 1.
# https://tools.ietf.org/html/rfc5280#section-4.1.2.1
'Version': csr.get_version()+1,
# Get size returns in bytes. The world thinks of key sizes in bits.
'Subject': _parse_subject(csr.get_subject()),
'Subject Hash': _dec2hex(csr.get_subject().as_hash()),
}
ret['X509v3 Extensions'] = _get_csr_extensions(csr)
return ret
def read_crl(crl):
'''
Returns a dict containing details of a certificate revocation list. Input can be a PEM string or file path.
:depends: - OpenSSL command line tool
csl:
A path or PEM encoded string containing the CSL to read.
CLI Example:
.. code-block:: bash
salt '*' x509.read_crl /etc/pki/mycrl.crl
'''
text = _text_or_file(crl)
text = get_pem_entry(text, pem_type='X509 CRL')
crltempfile = tempfile.NamedTemporaryFile()
crltempfile.write(text)
crltempfile.flush()
crlparsed = _parse_openssl_crl(crltempfile.name)
crltempfile.close()
return crlparsed
def get_public_key(key, asObj=False):
'''
Returns a string containing the public key in PEM format.
key:
A path or PEM encoded string containing a CSR, Certificate or Private Key from which
a public key can be retrieved.
CLI Example:
.. code-block:: bash
salt '*' x509.get_public_key /etc/pki/mycert.cer
'''
if isinstance(key, M2Crypto.X509.X509):
rsa = key.get_pubkey().get_rsa()
text = ''
else:
text = _text_or_file(key)
text = get_pem_entry(text)
if text.startswith('-----BEGIN PUBLIC KEY-----'):
if not asObj:
return text
bio = M2Crypto.BIO.MemoryBuffer()
bio.write(text)
rsa = M2Crypto.RSA.load_pub_key_bio(bio)
bio = M2Crypto.BIO.MemoryBuffer()
if text.startswith('-----BEGIN CERTIFICATE-----'):
cert = M2Crypto.X509.load_cert_string(text)
rsa = cert.get_pubkey().get_rsa()
if text.startswith('-----BEGIN CERTIFICATE REQUEST-----'):
csr = M2Crypto.X509.load_request_string(text)
rsa = csr.get_pubkey().get_rsa()
if (text.startswith('-----BEGIN PRIVATE KEY-----') or
text.startswith('-----BEGIN RSA PRIVATE KEY-----')):
rsa = M2Crypto.RSA.load_key_string(text)
if asObj:
evppubkey = M2Crypto.EVP.PKey()
evppubkey.assign_rsa(rsa)
return evppubkey
rsa.save_pub_key_bio(bio)
return bio.read_all()
def get_private_key_size(private_key):
'''
Returns the bit length of a private key in PEM format.
private_key:
A path or PEM encoded string containing a private key.
CLI Example:
.. code-block:: bash
salt '*' x509.get_private_key_size /etc/pki/mycert.key
'''
return _get_private_key_obj(private_key).size()*8
def write_pem(text, path, pem_type=None):
'''
Writes out a PEM string fixing any formatting or whitespace issues before writing.
text:
PEM string input to be written out.
path:
Path of the file to write the pem out to.
pem_type:
The PEM type to be saved, for example ``CERTIFICATE`` or ``PUBLIC KEY``. Adding this
will allow the function to take input that may contain multiple pem types.
CLI Example:
.. code-block:: bash
salt '*' x509.write_pem "-----BEGIN CERTIFICATE-----MIIGMzCCBBugA..." path=/etc/pki/mycert.crt
'''
old_umask = os.umask(0o77)
text = get_pem_entry(text, pem_type=pem_type)
salt.utils.fopen(path, 'w').write(text)
os.umask(old_umask)
return 'PEM written to {0}'.format(path)
def create_private_key(path=None, text=False, bits=2048):
'''
Creates a private key in PEM format.
path:
The path to write the file to, either ``path`` or ``text`` are required.
text:
If ``True``, return the PEM text without writing to a file. Default ``False``.
bits:
Length of the private key in bits. Default 2048
CLI Example:
.. code-block:: bash
salt '*' x509.create_private_key path=/etc/pki/mykey.key
'''
if not path and not text:
raise salt.exceptions.SaltInvocationError('Either path or text must be specified.')
if path and text:
raise salt.exceptions.SaltInvocationError('Either path or text must be specified, not both.')
rsa = M2Crypto.RSA.gen_key(bits, M2Crypto.m2.RSA_F4) # pylint: disable=no-member
bio = M2Crypto.BIO.MemoryBuffer()
rsa.save_key_bio(bio, cipher=None)
if path:
return write_pem(text=bio.read_all(), path=path,
pem_type='RSA PRIVATE KEY')
else:
return bio.read_all()
def create_crl(path=None, text=False, signing_private_key=None,
signing_cert=None, revoked=None, include_expired=False,
days_valid=100):
'''
Create a CRL
:depends: - PyOpenSSL Python module
path:
Path to write the crl to.
text:
If ``True``, return the PEM text without writing to a file. Default ``False``.
signing_private_key:
A path or string of the private key in PEM format that will be used to sign this crl.
This is required.
signing_cert:
A certificate matching the private key that will be used to sign this crl. This is
required.
revoked:
A list of dicts containing all the certificates to revoke. Each dict represents one
certificate. A dict must contain either the key ``serial_number`` with the value of
the serial number to revoke, or ``certificate`` with either the PEM encoded text of
the certificate, or a path ot the certificate to revoke.
The dict can optionally contain the ``revocation_date`` key. If this key is omitted
the revocation date will be set to now. If should be a string in the format "%Y-%m-%d %H:%M:%S".
The dict can also optionally contain the ``not_after`` key. This is redundant if the
``certificate`` key is included. If the ``Certificate`` key is not included, this
can be used for the logic behind the ``include_expired`` parameter.
If should be a string in the format "%Y-%m-%d %H:%M:%S".
The dict can also optionally contain the ``reason`` key. This is the reason code for the
revocation. Available choices are ``unspecified``, ``keyCompromise``, ``CACompromise``,
``affiliationChanged``, ``superseded``, ``cessationOfOperation`` and ``certificateHold``.
include_expired:
Include expired certificates in the CRL. Default is ``False``.
days_valid:
The number of days that the CRL should be valid. This sets the Next Update field in the CRL.
.. note
At this time the pyOpenSSL library does not allow choosing a signing algorithm for CRLs
See https://github.com/pyca/pyopenssl/issues/159
CLI Example:
.. code-block:: bash
salt '*' x509.create_crl path=/etc/pki/mykey.key signing_private_key=/etc/pki/ca.key \\
signing_cert=/etc/pki/ca.crt \\
revoked="{'compromized-web-key': {'certificate': '/etc/pki/certs/www1.crt', \\
'revocation_date': '2015-03-01 00:00:00'}}"
'''
# pyOpenSSL is required for dealing with CSLs. Importing inside these functions because
# Client operations like creating CRLs shouldn't require pyOpenSSL
# Note due to current limitations in pyOpenSSL it is impossible to specify a digest
# For signing the CRL. This will hopefully be fixed soon: https://github.com/pyca/pyopenssl/pull/161
import OpenSSL
crl = OpenSSL.crypto.CRL()
if revoked is None:
revoked = []
for rev_item in revoked:
if 'certificate' in rev_item:
rev_cert = read_certificate(rev_item['certificate'])
rev_item['serial_number'] = rev_cert['Serial Number']
rev_item['not_after'] = rev_cert['Not After']
serial_number = rev_item['serial_number'].replace(':', '')
serial_number = str(int(serial_number, 16))
if 'not_after' in rev_item and not include_expired:
not_after = datetime.datetime.strptime(rev_item['not_after'], '%Y-%m-%d %H:%M:%S')
if datetime.datetime.now() > not_after:
continue
if 'revocation_date' not in rev_item:
rev_item['revocation_date'] = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
rev_date = datetime.datetime.strptime(rev_item['revocation_date'], '%Y-%m-%d %H:%M:%S')
rev_date = rev_date.strftime('%Y%m%d%H%M%SZ')
rev = OpenSSL.crypto.Revoked()
rev.set_serial(serial_number)
rev.set_rev_date(rev_date)
if 'reason' in rev_item:
rev.set_reason(rev_item['reason'])
crl.add_revoked(rev)
signing_cert = _text_or_file(signing_cert)
cert = OpenSSL.crypto.load_certificate(OpenSSL.crypto.FILETYPE_PEM,
get_pem_entry(signing_cert, pem_type='CERTIFICATE'))
signing_private_key = _text_or_file(signing_private_key)
key = OpenSSL.crypto.load_privatekey(OpenSSL.crypto.FILETYPE_PEM,
get_pem_entry(signing_private_key))
crltext = crl.export(cert, key, OpenSSL.crypto.FILETYPE_PEM, days=days_valid)
if text:
return crltext
return write_pem(text=crltext, path=path,
pem_type='X509 CRL')
def sign_remote_certificate(argdic, **kwargs):
'''
Request a certificate to be remotely signed according to a signing policy.
argdic:
A dict containing all the arguments to be passed into the create_certificate function.
This will become kwargs when passed to create_certificate.
kwargs:
kwargs delivered from publish.publish
CLI Example:
.. code-block:: bash
salt '*' x509.sign_remote_certificate argdic="{'public_key': '/etc/pki/www.key', \\
'signing_policy': 'www'}" __pub_id='www1'
'''
if 'signing_policy' not in argdic:
return 'signing_policy must be specified'
if not isinstance(argdic, dict):
argdic = ast.literal_eval(argdic)
signing_policy = {}
if 'signing_policy' in argdic:
signing_policy = _get_signing_policy(argdic['signing_policy'])
if not signing_policy:
return 'Signing policy {0} does not exist.'.format(argdic['signing_policy'])
if isinstance(signing_policy, list):
dict_ = {}
for item in signing_policy:
dict_.update(item)
signing_policy = dict_
if 'minions' in signing_policy:
if '__pub_id' not in kwargs:
return 'minion sending this request could not be identified'
if not __salt__['match.glob'](signing_policy['minions'], kwargs['__pub_id']):
return '{0} not permitted to use signing policy {1}'.format(kwargs['__pub_id'], argdic['signing_policy'])
try:
return create_certificate(path=None, text=True, **argdic)
except Exception as except_: # pylint: disable=broad-except
return str(except_)
def get_signing_policy(signing_policy_name):
'''
Returns the details of a names signing policy, including the text of the public key that will be used
to sign it. Does not return the private key.
CLI Example:
.. code-block:: bash
salt '*' x509.get_signing_policy www
'''
signing_policy = _get_signing_policy(signing_policy_name)
if not signing_policy:
return 'Signing policy {0} does not exist.'.format(signing_policy_name)
if isinstance(signing_policy, list):
dict_ = {}
for item in signing_policy:
dict_.update(item)
signing_policy = dict_
try:
del signing_policy['signing_private_key']
except KeyError:
pass
try:
signing_policy['signing_cert'] = get_pem_entry(signing_policy['signing_cert'], 'CERTIFICATE')
except KeyError:
pass
return signing_policy
def create_certificate(path=None, text=False, ca_server=None, **kwargs):
'''
Create an X509 certificate.
path:
Path to write the certificate to.
text:
If ``True``, return the PEM text without writing to a file. Default ``False``.
kwargs:
Any of the properties below can be included as additional keyword arguments.
ca_server:
Request a remotely signed certificate from ca_server. For this to work, a ``signing_policy`` must
be specified, and that same policy must be configured on the ca_server. See ``signing_policy`` for
details. Also the salt master must permit peers to call the ``sign_remote_certificate`` function.
Example:
/etc/salt/master.d/peer.conf
.. code-block:: yaml
peer:
.*:
- x509.sign_remote_certificate
subject properties:
Any of the values below can be incldued to set subject properties
Any other subject properties supported by OpenSSL should also work.
C:
2 letter Country code
CN:
Certificate common name, typically the FQDN.
Email:
Email address
GN:
Given Name
L:
Locality
O:
Organization
OU:
Organization Unit
SN:
SurName
ST:
State or Province
signing_private_key:
A path or string of the private key in PEM format that will be used to sign this certificate.
If neither ``signing_cert``, ``public_key``, or ``csr`` are included, it will be assumed that
this is a self-signed certificate, and the public key matching ``signing_private_key`` will
be used to create the certificate.
signing_cert:
A certificate matching the private key that will be used to sign this certificate. This is used
to populate the issuer values in the resulting certificate. Do not include this value for
self-signed certificates.
public_key:
The public key to be included in this certificate. This can be sourced from a public key,
certificate, csr or private key. If a private key is used, the matching public key from
the private key will be generated before any processing is done. This means you can request a
certificate from a remote CA using a private key file as your public_key and only the
public key will be sent across the network to the CA.
If neither ``public_key`` or ``csr`` are
specified, it will be assumed that this is a self-signed certificate, and the public key
derived from ``signing_private_key`` will be used. Specify either ``public_key`` or ``csr``,
not both. Because you can input a CSR as a public key or as a CSR, it is important to understand
the difference. If you import a CSR as a public key, only the public key will be added
to the certificate, subject or extension information in the CSR will be lost.
csr:
A file or PEM string containing a certificate signing request. This will be used to supply the
subject, extensions and public key of a certificate. Any subject or extensions specified
explicitly will overwrite any in the CSR.
basicConstraints:
X509v3 Basic Constraints extension.
extensions:
The following arguments set X509v3 Extension values. If the value starts with ``critical ``,
the extension will be marked as critical.
Some special extensions are ``subjectKeyIdentifier`` and ``authorityKeyIdentifier``.
``subjectKeyIdentifier`` can be an explicit value or it can be the special string ``hash``.
``hash`` will set the subjectKeyIdentifier equal to the SHA1 hash of the modulus of the
public key in this certificate. Note that this is not the exact same hashing method used by
OpenSSL when using the hash value.
``authorityKeyIdentifier`` Use values acceptable to the openssl CLI tools. This will
automatically populate ``authorityKeyIdentifier`` with the ``subjectKeyIdentifier`` of
``signing_cert``. If this is a self-signed cert these values will be the same.
basicConstraints:
X509v3 Basic Constraints
keyUsage:
X509v3 Key Usage
extendedKeyUsage:
X509v3 Extended Key Usage
subjectKeyIdentifier:
X509v3 Subject Key Identifier
issuerAltName:
X509v3 Issuer Alternative Name
subjectAltName:
X509v3 Subject Alternative Name
crlDistributionPoints:
X509v3 CRL distribution points
issuingDistributionPoint:
X509v3 Issuing Distribution Point
certificatePolicies:
X509v3 Certificate Policies
policyConstraints:
X509v3 Policy Constraints
inhibitAnyPolicy:
X509v3 Inhibit Any Policy
nameConstraints:
X509v3 Name Constraints
noCheck:
X509v3 OCSP No Check
nsComment:
Netscape Comment
nsCertType:
Netscape Certificate Type
days_valid:
The number of days this certificate should be valid. This sets the ``notAfter`` property
of the certificate. Defaults to 365.
version:
The version of the X509 certificate. Defaults to 3. This is automatically converted to the
version value, so ``version=3`` sets the certificate version field to 0x2.
serial_number:
The serial number to assign to this certificate. If omitted a random serial number of size
``serial_bits`` is generated.
serial_bits:
The number of bits to use when randomly generating a serial number. Defaults to 64.
algorithm:
The hashing algorithm to be used for signing this certificate. Defaults to sha256.
copypath:
An additional path to copy the resulting certificate to. Can be used to maintain a copy
of all certificates issued for revocation purposes.
signing_policy:
A signing policy that should be used to create this certificate. Signing policies should be defined
in the minion configuration, or in a minion pillar. It should be a yaml formatted list of arguments
which will override any arguments passed to this function. If the ``minions`` key is included in
the signing policy, only minions matching that pattern will be permitted to remotely request certificates
from that policy.
Example:
.. code-block:: yaml
x509_signing_policies:
www:
- minions: 'www*'
- signing_private_key: /etc/pki/ca.key
- signing_cert: /etc/pki/ca.crt
- C: US
- ST: Utah
- L: Salt Lake City
- basicConstraints: "critical CA:false"
- keyUsage: "critical cRLSign, keyCertSign"
- subjectKeyIdentifier: hash
- authorityKeyIdentifier: keyid,issuer:always
- days_valid: 90
- copypath: /etc/pki/issued_certs/
The above signing policy can be invoked with ``signing_policy=www``
CLI Example:
.. code-block:: bash
salt '*' x509.create_certificate path=/etc/pki/myca.crt \\
signing_private_key='/etc/pki/myca.key' csr='/etc/pki/myca.csr'}
'''
if not path and not text and ('testrun' not in kwargs or kwargs['testrun'] is False):
raise salt.exceptions.SaltInvocationError('Either path or text must be specified.')
if path and text:
raise salt.exceptions.SaltInvocationError('Either path or text must be specified, not both.')
if ca_server:
if 'signing_policy' not in kwargs:
raise salt.exceptions.SaltInvocationError('signing_policy must be specified'
'if requesting remote certificate from ca_server {0}.'.format(ca_server))
if 'csr' in kwargs:
kwargs['csr'] = get_pem_entry(kwargs['csr'], pem_type='CERTIFICATE REQUEST').replace('\n', '')
if 'public_key' in kwargs:
# Strip newlines to make passing through as cli functions easier
kwargs['public_key'] = get_public_key(kwargs['public_key']).replace('\n', '')
# Remove system entries in kwargs
# Including listen_in and preqreuired because they are not included in STATE_INTERNAL_KEYWORDS
# for salt 2014.7.2
for ignore in list(_STATE_INTERNAL_KEYWORDS) + ['listen_in', 'preqrequired']:
kwargs.pop(ignore, None)
cert_txt = __salt__['publish.publish'](tgt=ca_server,
fun='x509.sign_remote_certificate',
arg=str(kwargs))[ca_server]
if path:
return write_pem(text=cert_txt, path=path,
pem_type='CERTIFICATE')
else:
return cert_txt
signing_policy = {}
if 'signing_policy' in kwargs:
signing_policy = _get_signing_policy(kwargs['signing_policy'])
if isinstance(signing_policy, list):
dict_ = {}
for item in signing_policy:
dict_.update(item)
signing_policy = dict_
# Overwrite any arguments in kwargs with signing_policy
kwargs.update(signing_policy)
for prop, default in six.iteritems(CERT_DEFAULTS):
if prop not in kwargs:
kwargs[prop] = default
cert = M2Crypto.X509.X509()
# X509 Version 3 has a value of 2 in the field.
# Version 2 has a value of 1.
# https://tools.ietf.org/html/rfc5280#section-4.1.2.1
cert.set_version(kwargs['version'] - 1)
# Random serial number if not specified
if 'serial_number' not in kwargs:
kwargs['serial_number'] = _dec2hex(random.getrandbits(kwargs['serial_bits']))
cert.set_serial_number(int(kwargs['serial_number'].replace(':', ''), 16))
# Set validity dates
# pylint: disable=no-member
not_before = M2Crypto.m2.x509_get_not_before(cert.x509)
not_after = M2Crypto.m2.x509_get_not_after(cert.x509)
M2Crypto.m2.x509_gmtime_adj(not_before, 0)
M2Crypto.m2.x509_gmtime_adj(not_after, 60*60*24*kwargs['days_valid'])
# pylint: enable=no-member
# If neither public_key or csr are included, this cert is self-signed
if 'public_key' not in kwargs and 'csr' not in kwargs:
kwargs['public_key'] = kwargs['signing_private_key']
csrexts = {}
if 'csr' in kwargs:
kwargs['public_key'] = kwargs['csr']
csr = _get_request_obj(kwargs['csr'])
cert.set_subject(csr.get_subject())
csrexts = read_csr(kwargs['csr'])['X509v3 Extensions']
cert.set_pubkey(get_public_key(kwargs['public_key'], asObj=True))
subject = cert.get_subject()
for entry, num in six.iteritems(subject.nid): # pylint: disable=unused-variable
if entry in kwargs:
setattr(subject, entry, kwargs[entry])
if 'signing_cert' in kwargs:
signing_cert = _get_certificate_obj(kwargs['signing_cert'])
else:
signing_cert = cert
cert.set_issuer(signing_cert.get_subject())
for extname, extlongname in six.iteritems(EXT_NAME_MAPPINGS):
if (extname in kwargs or extlongname in kwargs or extname in csrexts or extlongname in csrexts) is False:
continue
# Use explicitly set values first, fall back to CSR values.
extval = kwargs[extname] or kwargs[extlongname] or csrexts[extname] or csrexts[extlongname]
critical = False
if extval.startswith('critical '):
critical = True
extval = extval[9:]
if extname == 'subjectKeyIdentifier' and 'hash' in extval:
extval = extval.replace('hash', _get_pubkey_hash(cert))
issuer = None
if extname == 'authorityKeyIdentifier':
issuer = signing_cert
ext = _new_extension(name=extname, value=extval, critical=critical, issuer=issuer)
if not ext.x509_ext:
log.info('Invalid X509v3 Extension. {0}: {1}'.format(extname, extval))
continue
cert.add_ext(ext)
if 'testrun' in kwargs and kwargs['testrun'] is True:
cert_props = read_certificate(cert)
cert_props['Issuer Public Key'] = get_public_key(kwargs['signing_private_key'])
return cert_props
if not verify_private_key(kwargs['signing_private_key'], signing_cert):
raise salt.exceptions.SaltInvocationError('signing_private_key: {0}'
'does no match signing_cert: {1}'.format(kwargs['signing_private_key'],
kwargs['signing_cert']))
cert.sign(_get_private_key_obj(kwargs['signing_private_key']), kwargs['algorithm'])
if not verify_signature(cert, signing_pub_key=signing_cert):
raise salt.exceptions.SaltInvocationError('failed to verify certificate signature')
if 'copypath' in kwargs:
write_pem(text=cert.as_pem(), path=os.path.join(kwargs['copypath'], kwargs['serial_number']+'.crt'),
pem_type='CERTIFICATE')
if path:
return write_pem(text=cert.as_pem(), path=path,
pem_type='CERTIFICATE')
else:
return cert.as_pem()
def create_csr(path=None, text=False, **kwargs):
'''
Create a certificate signing request.
path:
Path to write the certificate to.
text:
If ``True``, return the PEM text without writing to a file. Default ``False``.
kwargs:
The subject, extension and version arguments from
:mod:`x509.create_certificate <salt.modules.x509.create_certificate>` can be used.
CLI Example:
.. code-block:: bash
salt '*' x509.create_csr path=/etc/pki/myca.csr public_key='/etc/pki/myca.key' CN='My Cert
'''
if not path and not text:
raise salt.exceptions.SaltInvocationError('Either path or text must be specified.')
if path and text:
raise salt.exceptions.SaltInvocationError('Either path or text must be specified, not both.')
csr = M2Crypto.X509.Request()
subject = csr.get_subject()
csr.set_version(kwargs['version'] - 1)
if 'public_key' not in kwargs:
raise salt.exceptions.SaltInvocationError('public_key is required')
csr.set_pubkey(get_public_key(kwargs['public_key'], asObj=True))
for entry, num in six.iteritems(subject.nid): # pylint: disable=unused-variable
if entry in kwargs:
setattr(subject, entry, kwargs[entry])
extstack = M2Crypto.X509.X509_Extension_Stack()
for extname, extlongname in six.iteritems(EXT_NAME_MAPPINGS):
if extname not in kwargs or extlongname not in kwargs:
continue
extval = kwargs[extname] or kwargs[extlongname]
critical = False
if extval.startswith('critical '):
critical = True
extval = extval[9:]
issuer = None
ext = _new_extension(name=extname, value=extval, critical=critical, issuer=issuer)
if not ext.x509_ext:
log.info('Invalid X509v3 Extension. {0}: {1}'.format(extname, extval))
continue
extstack.push(ext)
csr.add_extensions(extstack)
if path:
return write_pem(text=csr.as_pem(), path=path,
pem_type='CERTIFICATE REQUEST')
else:
return csr.as_pem()
def verify_private_key(private_key, public_key):
'''
Verify that 'private_key' matches 'public_key'
private_key:
The private key to verify, can be a string or path to a private key in PEM format.
public_key:
The public key to verify, can be a string or path to a PEM formatted certificate, csr,
or another private key.
CLI Example:
.. code-block:: bash
salt '*' x509.verify_private_key private_key=/etc/pki/myca.key public_key=/etc/pki/myca.crt
'''
return bool(get_public_key(private_key) == get_public_key(public_key))
def verify_signature(certificate, signing_pub_key=None):
'''
Verify that ``certificate`` has been signed by ``signing_pub_key``
certificate:
The certificate to verify. Can be a path or string containing a PEM formatted certificate.
signing_pub_key:
The public key to verify, can be a string or path to a PEM formatted certificate, csr,
or private key.
CLI Example:
.. code-block:: bash
salt '*' x509.verify_private_key private_key=/etc/pki/myca.key public_key=/etc/pki/myca.crt
'''
cert = _get_certificate_obj(certificate)
if signing_pub_key:
signing_pub_key = get_public_key(signing_pub_key, asObj=True)
return bool(cert.verify(pkey=signing_pub_key) == 1)
def verify_crl(crl, cert):
'''
Validate a CRL against a certificate.
Parses openssl command line output, this is a workaround for M2Crypto's
inability to get them from CSR objects.
crl:
The CRL to verify
cert:
The certificate to verify the CRL against
CLI Example:
.. code-block:: bash
salt '*' x509.verify_crl crl=/etc/pki/myca.crl cert=/etc/pki/myca.crt
'''
crltext = _text_or_file(crl)
crltext = get_pem_entry(crltext, pem_type='X509 CRL')
crltempfile = tempfile.NamedTemporaryFile()
crltempfile.write(crltext)
crltempfile.flush()
certtext = _text_or_file(cert)
certtext = get_pem_entry(certtext, pem_type='CERTIFICATE')
certtempfile = tempfile.NamedTemporaryFile()
certtempfile.write(certtext)
certtempfile.flush()
cmd = ('openssl crl -noout -in {0} -CAfile {1}'.format(crltempfile.name, certtempfile.name))
output = __salt__['cmd.run_stdout'](cmd)
crltempfile.close()
certtempfile.close()
if 'verify OK' in output:
return True
else:
return False
|
stephane-martin/salt-debian-packaging
|
salt-2016.3.2/salt/modules/x509.py
|
Python
|
apache-2.0
| 46,018 | 0.003607 |
# -*- coding: utf-8 -*-
import os
import httplib
import logging
import functools
from modularodm.exceptions import ValidationValueError
from framework.exceptions import HTTPError
from framework.analytics import update_counter
from website.addons.osfstorage import settings
logger = logging.getLogger(__name__)
LOCATION_KEYS = ['service', settings.WATERBUTLER_RESOURCE, 'object']
def update_analytics(node, file_id, version_idx):
"""
:param Node node: Root node to update
:param str file_id: The _id field of a filenode
:param int version_idx: Zero-based version index
"""
update_counter(u'download:{0}:{1}'.format(node._id, file_id))
update_counter(u'download:{0}:{1}:{2}'.format(node._id, file_id, version_idx))
def serialize_revision(node, record, version, index, anon=False):
"""Serialize revision for use in revisions table.
:param Node node: Root node
:param FileRecord record: Root file record
:param FileVersion version: The version to serialize
:param int index: One-based index of version
"""
if anon:
user = None
else:
user = {
'name': version.creator.fullname,
'url': version.creator.url,
}
return {
'user': user,
'index': index + 1,
'date': version.date_created.isoformat(),
'downloads': record.get_download_count(version=index),
'md5': version.metadata.get('md5'),
'sha256': version.metadata.get('sha256'),
}
SIGNED_REQUEST_ERROR = HTTPError(
httplib.SERVICE_UNAVAILABLE,
data={
'message_short': 'Upload service unavailable',
'message_long': (
'Upload service is not available; please retry '
'your upload in a moment'
),
},
)
def get_filename(version_idx, file_version, file_record):
"""Build name for downloaded file, appending version date if not latest.
:param int version_idx: One-based version index
:param FileVersion file_version: Version to name
:param FileRecord file_record: Root file object
"""
if version_idx == len(file_record.versions):
return file_record.name
name, ext = os.path.splitext(file_record.name)
return u'{name}-{date}{ext}'.format(
name=name,
date=file_version.date_created.isoformat(),
ext=ext,
)
def validate_location(value):
for key in LOCATION_KEYS:
if key not in value:
raise ValidationValueError
def must_be(_type):
"""A small decorator factory for OsfStorageFileNode. Acts as a poor mans
polymorphic inheritance, ensures that the given instance is of "kind" folder or file
"""
def _must_be(func):
@functools.wraps(func)
def wrapped(self, *args, **kwargs):
if not self.kind == _type:
raise ValueError('This instance is not a {}'.format(_type))
return func(self, *args, **kwargs)
return wrapped
return _must_be
def copy_files(src, target_settings, parent=None, name=None):
"""Copy the files from src to the target nodesettings
:param OsfStorageFileNode src: The source to copy children from
:param OsfStorageNodeSettings target_settings: The node settings of the project to copy files to
:param OsfStorageFileNode parent: The parent of to attach the clone of src to, if applicable
"""
cloned = src.clone()
cloned.parent = parent
cloned.name = name or cloned.name
cloned.node_settings = target_settings
if src.is_file:
cloned.versions = src.versions
cloned.save()
if src.is_folder:
for child in src.children:
copy_files(child, target_settings, parent=cloned)
return cloned
|
ticklemepierce/osf.io
|
website/addons/osfstorage/utils.py
|
Python
|
apache-2.0
| 3,726 | 0.001074 |
# Copyright 2009-2014 Justin Riley
#
# This file is part of StarCluster.
#
# StarCluster is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# StarCluster is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with StarCluster. If not, see <http://www.gnu.org/licenses/>.
import os
import posixpath
from starcluster import utils
from starcluster import static
from starcluster import exception
from starcluster import clustersetup
from starcluster.logger import log
class CreateUsers(clustersetup.DefaultClusterSetup):
"""
Plugin for creating one or more cluster users
"""
DOWNLOAD_KEYS_DIR = os.path.join(static.STARCLUSTER_CFG_DIR, 'user_keys')
BATCH_USER_FILE = "/root/.users/users.txt"
def __init__(self, num_users=None, usernames=None, download_keys=None,
download_keys_dir=None):
if usernames:
usernames = [user.strip() for user in usernames.split(',')]
if num_users:
try:
num_users = int(num_users)
except ValueError:
raise exception.BaseException("num_users must be an integer")
elif usernames:
num_users = len(usernames)
else:
raise exception.BaseException(
"you must provide num_users or usernames or both")
if usernames and num_users and len(usernames) != num_users:
raise exception.BaseException(
"only %d usernames provided - %d required" %
(len(usernames), num_users))
self._num_users = num_users
if not usernames:
usernames = ['user%.3d' % i for i in range(1, num_users + 1)]
log.info("CreateUsers: %s" % usernames)
self._usernames = usernames
self._download_keys = str(download_keys).lower() == "true"
self._download_keys_dir = download_keys_dir or self.DOWNLOAD_KEYS_DIR
super(CreateUsers, self).__init__()
def run(self, nodes, master, user, user_shell, volumes):
self._nodes = nodes
self._master = master
self._user = user
self._user_shell = user_shell
self._volumes = volumes
log.info("Creating %d cluster users" % self._num_users)
newusers = self._get_newusers_batch_file(master, self._usernames,
user_shell)
for node in nodes:
self.pool.simple_job(node.ssh.execute,
("echo -n '%s' | xargs -L 1 -I '{}' sh -c 'echo {} | newusers'" % newusers),
jobid=node.alias)
self.pool.wait(numtasks=len(nodes))
for node in nodes:
add_group_str = "grep -q dl-fte /etc/group || groupadd -g 10123 dl-fte"
self.pool.simple_job(node.ssh.execute,
(add_group_str),
jobid=node.alias)
add_user_str = "; ".join(
["usermod -a -G docker,dl-fte %s" % u for u in self._usernames])
self.pool.simple_job(node.ssh.execute,
(add_user_str),
jobid=node.alias)
self.pool.wait(numtasks=len(nodes))
log.info("Configuring passwordless ssh for %d cluster users" %
self._num_users)
pbar = self.pool.progress_bar.reset()
pbar.maxval = self._num_users
for i, user in enumerate(self._usernames):
master.generate_key_for_user(user, auth_new_key=True,
auth_conn_key=True)
master.add_to_known_hosts(user, nodes)
pbar.update(i + 1)
pbar.finish()
self._setup_scratch(nodes, self._usernames)
if self._download_keys:
self._download_user_keys(master, self._usernames)
def _download_user_keys(self, master, usernames):
pardir = posixpath.dirname(self.BATCH_USER_FILE)
bfile = posixpath.basename(self.BATCH_USER_FILE)
if not master.ssh.isdir(pardir):
master.ssh.makedirs(pardir)
log.info("Tarring all SSH keys for cluster users...")
for user in usernames:
master.ssh.execute(
"cp /home/%(user)s/.ssh/id_rsa %(keydest)s" %
dict(user=user, keydest=posixpath.join(pardir, user + '.rsa')))
cluster_tag = master.cluster_groups[0].name.replace(
static.SECURITY_GROUP_PREFIX, '')
tarfile = "%s-%s.tar.gz" % (cluster_tag, master.region.name)
master.ssh.execute("tar -C %s -czf ~/%s . --exclude=%s" %
(pardir, tarfile, bfile))
if not os.path.exists(self._download_keys_dir):
os.makedirs(self._download_keys_dir)
log.info("Copying cluster users SSH keys to: %s" %
os.path.join(self._download_keys_dir, tarfile))
master.ssh.get(tarfile, self._download_keys_dir)
master.ssh.unlink(tarfile)
def _get_newusers_batch_file(self, master, usernames, shell,
batch_file=None):
batch_file = batch_file or self.BATCH_USER_FILE
# False here to avoid the incorrect optimization
# (when new users are added)
if False and master.ssh.isfile(batch_file):
bfile = master.ssh.remote_file(batch_file, 'r')
bfilecontents = bfile.read()
bfile.close()
return bfilecontents
bfilecontents = ''
tmpl = "%(username)s:%(password)s:%(uid)d:%(gid)d:"
tmpl += "Cluster user account %(username)s:"
tmpl += "/home/%(username)s:%(shell)s\n"
shpath = master.ssh.which(shell)[0]
ctx = dict(shell=shpath)
base_uid, base_gid = self._get_max_unused_user_id()
for user in usernames:
home_folder = '/home/%s' % user
if master.ssh.path_exists(home_folder):
s = master.ssh.stat(home_folder)
uid = s.st_uid
gid = s.st_gid
else:
uid = base_uid
gid = base_gid
base_uid += 1
base_gid += 1
passwd = utils.generate_passwd(8)
ctx.update(username=user, uid=uid, gid=gid, password=passwd)
bfilecontents += tmpl % ctx
pardir = posixpath.dirname(batch_file)
if not master.ssh.isdir(pardir):
master.ssh.makedirs(pardir)
bfile = master.ssh.remote_file(batch_file, 'w')
bfile.write(bfilecontents)
bfile.close()
return bfilecontents
def on_add_node(self, node, nodes, master, user, user_shell, volumes):
self._nodes = nodes
self._master = master
self._user = user
self._user_shell = user_shell
self._volumes = volumes
log.info("Creating %d users on %s" % (self._num_users, node.alias))
newusers = self._get_newusers_batch_file(master, self._usernames,
user_shell)
node.ssh.execute("echo -n '%s' | xargs -L 1 -I '{}' sh -c 'echo {} | newusers'" % newusers)
log.info("Adding %s to known_hosts for %d users" %
(node.alias, self._num_users))
pbar = self.pool.progress_bar.reset()
pbar.maxval = self._num_users
for i, user in enumerate(self._usernames):
master.add_to_known_hosts(user, [node])
pbar.update(i + 1)
pbar.finish()
add_group_str = "grep -q dl-fte /etc/group || groupadd -g 10123 dl-fte"
node.ssh.execute(add_group_str)
add_user_str = "; ".join(
["usermod -a -G docker,dl-fte %s" % u for u in self._usernames])
node.ssh.execute(add_user_str)
self._setup_scratch(nodes=[node], users=self._usernames)
def on_remove_node(self, node, nodes, master, user, user_shell, volumes):
raise NotImplementedError('on_remove_node method not implemented')
|
cancan101/StarCluster
|
starcluster/plugins/users.py
|
Python
|
lgpl-3.0
| 8,400 | 0.000476 |
from __future__ import absolute_import
import logging
import six
from django.core.urlresolvers import reverse
from sentry.exceptions import InvalidIdentity, PluginError
from sentry.integrations.exceptions import IntegrationError
from sentry.models import Deploy, LatestRelease, Release, ReleaseHeadCommit, Repository, User
from sentry.plugins import bindings
from sentry.tasks.base import instrumented_task, retry
from sentry.utils.email import MessageBuilder
from sentry.utils.http import absolute_uri
logger = logging.getLogger(__name__)
def generate_invalid_identity_email(identity, commit_failure=False):
new_context = {
"identity": identity,
"auth_url": absolute_uri(reverse("socialauth_associate", args=[identity.provider])),
"commit_failure": commit_failure,
}
return MessageBuilder(
subject="Unable to Fetch Commits" if commit_failure else "Action Required",
context=new_context,
template="sentry/emails/identity-invalid.txt",
html_template="sentry/emails/identity-invalid.html",
)
def generate_fetch_commits_error_email(release, error_message):
new_context = {"release": release, "error_message": error_message}
return MessageBuilder(
subject="Unable to Fetch Commits",
context=new_context,
template="sentry/emails/unable-to-fetch-commits.txt",
html_template="sentry/emails/unable-to-fetch-commits.html",
)
# we're future proofing this function a bit so it could be used with other code
def handle_invalid_identity(identity, commit_failure=False):
# email the user
msg = generate_invalid_identity_email(identity, commit_failure)
msg.send_async(to=[identity.user.email])
# now remove the identity, as its invalid
identity.delete()
@instrumented_task(
name="sentry.tasks.commits.fetch_commits",
queue="commits",
default_retry_delay=60 * 5,
max_retries=5,
)
@retry(exclude=(Release.DoesNotExist, User.DoesNotExist))
def fetch_commits(release_id, user_id, refs, prev_release_id=None, **kwargs):
# TODO(dcramer): this function could use some cleanup/refactoring as its a bit unwieldly
commit_list = []
release = Release.objects.get(id=release_id)
user = User.objects.get(id=user_id)
prev_release = None
if prev_release_id is not None:
try:
prev_release = Release.objects.get(id=prev_release_id)
except Release.DoesNotExist:
pass
for ref in refs:
try:
repo = Repository.objects.get(
organization_id=release.organization_id, name=ref["repository"]
)
except Repository.DoesNotExist:
logger.info(
"repository.missing",
extra={
"organization_id": release.organization_id,
"user_id": user_id,
"repository": ref["repository"],
},
)
continue
binding_key = (
"integration-repository.provider"
if is_integration_provider(repo.provider)
else "repository.provider"
)
try:
provider_cls = bindings.get(binding_key).get(repo.provider)
except KeyError:
continue
# if previous commit isn't provided, try to get from
# previous release otherwise, try to get
# recent commits from provider api
start_sha = None
if ref.get("previousCommit"):
start_sha = ref["previousCommit"]
elif prev_release:
try:
start_sha = ReleaseHeadCommit.objects.filter(
organization_id=release.organization_id,
release=prev_release,
repository_id=repo.id,
).values_list("commit__key", flat=True)[0]
except IndexError:
pass
end_sha = ref["commit"]
provider = provider_cls(id=repo.provider)
try:
if is_integration_provider(provider.id):
repo_commits = provider.compare_commits(repo, start_sha, end_sha)
else:
repo_commits = provider.compare_commits(repo, start_sha, end_sha, actor=user)
except NotImplementedError:
pass
except Exception as exc:
logger.info(
"fetch_commits.error",
extra={
"organization_id": repo.organization_id,
"user_id": user_id,
"repository": repo.name,
"provider": provider.id,
"error": six.text_type(exc),
"end_sha": end_sha,
"start_sha": start_sha,
},
)
if isinstance(exc, InvalidIdentity) and getattr(exc, "identity", None):
handle_invalid_identity(identity=exc.identity, commit_failure=True)
elif isinstance(exc, (PluginError, InvalidIdentity, IntegrationError)):
msg = generate_fetch_commits_error_email(release, exc.message)
msg.send_async(to=[user.email])
else:
msg = generate_fetch_commits_error_email(
release, "An internal system error occurred."
)
msg.send_async(to=[user.email])
else:
logger.info(
"fetch_commits.complete",
extra={
"organization_id": repo.organization_id,
"user_id": user_id,
"repository": repo.name,
"end_sha": end_sha,
"start_sha": start_sha,
"num_commits": len(repo_commits or []),
},
)
commit_list.extend(repo_commits)
if commit_list:
release.set_commits(commit_list)
deploys = Deploy.objects.filter(
organization_id=release.organization_id, release=release, notified=False
).values_list("id", "environment_id", "date_finished")
# XXX(dcramer): i dont know why this would have multiple environments, but for
# our sanity lets assume it can
pending_notifications = []
last_deploy_per_environment = {}
for deploy_id, environment_id, date_finished in deploys:
last_deploy_per_environment[environment_id] = (deploy_id, date_finished)
pending_notifications.append(deploy_id)
repo_queryset = ReleaseHeadCommit.objects.filter(
organization_id=release.organization_id, release=release
).values_list("repository_id", "commit")
# we need to mark LatestRelease, but only if there's not a deploy which has completed
# *after* this deploy (given we might process commits out of order)
for repository_id, commit_id in repo_queryset:
for environment_id, (deploy_id, date_finished) in six.iteritems(
last_deploy_per_environment
):
if not Deploy.objects.filter(
id__in=LatestRelease.objects.filter(
repository_id=repository_id, environment_id=environment_id
).values("deploy_id"),
date_finished__gt=date_finished,
).exists():
LatestRelease.objects.create_or_update(
repository_id=repository_id,
environment_id=environment_id,
values={
"release_id": release.id,
"deploy_id": deploy_id,
"commit_id": commit_id,
},
)
for deploy_id in pending_notifications:
Deploy.notify_if_ready(deploy_id, fetch_complete=True)
def is_integration_provider(provider):
return provider and provider.startswith("integrations:")
|
mvaled/sentry
|
src/sentry/tasks/commits.py
|
Python
|
bsd-3-clause
| 7,974 | 0.001756 |
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import getpass
import grp
import os
import pwd
import shutil
import subprocess
from abc import abstractmethod, abstractproperty
from twitter.common import log
from twitter.common.dirutil import safe_mkdir, safe_rmtree, touch
from twitter.common.lang import Interface
from gen.apache.aurora.api.constants import TASK_FILESYSTEM_MOUNT_POINT
class SandboxInterface(Interface):
class Error(Exception): pass
class CreationError(Error): pass
class DeletionError(Error): pass
@abstractproperty
def root(self):
"""Return the root path of the sandbox within the host filesystem."""
@abstractproperty
def container_root(self):
"""Return the root path of the sandbox as it's visible to the running task."""
@abstractproperty
def chrooted(self):
"""Returns whether or not the sandbox is a chroot."""
@abstractproperty
def is_filesystem_image(self):
"""Returns whether or not the task is using a filesystem image."""
@abstractmethod
def exists(self):
"""Returns true if the sandbox appears to exist."""
@abstractmethod
def create(self, *args, **kw):
"""Create the sandbox."""
@abstractmethod
def destroy(self, *args, **kw):
"""Destroy the sandbox."""
class SandboxProvider(Interface):
def _get_sandbox_user(self, assigned_task):
return assigned_task.task.job.role
@abstractmethod
def from_assigned_task(self, assigned_task):
"""Return the appropriate Sandbox implementation from an AssignedTask."""
class DefaultSandboxProvider(SandboxProvider):
MESOS_DIRECTORY_ENV_VARIABLE = 'MESOS_DIRECTORY'
def from_assigned_task(self, assigned_task, **kwargs):
mesos_dir = os.environ[self.MESOS_DIRECTORY_ENV_VARIABLE]
container = assigned_task.task.container
if container.docker:
return DockerDirectorySandbox(mesos_dir, **kwargs)
elif container.mesos and container.mesos.image:
return FileSystemImageSandbox(
mesos_dir,
user=self._get_sandbox_user(assigned_task),
**kwargs)
else:
return DirectorySandbox(mesos_dir, user=self._get_sandbox_user(assigned_task), **kwargs)
class DirectorySandbox(SandboxInterface):
""" Basic sandbox implementation using a directory on the filesystem """
SANDBOX_NAME = 'sandbox'
def __init__(self, mesos_dir, user=getpass.getuser(), **kwargs):
self._mesos_dir = mesos_dir
self._user = user
@property
def root(self):
return os.path.join(self._mesos_dir, self.SANDBOX_NAME)
@property
def container_root(self):
return self.root
@property
def chrooted(self):
return False
@property
def is_filesystem_image(self):
return False
def exists(self):
return os.path.exists(self.root)
def get_user_and_group(self):
try:
pwent = pwd.getpwnam(self._user)
grent = grp.getgrgid(pwent.pw_gid)
return (pwent, grent)
except KeyError:
raise self.CreationError(
'Could not create sandbox because user does not exist: %s' % self._user)
def create(self):
log.debug('DirectorySandbox: mkdir %s' % self.root)
try:
safe_mkdir(self.root)
except (IOError, OSError) as e:
raise self.CreationError('Failed to create the sandbox: %s' % e)
if self._user:
pwent, grent = self.get_user_and_group()
try:
# Mesos provides a sandbox directory with permission 0750 owned by the user of the executor.
# In case of Thermos this is `root`, as Thermos takes the responsibility to drop
# privileges to the designated non-privileged user/role. To ensure non-provileged processes
# can still read their sandbox, Thermos must also update the permissions of the scratch
# directory created by Mesos.
# This is necessary since Mesos 1.6.0 (https://issues.apache.org/jira/browse/MESOS-8332).
log.debug('DirectorySandbox: chown %s:%s %s' % (self._user, grent.gr_name, self._mesos_dir))
os.chown(self._mesos_dir, pwent.pw_uid, pwent.pw_gid)
log.debug('DirectorySandbox: chown %s:%s %s' % (self._user, grent.gr_name, self.root))
os.chown(self.root, pwent.pw_uid, pwent.pw_gid)
log.debug('DirectorySandbox: chmod 700 %s' % self.root)
os.chmod(self.root, 0700)
except (IOError, OSError) as e:
raise self.CreationError('Failed to chown/chmod the sandbox: %s' % e)
def destroy(self):
try:
safe_rmtree(self.root)
except (IOError, OSError) as e:
raise self.DeletionError('Failed to destroy sandbox: %s' % e)
class DockerDirectorySandbox(DirectorySandbox):
""" A sandbox implementation that configures the sandbox correctly for docker containers. """
def __init__(self, mesos_dir, **kwargs):
# remove the user value from kwargs if it was set.
kwargs.pop('user', None)
super(DockerDirectorySandbox, self).__init__(mesos_dir, user=None, **kwargs)
def _create_symlinks(self):
# This sets up the container to have a similar directory structure to the host.
# It takes self._mesos_dir (e.g. "[exec-root]/runs/RUN1/") and:
# - Sets mesos_host_sandbox_root = "[exec-root]/runs/" (one level up from mesos_host_sandbox)
# - Creates mesos_host_sandbox_root (recursively)
# - Symlinks self._mesos_dir -> $MESOS_SANDBOX (typically /mnt/mesos/sandbox)
# $MESOS_SANDBOX is provided in the environment by the Mesos containerizer.
mesos_host_sandbox_root = os.path.dirname(self._mesos_dir)
try:
safe_mkdir(mesos_host_sandbox_root)
os.symlink(os.environ['MESOS_SANDBOX'], self._mesos_dir)
except (IOError, OSError) as e:
raise self.CreationError('Failed to create the sandbox root: %s' % e)
def create(self):
self._create_symlinks()
super(DockerDirectorySandbox, self).create()
class FileSystemImageSandbox(DirectorySandbox):
"""
A sandbox implementation that configures the sandbox correctly for tasks provisioned from a
filesystem image.
"""
# returncode from a `useradd` or `groupadd` call indicating that the uid/gid already exists.
_USER_OR_GROUP_ID_EXISTS = 4
# returncode from a `useradd` or `groupadd` call indicating that the user/group name
# already exists.
_USER_OR_GROUP_NAME_EXISTS = 9
def __init__(self, mesos_dir, **kwargs):
self._task_fs_root = os.path.join(mesos_dir, TASK_FILESYSTEM_MOUNT_POINT)
self._no_create_user = kwargs.pop('no_create_user', False)
self._mounted_volume_paths = kwargs.pop('mounted_volume_paths', None)
self._sandbox_mount_point = kwargs.pop('sandbox_mount_point', None)
if self._sandbox_mount_point is None:
raise self.Error(
'Failed to initialize FileSystemImageSandbox: no value specified for sandbox_mount_point')
super(FileSystemImageSandbox, self).__init__(mesos_dir, **kwargs)
def _verify_group_match_in_taskfs(self, group_id, group_name):
try:
result = subprocess.check_output(
['chroot', self._task_fs_root, 'getent', 'group', group_name])
except subprocess.CalledProcessError as e:
raise self.CreationError(
'Error when getting group id for name %s in task image: %s' % (
group_name, e))
splitted = result.split(':')
if (len(splitted) < 3 or splitted[0] != '%s' % group_name or
splitted[2] != '%s' % group_id):
raise self.CreationError(
'Group id result %s from image does not match name %s and id %s' % (
result, group_name, group_id))
def _verify_user_match_in_taskfs(self, user_id, user_name, group_id, group_name):
try:
result = subprocess.check_output(
['chroot', self._task_fs_root, 'id', '%s' % user_name])
except subprocess.CalledProcessError as e:
raise self.CreationError(
'Error when getting user id for name %s in task image: %s' % (
user_name, e))
expected_prefix = "uid=%s(%s) gid=%s(%s) groups=" % (user_id, user_name, group_id, group_name)
if not result.startswith(expected_prefix):
raise self.CreationError(
'User group result %s from task image does not start with expected prefix %s' % (
result, expected_prefix))
def _create_user_and_group_in_taskfs(self):
if self._user:
pwent, grent = self.get_user_and_group()
try:
subprocess.check_call(
['groupadd', '-R', self._task_fs_root, '-g', '%s' % grent.gr_gid, grent.gr_name])
except subprocess.CalledProcessError as e:
# If the failure was due to the group existing, we're ok to continue, otherwise bail out.
if e.returncode in [self._USER_OR_GROUP_ID_EXISTS, self._USER_OR_GROUP_NAME_EXISTS]:
self._verify_group_match_in_taskfs(grent.gr_gid, grent.gr_name)
log.info(
'Group %s(%s) already exists in the task''s filesystem, no need to create.' % (
grent.gr_name, grent.gr_gid))
else:
raise self.CreationError('Failed to create group in sandbox for task image: %s' % e)
try:
subprocess.check_call([
'useradd',
'-R',
self._task_fs_root,
'-u',
'%s' % pwent.pw_uid,
'-g', '%s' % pwent.pw_gid,
pwent.pw_name])
except subprocess.CalledProcessError as e:
# If the failure was due to the user existing, we're ok to continue, otherwise bail out.
if e.returncode in [self._USER_OR_GROUP_ID_EXISTS, self._USER_OR_GROUP_NAME_EXISTS]:
self._verify_user_match_in_taskfs(
pwent.pw_uid, pwent.pw_name, pwent.pw_gid, grent.gr_name)
log.info(
'User %s (%s) already exists in the task''s filesystem, no need to create.' % (
self._user, pwent.pw_uid))
else:
raise self.CreationError('Failed to create user in sandbox for task image: %s' % e)
def _mount_paths(self):
def do_mount(source, destination):
log.info('Mounting %s into task filesystem at %s.' % (source, destination))
# If we're mounting a file into the task filesystem, the mount call will fail if the mount
# point doesn't exist. In that case we'll create an empty file to mount over.
if os.path.isfile(source) and not os.path.exists(destination):
safe_mkdir(os.path.dirname(destination))
touch(destination)
else:
safe_mkdir(destination)
# This mount call is meant to mimic what mesos does when mounting into the container. C.f.
# https://github.com/apache/mesos/blob/c3228f3c3d1a1b2c145d1377185cfe22da6079eb/src/slave/containerizer/mesos/isolators/filesystem/linux.cpp#L521-L528
subprocess.check_call([
'mount',
'-n',
'--rbind',
source,
destination])
if self._mounted_volume_paths is not None:
for container_path in self._mounted_volume_paths:
if container_path != TASK_FILESYSTEM_MOUNT_POINT:
target = container_path.lstrip('/')
do_mount(container_path, os.path.join(self._task_fs_root, target))
do_mount(self.root, os.path.join(self._task_fs_root, self._sandbox_mount_point.lstrip('/')))
def _copy_files(self):
def copy_if_exists(source, destination):
if os.path.exists(source):
shutil.copy(source, destination)
log.info('Copying %s into task filesystem at %s.' % (source, destination))
# TODO(jpinkul): In Mesos the network/cni isolator is responsible for copying these network
# files but this logic is being bypassed at the moment due to shelling out to
# mesos-containerizer. Once this is no longer necessary this copy should be removed.
copy_if_exists('/etc/resolv.conf', os.path.join(self._task_fs_root, 'etc/resolv.conf'))
copy_if_exists('/etc/hosts', os.path.join(self._task_fs_root, 'etc/hosts'))
copy_if_exists('/etc/hostname', os.path.join(self._task_fs_root, 'etc/hostname'))
@property
def container_root(self):
return self._sandbox_mount_point
@property
def is_filesystem_image(self):
return True
def create(self):
if not self._no_create_user:
self._create_user_and_group_in_taskfs()
super(FileSystemImageSandbox, self).create()
self._mount_paths()
self._copy_files()
|
medallia/aurora
|
src/main/python/apache/aurora/executor/common/sandbox.py
|
Python
|
apache-2.0
| 12,730 | 0.012412 |
"""
Test models.
"""
from django.db.models import Model, CharField, BooleanField, DateTimeField
#raise IOError("A")
class Thing(Model):
name = CharField(max_length=32)
#value = BooleanField(default=False)
|
doctormo/django-autotest
|
testapp/models.py
|
Python
|
agpl-3.0
| 217 | 0.018433 |
# -*- coding: utf-8 -*-
# Copyright (c) 2016, Germán Fuentes Capella <development@fuentescapella.com>
# BSD 3-Clause License
#
# Copyright (c) 2017, Germán Fuentes Capella
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import ruamel.yaml
from clint.textui import puts, columns
from clint.textui.cols import console_width
def get_headers(dl):
headers = set()
for d in dl:
for key in d.keys():
headers.add(key)
headers = list(headers)
headers.sort()
return headers
def column_size(headers, dl):
csize = {}
for header in headers:
# initialize to the length of the key (header)
length = len(header)
for d in dl:
item_length = len(str(d.get(header, '')))
if item_length > length:
length = item_length
csize[header] = length
return csize
def _trim(value, length):
value = str(value)
if len(value) > length:
value = value[0:length]
value = value[0:-3] + '...'
return value
def display_yaml(a_dict):
puts(ruamel.yaml.dump(a_dict, Dumper=ruamel.yaml.RoundTripDumper))
def display(dl):
"""
Displays a list of dicts (dl) that contain same keys
"""
headers = get_headers(dl)
csize = column_size(headers, dl)
cons_width = console_width({})
values = csize.values()
content_width = sum(values)
if content_width > cons_width:
# if content is bigger than console, switch to yaml format
output = {}
for d in dl:
key = d.get('label') or d.get('SUBID') or d.get('SCRIPTID')
output[key] = d
puts(ruamel.yaml.dump(output, Dumper=ruamel.yaml.RoundTripDumper))
else:
# otherwise, print a table
row = [[header, csize.get(header, '')] for header in headers]
puts(columns(*row))
for d in dl:
row = [[_trim(d.get(h, ''), csize[h]), csize[h]] for h in headers]
puts(columns(*row))
|
germfue/vps-tools
|
vps/console.py
|
Python
|
bsd-3-clause
| 3,431 | 0.000292 |
"""empty message
Revision ID: 36e92a9c018b
Revises: 4758ea467345
Create Date: 2015-05-06 02:42:39.064090
"""
# revision identifiers, used by Alembic.
revision = '36e92a9c018b'
down_revision = '4758ea467345'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('user', schema=None) as batch_op:
batch_op.add_column(sa.Column('last_emailed', sa.DateTime(), nullable=True))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('user', schema=None) as batch_op:
batch_op.drop_column('last_emailed')
### end Alembic commands ###
|
alexgerstein/dartmouth-roommates
|
migrations/versions/36e92a9c018b_.py
|
Python
|
gpl-2.0
| 748 | 0.009358 |
# -*- coding: utf-8 -*-
# -- cmd -- pip install PyUserInput
import time
import win32gui
import win32con
import PIL.ImageGrab
from pymouse import PyMouse
PIECE_X = 44
PIECE_Y = 40
NUM_X = 14
NUM_Y = 10
def getOrigin():
cwllk = '宠物连连看'.decode('utf8')
hwnd = win32gui.FindWindow("#32770", cwllk)
print hwnd
#win32gui.ShowWindow(hwnd, win32con.SW_SHOWMINIMIZED)
win32gui.ShowWindow(hwnd, win32con.SW_SHOWNORMAL)
win32gui.SetForegroundWindow(hwnd)
rect = win32gui.GetWindowRect(hwnd)
time.sleep(0.5)
#print rect
#newRect = (rect[0] + 58, rect[1] + 104, rect[0] + 674, rect[1] + 504)
return rect
def getPic(RECT):
"""
RECT = (x1,y1,x2,y2)
"""
pic = PIL.ImageGrab.grab(getOrigin())
return pic
def pause():
m = PyMouse()
m.click(RECT[0] -58 + 307, RECT[1] - 104 + 62)
time.sleep(0.5)
def click(pos):
'''
pos: (x, y) # (0, 0) for top left piece
'''
m = PyMouse()
m.click(pos[0],pos[1])
# only for test
if __name__ == '__main__':
pic = getPic()
pic.save("screenshot" + ".png")
click((0,0))
click((0,9))
click((13,9))
click((13,0))
pause()
|
TarnumG95/PictureMatchCheater
|
merge/UI.py
|
Python
|
mit
| 1,196 | 0.016863 |
# This file is part of Indico.
# Copyright (C) 2002 - 2016 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from flask import redirect, flash, session, request
from sqlalchemy.orm import joinedload
from werkzeug.exceptions import Forbidden
from indico.core.db import db
from indico.modules.auth.util import redirect_to_login
from indico.modules.events.models.events import EventType
from indico.modules.events.surveys.models.items import SurveySection
from indico.modules.events.surveys.models.submissions import SurveyAnswer, SurveySubmission
from indico.modules.events.surveys.models.surveys import Survey, SurveyState
from indico.modules.events.surveys.util import make_survey_form, was_survey_submitted, save_submitted_survey_to_session
from indico.modules.events.surveys.views import (WPDisplaySurveyConference, WPDisplaySurveyMeeting,
WPDisplaySurveyLecture)
from indico.util.i18n import _
from indico.web.flask.util import url_for
from MaKaC.webinterface.rh.conferenceDisplay import RHConferenceBaseDisplay
def _can_redirect_to_single_survey(surveys):
return len(surveys) == 1 and surveys[0].is_active and not was_survey_submitted(surveys[0])
class RHSurveyBaseDisplay(RHConferenceBaseDisplay):
def _checkParams(self, params):
RHConferenceBaseDisplay._checkParams(self, params)
self.event = self._conf
@property
def view_class(self):
mapping = {EventType.conference: WPDisplaySurveyConference,
EventType.meeting: WPDisplaySurveyMeeting,
EventType.lecture: WPDisplaySurveyLecture}
return mapping[self.event_new.type_]
class RHSurveyList(RHSurveyBaseDisplay):
def _process(self):
surveys = Survey.find_all(Survey.is_visible, Survey.event_id == int(self.event.id),
_eager=(Survey.questions, Survey.submissions))
if _can_redirect_to_single_survey(surveys):
return redirect(url_for('.display_survey_form', surveys[0]))
return self.view_class.render_template('display/survey_list.html', self.event, surveys=surveys,
event=self.event, states=SurveyState,
was_survey_submitted=was_survey_submitted)
class RHSubmitSurvey(RHSurveyBaseDisplay):
CSRF_ENABLED = True
normalize_url_spec = {
'locators': {
lambda self: self.survey
}
}
def _checkProtection(self):
RHSurveyBaseDisplay._checkProtection(self)
if self.survey.require_user and not session.user:
raise Forbidden(response=redirect_to_login(reason=_('You are trying to answer a survey '
'that requires you to be logged in')))
def _checkParams(self, params):
RHSurveyBaseDisplay._checkParams(self, params)
self.survey = (Survey
.find(Survey.id == request.view_args['survey_id'], Survey.is_visible)
.options(joinedload(Survey.submissions))
.options(joinedload(Survey.sections).joinedload(SurveySection.children))
.one())
if not self.survey.is_active:
flash(_('This survey is not active'), 'error')
return redirect(url_for('.display_survey_list', self.event))
elif was_survey_submitted(self.survey):
flash(_('You have already answered this survey'), 'error')
return redirect(url_for('.display_survey_list', self.event))
def _process(self):
form = make_survey_form(self.survey)()
if form.validate_on_submit():
submission = self._save_answers(form)
save_submitted_survey_to_session(submission)
self.survey.send_submission_notification(submission)
flash(_('Your answers has been saved'), 'success')
return redirect(url_for('.display_survey_list', self.event))
surveys = Survey.find_all(Survey.is_visible, Survey.event_id == int(self.event.id))
if not _can_redirect_to_single_survey(surveys):
back_button_endpoint = '.display_survey_list'
elif self.event.getType() != 'conference':
back_button_endpoint = 'event.conferenceDisplay'
else:
back_button_endpoint = None
return self.view_class.render_template('display/survey_questionnaire.html', self.event, form=form,
event=self.event, survey=self.survey,
back_button_endpoint=back_button_endpoint)
def _save_answers(self, form):
survey = self.survey
submission = SurveySubmission(survey=survey)
if not survey.anonymous:
submission.user = session.user
for question in survey.questions:
answer = SurveyAnswer(question=question, data=getattr(form, 'question_{}'.format(question.id)).data)
submission.answers.append(answer)
db.session.flush()
return submission
|
DavidAndreev/indico
|
indico/modules/events/surveys/controllers/display.py
|
Python
|
gpl-3.0
| 5,780 | 0.003114 |
# benchmark_writer.py ---
#
# Filename: benchmark_writer.py
# Description:
# Author: Subhasis Ray
# Maintainer:
# Created: Wed Sep 3 10:22:50 2014 (+0530)
# Version:
# Last-Updated:
# By:
# Update #: 0
# URL:
# Keywords:
# Compatibility:
#
#
# Commentary:
#
#
#
#
# Change log:
#
#
#
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street, Fifth
# Floor, Boston, MA 02110-1301, USA.
#
#
# Code:
"""This script benchmarks the nsdf writer using randomly generated
data.
Note that we violate the unique source id requirement here.
"""
import sys
import argparse
from collections import defaultdict
import numpy as np
from numpy import testing as nptest
import h5py as h5
from datetime import datetime
import unittest
import os
import socket
sys.path.append('..')
import nsdf
DATADIR = '/data/subha/nsdf_samples/benchmark'
HOSTNAME = socket.gethostname()
PID = os.getpid()
TIMESTAMP = datetime.now().strftime('%Y%m%d_%H%M%S')
np.random.seed(1) # For reproducibility
def get_poisson_times(npoints, rate):
"""Return `npoints` time points from a Poisson event with rate
`rate`"""
scale = 1.0/rate
return np.cumsum(np.random.exponential(scale=scale, size=npoints))
def create_uniform_data(name, num_sources, num_cols):
"""Create data for m=`num_sources`, each n=`num_cols` long."""
data = nsdf.UniformData(name, field='Vm', unit='V', dt=1e-5, tunit='s', dtype=np.float32)
for ii in range(num_sources):
data.put_data('src_{}'.format(ii), np.random.rand(num_cols))
return data
def create_nonuniform_data(name, num_sources, mincol, maxcol):
"""Create nonuniform data for m=`num_sources`, the number of sampling
points n for each source is randomly chosen between `mincol` and
`maxcol`
"""
data = nsdf.NonuniformData(name, unit='V', tunit='s', dtype=np.float32, ttype=np.float32)
if mincol < maxcol:
ncols = np.random.randint(low=mincol, high=maxcol, size=num_sources)
else:
ncols = np.ones(num_sources, dtype=int) * maxcols
for ii in range(num_sources):
value = np.random.rand(ncols[ii])
time = get_poisson_times(ncols[ii], 10)
data.put_data('src_{}'.format(ii), (value, time))
return data
def create_event_data(name, num_sources, mincol, maxcol):
"""Create event data for m=`num_sources`, the number of sampling
points for each source is randomly chosen between `mincol` and
`maxcol`
"""
data = nsdf.EventData(name, unit='s', dtype=np.float32)
ncols = np.random.randint(low=mincol, high=maxcol, size=num_sources)
for ii in range(num_sources):
data.put_data('src_{}'.format(ii), get_poisson_times(ncols[ii], 10))
return data
def create_uniform_vars(num_vars, num_sources, num_cols, prefix='var'):
"""Note that they all share the same sources."""
ret = []
for ii in range(num_vars):
ret.append(create_uniform_data('{}_{}'.format(prefix, ii),
num_sources,
num_cols))
return ret
def create_nonuniform_vars(num_vars, num_sources, mincol, maxcol, prefix='var'):
"""Note that they all share the same sources."""
ret = []
for ii in range(num_vars):
ret.append(create_nonuniform_data('{}_{}'.format(prefix, ii),
num_sources,
mincol, maxcol))
return ret
def create_event_vars(num_vars, num_sources, mincol, maxcol, prefix='var'):
"""Note that they all share the same sources."""
ret = []
for ii in range(num_vars):
ret.append(create_event_data('{}_{}'.format(prefix, ii),
num_sources, mincol, maxcol))
return ret
def create_datasets(args):
uvar_list = []
nvar_list = []
evar_list = []
if args.sampling:
if args.sampling.startswith('u'):
uvar_list = create_uniform_vars(args.variables,
args.sources,
(args.maxcol + args.mincol) / 2,
prefix='uniform')
elif args.sampling.startswith('n'):
nvar_list = create_nonuniform_vars(args.variables,
args.sources,
args.mincol,
args.maxcol,
prefix='nonuniform')
elif args.sampling.startswith('e'):
evar_list = create_event_vars(args.variables,
args.sources,
args.mincol,
args.maxcol,
prefix='event')
else:
uvar_list = create_uniform_vars(args.variables,
args.sources,
(args.maxcol + args.mincol) / 2,
prefix='uniform')
nvar_list = create_nonuniform_vars(args.variables,
args.sources,
args.mincol,
args.maxcol,
prefix='nonuniform')
evar_list = create_event_vars(args.variables,
args.sources,
args.mincol,
args.maxcol,
prefix='event')
return {'uniform': uvar_list,
'nonuniform': nvar_list,
'event': evar_list}
def write_incremental(writer, source_ds, data, step, maxcol, dialect):
for ii in range(0, maxcol + step - 1, step):
if isinstance(data, nsdf.UniformData):
tmp = nsdf.UniformData(data.name, unit=data.unit,
dt=data.dt, tunit=data.tunit, dtype=np.float32)
for src, value in data.get_source_data_dict().items():
tmp.put_data(src, value[ii: ii + step])
writer.add_uniform_data(source_ds, tmp)
elif isinstance(data, nsdf.NonuniformData):
tmp = nsdf.NonuniformData(data.name, unit=data.unit,
tunit=data.tunit, dtype=np.float32, ttype=np.float32)
for src, (value, time) in data.get_source_data_dict().items():
value_chunk = value[ii: ii+step]
time_chunk = time[ii: ii+step]
tmp.put_data(src, (value_chunk, time_chunk))
if dialect == nsdf.dialect.ONED:
writer.add_nonuniform_1d(source_ds, tmp)
elif dialect == nsdf.dialect.VLEN:
writer.add_nonuniform_vlen(source_ds, tmp)
else:
writer.add_nonuniform_nan(source_ds, tmp)
elif isinstance(data, nsdf.EventData):
tmp = nsdf.EventData(data.name, unit=data.unit, dtype=np.float32)
for src, value in data.get_source_data_dict().items():
value_chunk = value[ii: ii+step]
tmp.put_data(src, value_chunk)
if dialect == nsdf.dialect.ONED:
writer.add_event_1d(source_ds, tmp)
elif dialect == nsdf.dialect.VLEN:
writer.add_event_vlen(source_ds, tmp)
else:
writer.add_event_nan(source_ds, tmp)
def write_data(args, var_dict):
"""Write data from `var_dict` to benchmark files.
file names are of the form:
benchmark_out_{dialect}_{incremental}_{compression}.h5
where dialect is oned, vlen or nan,
incremental is incr of fixed,
compression is compressed or uncompressed.
"""
if args.dialect == 'vlen':
dialect = nsdf.dialect.VLEN
elif args.dialect == 'nan':
dialect = nsdf.dialect.NANPADDED
else:
dialect = nsdf.dialect.ONED
filename = args.out
if not filename:
filename = 'benchmark_out_{0}_{1}_{2}_{3}_{4}_{5}_{6}.h5'.format(
dialect, args.sampling,
'incr' if (args.increment > 0) else 'fixed',
'compressed' if args.compress else 'uncompressed',
HOSTNAME,
PID, TIMESTAMP)
filepath = os.path.join(DATADIR, filename)
if args.compress:
writer = nsdf.NSDFWriter(filepath, dialect=dialect, mode='w',
compression='gzip',
compression_opts=6, fletcher32=True,
shuffle=True)
else:
writer = nsdf.NSDFWriter(filepath, dialect=dialect,
mode='w')
uvar_list = var_dict.get('uniform', [])
for uvar in uvar_list:
source_ds = writer.add_uniform_ds(uvar.name, uvar.get_sources())
if args.increment <= 0:
writer.add_uniform_data(source_ds, uvar, fixed=True)
else:
write_incremental(writer, source_ds, uvar,
args.increment, args.maxcol,
dialect)
nuvar_list = var_dict.get('nonuniform', [])
for nuvar in nuvar_list:
if dialect == nsdf.dialect.ONED:
source_ds = writer.add_nonuniform_ds_1d(nuvar.name,
nuvar.name,
nuvar.get_sources())
else:
source_ds = writer.add_nonuniform_ds(nuvar.name,
nuvar.get_sources())
if args.increment <= 0:
if dialect == nsdf.dialect.ONED:
writer.add_nonuniform_1d(source_ds, nuvar, fixed=True)
elif dialect == nsdf.dialect.VLEN:
writer.add_nonuniform_vlen(source_ds, nuvar, fixed=True)
if dialect == nsdf.dialect.NANPADDED:
writer.add_nonuniform_nan(source_ds, nuvar, fixed=True)
else:
write_incremental(writer, source_ds, nuvar,
args.increment, args.maxcol,
dialect)
evar_list = var_dict.get('event', [])
for evar in evar_list:
if dialect == nsdf.dialect.ONED:
source_ds = writer.add_event_ds_1d(evar.name,
evar.name,
evar.get_sources())
else:
source_ds = writer.add_event_ds(evar.name,
evar.get_sources())
if args.increment <= 0:
if dialect == nsdf.dialect.ONED:
writer.add_event_1d(source_ds, evar, fixed=True)
elif dialect == nsdf.dialect.VLEN:
writer.add_event_vlen(source_ds, evar, fixed=True)
if dialect == nsdf.dialect.NANPADDED:
writer.add_event_nan(source_ds, evar, fixed=True)
else:
write_incremental(writer, source_ds, evar,
args.increment, args.maxcol,
dialect)
print 'Saved data in', filepath
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Benchmark nsdf writer.')
parser.add_argument('-d', '--dialect',
help='dialect to use: oned, vlen or nan' )
parser.add_argument('-i', '--increment', type=int, default=0,
help='benchmark incremental writing with'
' specified step. 0 means fixed.')
parser.add_argument('-s', '--sampling', help='sampling type:'
' uniform/u, nonuniform/n, event/e' )
parser.add_argument('-m', '--mincol', type=int, default=1024,
help='minimum number of columns')
parser.add_argument('-n', '--maxcol', type=int, default=4096,
help='maximum number of columns')
parser.add_argument('-x', '--sources', type=int, default=1024,
help='number of sources')
parser.add_argument('-v', '--variables', type=int, default=1,
help='number of variables recorded')
parser.add_argument('-c', '--compress',
help='enable gzip compression with level=6',
action='store_true')
parser.add_argument('-o', '--out',
help='output data file')
args = parser.parse_args()
print args
data = create_datasets(args)
write_data(args, data)
#
# benchmark_writer.py ends here
|
nsdf/nsdf
|
benchmark/benchmark_writer.py
|
Python
|
gpl-3.0
| 13,434 | 0.00402 |
"""
compressible-specific boundary conditions. Here, in particular, we
implement an HSE BC in the vertical direction.
Note: the pyro BC routines operate on a single variable at a time, so
some work will necessarily be repeated.
Also note: we may come in here with the aux_data (source terms), so
we'll do a special case for them
"""
import compressible.eos as eos
from util import msg
import math
import numpy as np
def user(bc_name, bc_edge, variable, ccdata):
"""
A hydrostatic boundary. This integrates the equation of HSE into
the ghost cells to get the pressure and density under the assumption
that the specific internal energy is constant.
Upon exit, the ghost cells for the input variable will be set
Parameters
----------
bc_name : {'hse'}
The descriptive name for the boundary condition -- this allows
for pyro to have multiple types of user-supplied boundary
conditions. For this module, it needs to be 'hse'.
bc_edge : {'ylb', 'yrb'}
The boundary to update: ylb = lower y boundary; yrb = upper y
boundary.
variable : {'density', 'x-momentum', 'y-momentum', 'energy'}
The variable whose ghost cells we are filling
ccdata : CellCenterData2d object
The data object
"""
myg = ccdata.grid
if bc_name == "hse":
if bc_edge == "ylb":
# lower y boundary
# we will take the density to be constant, the velocity to
# be outflow, and the pressure to be in HSE
if variable in ["density", "x-momentum", "y-momentum", "ymom_src", "E_src", "fuel", "ash"]:
v = ccdata.get_var(variable)
j = myg.jlo-1
while j >= 0:
v[:, j] = v[:, myg.jlo]
j -= 1
elif variable == "energy":
dens = ccdata.get_var("density")
xmom = ccdata.get_var("x-momentum")
ymom = ccdata.get_var("y-momentum")
ener = ccdata.get_var("energy")
grav = ccdata.get_aux("grav")
gamma = ccdata.get_aux("gamma")
dens_base = dens[:, myg.jlo]
ke_base = 0.5*(xmom[:, myg.jlo]**2 + ymom[:, myg.jlo]**2) / \
dens[:, myg.jlo]
eint_base = (ener[:, myg.jlo] - ke_base)/dens[:, myg.jlo]
pres_base = eos.pres(gamma, dens_base, eint_base)
# we are assuming that the density is constant in this
# formulation of HSE, so the pressure comes simply from
# differencing the HSE equation
j = myg.jlo-1
while j >= 0:
pres_below = pres_base - grav*dens_base*myg.dy
rhoe = eos.rhoe(gamma, pres_below)
ener[:, j] = rhoe + ke_base
pres_base = pres_below.copy()
j -= 1
else:
raise NotImplementedError("variable not defined")
elif bc_edge == "yrb":
# upper y boundary
# we will take the density to be constant, the velocity to
# be outflow, and the pressure to be in HSE
if variable in ["density", "x-momentum", "y-momentum", "ymom_src", "E_src", "fuel", "ash"]:
v = ccdata.get_var(variable)
for j in range(myg.jhi+1, myg.jhi+myg.ng+1):
v[:, j] = v[:, myg.jhi]
elif variable == "energy":
dens = ccdata.get_var("density")
xmom = ccdata.get_var("x-momentum")
ymom = ccdata.get_var("y-momentum")
ener = ccdata.get_var("energy")
grav = ccdata.get_aux("grav")
gamma = ccdata.get_aux("gamma")
dens_base = dens[:, myg.jhi]
ke_base = 0.5*(xmom[:, myg.jhi]**2 + ymom[:, myg.jhi]**2) / \
dens[:, myg.jhi]
eint_base = (ener[:, myg.jhi] - ke_base)/dens[:, myg.jhi]
pres_base = eos.pres(gamma, dens_base, eint_base)
# we are assuming that the density is constant in this
# formulation of HSE, so the pressure comes simply from
# differencing the HSE equation
for j in range(myg.jhi+1, myg.jhi+myg.ng+1):
pres_above = pres_base + grav*dens_base*myg.dy
rhoe = eos.rhoe(gamma, pres_above)
ener[:, j] = rhoe + ke_base
pres_base = pres_above.copy()
else:
raise NotImplementedError("variable not defined")
else:
msg.fail("error: hse BC not supported for xlb or xrb")
elif bc_name == "ramp":
# Boundary conditions for double Mach reflection problem
gamma = ccdata.get_aux("gamma")
if bc_edge == "xlb":
# lower x boundary
# inflow condition with post shock setup
v = ccdata.get_var(variable)
i = myg.ilo - 1
if variable in ["density", "x-momentum", "y-momentum", "energy"]:
val = inflow_post_bc(variable, gamma)
while i >= 0:
v[i, :] = val
i = i - 1
else:
v[:, :] = 0.0 # no source term
elif bc_edge == "ylb":
# lower y boundary
# for x > 1./6., reflective boundary
# for x < 1./6., inflow with post shock setup
if variable in ["density", "x-momentum", "y-momentum", "energy"]:
v = ccdata.get_var(variable)
j = myg.jlo - 1
jj = 0
while j >= 0:
xcen_l = myg.x < 1.0/6.0
xcen_r = myg.x >= 1.0/6.0
v[xcen_l, j] = inflow_post_bc(variable, gamma)
if variable == "y-momentum":
v[xcen_r, j] = -1.0*v[xcen_r, myg.jlo+jj]
else:
v[xcen_r, j] = v[xcen_r, myg.jlo+jj]
j = j - 1
jj = jj + 1
else:
v = ccdata.get_var(variable)
v[:, :] = 0.0 # no source term
elif bc_edge == "yrb":
# upper y boundary
# time-dependent boundary, the shockfront moves with a 10 mach velocity forming an angle
# to the x-axis of 30 degrees clockwise.
# x coordinate of the grid is used to judge whether the cell belongs to pure post shock area,
# the pure pre shock area or the mixed area.
if variable in ["density", "x-momentum", "y-momentum", "energy"]:
v = ccdata.get_var(variable)
for j in range(myg.jhi+1, myg.jhi+myg.ng+1):
shockfront_up = 1.0/6.0 + (myg.y[j] + 0.5*myg.dy*math.sqrt(3))/math.tan(math.pi/3.0) \
+ (10.0/math.sin(math.pi/3.0))*ccdata.t
shockfront_down = 1.0/6.0 + (myg.y[j] - 0.5*myg.dy*math.sqrt(3))/math.tan(math.pi/3.0) \
+ (10.0/math.sin(math.pi/3.0))*ccdata.t
shockfront = np.array([shockfront_down, shockfront_up])
for i in range(myg.ihi+myg.ng+1):
v[i, j] = 0.0
cx_down = myg.x[i] - 0.5*myg.dx*math.sqrt(3)
cx_up = myg.x[i] + 0.5*myg.dx*math.sqrt(3)
cx = np.array([cx_down, cx_up])
for sf in shockfront:
for x in cx:
if x < sf:
v[i, j] = v[i, j] + 0.25*inflow_post_bc(variable, gamma)
else:
v[i, j] = v[i, j] + 0.25*inflow_pre_bc(variable, gamma)
else:
v = ccdata.get_var(variable)
v[:, :] = 0.0 # no source term
else:
msg.fail("error: bc type %s not supported" % (bc_name))
def inflow_post_bc(var, g):
# inflow boundary condition with post shock setup
r_l = 8.0
u_l = 7.1447096
v_l = -4.125
p_l = 116.5
if var == "density":
vl = r_l
elif var == "x-momentum":
vl = r_l*u_l
elif var == "y-momentum":
vl = r_l*v_l
elif var == "energy":
vl = p_l/(g - 1.0) + 0.5*r_l*(u_l*u_l + v_l*v_l)
else:
vl = 0.0
return vl
def inflow_pre_bc(var, g):
# pre shock setup
r_r = 1.4
u_r = 0.0
v_r = 0.0
p_r = 1.0
if var == "density":
vl = r_r
elif var == "x-momentum":
vl = r_r*u_r
elif var == "y-momentum":
vl = r_r*v_r
elif var == "energy":
vl = p_r/(g - 1.0) + 0.5*r_r*(u_r*u_r + v_r*v_r)
else:
vl = 0.0
return vl
|
zingale/pyro2
|
compressible/BC.py
|
Python
|
bsd-3-clause
| 8,919 | 0.001009 |
# -*- encoding: utf-8 -*-
###############################################################################
# #
# Copyright (C) 2009 Renato Lima - Akretion #
# Copyright (C) 2011 Vinicius Dittgen - PROGE, Leonardo Santagada - PROGE #
# #
#This program is free software: you can redistribute it and/or modify #
#it under the terms of the GNU Affero General Public License as published by #
#the Free Software Foundation, either version 3 of the License, or #
#(at your option) any later version. #
# #
#This program is distributed in the hope that it will be useful, #
#but WITHOUT ANY WARRANTY; without even the implied warranty of #
#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
#GNU Affero General Public License for more details. #
# #
#You should have received a copy of the GNU Affero General Public License #
#along with this program. If not, see <http://www.gnu.org/licenses/>. #
###############################################################################
import time
import base64
from openerp.osv import orm, fields
from openerp.tools.translate import _
class L10n_brAccountNfeExportInvoice(orm.TransientModel):
""" Export fiscal eletronic file from invoice"""
_name = 'l10n_br_account_product.nfe_export_invoice'
_description = 'Export eletronic invoice for Emissor de NFe SEFAZ SP'
_columns = {
'name': fields.char('Nome', size=255),
'file': fields.binary('Arquivo', readonly=True),
'file_type': fields.selection(
[('xml', 'XML'), ('txt', ' TXT')], 'Tipo do Arquivo'),
'state': fields.selection(
[('init', 'init'), ('done', 'done')], 'state', readonly=True),
'nfe_environment': fields.selection(
[('1', u'Produção'), ('2', u'Homologação')], 'Ambiente'),
'sign_xml': fields.boolean('Assinar XML'),
'nfe_export_result': fields.one2many(
'l10n_br_account_product.nfe_export_invoice_result', 'wizard_id',
'NFe Export Result'),
'export_folder': fields.boolean(u'Salvar na Pasta de Exportação'),
}
def _default_file_type(self, cr, uid, context):
file_type = False
user = self.pool.get('res.users').browse(cr, uid, uid, context=context)
company = self.pool.get('res.company').browse(
cr, uid, user.company_id.id, context=context)
file_type = company.file_type
return file_type
def _default_nfe_environment(self, cr, uid, context):
nfe_environment = False
user = self.pool.get('res.users').browse(cr, uid, uid, context=context)
company = self.pool.get('res.company').browse(
cr, uid, user.company_id.id, context=context)
nfe_environment = company.nfe_environment
return nfe_environment
def _default_sign_xml(self, cr, uid, context):
sign_xml = False
user = self.pool.get('res.users').browse(cr, uid, uid, context=context)
company = self.pool.get('res.company').browse(
cr, uid, user.company_id.id, context=context)
sign_xml = company.sign_xml
return sign_xml
def _default_export_folder(self, cr, uid, context):
export_folder = False
user = self.pool.get('res.users').browse(cr, uid, uid, context=context)
company = self.pool.get('res.company').browse(
cr, uid, user.company_id.id, context=context)
export_folder = company.export_folder
return export_folder
_defaults = {
'state': 'init',
'file_type': _default_file_type,
'nfe_environment': _default_nfe_environment,
'sign_xml': _default_sign_xml,
'export_folder': _default_export_folder,
}
def _get_invoice_ids(self, cr, uid, data, context=None):
if not context:
context = {}
return context.get('active_ids', [])
def nfe_export(self, cr, uid, ids, context=None):
data = self.read(cr, uid, ids, [], context=context)[0]
inv_obj = self.pool.get('account.invoice')
active_ids = self._get_invoice_ids(cr, uid, data, context)
export_inv_ids = []
export_inv_numbers = []
company_ids = []
err_msg = ''
if not active_ids:
err_msg = u'Não existe nenhum documento fiscal para ser exportado!'
for inv in inv_obj.browse(cr, uid, active_ids, context=context):
if inv.state not in ('sefaz_export'):
err_msg += u"O Documento Fiscal %s não esta definida para ser \
exportação para a SEFAZ.\n" % inv.internal_number
elif not inv.issuer == '0':
err_msg += u"O Documento Fiscal %s é do tipo externa e não \
pode ser exportada para a receita.\n" % inv.internal_number
else:
inv_obj.write(cr, uid, [inv.id], {'nfe_export_date': False,
'nfe_access_key': False,
'nfe_status': False,
'nfe_date': False})
message = "O Documento Fiscal %s foi \
exportado." % inv.internal_number
inv_obj.log(cr, uid, inv.id, message)
export_inv_ids.append(inv.id)
company_ids.append(inv.company_id.id)
export_inv_numbers.append(inv.internal_number)
if len(set(company_ids)) > 1:
err_msg += u'Não é permitido exportar Documentos \
Fiscais de mais de uma empresa, por favor selecione Documentos \
Fiscais da mesma empresa.'
if export_inv_ids:
if len(export_inv_numbers) > 1:
name = 'nfes%s-%s.%s' % (
time.strftime('%d-%m-%Y'),
self.pool.get('ir.sequence').get(cr, uid, 'nfe.export'),
data['file_type'])
else:
name = 'nfe%s.%s' % (export_inv_numbers[0], data['file_type'])
mod_serializer = __import__(
'l10n_br_account_product.sped.nfe.serializer.' +
data['file_type'], globals(), locals(), data['file_type'])
func = getattr(mod_serializer, 'nfe_export')
company_pool = self.pool.get('res.company')
company = company_pool.browse(cr, uid, inv.company_id.id)
str_nfe_version = inv.nfe_version
nfes = func(
cr, uid, export_inv_ids, data['nfe_environment'],
str_nfe_version, context)
for nfe in nfes:
#if nfe['message']:
#status = 'error'
#else:
#status = 'success'
#self.pool.get(self._name + '_result').create(
#cr, uid, {'document': nfe['key'],
#'message': nfe['message'],
#'status': status,
#'wizard_id': data['id']})
nfe_file = nfe['nfe'].encode('utf8')
self.write(
cr, uid, ids, {'file': base64.b64encode(nfe_file),
'state': 'done', 'name': name}, context=context)
if err_msg:
raise orm.except_orm(_('Error!'), _("'%s'") % _(err_msg, ))
mod_obj = self.pool.get('ir.model.data')
model_data_ids = mod_obj.search(
cr, uid, [('model', '=', 'ir.ui.view'),
('name', '=', 'l10n_br_account_product_nfe_export_invoice_form')],
context=context)
resource_id = mod_obj.read(
cr, uid, model_data_ids,
fields=['res_id'], context=context)[0]['res_id']
return {
'type': 'ir.actions.act_window',
'res_model': self._name,
'view_mode': 'form',
'view_type': 'form',
'res_id': data['id'],
'views': [(resource_id, 'form')],
'target': 'new',
}
class L10n_brAccountNfeExportInvoiceResult(orm.TransientModel):
_name = 'l10n_br_account_product.nfe_export_invoice_result'
_columns = {
'wizard_id': fields.many2one(
'l10n_br_account_product.nfe_export_invoice', 'Wizard ID',
ondelete='cascade', select=True),
'document': fields.char('Documento', size=255),
'status': fields.selection(
[('success', 'Sucesso'), ('error', 'Erro')], 'Status'),
'message': fields.char('Mensagem', size=255),
}
|
rodrigoasmacedo/l10n-brazil
|
__unported__/l10n_br_account_product/wizard/l10n_br_account_nfe_export_invoice.py
|
Python
|
agpl-3.0
| 8,955 | 0.00302 |
from datetime import datetime
import click
import crayons
import tweakers
from tabulate import tabulate
from . import __version__, config, utils
def format_date(dt):
if dt.date() == datetime.today().date():
return dt.strftime("%H:%M")
elif dt.year == datetime.today().year:
return dt.strftime("%d-%m")
else:
return dt.strftime("%d-%m-%Y")
def confirm_overwrite_existing_login():
if utils.cookies_exist():
confirmed = click.confirm(
"You are already logged in. Would you like to login to a different account?"
)
if confirmed:
config.stored_cookies_path.unlink()
click.echo("Existing login deleted.")
else:
raise SystemExit
def print_comment(comment):
"""Pretty print a comment"""
print(
crayons.yellow((comment.date.strftime("%H:%M"))),
crayons.green(comment.user.name),
crayons.blue(comment.url),
)
print(comment.text, "\n")
def choose_topic(topics):
"""Return chosen topic from a printed list of topics
Args:
topics (list): List of Topic objects
Returns:
topic (Topic): Chosen topic
"""
table = []
for i, t in enumerate(topics):
row = [i + 1, t.title, format_date(t.last_reply)]
table.append(row)
print("\n", tabulate(table, headers=["#", "Titel", "Laatste reactie"]))
choice = click.prompt(f"\nChoose a topic to stream (1-{len(topics)})", type=int)
return topics[choice - 1]
@click.group()
@click.version_option(version=__version__)
@click.option("--last", default=3, help="Number of previous comments to show.")
@click.pass_context
def cli(ctx, last):
ctx.ensure_object(dict)
ctx.obj["last"] = last
try:
utils.load_persistent_cookies()
except FileNotFoundError:
pass
@cli.command(name="stream", help="Stream from a specific url.")
@click.argument("url")
@click.pass_context
def stream(ctx, url):
topic = tweakers.gathering.Topic(url=url)
for comment in topic.comment_stream(last=ctx.obj["last"]):
print_comment(comment)
@cli.command(name="list", help="Choose from a list of active topics.")
@click.option("-n", default=20, help="Number of topics to show.")
@click.pass_context
def list_active(ctx, n):
topics = tweakers.gathering.active_topics()[:n]
topic = choose_topic(topics)
for comment in topic.comment_stream(last=ctx.obj["last"]):
print_comment(comment)
@cli.command(name="search", help="Search for a specific topic.")
@click.argument("query", nargs=-1)
@click.option("-n", default=10, help="Number of results to show.")
@click.pass_context
def search(ctx, query, n):
query = " ".join(query)
topics = tweakers.gathering.search(query)
if len(topics) == 0:
click.echo("No topics found!")
raise SystemExit
topic = choose_topic(topics)
for comment in topic.comment_stream(last=ctx.obj["last"]):
print_comment(comment)
@cli.command(name="login", help="Login to tweakers.net.")
def login():
confirm_overwrite_existing_login()
username = click.prompt("Username")
password = click.prompt("Password", hide_input=True)
tweakers.utils.login(username=username, password=password)
utils.store_persistent_cookies()
click.echo("Login successful!")
@cli.command(name="bookmarks", help="Choose from a list of bookmarks.")
@click.pass_context
def bookmarks(ctx):
topics = tweakers.gathering.bookmarks()
if len(topics) == 0:
click.echo("No topics found!")
raise SystemExit
topic = choose_topic(topics)
for comment in topic.comment_stream(last=ctx.obj["last"]):
print_comment(comment)
if __name__ == "__main__":
cli(obj={})
|
timotk/tweakstream
|
tweakstream/cli.py
|
Python
|
mit
| 3,757 | 0.000532 |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from selenium.common import exceptions
from selenium.webdriver.common import by
from openstack_dashboard.test.integration_tests.pages import basepage
from openstack_dashboard.test.integration_tests.pages.project.network.\
routerinterfacespage import RouterInterfacesPage
from openstack_dashboard.test.integration_tests.pages.project.network\
.routeroverviewpage import RouterOverviewPage
from openstack_dashboard.test.integration_tests.regions import forms
from openstack_dashboard.test.integration_tests.regions import tables
class RoutersTable(tables.TableRegion):
name = "routers"
CREATE_ROUTER_FORM_FIELDS = ("name", "admin_state_up",
"external_network")
SET_GATEWAY_FORM_FIELDS = ("network_id", "router_name",
"router_id")
@tables.bind_table_action('create')
def create_router(self, create_button):
create_button.click()
return forms.FormRegion(self.driver, self.conf,
field_mappings=self.CREATE_ROUTER_FORM_FIELDS)
@tables.bind_table_action('delete')
def delete_router(self, delete_button):
delete_button.click()
return forms.BaseFormRegion(self.driver, self.conf)
@tables.bind_row_action('cleargateway')
def clear_gateway(self, clear_gateway_button, row):
clear_gateway_button.click()
return forms.BaseFormRegion(self.driver, self.conf)
@tables.bind_row_action('setgateway')
def set_gateway(self, set_gateway_button, row):
set_gateway_button.click()
return forms.FormRegion(self.driver, self.conf,
field_mappings=self.SET_GATEWAY_FORM_FIELDS)
class RoutersPage(basepage.BaseNavigationPage):
DEFAULT_ADMIN_STATE_UP = 'True'
DEFAULT_EXTERNAL_NETWORK = 'admin_floating_net'
ROUTERS_TABLE_NAME_COLUMN = 'name'
ROUTERS_TABLE_STATUS_COLUMN = 'status'
ROUTERS_TABLE_NETWORK_COLUMN = 'ext_net'
_interfaces_tab_locator = (by.By.CSS_SELECTOR,
'a[href*="tab=router_details__interfaces"]')
def __init__(self, driver, conf):
super(RoutersPage, self).__init__(driver, conf)
self._page_title = "Routers"
def _get_row_with_router_name(self, name):
return self.routers_table.get_row(
self.ROUTERS_TABLE_NAME_COLUMN, name)
@property
def routers_table(self):
return RoutersTable(self.driver, self.conf)
def create_router(self, name, admin_state_up=DEFAULT_ADMIN_STATE_UP,
external_network=DEFAULT_EXTERNAL_NETWORK):
create_router_form = self.routers_table.create_router()
create_router_form.name.text = name
if admin_state_up:
create_router_form.admin_state_up.value = admin_state_up
if external_network:
create_router_form.external_network.text = external_network
create_router_form.submit()
def set_gateway(self, router_id,
network_name=DEFAULT_EXTERNAL_NETWORK):
row = self._get_row_with_router_name(router_id)
set_gateway_form = self.routers_table.set_gateway(row)
set_gateway_form.network_id.text = network_name
set_gateway_form.submit()
def clear_gateway(self, name):
row = self._get_row_with_router_name(name)
confirm_clear_gateway_form = self.routers_table.clear_gateway(row)
confirm_clear_gateway_form.submit()
def delete_router(self, name):
row = self._get_row_with_router_name(name)
row.mark()
confirm_delete_routers_form = self.routers_table.delete_router()
confirm_delete_routers_form.submit()
def is_router_present(self, name):
return bool(self._get_row_with_router_name(name))
def is_router_active(self, name):
def cell_getter():
row = self._get_row_with_router_name(name)
return row and row.cells[self.ROUTERS_TABLE_STATUS_COLUMN]
return self.routers_table.wait_cell_status(cell_getter, 'Active')
def is_gateway_cleared(self, name):
row = self._get_row_with_router_name(name)
def cell_getter():
return row.cells[self.ROUTERS_TABLE_NETWORK_COLUMN]
try:
self._wait_till_text_present_in_element(cell_getter, '-')
except exceptions.TimeoutException:
return False
return True
def is_gateway_set(self, name, network_name=DEFAULT_EXTERNAL_NETWORK):
row = self._get_row_with_router_name(name)
def cell_getter():
return row.cells[self.ROUTERS_TABLE_NETWORK_COLUMN]
try:
self._wait_till_text_present_in_element(cell_getter, network_name)
except exceptions.TimeoutException:
return False
return True
def go_to_interfaces_page(self, name):
self._get_element(by.By.LINK_TEXT, name).click()
self._get_element(*self._interfaces_tab_locator).click()
return RouterInterfacesPage(self.driver, self.conf, name)
def go_to_overview_page(self, name):
self._get_element(by.By.LINK_TEXT, name).click()
return RouterOverviewPage(self.driver, self.conf, name)
|
Mirantis/mos-horizon
|
openstack_dashboard/test/integration_tests/pages/project/network/routerspage.py
|
Python
|
apache-2.0
| 5,775 | 0 |
from itertools import filterfalse
from typing import (
Callable,
Iterable,
Iterator,
Optional,
Set,
TypeVar,
Union,
)
# Type and type variable definitions
_T = TypeVar('_T')
_U = TypeVar('_U')
def unique_everseen(
iterable: Iterable[_T], key: Optional[Callable[[_T], _U]] = None
) -> Iterator[_T]:
"List unique elements, preserving order. Remember all elements ever seen."
# unique_everseen('AAAABBBCCDAABBB') --> A B C D
# unique_everseen('ABBCcAD', str.lower) --> A B C D
seen: Set[Union[_T, _U]] = set()
seen_add = seen.add
if key is None:
for element in filterfalse(seen.__contains__, iterable):
seen_add(element)
yield element
else:
for element in iterable:
k = key(element)
if k not in seen:
seen_add(k)
yield element
|
pybuilder/pybuilder
|
src/main/python/pybuilder/_vendor/pkg_resources/_vendor/importlib_resources/_itertools.py
|
Python
|
apache-2.0
| 884 | 0 |
_base_ = './retinanet_r50_fpn_2x_coco.py'
model = dict(
backbone=dict(
depth=101,
init_cfg=dict(type='Pretrained',
checkpoint='torchvision://resnet101')))
|
open-mmlab/mmdetection
|
configs/retinanet/retinanet_r101_fpn_2x_coco.py
|
Python
|
apache-2.0
| 197 | 0 |
#!/usr/bin/env python3
__revision__ = "$Rev$"
try:
import configparser
except ImportError:
import ConfigParser as configparser
import pickle
import datetime
import time
import os.path
import sys
from optparse import OptionParser
import lj
"""
journal backup dictionary structure:
{ 'last_entry': timestamp of the last journal entry sync'd,
'last_comment': id of the last comment sync'd,
'login': the dictionary returned by the last login (useful information such as friend groups),
'comment_posters': { [posterid]: [postername] }
'entries': { [entryid]: {
eventtime: timestamp,
security: 'private' or 'usemask',
allowmask: bitmask of usergroups allowed to see post,
subject: subject,
event: event text (url-encoded),
poster: user who posted the entry (if different from logged-in user),
props: dictionary of properties,
[other undocumented keys returned in a pseudo-arbitrary fashion by LJ],
} }
comments: { [commentid]: {
'posterid': poster id (map to username with comment_posters),
'jitemid': entry id,
'parentid': id of parent comment (0 if top-level),
'body': text of comment,
'date': date comment posted,
'subject': subject of comment,
[other undocumented keys returned in a pseudo-aritrary fashion by LJ],
} }
}
"""
DEFAULT_JOURNAL = {
'last_entry': None,
'last_comment': '0',
'last_comment_meta': None,
'entries': {},
'comments': {},
'comment_posters': {},
}
def datetime_from_string(s):
"""This assumes input in the form '2007-11-19 12:24:01' because that's all I care about"""
return datetime.datetime.strptime(s, "%Y-%m-%d %H:%M:%S")
def days_ago(s):
return (datetime.datetime.today() - datetime_from_string(s)).days
def one_second_before(s):
return str(datetime_from_string(s[:19]) - datetime.timedelta(seconds=1))
def backup(user, password, journal):
server = lj.LJServer('lj.py+backup; kemayo@gmail.com', 'Python-lj.py/0.0.1')
try:
login = server.login(user, password, getpickws=True, getpickwurls=True)
except lj.LJException as e:
sys.exit(e)
# Load already-cached entries
journal['login'] = login
# Sync entries from the server
print("Downloading journal entries")
nj = update_journal_entries(server, journal)
# Sync comments from the server
print("Downloading comments")
nc = update_journal_comments(server, journal)
print(("Updated %d entries and %d comments" % (nj, nc)))
def backup_to_file(user, password, f):
journal = load_journal(f)
backup(user, password, journal)
save_journal(f, journal)
def load_journal(f):
# f should be a string referring to a file
if os.path.exists(f):
try:
j = pickle.load(open(f, 'rb'))
return j
except EOFError:
return DEFAULT_JOURNAL.copy()
return DEFAULT_JOURNAL.copy()
def save_journal(f, journal):
pickle.dump(journal, open(f, 'wb'))
def update_journal_entries(server, journal):
syncitems = built_syncitems_list(server, journal)
howmany = len(syncitems)
print(howmany, "entries to download")
while len(syncitems) > 0:
print("getting entries starting at", syncitems[0][1])
sync = server.getevents_syncitems(one_second_before(syncitems[0][1]))
for entry in sync['events']:
if hasattr(entry, 'data'):
entry = entry.data
journal['entries'][entry['itemid']] = entry
del(syncitems[0])
return howmany
def built_syncitems_list(server, journal):
all = []
count = 0
total = None
while count != total:
sync = server.syncitems(journal.get('last_entry'))
count = sync['count']
total = sync['total']
journalitems = [(int(e['item'][2:]), e['time']) for e in sync['syncitems'] if e['item'].startswith('L-')]
if journalitems:
all.extend(journalitems)
journal['last_entry'] = all[-1][1]
return all
def update_journal_comments(server, journal):
session = server.sessiongenerate()
initial_meta = get_meta_since(journal['last_comment'], server, session)
journal['comment_posters'].update(initial_meta['usermaps'])
if initial_meta['maxid'] > journal['last_comment']:
bodies = get_bodies_since(journal['last_comment'], initial_meta['maxid'], server, session)
journal['comments'].update(bodies)
if len(journal['comments']) == 0 or days_ago(journal['last_comment_meta']) > 30:
# update metadata every 30 days
all_meta = get_meta_since('0', server, session)
journal['comment_posters'].update(all_meta['usermaps'])
if len(journal['comments']) > 0:
for id, data in list(all_meta['comments'].items()):
journal['comments'][id]['posterid'] = data[0]
journal['comments'][id]['state'] = data[1]
journal['last_comment_meta'] = str(datetime.datetime.today())
howmany = int(initial_meta['maxid']) - int(journal['last_comment'])
journal['last_comment'] = initial_meta['maxid']
server.sessionexpire(session)
return howmany
def get_meta_since(highest, server, session):
all = {'comments': {}, 'usermaps': {}}
maxid = str(int(highest) + 1)
while highest < maxid:
meta = server.fetch_comment_meta(highest, session)
maxid = meta['maxid']
for id, data in list(meta['comments'].items()):
if int(id) > int(highest):
highest = id
all['comments'][id] = data
all['usermaps'].update(meta['usermaps'])
all['maxid'] = maxid
return all
def get_bodies_since(highest, maxid, server, session):
all = {}
while highest != maxid:
meta = server.fetch_comment_bodies(highest, session)
for id, data in list(meta.items()):
if int(id) > int(highest):
highest = id
all[id] = data
if maxid in meta:
break
print("Downloaded %d comments so far" % len(all))
return all
def __dispatch():
parser = OptionParser(version="%%prog %s" % __revision__, usage="usage: %prog -u Username -p Password -f backup.pkl")
parser.add_option('-u', dest='user', help="Username")
parser.add_option('-p', dest='password', help="Password")
parser.add_option('-f', dest='file', help="Backup filename")
parser.add_option('-c', dest='config', help="Config file")
options, args = parser.parse_args(sys.argv[1:])
if options.config:
cp = configparser.ConfigParser()
cp.read(options.config)
username = cp.get("login", "username")
password = cp.get("login", "password")
filename = cp.get("login", "file")
backup_to_file(username, password, filename)
elif options.user and options.password and options.file:
backup_to_file(options.user, options.password, options.file)
else:
parser.error("If a config file is not being used, -u, -p, and -f must all be present.")
if __name__ == "__main__":
__dispatch()
|
zetasyanthis/myarchive
|
src/myarchive/libs/livejournal/backup.py
|
Python
|
mit
| 7,193 | 0.001529 |
"""cshsms URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import include, url
from django.contrib import admin
from django.views.generic import RedirectView
urlpatterns = [
url(r'^management/', include('management.urls')),
url(r'^admin/', admin.site.urls),
url(r'^$', RedirectView.as_view(url='/management'))
]
|
charityscience/csh-sms
|
cshsms/urls.py
|
Python
|
gpl-3.0
| 925 | 0.005405 |
from wxPython.wx import *
from twisted.internet import reactor
class MyApp(wxApp):
def OnInit(self):
# Twisted Reactor Code
reactor.startRunning()
EVT_TIMER(self,999999,self.OnTimer)
self.timer=wxTimer(self,999999)
self.timer.Start(250,False)
# End Twisted Code
# Do whatever you need to do here
return True
def OnTimer(self,event):
reactor.runUntilCurrent()
reactor.doIteration(0)
|
ActiveState/code
|
recipes/Python/181780_Using_wxPythTwisted/recipe-181780.py
|
Python
|
mit
| 465 | 0.021505 |
# Copyright 2016 Randall Nortman
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import asyncio
from collections import deque
from concurrent.futures import FIRST_COMPLETED
import functools
import itertools
import struct
import io
import threading
import os
import socket
import sys
import logging
LOG = logging.getLogger(__name__)
from master import MsgHeader
from master import MSG_FLAGS_COMMITTED
class Follower:
def __init__(self, reader, writer, loop):
self.read_task = asyncio.async(self.reader(reader, writer, loop))
return
@asyncio.coroutine
def reader(self, reader, writer, loop):
#print('reader')
try:
for i in itertools.count():
#print('read header', i, MsgHeader.size)
header = yield from reader.readexactly(MsgHeader.size)
msg_len, flags, seqno = MsgHeader.unpack(header)
if flags & MSG_FLAGS_COMMITTED:
assert msg_len == MsgHeader.size
else:
payload = yield from reader.readexactly(msg_len - MsgHeader.size)
#loop.call_later(0.5, self.send_ack, writer, seqno)
self.send_ack(writer, seqno)
except:
LOG.exception('Exception occured in Follower task')
writer.close()
raise
def send_ack(self, writer, seqno):
#print('follower ack', seqno)
writer.write(MsgHeader.pack(MsgHeader.size, 0, seqno))
return
@classmethod
@asyncio.coroutine
def connect_and_run(cls, loop, host='127.0.0.1', port=8889):
reader, writer = yield from asyncio.open_connection(host, port,
loop=loop)
follower = cls(reader, writer, loop)
return (yield from follower.read_task)
def main():
#logging.basicConfig(level=logging.DEBUG)
loop = asyncio.get_event_loop()
#loop.set_debug(True)
task = asyncio.async(Follower.connect_and_run(loop))
print('main task', task)
loop.run_until_complete(task)
print('All done!', task.result())
loop.close()
return
if __name__ == '__main__':
main()
|
rnortman/boomscooter
|
boomscooter/follower.py
|
Python
|
apache-2.0
| 2,694 | 0.00631 |
#!/usr/bin/env python
# Author: Angela Chapman
# Date: 8/6/2014
#
# This file contains code to accompany the Kaggle tutorial
# "Deep learning goes to the movies". The code in this file
# is for Part 1 of the tutorial on Natural Language Processing.
#
# *************************************** #
import os
from sklearn.feature_extraction.text import CountVectorizer
from sklearn.ensemble import RandomForestClassifier
from KaggleWord2VecUtility import KaggleWord2VecUtility
import pandas as pd
import numpy as np
if __name__ == '__main__':
train = pd.read_csv(os.path.join(os.path.abspath(os.path.join(os.path.dirname(__file__), '..')), 'data', 'labeledTrainData.tsv'), header=0, \
delimiter="\t", quoting=3)
test = pd.read_csv(os.path.join(os.path.abspath(os.path.join(os.path.dirname(__file__), '..')), 'data', 'testData.tsv'), header=0, delimiter="\t", \
quoting=3 )
print 'The first review is:'
print train["review"][0]
raw_input("Press Enter to continue...")
#print 'Download text data sets. If you already have NLTK datasets downloaded, just close the Python download window...'
#nltk.download() # Download text data sets, including stop words
# Initialize an empty list to hold the clean reviews
clean_train_reviews = []
# Loop over each review; create an index i that goes from 0 to the length
# of the movie review list
print "Cleaning and parsing the training set movie reviews...\n"
for i in xrange( 0, len(train["review"])):
clean_train_reviews.append(" ".join(KaggleWord2VecUtility.review_to_wordlist(train["review"][i], True)))
# ****** Create a bag of words from the training set
#
print "Creating the bag of words...\n"
# Initialize the "CountVectorizer" object, which is scikit-learn's
# bag of words tool.
vectorizer = CountVectorizer(analyzer = "word", \
tokenizer = None, \
preprocessor = None, \
stop_words = None, \
max_features = 5000)
# fit_transform() does two functions: First, it fits the model
# and learns the vocabulary; second, it transforms our training data
# into feature vectors. The input to fit_transform should be a list of
# strings.
train_data_features = vectorizer.fit_transform(clean_train_reviews)
# Numpy arrays are easy to work with, so convert the result to an
# array
train_data_features = train_data_features.toarray()
# ******* Train a random forest using the bag of words
#
print "Training the random forest (this may take a while)..."
# Initialize a Random Forest classifier with 100 trees
forest = RandomForestClassifier(n_estimators = 100)
# Fit the forest to the training set, using the bag of words as
# features and the sentiment labels as the response variable
#
# This may take a few minutes to run
forest = forest.fit( train_data_features, train["sentiment"] )
# Create an empty list and append the clean reviews one by one
clean_test_reviews = []
print "Cleaning and parsing the test set movie reviews...\n"
for i in xrange(0,len(test["review"])):
clean_test_reviews.append(" ".join(KaggleWord2VecUtility.review_to_wordlist(test["review"][i], True)))
# Get a bag of words for the test set, and convert to a numpy array
test_data_features = vectorizer.transform(clean_test_reviews)
test_data_features = test_data_features.toarray()
# Use the random forest to make sentiment label predictions
print "Predicting test labels...\n"
result = forest.predict_proba(test_data_features)[:,1]
# Copy the results to a pandas dataframe with an "id" column and
# a "sentiment" column
output = pd.DataFrame( data={"id":test["id"], "sentiment":result} )
# Use pandas to write the comma-separated output file
output.to_csv(os.path.join(os.path.abspath(os.path.join(os.path.dirname(__file__), '..')), 'data', 'Bag_of_Words_test_results.csv'), index=False, quoting=3)
print "Wrote results to Bag_of_Words_test_results.csv"
|
weiwang/popcorn
|
analysis/BagOfWords.py
|
Python
|
mit
| 4,190 | 0.011456 |
from __future__ import absolute_import
from django.conf import settings
from django.contrib import messages
from django.core.exceptions import PermissionDenied
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
from django.shortcuts import render_to_response, get_object_or_404
from django.template import RequestContext
from django.utils.http import urlencode
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext
from django.utils.translation import ugettext_lazy as _
import sendfile
from acls.models import AccessEntry
from common.utils import encapsulate
from documents.conf.settings import THUMBNAIL_SIZE
from documents.exceptions import NewDocumentVersionNotAllowed
from documents.forms import DocumentTypeSelectForm
from documents.models import DocumentType, Document
from documents.permissions import (PERMISSION_DOCUMENT_CREATE,
PERMISSION_DOCUMENT_NEW_VERSION)
from metadata.api import decode_metadata_from_url, metadata_repr_as_list
from metadata.forms import MetadataSelectionForm, MetadataFormSet
from permissions.models import Permission
from .forms import (SourceTransformationForm, SourceTransformationForm_create,
WebFormSetupForm, StagingFolderSetupForm, StagingDocumentForm, WebFormForm,
WatchFolderSetupForm)
from .literals import (SOURCE_CHOICE_WEB_FORM, SOURCE_CHOICE_STAGING,
SOURCE_CHOICE_WATCH, SOURCE_UNCOMPRESS_CHOICE_Y, SOURCE_UNCOMPRESS_CHOICE_ASK)
from .models import (WebForm, StagingFolder, SourceTransformation,
WatchFolder)
from .permissions import (PERMISSION_SOURCES_SETUP_VIEW,
PERMISSION_SOURCES_SETUP_EDIT, PERMISSION_SOURCES_SETUP_DELETE,
PERMISSION_SOURCES_SETUP_CREATE)
from .staging import create_staging_file_class
from .wizards import DocumentCreateWizard
def document_create(request):
Permission.objects.check_permissions(request.user, [PERMISSION_DOCUMENT_CREATE])
wizard = DocumentCreateWizard(form_list=[DocumentTypeSelectForm, MetadataSelectionForm, MetadataFormSet])
return wizard(request)
def document_create_siblings(request, document_id):
Permission.objects.check_permissions(request.user, [PERMISSION_DOCUMENT_CREATE])
document = get_object_or_404(Document, pk=document_id)
query_dict = {}
for pk, metadata in enumerate(document.documentmetadata_set.all()):
query_dict['metadata%s_id' % pk] = metadata.metadata_type_id
query_dict['metadata%s_value' % pk] = metadata.value
if document.document_type_id:
query_dict['document_type_id'] = document.document_type_id
url = reverse('upload_interactive')
return HttpResponseRedirect('%s?%s' % (url, urlencode(query_dict)))
def return_function(obj):
return lambda context: context['source'].source_type == obj.source_type and context['source'].pk == obj.pk
def get_tab_link_for_source(source, document=None):
if document:
view = u'upload_version'
args = [document.pk, u'"%s"' % source.source_type, source.pk]
else:
view = u'upload_interactive'
args = [u'"%s"' % source.source_type, source.pk]
return {
'text': source.title,
'view': view,
'args': args,
'famfam': source.icon,
'keep_query': True,
'conditional_highlight': return_function(source),
}
def get_active_tab_links(document=None):
tab_links = []
web_forms = WebForm.objects.filter(enabled=True)
for web_form in web_forms:
tab_links.append(get_tab_link_for_source(web_form, document))
staging_folders = StagingFolder.objects.filter(enabled=True)
for staging_folder in staging_folders:
tab_links.append(get_tab_link_for_source(staging_folder, document))
return {
'tab_links': tab_links,
SOURCE_CHOICE_WEB_FORM: web_forms,
SOURCE_CHOICE_STAGING: staging_folders
}
def upload_interactive(request, source_type=None, source_id=None, document_pk=None):
subtemplates_list = []
if document_pk:
document = get_object_or_404(Document, pk=document_pk)
try:
Permission.objects.check_permissions(request.user, [PERMISSION_DOCUMENT_NEW_VERSION])
except PermissionDenied:
AccessEntry.objects.check_access(PERMISSION_DOCUMENT_NEW_VERSION, request.user, document)
results = get_active_tab_links(document)
else:
Permission.objects.check_permissions(request.user, [PERMISSION_DOCUMENT_CREATE])
document = None
results = get_active_tab_links()
context = {}
if results[SOURCE_CHOICE_WEB_FORM].count() == 0 and results[SOURCE_CHOICE_STAGING].count() == 0:
source_setup_link = mark_safe('<a href="%s">%s</a>' % (reverse('setup_web_form_list'), ugettext(u'here')))
subtemplates_list.append(
{
'name': 'generic_subtemplate.html',
'context': {
'title': _(u'Upload sources'),
'paragraphs': [
_(u'No interactive document sources have been defined or none have been enabled.'),
_(u'Click %(setup_link)s to add or enable some document sources.') % {
'setup_link': source_setup_link
}
],
}
})
document_type_id = request.GET.get('document_type_id', None)
if document_type_id:
document_type = get_object_or_404(DocumentType, pk=document_type_id)
else:
document_type = None
if source_type is None and source_id is None:
if results[SOURCE_CHOICE_WEB_FORM].count():
source_type = results[SOURCE_CHOICE_WEB_FORM][0].source_type
source_id = results[SOURCE_CHOICE_WEB_FORM][0].pk
elif results[SOURCE_CHOICE_STAGING].count():
source_type = results[SOURCE_CHOICE_STAGING][0].source_type
source_id = results[SOURCE_CHOICE_STAGING][0].pk
if source_type and source_id:
if source_type == SOURCE_CHOICE_WEB_FORM:
web_form = get_object_or_404(WebForm, pk=source_id)
context['source'] = web_form
if request.method == 'POST':
form = WebFormForm(request.POST, request.FILES,
document_type=document_type,
show_expand=(web_form.uncompress == SOURCE_UNCOMPRESS_CHOICE_ASK) and not document,
source=web_form,
instance=document
)
if form.is_valid():
try:
if document:
expand = False
else:
if web_form.uncompress == SOURCE_UNCOMPRESS_CHOICE_ASK:
expand = form.cleaned_data.get('expand')
else:
if web_form.uncompress == SOURCE_UNCOMPRESS_CHOICE_Y:
expand = True
else:
expand = False
new_filename = get_form_filename(form)
result = web_form.upload_file(request.FILES['file'],
new_filename, use_file_name=form.cleaned_data.get('use_file_name', False),
document_type=document_type,
expand=expand,
metadata_dict_list=decode_metadata_from_url(request.GET),
user=request.user,
document=document,
new_version_data=form.cleaned_data.get('new_version_data')
)
if document:
messages.success(request, _(u'New document version uploaded successfully.'))
return HttpResponseRedirect(reverse('document_view_simple', args=[document.pk]))
else:
if result['is_compressed'] == None:
messages.success(request, _(u'File uploaded successfully.'))
if result['is_compressed'] == True:
messages.success(request, _(u'File uncompressed successfully and uploaded as individual files.'))
if result['is_compressed'] == False:
messages.warning(request, _(u'File was not a compressed file, uploaded as it was.'))
return HttpResponseRedirect(request.get_full_path())
except NewDocumentVersionNotAllowed:
messages.error(request, _(u'New version uploads are not allowed for this document.'))
except Exception, e:
if settings.DEBUG:
raise
messages.error(request, _(u'Unhandled exception: %s') % e)
else:
form = WebFormForm(
show_expand=(web_form.uncompress == SOURCE_UNCOMPRESS_CHOICE_ASK) and not document,
document_type=document_type,
source=web_form,
instance=document
)
if document:
title = _(u'upload a new version from source: %s') % web_form.title
else:
title = _(u'upload a local document from source: %s') % web_form.title
subtemplates_list.append({
'name': 'generic_form_subtemplate.html',
'context': {
'form': form,
'title': title,
},
})
elif source_type == SOURCE_CHOICE_STAGING:
staging_folder = get_object_or_404(StagingFolder, pk=source_id)
context['source'] = staging_folder
StagingFile = create_staging_file_class(request, staging_folder.folder_path, source=staging_folder)
if request.method == 'POST':
form = StagingDocumentForm(request.POST, request.FILES,
cls=StagingFile, document_type=document_type,
show_expand=(staging_folder.uncompress == SOURCE_UNCOMPRESS_CHOICE_ASK) and not document,
source=staging_folder,
instance=document
)
if form.is_valid():
try:
staging_file = StagingFile.get(form.cleaned_data['staging_file_id'])
if document:
expand = False
else:
if staging_folder.uncompress == SOURCE_UNCOMPRESS_CHOICE_ASK:
expand = form.cleaned_data.get('expand')
else:
if staging_folder.uncompress == SOURCE_UNCOMPRESS_CHOICE_Y:
expand = True
else:
expand = False
new_filename = get_form_filename(form)
result = staging_folder.upload_file(staging_file.upload(),
new_filename, use_file_name=form.cleaned_data.get('use_file_name', False),
document_type=document_type,
expand=expand,
metadata_dict_list=decode_metadata_from_url(request.GET),
user=request.user,
document=document,
new_version_data=form.cleaned_data.get('new_version_data')
)
if document:
messages.success(request, _(u'Document version from staging file: %s, uploaded successfully.') % staging_file.filename)
else:
if result['is_compressed'] == None:
messages.success(request, _(u'Staging file: %s, uploaded successfully.') % staging_file.filename)
if result['is_compressed'] == True:
messages.success(request, _(u'Staging file: %s, uncompressed successfully and uploaded as individual files.') % staging_file.filename)
if result['is_compressed'] == False:
messages.warning(request, _(u'Staging file: %s, was not compressed, uploaded as a single file.') % staging_file.filename)
if staging_folder.delete_after_upload:
transformations, errors = staging_folder.get_transformation_list()
staging_file.delete(preview_size=staging_folder.get_preview_size(), transformations=transformations)
messages.success(request, _(u'Staging file: %s, deleted successfully.') % staging_file.filename)
if document:
return HttpResponseRedirect(reverse('document_view_simple', args=[document.pk]))
else:
return HttpResponseRedirect(request.get_full_path())
except NewDocumentVersionNotAllowed:
messages.error(request, _(u'New version uploads are not allowed for this document.'))
except Exception, e:
if settings.DEBUG:
raise
messages.error(request, _(u'Unhandled exception: %s') % e)
else:
form = StagingDocumentForm(cls=StagingFile,
document_type=document_type,
show_expand=(staging_folder.uncompress == SOURCE_UNCOMPRESS_CHOICE_ASK) and not document,
source=staging_folder,
instance=document
)
try:
staging_filelist = StagingFile.get_all()
except Exception, e:
messages.error(request, e)
staging_filelist = []
finally:
if document:
title = _(u'upload a new version from staging source: %s') % staging_folder.title
else:
title = _(u'upload a document from staging source: %s') % staging_folder.title
subtemplates_list = [
{
'name': 'generic_form_subtemplate.html',
'context': {
'form': form,
'title': title,
}
},
{
'name': 'generic_list_subtemplate.html',
'context': {
'title': _(u'files in staging path'),
'object_list': staging_filelist,
'hide_link': True,
}
},
]
if document:
context['object'] = document
context.update({
'document_type_id': document_type_id,
'subtemplates_list': subtemplates_list,
'temporary_navigation_links': {
'form_header': {
'upload_version': {
'links': results['tab_links']
},
'upload_interactive': {
'links': results['tab_links']
}
}
},
})
if not document:
context.update(
{
'sidebar_subtemplates_list': [
{
'name': 'generic_subtemplate.html',
'context': {
'title': _(u'Current document type'),
'paragraphs': [document_type if document_type else _(u'None')],
'side_bar': True,
}
},
{
'name': 'generic_subtemplate.html',
'context': {
'title': _(u'Current metadata'),
'paragraphs': metadata_repr_as_list(decode_metadata_from_url(request.GET)),
'side_bar': True,
}
}
],
}
)
return render_to_response('generic_form.html', context,
context_instance=RequestContext(request))
def get_form_filename(form):
filename = None
if form:
if form.cleaned_data['new_filename']:
return form.cleaned_data['new_filename']
if form and 'document_type_available_filenames' in form.cleaned_data:
if form.cleaned_data['document_type_available_filenames']:
return form.cleaned_data['document_type_available_filenames'].filename
return filename
def staging_file_preview(request, source_type, source_id, staging_file_id):
Permission.objects.check_permissions(request.user, [PERMISSION_DOCUMENT_CREATE, PERMISSION_DOCUMENT_NEW_VERSION])
staging_folder = get_object_or_404(StagingFolder, pk=source_id)
StagingFile = create_staging_file_class(request, staging_folder.folder_path)
transformations, errors = SourceTransformation.transformations.get_for_object_as_list(staging_folder)
output_file = StagingFile.get(staging_file_id).get_image(
size=staging_folder.get_preview_size(),
transformations=transformations
)
if errors and (request.user.is_staff or request.user.is_superuser):
for error in errors:
messages.warning(request, _(u'Staging file transformation error: %(error)s') % {
'error': error
})
return sendfile.sendfile(request, output_file)
def staging_file_thumbnail(request, source_id, staging_file_id):
Permission.objects.check_permissions(request.user, [PERMISSION_DOCUMENT_CREATE, PERMISSION_DOCUMENT_NEW_VERSION])
staging_folder = get_object_or_404(StagingFolder, pk=source_id)
StagingFile = create_staging_file_class(request, staging_folder.folder_path, source=staging_folder)
transformations, errors = SourceTransformation.transformations.get_for_object_as_list(staging_folder)
output_file = StagingFile.get(staging_file_id).get_image(
size=THUMBNAIL_SIZE,
transformations=transformations
)
if errors and (request.user.is_staff or request.user.is_superuser):
for error in errors:
messages.warning(request, _(u'Staging file transformation error: %(error)s') % {
'error': error
})
return sendfile.sendfile(request, output_file)
def staging_file_delete(request, source_type, source_id, staging_file_id):
Permission.objects.check_permissions(request.user, [PERMISSION_DOCUMENT_CREATE, PERMISSION_DOCUMENT_NEW_VERSION])
staging_folder = get_object_or_404(StagingFolder, pk=source_id)
StagingFile = create_staging_file_class(request, staging_folder.folder_path)
staging_file = StagingFile.get(staging_file_id)
next = request.POST.get('next', request.GET.get('next', request.META.get('HTTP_REFERER', '/')))
previous = request.POST.get('previous', request.GET.get('previous', request.META.get('HTTP_REFERER', '/')))
if request.method == 'POST':
try:
transformations, errors = SourceTransformation.transformations.get_for_object_as_list(staging_folder)
staging_file.delete(
preview_size=staging_folder.get_preview_size(),
transformations=transformations
)
messages.success(request, _(u'Staging file delete successfully.'))
except Exception, e:
messages.error(request, _(u'Staging file delete error; %s.') % e)
return HttpResponseRedirect(next)
results = get_active_tab_links()
return render_to_response('generic_confirm.html', {
'source': staging_folder,
'delete_view': True,
'object': staging_file,
'next': next,
'previous': previous,
'form_icon': u'delete.png',
'temporary_navigation_links': {'form_header': {'staging_file_delete': {'links': results['tab_links']}}},
}, context_instance=RequestContext(request))
# Setup views
def setup_source_list(request, source_type):
Permission.objects.check_permissions(request.user, [PERMISSION_SOURCES_SETUP_VIEW])
if source_type == SOURCE_CHOICE_WEB_FORM:
cls = WebForm
elif source_type == SOURCE_CHOICE_STAGING:
cls = StagingFolder
elif source_type == SOURCE_CHOICE_WATCH:
cls = WatchFolder
context = {
'object_list': cls.objects.all(),
'title': cls.class_fullname_plural(),
'hide_link': True,
'list_object_variable_name': 'source',
'source_type': source_type,
}
return render_to_response('generic_list.html', context,
context_instance=RequestContext(request))
def setup_source_edit(request, source_type, source_id):
Permission.objects.check_permissions(request.user, [PERMISSION_SOURCES_SETUP_EDIT])
if source_type == SOURCE_CHOICE_WEB_FORM:
cls = WebForm
form_class = WebFormSetupForm
elif source_type == SOURCE_CHOICE_STAGING:
cls = StagingFolder
form_class = StagingFolderSetupForm
elif source_type == SOURCE_CHOICE_WATCH:
cls = WatchFolder
form_class = WatchFolderSetupForm
source = get_object_or_404(cls, pk=source_id)
next = request.POST.get('next', request.GET.get('next', request.META.get('HTTP_REFERER', '/')))
if request.method == 'POST':
form = form_class(instance=source, data=request.POST)
if form.is_valid():
try:
form.save()
messages.success(request, _(u'Source edited successfully'))
return HttpResponseRedirect(next)
except Exception, e:
messages.error(request, _(u'Error editing source; %s') % e)
else:
form = form_class(instance=source)
return render_to_response('generic_form.html', {
'title': _(u'edit source: %s') % source.fullname(),
'form': form,
'source': source,
'navigation_object_name': 'source',
'next': next,
'object_name': _(u'source'),
'source_type': source_type,
},
context_instance=RequestContext(request))
def setup_source_delete(request, source_type, source_id):
Permission.objects.check_permissions(request.user, [PERMISSION_SOURCES_SETUP_DELETE])
if source_type == SOURCE_CHOICE_WEB_FORM:
cls = WebForm
form_icon = u'application_form_delete.png'
redirect_view = 'setup_web_form_list'
elif source_type == SOURCE_CHOICE_STAGING:
cls = StagingFolder
form_icon = u'folder_delete.png'
redirect_view = 'setup_staging_folder_list'
elif source_type == SOURCE_CHOICE_WATCH:
cls = WatchFolder
form_icon = u'folder_delete.png'
redirect_view = 'setup_watch_folder_list'
redirect_view = reverse('setup_source_list', args=[source_type])
previous = request.POST.get('previous', request.GET.get('previous', request.META.get('HTTP_REFERER', redirect_view)))
source = get_object_or_404(cls, pk=source_id)
if request.method == 'POST':
try:
source.delete()
messages.success(request, _(u'Source "%s" deleted successfully.') % source)
except Exception, e:
messages.error(request, _(u'Error deleting source "%(source)s": %(error)s') % {
'source': source, 'error': e
})
return HttpResponseRedirect(redirect_view)
context = {
'title': _(u'Are you sure you wish to delete the source: %s?') % source.fullname(),
'source': source,
'object_name': _(u'source'),
'navigation_object_name': 'source',
'delete_view': True,
'previous': previous,
'form_icon': form_icon,
'source_type': source_type,
}
return render_to_response('generic_confirm.html', context,
context_instance=RequestContext(request))
def setup_source_create(request, source_type):
Permission.objects.check_permissions(request.user, [PERMISSION_SOURCES_SETUP_CREATE])
if source_type == SOURCE_CHOICE_WEB_FORM:
cls = WebForm
form_class = WebFormSetupForm
elif source_type == SOURCE_CHOICE_STAGING:
cls = StagingFolder
form_class = StagingFolderSetupForm
elif source_type == SOURCE_CHOICE_WATCH:
cls = WatchFolder
form_class = WatchFolderSetupForm
if request.method == 'POST':
form = form_class(data=request.POST)
if form.is_valid():
try:
form.save()
messages.success(request, _(u'Source created successfully'))
return HttpResponseRedirect(reverse('setup_web_form_list'))
except Exception, e:
messages.error(request, _(u'Error creating source; %s') % e)
else:
form = form_class()
return render_to_response('generic_form.html', {
'title': _(u'Create new source of type: %s') % cls.class_fullname(),
'form': form,
'source_type': source_type,
'navigation_object_name': 'source',
},
context_instance=RequestContext(request))
def setup_source_transformation_list(request, source_type, source_id):
Permission.objects.check_permissions(request.user, [PERMISSION_SOURCES_SETUP_EDIT])
if source_type == SOURCE_CHOICE_WEB_FORM:
cls = WebForm
elif source_type == SOURCE_CHOICE_STAGING:
cls = StagingFolder
elif source_type == SOURCE_CHOICE_WATCH:
cls = WatchFolder
source = get_object_or_404(cls, pk=source_id)
context = {
'object_list': SourceTransformation.transformations.get_for_object(source),
'title': _(u'transformations for: %s') % source.fullname(),
'source': source,
'object_name': _(u'source'),
'navigation_object_name': 'source',
'list_object_variable_name': 'transformation',
'extra_columns': [
{'name': _(u'order'), 'attribute': 'order'},
{'name': _(u'transformation'), 'attribute': encapsulate(lambda x: x.get_transformation_display())},
{'name': _(u'arguments'), 'attribute': 'arguments'}
],
'hide_link': True,
'hide_object': True,
}
return render_to_response('generic_list.html', context,
context_instance=RequestContext(request))
def setup_source_transformation_edit(request, transformation_id):
Permission.objects.check_permissions(request.user, [PERMISSION_SOURCES_SETUP_EDIT])
source_transformation = get_object_or_404(SourceTransformation, pk=transformation_id)
redirect_view = reverse('setup_source_transformation_list', args=[source_transformation.content_object.source_type, source_transformation.content_object.pk])
next = request.POST.get('next', request.GET.get('next', request.META.get('HTTP_REFERER', redirect_view)))
if request.method == 'POST':
form = SourceTransformationForm(instance=source_transformation, data=request.POST)
if form.is_valid():
try:
form.save()
messages.success(request, _(u'Source transformation edited successfully'))
return HttpResponseRedirect(next)
except Exception, e:
messages.error(request, _(u'Error editing source transformation; %s') % e)
else:
form = SourceTransformationForm(instance=source_transformation)
return render_to_response('generic_form.html', {
'title': _(u'Edit transformation: %s') % source_transformation,
'form': form,
'source': source_transformation.content_object,
'transformation': source_transformation,
'navigation_object_list': [
{'object': 'source', 'name': _(u'source')},
{'object': 'transformation', 'name': _(u'transformation')}
],
'next': next,
},
context_instance=RequestContext(request))
def setup_source_transformation_delete(request, transformation_id):
Permission.objects.check_permissions(request.user, [PERMISSION_SOURCES_SETUP_EDIT])
source_transformation = get_object_or_404(SourceTransformation, pk=transformation_id)
redirect_view = reverse('setup_source_transformation_list', args=[source_transformation.content_object.source_type, source_transformation.content_object.pk])
previous = request.POST.get('previous', request.GET.get('previous', request.META.get('HTTP_REFERER', redirect_view)))
if request.method == 'POST':
try:
source_transformation.delete()
messages.success(request, _(u'Source transformation deleted successfully.'))
except Exception, e:
messages.error(request, _(u'Error deleting source transformation; %(error)s') % {
'error': e}
)
return HttpResponseRedirect(redirect_view)
return render_to_response('generic_confirm.html', {
'delete_view': True,
'transformation': source_transformation,
'source': source_transformation.content_object,
'navigation_object_list': [
{'object': 'source', 'name': _(u'source')},
{'object': 'transformation', 'name': _(u'transformation')}
],
'title': _(u'Are you sure you wish to delete source transformation "%(transformation)s"') % {
'transformation': source_transformation.get_transformation_display(),
},
'previous': previous,
'form_icon': u'shape_square_delete.png',
},
context_instance=RequestContext(request))
def setup_source_transformation_create(request, source_type, source_id):
Permission.objects.check_permissions(request.user, [PERMISSION_SOURCES_SETUP_EDIT])
if source_type == SOURCE_CHOICE_WEB_FORM:
cls = WebForm
elif source_type == SOURCE_CHOICE_STAGING:
cls = StagingFolder
elif source_type == SOURCE_CHOICE_WATCH:
cls = WatchFolder
source = get_object_or_404(cls, pk=source_id)
redirect_view = reverse('setup_source_transformation_list', args=[source.source_type, source.pk])
if request.method == 'POST':
form = SourceTransformationForm_create(request.POST)
if form.is_valid():
try:
source_tranformation = form.save(commit=False)
source_tranformation.content_object = source
source_tranformation.save()
messages.success(request, _(u'Source transformation created successfully'))
return HttpResponseRedirect(redirect_view)
except Exception, e:
messages.error(request, _(u'Error creating source transformation; %s') % e)
else:
form = SourceTransformationForm_create()
return render_to_response('generic_form.html', {
'form': form,
'source': source,
'object_name': _(u'source'),
'navigation_object_name': 'source',
'title': _(u'Create new transformation for source: %s') % source,
}, context_instance=RequestContext(request))
|
rosarior/mayan
|
apps/sources/views.py
|
Python
|
gpl-3.0
| 31,364 | 0.004591 |
from collections import defaultdict
class Solution:
def longestDecomposition(self, text: str) -> int:
num = 0
L = len(text)
l, r = 0, L - 1
mp1 = defaultdict(int)
mp2 = defaultdict(int)
while l < r:
mp1[text[l]] += 1
mp2[text[r]] += 1
if mp1 == mp2:
num += 2
mp1 = defaultdict(int)
mp2 = defaultdict(int)
l += 1
r -= 1
if not mp1 and not mp2 and l > r:
pass
else:
num += 1
return num
if __name__ == '__main__':
assert Solution().longestDecomposition("ghiabcdefhelloadamhelloabcdefghi") == 7
assert Solution().longestDecomposition("merchant") == 1
assert Solution().longestDecomposition("antaprezatepzapreanta") == 11
assert Solution().longestDecomposition("aaa") == 3
|
lmmsoft/LeetCode
|
LeetCode-Algorithm/1147. Longest Chunked Palindrome Decomposition/1147.py
|
Python
|
gpl-2.0
| 896 | 0.002232 |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 Jacob Kaplan-Moss
# Copyright 2011 OpenStack Foundation
# Copyright 2012 Grid Dynamics
# Copyright 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Base utilities to build API operation managers and objects on top of.
"""
# E1102: %s is not callable
# pylint: disable=E1102
import abc
import urllib
from cinderclient.openstack.common.apiclient import exceptions
from cinderclient.openstack.common import strutils
def getid(obj):
"""Return id if argument is a Resource.
Abstracts the common pattern of allowing both an object or an object's ID
(UUID) as a parameter when dealing with relationships.
"""
try:
if obj.uuid:
return obj.uuid
except AttributeError:
pass
try:
return obj.id
except AttributeError:
return obj
# TODO(aababilov): call run_hooks() in HookableMixin's child classes
class HookableMixin(object):
"""Mixin so classes can register and run hooks."""
_hooks_map = {}
@classmethod
def add_hook(cls, hook_type, hook_func):
"""Add a new hook of specified type.
:param cls: class that registers hooks
:param hook_type: hook type, e.g., '__pre_parse_args__'
:param hook_func: hook function
"""
if hook_type not in cls._hooks_map:
cls._hooks_map[hook_type] = []
cls._hooks_map[hook_type].append(hook_func)
@classmethod
def run_hooks(cls, hook_type, *args, **kwargs):
"""Run all hooks of specified type.
:param cls: class that registers hooks
:param hook_type: hook type, e.g., '__pre_parse_args__'
:param **args: args to be passed to every hook function
:param **kwargs: kwargs to be passed to every hook function
"""
hook_funcs = cls._hooks_map.get(hook_type) or []
for hook_func in hook_funcs:
hook_func(*args, **kwargs)
class BaseManager(HookableMixin):
"""Basic manager type providing common operations.
Managers interact with a particular type of API (servers, flavors, images,
etc.) and provide CRUD operations for them.
"""
resource_class = None
def __init__(self, client):
"""Initializes BaseManager with `client`.
:param client: instance of BaseClient descendant for HTTP requests
"""
super(BaseManager, self).__init__()
self.client = client
def _list(self, url, response_key, obj_class=None, json=None):
"""List the collection.
:param url: a partial URL, e.g., '/servers'
:param response_key: the key to be looked up in response dictionary,
e.g., 'servers'
:param obj_class: class for constructing the returned objects
(self.resource_class will be used by default)
:param json: data that will be encoded as JSON and passed in POST
request (GET will be sent by default)
"""
if json:
body = self.client.post(url, json=json).json()
else:
body = self.client.get(url).json()
if obj_class is None:
obj_class = self.resource_class
data = body[response_key]
# NOTE(ja): keystone returns values as list as {'values': [ ... ]}
# unlike other services which just return the list...
try:
data = data['values']
except (KeyError, TypeError):
pass
return [obj_class(self, res, loaded=True) for res in data if res]
def _get(self, url, response_key):
"""Get an object from collection.
:param url: a partial URL, e.g., '/servers'
:param response_key: the key to be looked up in response dictionary,
e.g., 'server'
"""
body = self.client.get(url).json()
return self.resource_class(self, body[response_key], loaded=True)
def _head(self, url):
"""Retrieve request headers for an object.
:param url: a partial URL, e.g., '/servers'
"""
resp = self.client.head(url)
return resp.status_code == 204
def _post(self, url, json, response_key, return_raw=False):
"""Create an object.
:param url: a partial URL, e.g., '/servers'
:param json: data that will be encoded as JSON and passed in POST
request (GET will be sent by default)
:param response_key: the key to be looked up in response dictionary,
e.g., 'servers'
:param return_raw: flag to force returning raw JSON instead of
Python object of self.resource_class
"""
body = self.client.post(url, json=json).json()
if return_raw:
return body[response_key]
return self.resource_class(self, body[response_key])
def _put(self, url, json=None, response_key=None):
"""Update an object with PUT method.
:param url: a partial URL, e.g., '/servers'
:param json: data that will be encoded as JSON and passed in POST
request (GET will be sent by default)
:param response_key: the key to be looked up in response dictionary,
e.g., 'servers'
"""
resp = self.client.put(url, json=json)
# PUT requests may not return a body
if resp.content:
body = resp.json()
if response_key is not None:
return self.resource_class(self, body[response_key])
else:
return self.resource_class(self, body)
def _patch(self, url, json=None, response_key=None):
"""Update an object with PATCH method.
:param url: a partial URL, e.g., '/servers'
:param json: data that will be encoded as JSON and passed in POST
request (GET will be sent by default)
:param response_key: the key to be looked up in response dictionary,
e.g., 'servers'
"""
body = self.client.patch(url, json=json).json()
if response_key is not None:
return self.resource_class(self, body[response_key])
else:
return self.resource_class(self, body)
def _delete(self, url):
"""Delete an object.
:param url: a partial URL, e.g., '/servers/my-server'
"""
return self.client.delete(url)
class ManagerWithFind(BaseManager):
"""Manager with additional `find()`/`findall()` methods."""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def list(self):
pass
def find(self, **kwargs):
"""Find a single item with attributes matching ``**kwargs``.
This isn't very efficient: it loads the entire list then filters on
the Python side.
"""
matches = self.findall(**kwargs)
num_matches = len(matches)
if num_matches == 0:
msg = "No %s matching %s." % (self.resource_class.__name__, kwargs)
raise exceptions.NotFound(msg)
elif num_matches > 1:
raise exceptions.NoUniqueMatch()
else:
return matches[0]
def findall(self, **kwargs):
"""Find all items with attributes matching ``**kwargs``.
This isn't very efficient: it loads the entire list then filters on
the Python side.
"""
found = []
searches = kwargs.items()
for obj in self.list():
try:
if all(getattr(obj, attr) == value
for (attr, value) in searches):
found.append(obj)
except AttributeError:
continue
return found
class CrudManager(BaseManager):
"""Base manager class for manipulating entities.
Children of this class are expected to define a `collection_key` and `key`.
- `collection_key`: Usually a plural noun by convention (e.g. `entities`);
used to refer collections in both URL's (e.g. `/v3/entities`) and JSON
objects containing a list of member resources (e.g. `{'entities': [{},
{}, {}]}`).
- `key`: Usually a singular noun by convention (e.g. `entity`); used to
refer to an individual member of the collection.
"""
collection_key = None
key = None
def build_url(self, base_url=None, **kwargs):
"""Builds a resource URL for the given kwargs.
Given an example collection where `collection_key = 'entities'` and
`key = 'entity'`, the following URL's could be generated.
By default, the URL will represent a collection of entities, e.g.::
/entities
If kwargs contains an `entity_id`, then the URL will represent a
specific member, e.g.::
/entities/{entity_id}
:param base_url: if provided, the generated URL will be appended to it
"""
url = base_url if base_url is not None else ''
url += '/%s' % self.collection_key
# do we have a specific entity?
entity_id = kwargs.get('%s_id' % self.key)
if entity_id is not None:
url += '/%s' % entity_id
return url
def _filter_kwargs(self, kwargs):
"""Drop null values and handle ids."""
for key, ref in kwargs.copy().iteritems():
if ref is None:
kwargs.pop(key)
else:
if isinstance(ref, Resource):
kwargs.pop(key)
kwargs['%s_id' % key] = getid(ref)
return kwargs
def create(self, **kwargs):
kwargs = self._filter_kwargs(kwargs)
return self._post(
self.build_url(**kwargs),
{self.key: kwargs},
self.key)
def get(self, **kwargs):
kwargs = self._filter_kwargs(kwargs)
return self._get(
self.build_url(**kwargs),
self.key)
def head(self, **kwargs):
kwargs = self._filter_kwargs(kwargs)
return self._head(self.build_url(**kwargs))
def list(self, base_url=None, **kwargs):
"""List the collection.
:param base_url: if provided, the generated URL will be appended to it
"""
kwargs = self._filter_kwargs(kwargs)
return self._list(
'%(base_url)s%(query)s' % {
'base_url': self.build_url(base_url=base_url, **kwargs),
'query': '?%s' % urllib.urlencode(kwargs) if kwargs else '',
},
self.collection_key)
def put(self, base_url=None, **kwargs):
"""Update an element.
:param base_url: if provided, the generated URL will be appended to it
"""
kwargs = self._filter_kwargs(kwargs)
return self._put(self.build_url(base_url=base_url, **kwargs))
def update(self, **kwargs):
kwargs = self._filter_kwargs(kwargs)
params = kwargs.copy()
params.pop('%s_id' % self.key)
return self._patch(
self.build_url(**kwargs),
{self.key: params},
self.key)
def delete(self, **kwargs):
kwargs = self._filter_kwargs(kwargs)
return self._delete(
self.build_url(**kwargs))
def find(self, base_url=None, **kwargs):
"""Find a single item with attributes matching ``**kwargs``.
:param base_url: if provided, the generated URL will be appended to it
"""
kwargs = self._filter_kwargs(kwargs)
rl = self._list(
'%(base_url)s%(query)s' % {
'base_url': self.build_url(base_url=base_url, **kwargs),
'query': '?%s' % urllib.urlencode(kwargs) if kwargs else '',
},
self.collection_key)
num = len(rl)
if num == 0:
msg = "No %s matching %s." % (self.resource_class.__name__, kwargs)
raise exceptions.NotFound(404, msg)
elif num > 1:
raise exceptions.NoUniqueMatch
else:
return rl[0]
class Extension(HookableMixin):
"""Extension descriptor."""
SUPPORTED_HOOKS = ('__pre_parse_args__', '__post_parse_args__')
manager_class = None
def __init__(self, name, module):
super(Extension, self).__init__()
self.name = name
self.module = module
self._parse_extension_module()
def _parse_extension_module(self):
self.manager_class = None
for attr_name, attr_value in self.module.__dict__.items():
if attr_name in self.SUPPORTED_HOOKS:
self.add_hook(attr_name, attr_value)
else:
try:
if issubclass(attr_value, BaseManager):
self.manager_class = attr_value
except TypeError:
pass
def __repr__(self):
return "<Extension '%s'>" % self.name
class Resource(object):
"""Base class for OpenStack resources (tenant, user, etc.).
This is pretty much just a bag for attributes.
"""
HUMAN_ID = False
NAME_ATTR = 'name'
def __init__(self, manager, info, loaded=False):
"""Populate and bind to a manager.
:param manager: BaseManager object
:param info: dictionary representing resource attributes
:param loaded: prevent lazy-loading if set to True
"""
self.manager = manager
self._info = info
self._add_details(info)
self._loaded = loaded
def __repr__(self):
reprkeys = sorted(k
for k in self.__dict__.keys()
if k[0] != '_' and k != 'manager')
info = ", ".join("%s=%s" % (k, getattr(self, k)) for k in reprkeys)
return "<%s %s>" % (self.__class__.__name__, info)
@property
def human_id(self):
"""Human-readable ID which can be used for bash completion.
"""
if self.NAME_ATTR in self.__dict__ and self.HUMAN_ID:
return strutils.to_slug(getattr(self, self.NAME_ATTR))
return None
def _add_details(self, info):
for (k, v) in info.iteritems():
try:
setattr(self, k, v)
self._info[k] = v
except AttributeError:
# In this case we already defined the attribute on the class
pass
def __getattr__(self, k):
if k not in self.__dict__:
#NOTE(bcwaldon): disallow lazy-loading if already loaded once
if not self.is_loaded():
self.get()
return self.__getattr__(k)
raise AttributeError(k)
else:
return self.__dict__[k]
def get(self):
# set_loaded() first ... so if we have to bail, we know we tried.
self.set_loaded(True)
if not hasattr(self.manager, 'get'):
return
new = self.manager.get(self.id)
if new:
self._add_details(new._info)
def __eq__(self, other):
if not isinstance(other, Resource):
return NotImplemented
# two resources of different types are not equal
if not isinstance(other, self.__class__):
return False
if hasattr(self, 'id') and hasattr(other, 'id'):
return self.id == other.id
return self._info == other._info
def is_loaded(self):
return self._loaded
def set_loaded(self, val):
self._loaded = val
|
ntt-sic/python-cinderclient
|
cinderclient/openstack/common/apiclient/base.py
|
Python
|
apache-2.0
| 15,937 | 0.000063 |
import ecj
import scipy
import numpy
import operator
import networkx as nx
#from progressbar import ProgressBar, Percentage
numpy.random.RandomState()
import bfutils as bfu
import numpy as np
import gmpy as gmp
def num2CG(num,n):
"""num2CG - converts a number whose binary representaion encodes edge
presence/absence into a compressed graph representaion
"""
n2 = n*n
G = {'%i'%(i+1):{} for i in xrange(n)}
if num == 0: return G
bl = gmp.bit_length(num)
idx = [n2-i-1 for i in xrange(bl) if num & (1<<i)]
idx = np.unravel_index(idx,(n,n))
x = idx[0]+1
y = idx[1]+1
for i in xrange(len(x)):
G['%i' % x[i]]['%i' % y[i]] = set([(0,1)])
return G
def hasSelfLoops(G):
for u in G:
if G[u].has_key(u):
return True
return False
def randSCC(n):
G = num2CG(scipy.random.randint(2**(n**2)),n)
while (len(ecj.scc(G)) > 1) or gcd4scc(G)>1:
G = num2CG(scipy.random.randint(2**(n**2)),n)
return G
def SM_fixed(Gstar,G, iter=5):
compat = []
for j in range(0,iter):
if Gstar == ecj.undersample(G,j):
compat.append(j)
return compat
def SM_converging(Gstar,G):
"""Gstar is the undersampled reference graph, while G is the starting
graph. The code searches over all undersampled version of G to
find all matches with Gstar
"""
compat = []
GG = G
Gprev = G
if G == Gstar: return [0]
j = 1
G = ecj.undersample(GG,j)
while not (G == Gprev):
if Gstar == G: compat.append(j)
j += 1
Gprev = G
G = ecj.undersample(GG,j)
return compat
def searchMatch(Gstar,G, iter=5):
if gcd4scc(G) >1: return SM_fixed(Gstar, G, iter=iter)
return SM_converging(Gstar, G)
def hasSink(G):
return not reduce(operator.and_, [bool(G[n]) for n in G], True)
def hasRoot(G): return hasSink(ecj.tr(G))
def isSclique(G):
n = len(G)
for v in G:
if sum([(0,1) in G[v][w] for w in G[v]]) < n: return False
if sum([(2,0) in G[v][w] for w in G[v]]) < n-1: return False
return True
def graph2nx(G):
g = nx.DiGraph()
for v in G:
g.add_edges_from([(v,x) for x in G[v] if (0,1) in G[v][x]])
return g
def nx2graph(G):
g = {str(n+1):{} for n in G}
for n in G:
g['%i' % (n+1)] = {'%i' % (x+1):set([(0,1)]) for x in G[n]}
return g
def gcd4scc(SCC):
g = graph2nx(SCC)
return ecj.listgcd(map(lambda x: len(x)-1, nx.simple_cycles(g)))
def compatibleAtU(uGstar):
compat = []
n = len(uGstar)
numG = 2**(n**2)
#pbar = Percentage()
for i in range(1,numG):
G = num2CG(i,n)
#pbar.update(i+1)
if len(ecj.scc(G)) > 1: continue
l = searchMatch(uGstar,G, iter = 5)
if l: compat.append((l,G))
#pbar.finish()
return compat
|
pliz/gunfolds
|
tools/comparison.py
|
Python
|
gpl-3.0
| 2,830 | 0.021555 |
# Licensed under a 3-clause BSD style license - see PYFITS.rst
import ctypes
import gc
import itertools
import math
import re
import time
import warnings
from contextlib import suppress
import numpy as np
from .base import DELAYED, ExtensionHDU, BITPIX2DTYPE, DTYPE2BITPIX
from .image import ImageHDU
from .table import BinTableHDU
from astropy.io.fits import conf
from astropy.io.fits.card import Card
from astropy.io.fits.column import Column, ColDefs, TDEF_RE
from astropy.io.fits.column import KEYWORD_NAMES as TABLE_KEYWORD_NAMES
from astropy.io.fits.fitsrec import FITS_rec
from astropy.io.fits.header import Header
from astropy.io.fits.util import (_is_pseudo_unsigned, _unsigned_zero, _is_int,
_get_array_mmap)
from astropy.utils import lazyproperty
from astropy.utils.exceptions import AstropyUserWarning
try:
from astropy.io.fits import compression
COMPRESSION_SUPPORTED = COMPRESSION_ENABLED = True
except ImportError:
COMPRESSION_SUPPORTED = COMPRESSION_ENABLED = False
# Quantization dithering method constants; these are right out of fitsio.h
NO_DITHER = -1
SUBTRACTIVE_DITHER_1 = 1
SUBTRACTIVE_DITHER_2 = 2
QUANTIZE_METHOD_NAMES = {
NO_DITHER: 'NO_DITHER',
SUBTRACTIVE_DITHER_1: 'SUBTRACTIVE_DITHER_1',
SUBTRACTIVE_DITHER_2: 'SUBTRACTIVE_DITHER_2'
}
DITHER_SEED_CLOCK = 0
DITHER_SEED_CHECKSUM = -1
COMPRESSION_TYPES = ('RICE_1', 'GZIP_1', 'GZIP_2', 'PLIO_1', 'HCOMPRESS_1')
# Default compression parameter values
DEFAULT_COMPRESSION_TYPE = 'RICE_1'
DEFAULT_QUANTIZE_LEVEL = 16.
DEFAULT_QUANTIZE_METHOD = NO_DITHER
DEFAULT_DITHER_SEED = DITHER_SEED_CLOCK
DEFAULT_HCOMP_SCALE = 0
DEFAULT_HCOMP_SMOOTH = 0
DEFAULT_BLOCK_SIZE = 32
DEFAULT_BYTE_PIX = 4
CMTYPE_ALIASES = {}
# CFITSIO version-specific features
if COMPRESSION_SUPPORTED:
try:
CFITSIO_SUPPORTS_GZIPDATA = compression.CFITSIO_VERSION >= 3.28
CFITSIO_SUPPORTS_Q_FORMAT = compression.CFITSIO_VERSION >= 3.35
if compression.CFITSIO_VERSION >= 3.35:
CMTYPE_ALIASES['RICE_ONE'] = 'RICE_1'
except AttributeError:
# This generally shouldn't happen unless running pip in an
# environment where an old build of pyfits exists
CFITSIO_SUPPORTS_GZIPDATA = True
CFITSIO_SUPPORTS_Q_FORMAT = True
COMPRESSION_KEYWORDS = {'ZIMAGE', 'ZCMPTYPE', 'ZBITPIX', 'ZNAXIS', 'ZMASKCMP',
'ZSIMPLE', 'ZTENSION', 'ZEXTEND'}
class CompImageHeader(Header):
"""
Header object for compressed image HDUs designed to keep the compression
header and the underlying image header properly synchronized.
This essentially wraps the image header, so that all values are read from
and written to the image header. However, updates to the image header will
also update the table header where appropriate.
Note that if no image header is passed in, the code will instantiate a
regular `~astropy.io.fits.Header`.
"""
# TODO: The difficulty of implementing this screams a need to rewrite this
# module
_keyword_remaps = {
'SIMPLE': 'ZSIMPLE', 'XTENSION': 'ZTENSION', 'BITPIX': 'ZBITPIX',
'NAXIS': 'ZNAXIS', 'EXTEND': 'ZEXTEND', 'BLOCKED': 'ZBLOCKED',
'PCOUNT': 'ZPCOUNT', 'GCOUNT': 'ZGCOUNT', 'CHECKSUM': 'ZHECKSUM',
'DATASUM': 'ZDATASUM'
}
_zdef_re = re.compile(r'(?P<label>^[Zz][a-zA-Z]*)(?P<num>[1-9][0-9 ]*$)?')
_compression_keywords = set(_keyword_remaps.values()).union(
['ZIMAGE', 'ZCMPTYPE', 'ZMASKCMP', 'ZQUANTIZ', 'ZDITHER0'])
_indexed_compression_keywords = {'ZNAXIS', 'ZTILE', 'ZNAME', 'ZVAL'}
# TODO: Once it place it should be possible to manage some of this through
# the schema system, but it's not quite ready for that yet. Also it still
# makes more sense to change CompImageHDU to subclass ImageHDU :/
def __new__(cls, table_header, image_header=None):
# 2019-09-14 (MHvK): No point wrapping anything if no image_header is
# given. This happens if __getitem__ and copy are called - our super
# class will aim to initialize a new, possibly partially filled
# header, but we cannot usefully deal with that.
# TODO: the above suggests strongly we should *not* subclass from
# Header. See also comment above about the need for reorganization.
if image_header is None:
return Header(table_header)
else:
return super().__new__(cls)
def __init__(self, table_header, image_header):
self._cards = image_header._cards
self._keyword_indices = image_header._keyword_indices
self._rvkc_indices = image_header._rvkc_indices
self._modified = image_header._modified
self._table_header = table_header
# We need to override and Header methods that can modify the header, and
# ensure that they sync with the underlying _table_header
def __setitem__(self, key, value):
# This isn't pretty, but if the `key` is either an int or a tuple we
# need to figure out what keyword name that maps to before doing
# anything else; these checks will be repeated later in the
# super().__setitem__ call but I don't see another way around it
# without some major refactoring
if self._set_slice(key, value, self):
return
if isinstance(key, int):
keyword, index = self._keyword_from_index(key)
elif isinstance(key, tuple):
keyword, index = key
else:
# We don't want to specify and index otherwise, because that will
# break the behavior for new keywords and for commentary keywords
keyword, index = key, None
if self._is_reserved_keyword(keyword):
return
super().__setitem__(key, value)
if index is not None:
remapped_keyword = self._remap_keyword(keyword)
self._table_header[remapped_keyword, index] = value
# Else this will pass through to ._update
def __delitem__(self, key):
if isinstance(key, slice) or self._haswildcard(key):
# If given a slice pass that on to the superclass and bail out
# early; we only want to make updates to _table_header when given
# a key specifying a single keyword
return super().__delitem__(key)
if isinstance(key, int):
keyword, index = self._keyword_from_index(key)
elif isinstance(key, tuple):
keyword, index = key
else:
keyword, index = key, None
if key not in self:
raise KeyError(f"Keyword {key!r} not found.")
super().__delitem__(key)
remapped_keyword = self._remap_keyword(keyword)
if remapped_keyword in self._table_header:
if index is not None:
del self._table_header[(remapped_keyword, index)]
else:
del self._table_header[remapped_keyword]
def append(self, card=None, useblanks=True, bottom=False, end=False):
# This logic unfortunately needs to be duplicated from the base class
# in order to determine the keyword
if isinstance(card, str):
card = Card(card)
elif isinstance(card, tuple):
card = Card(*card)
elif card is None:
card = Card()
elif not isinstance(card, Card):
raise ValueError(
'The value appended to a Header must be either a keyword or '
'(keyword, value, [comment]) tuple; got: {!r}'.format(card))
if self._is_reserved_keyword(card.keyword):
return
super().append(card=card, useblanks=useblanks, bottom=bottom, end=end)
remapped_keyword = self._remap_keyword(card.keyword)
# card.keyword strips the HIERARCH if present so this must be added
# back to avoid a warning.
if str(card).startswith("HIERARCH ") and not remapped_keyword.startswith("HIERARCH "):
remapped_keyword = "HIERARCH " + remapped_keyword
card = Card(remapped_keyword, card.value, card.comment)
# Here we disable the use of blank cards, because the call above to
# Header.append may have already deleted a blank card in the table
# header, thanks to inheritance: Header.append calls 'del self[-1]'
# to delete a blank card, which calls CompImageHeader.__deltitem__,
# which deletes the blank card both in the image and the table headers!
self._table_header.append(card=card, useblanks=False,
bottom=bottom, end=end)
def insert(self, key, card, useblanks=True, after=False):
if isinstance(key, int):
# Determine condition to pass through to append
if after:
if key == -1:
key = len(self._cards)
else:
key += 1
if key >= len(self._cards):
self.append(card, end=True)
return
if isinstance(card, str):
card = Card(card)
elif isinstance(card, tuple):
card = Card(*card)
elif not isinstance(card, Card):
raise ValueError(
'The value inserted into a Header must be either a keyword or '
'(keyword, value, [comment]) tuple; got: {!r}'.format(card))
if self._is_reserved_keyword(card.keyword):
return
# Now the tricky part is to determine where to insert in the table
# header. If given a numerical index we need to map that to the
# corresponding index in the table header. Although rare, there may be
# cases where there is no mapping in which case we just try the same
# index
# NOTE: It is crucial that remapped_index in particular is figured out
# before the image header is modified
remapped_index = self._remap_index(key)
remapped_keyword = self._remap_keyword(card.keyword)
super().insert(key, card, useblanks=useblanks, after=after)
card = Card(remapped_keyword, card.value, card.comment)
# Here we disable the use of blank cards, because the call above to
# Header.insert may have already deleted a blank card in the table
# header, thanks to inheritance: Header.insert calls 'del self[-1]'
# to delete a blank card, which calls CompImageHeader.__delitem__,
# which deletes the blank card both in the image and the table headers!
self._table_header.insert(remapped_index, card, useblanks=False,
after=after)
def _update(self, card):
keyword = card[0]
if self._is_reserved_keyword(keyword):
return
super()._update(card)
if keyword in Card._commentary_keywords:
# Otherwise this will result in a duplicate insertion
return
remapped_keyword = self._remap_keyword(keyword)
self._table_header._update((remapped_keyword,) + card[1:])
# Last piece needed (I think) for synchronizing with the real header
# This one is tricky since _relativeinsert calls insert
def _relativeinsert(self, card, before=None, after=None, replace=False):
keyword = card[0]
if self._is_reserved_keyword(keyword):
return
# Now we have to figure out how to remap 'before' and 'after'
if before is None:
if isinstance(after, int):
remapped_after = self._remap_index(after)
else:
remapped_after = self._remap_keyword(after)
remapped_before = None
else:
if isinstance(before, int):
remapped_before = self._remap_index(before)
else:
remapped_before = self._remap_keyword(before)
remapped_after = None
super()._relativeinsert(card, before=before, after=after,
replace=replace)
remapped_keyword = self._remap_keyword(keyword)
card = Card(remapped_keyword, card[1], card[2])
self._table_header._relativeinsert(card, before=remapped_before,
after=remapped_after,
replace=replace)
@classmethod
def _is_reserved_keyword(cls, keyword, warn=True):
msg = ('Keyword {!r} is reserved for use by the FITS Tiled Image '
'Convention and will not be stored in the header for the '
'image being compressed.'.format(keyword))
if keyword == 'TFIELDS':
if warn:
warnings.warn(msg)
return True
m = TDEF_RE.match(keyword)
if m and m.group('label').upper() in TABLE_KEYWORD_NAMES:
if warn:
warnings.warn(msg)
return True
m = cls._zdef_re.match(keyword)
if m:
label = m.group('label').upper()
num = m.group('num')
if num is not None and label in cls._indexed_compression_keywords:
if warn:
warnings.warn(msg)
return True
elif label in cls._compression_keywords:
if warn:
warnings.warn(msg)
return True
return False
@classmethod
def _remap_keyword(cls, keyword):
# Given a keyword that one might set on an image, remap that keyword to
# the name used for it in the COMPRESSED HDU header
# This is mostly just a lookup in _keyword_remaps, but needs handling
# for NAXISn keywords
is_naxisn = False
if keyword[:5] == 'NAXIS':
with suppress(ValueError):
index = int(keyword[5:])
is_naxisn = index > 0
if is_naxisn:
return f'ZNAXIS{index}'
# If the keyword does not need to be remapped then just return the
# original keyword
return cls._keyword_remaps.get(keyword, keyword)
def _remap_index(self, idx):
# Given an integer index into this header, map that to the index in the
# table header for the same card. If the card doesn't exist in the
# table header (generally should *not* be the case) this will just
# return the same index
# This *does* also accept a keyword or (keyword, repeat) tuple and
# obtains the associated numerical index with self._cardindex
if not isinstance(idx, int):
idx = self._cardindex(idx)
keyword, repeat = self._keyword_from_index(idx)
remapped_insert_keyword = self._remap_keyword(keyword)
with suppress(IndexError, KeyError):
idx = self._table_header._cardindex((remapped_insert_keyword,
repeat))
return idx
# TODO: Fix this class so that it doesn't actually inherit from BinTableHDU,
# but instead has an internal BinTableHDU reference
class CompImageHDU(BinTableHDU):
"""
Compressed Image HDU class.
"""
_manages_own_heap = True
"""
The calls to CFITSIO lay out the heap data in memory, and we write it out
the same way CFITSIO organizes it. In principle this would break if a user
manually changes the underlying compressed data by hand, but there is no
reason they would want to do that (and if they do that's their
responsibility).
"""
_default_name = "COMPRESSED_IMAGE"
def __init__(self, data=None, header=None, name=None,
compression_type=DEFAULT_COMPRESSION_TYPE,
tile_size=None,
hcomp_scale=DEFAULT_HCOMP_SCALE,
hcomp_smooth=DEFAULT_HCOMP_SMOOTH,
quantize_level=DEFAULT_QUANTIZE_LEVEL,
quantize_method=DEFAULT_QUANTIZE_METHOD,
dither_seed=DEFAULT_DITHER_SEED,
do_not_scale_image_data=False,
uint=False, scale_back=False, **kwargs):
"""
Parameters
----------
data : array, optional
Uncompressed image data
header : `~astropy.io.fits.Header`, optional
Header to be associated with the image; when reading the HDU from a
file (data=DELAYED), the header read from the file
name : str, optional
The ``EXTNAME`` value; if this value is `None`, then the name from
the input image header will be used; if there is no name in the
input image header then the default name ``COMPRESSED_IMAGE`` is
used.
compression_type : str, optional
Compression algorithm: one of
``'RICE_1'``, ``'RICE_ONE'``, ``'PLIO_1'``, ``'GZIP_1'``,
``'GZIP_2'``, ``'HCOMPRESS_1'``
tile_size : int, optional
Compression tile sizes. Default treats each row of image as a
tile.
hcomp_scale : float, optional
HCOMPRESS scale parameter
hcomp_smooth : float, optional
HCOMPRESS smooth parameter
quantize_level : float, optional
Floating point quantization level; see note below
quantize_method : int, optional
Floating point quantization dithering method; can be either
``NO_DITHER`` (-1; default), ``SUBTRACTIVE_DITHER_1`` (1), or
``SUBTRACTIVE_DITHER_2`` (2); see note below
dither_seed : int, optional
Random seed to use for dithering; can be either an integer in the
range 1 to 1000 (inclusive), ``DITHER_SEED_CLOCK`` (0; default), or
``DITHER_SEED_CHECKSUM`` (-1); see note below
Notes
-----
The astropy.io.fits package supports 2 methods of image compression:
1) The entire FITS file may be externally compressed with the gzip
or pkzip utility programs, producing a ``*.gz`` or ``*.zip``
file, respectively. When reading compressed files of this type,
Astropy first uncompresses the entire file into a temporary file
before performing the requested read operations. The
astropy.io.fits package does not support writing to these types
of compressed files. This type of compression is supported in
the ``_File`` class, not in the `CompImageHDU` class. The file
compression type is recognized by the ``.gz`` or ``.zip`` file
name extension.
2) The `CompImageHDU` class supports the FITS tiled image
compression convention in which the image is subdivided into a
grid of rectangular tiles, and each tile of pixels is
individually compressed. The details of this FITS compression
convention are described at the `FITS Support Office web site
<https://fits.gsfc.nasa.gov/registry/tilecompression.html>`_.
Basically, the compressed image tiles are stored in rows of a
variable length array column in a FITS binary table. The
astropy.io.fits recognizes that this binary table extension
contains an image and treats it as if it were an image
extension. Under this tile-compression format, FITS header
keywords remain uncompressed. At this time, Astropy does not
support the ability to extract and uncompress sections of the
image without having to uncompress the entire image.
The astropy.io.fits package supports 3 general-purpose compression
algorithms plus one other special-purpose compression technique that is
designed for data masks with positive integer pixel values. The 3
general purpose algorithms are GZIP, Rice, and HCOMPRESS, and the
special-purpose technique is the IRAF pixel list compression technique
(PLIO). The ``compression_type`` parameter defines the compression
algorithm to be used.
The FITS image can be subdivided into any desired rectangular grid of
compression tiles. With the GZIP, Rice, and PLIO algorithms, the
default is to take each row of the image as a tile. The HCOMPRESS
algorithm is inherently 2-dimensional in nature, so the default in this
case is to take 16 rows of the image per tile. In most cases, it makes
little difference what tiling pattern is used, so the default tiles are
usually adequate. In the case of very small images, it could be more
efficient to compress the whole image as a single tile. Note that the
image dimensions are not required to be an integer multiple of the tile
dimensions; if not, then the tiles at the edges of the image will be
smaller than the other tiles. The ``tile_size`` parameter may be
provided as a list of tile sizes, one for each dimension in the image.
For example a ``tile_size`` value of ``[100,100]`` would divide a 300 X
300 image into 9 100 X 100 tiles.
The 4 supported image compression algorithms are all 'lossless' when
applied to integer FITS images; the pixel values are preserved exactly
with no loss of information during the compression and uncompression
process. In addition, the HCOMPRESS algorithm supports a 'lossy'
compression mode that will produce larger amount of image compression.
This is achieved by specifying a non-zero value for the ``hcomp_scale``
parameter. Since the amount of compression that is achieved depends
directly on the RMS noise in the image, it is usually more convenient
to specify the ``hcomp_scale`` factor relative to the RMS noise.
Setting ``hcomp_scale = 2.5`` means use a scale factor that is 2.5
times the calculated RMS noise in the image tile. In some cases it may
be desirable to specify the exact scaling to be used, instead of
specifying it relative to the calculated noise value. This may be done
by specifying the negative of the desired scale value (typically in the
range -2 to -100).
Very high compression factors (of 100 or more) can be achieved by using
large ``hcomp_scale`` values, however, this can produce undesirable
'blocky' artifacts in the compressed image. A variation of the
HCOMPRESS algorithm (called HSCOMPRESS) can be used in this case to
apply a small amount of smoothing of the image when it is uncompressed
to help cover up these artifacts. This smoothing is purely cosmetic
and does not cause any significant change to the image pixel values.
Setting the ``hcomp_smooth`` parameter to 1 will engage the smoothing
algorithm.
Floating point FITS images (which have ``BITPIX`` = -32 or -64) usually
contain too much 'noise' in the least significant bits of the mantissa
of the pixel values to be effectively compressed with any lossless
algorithm. Consequently, floating point images are first quantized
into scaled integer pixel values (and thus throwing away much of the
noise) before being compressed with the specified algorithm (either
GZIP, RICE, or HCOMPRESS). This technique produces much higher
compression factors than simply using the GZIP utility to externally
compress the whole FITS file, but it also means that the original
floating point value pixel values are not exactly preserved. When done
properly, this integer scaling technique will only discard the
insignificant noise while still preserving all the real information in
the image. The amount of precision that is retained in the pixel
values is controlled by the ``quantize_level`` parameter. Larger
values will result in compressed images whose pixels more closely match
the floating point pixel values, but at the same time the amount of
compression that is achieved will be reduced. Users should experiment
with different values for this parameter to determine the optimal value
that preserves all the useful information in the image, without
needlessly preserving all the 'noise' which will hurt the compression
efficiency.
The default value for the ``quantize_level`` scale factor is 16, which
means that scaled integer pixel values will be quantized such that the
difference between adjacent integer values will be 1/16th of the noise
level in the image background. An optimized algorithm is used to
accurately estimate the noise in the image. As an example, if the RMS
noise in the background pixels of an image = 32.0, then the spacing
between adjacent scaled integer pixel values will equal 2.0 by default.
Note that the RMS noise is independently calculated for each tile of
the image, so the resulting integer scaling factor may fluctuate
slightly for each tile. In some cases, it may be desirable to specify
the exact quantization level to be used, instead of specifying it
relative to the calculated noise value. This may be done by specifying
the negative of desired quantization level for the value of
``quantize_level``. In the previous example, one could specify
``quantize_level = -2.0`` so that the quantized integer levels differ
by 2.0. Larger negative values for ``quantize_level`` means that the
levels are more coarsely-spaced, and will produce higher compression
factors.
The quantization algorithm can also apply one of two random dithering
methods in order to reduce bias in the measured intensity of background
regions. The default method, specified with the constant
``SUBTRACTIVE_DITHER_1`` adds dithering to the zero-point of the
quantization array itself rather than adding noise to the actual image.
The random noise is added on a pixel-by-pixel basis, so in order
restore each pixel from its integer value to its floating point value
it is necessary to replay the same sequence of random numbers for each
pixel (see below). The other method, ``SUBTRACTIVE_DITHER_2``, is
exactly like the first except that before dithering any pixel with a
floating point value of ``0.0`` is replaced with the special integer
value ``-2147483647``. When the image is uncompressed, pixels with
this value are restored back to ``0.0`` exactly. Finally, a value of
``NO_DITHER`` disables dithering entirely.
As mentioned above, when using the subtractive dithering algorithm it
is necessary to be able to generate a (pseudo-)random sequence of noise
for each pixel, and replay that same sequence upon decompressing. To
facilitate this, a random seed between 1 and 10000 (inclusive) is used
to seed a random number generator, and that seed is stored in the
``ZDITHER0`` keyword in the header of the compressed HDU. In order to
use that seed to generate the same sequence of random numbers the same
random number generator must be used at compression and decompression
time; for that reason the tiled image convention provides an
implementation of a very simple pseudo-random number generator. The
seed itself can be provided in one of three ways, controllable by the
``dither_seed`` argument: It may be specified manually, or it may be
generated arbitrarily based on the system's clock
(``DITHER_SEED_CLOCK``) or based on a checksum of the pixels in the
image's first tile (``DITHER_SEED_CHECKSUM``). The clock-based method
is the default, and is sufficient to ensure that the value is
reasonably "arbitrary" and that the same seed is unlikely to be
generated sequentially. The checksum method, on the other hand,
ensures that the same seed is used every time for a specific image.
This is particularly useful for software testing as it ensures that the
same image will always use the same seed.
"""
if not COMPRESSION_SUPPORTED:
# TODO: Raise a more specific Exception type
raise Exception('The astropy.io.fits.compression module is not '
'available. Creation of compressed image HDUs is '
'disabled.')
compression_type = CMTYPE_ALIASES.get(compression_type, compression_type)
if data is DELAYED:
# Reading the HDU from a file
super().__init__(data=data, header=header)
else:
# Create at least a skeleton HDU that matches the input
# header and data (if any were input)
super().__init__(data=None, header=header)
# Store the input image data
self.data = data
# Update the table header (_header) to the compressed
# image format and to match the input data (if any);
# Create the image header (_image_header) from the input
# image header (if any) and ensure it matches the input
# data; Create the initially empty table data array to
# hold the compressed data.
self._update_header_data(header, name,
compression_type=compression_type,
tile_size=tile_size,
hcomp_scale=hcomp_scale,
hcomp_smooth=hcomp_smooth,
quantize_level=quantize_level,
quantize_method=quantize_method,
dither_seed=dither_seed)
# TODO: A lot of this should be passed on to an internal image HDU o
# something like that, see ticket #88
self._do_not_scale_image_data = do_not_scale_image_data
self._uint = uint
self._scale_back = scale_back
self._axes = [self._header.get('ZNAXIS' + str(axis + 1), 0)
for axis in range(self._header.get('ZNAXIS', 0))]
# store any scale factors from the table header
if do_not_scale_image_data:
self._bzero = 0
self._bscale = 1
else:
self._bzero = self._header.get('BZERO', 0)
self._bscale = self._header.get('BSCALE', 1)
self._bitpix = self._header['ZBITPIX']
self._orig_bzero = self._bzero
self._orig_bscale = self._bscale
self._orig_bitpix = self._bitpix
def _remove_unnecessary_default_extnames(self, header):
"""Remove default EXTNAME values if they are unnecessary.
Some data files (eg from CFHT) can have the default EXTNAME and
an explicit value. This method removes the default if a more
specific header exists. It also removes any duplicate default
values.
"""
if 'EXTNAME' in header:
indices = header._keyword_indices['EXTNAME']
# Only continue if there is more than one found
n_extname = len(indices)
if n_extname > 1:
extnames_to_remove = [index for index in indices
if header[index] == self._default_name]
if len(extnames_to_remove) == n_extname:
# Keep the first (they are all the same)
extnames_to_remove.pop(0)
# Remove them all in reverse order to keep the index unchanged.
for index in reversed(sorted(extnames_to_remove)):
del header[index]
@property
def name(self):
# Convert the value to a string to be flexible in some pathological
# cases (see ticket #96)
# Similar to base class but uses .header rather than ._header
return str(self.header.get('EXTNAME', self._default_name))
@name.setter
def name(self, value):
# This is a copy of the base class but using .header instead
# of ._header to ensure that the name stays in sync.
if not isinstance(value, str):
raise TypeError("'name' attribute must be a string")
if not conf.extension_name_case_sensitive:
value = value.upper()
if 'EXTNAME' in self.header:
self.header['EXTNAME'] = value
else:
self.header['EXTNAME'] = (value, 'extension name')
@classmethod
def match_header(cls, header):
card = header.cards[0]
if card.keyword != 'XTENSION':
return False
xtension = card.value
if isinstance(xtension, str):
xtension = xtension.rstrip()
if xtension not in ('BINTABLE', 'A3DTABLE'):
return False
if 'ZIMAGE' not in header or not header['ZIMAGE']:
return False
if COMPRESSION_SUPPORTED and COMPRESSION_ENABLED:
return True
elif not COMPRESSION_SUPPORTED:
warnings.warn('Failure matching header to a compressed image '
'HDU: The compression module is not available.\n'
'The HDU will be treated as a Binary Table HDU.',
AstropyUserWarning)
return False
else:
# Compression is supported but disabled; just pass silently (#92)
return False
def _update_header_data(self, image_header,
name=None,
compression_type=None,
tile_size=None,
hcomp_scale=None,
hcomp_smooth=None,
quantize_level=None,
quantize_method=None,
dither_seed=None):
"""
Update the table header (`_header`) to the compressed
image format and to match the input data (if any). Create
the image header (`_image_header`) from the input image
header (if any) and ensure it matches the input
data. Create the initially-empty table data array to hold
the compressed data.
This method is mainly called internally, but a user may wish to
call this method after assigning new data to the `CompImageHDU`
object that is of a different type.
Parameters
----------
image_header : `~astropy.io.fits.Header`
header to be associated with the image
name : str, optional
the ``EXTNAME`` value; if this value is `None`, then the name from
the input image header will be used; if there is no name in the
input image header then the default name 'COMPRESSED_IMAGE' is used
compression_type : str, optional
compression algorithm 'RICE_1', 'PLIO_1', 'GZIP_1', 'GZIP_2',
'HCOMPRESS_1'; if this value is `None`, use value already in the
header; if no value already in the header, use 'RICE_1'
tile_size : sequence of int, optional
compression tile sizes as a list; if this value is `None`, use
value already in the header; if no value already in the header,
treat each row of image as a tile
hcomp_scale : float, optional
HCOMPRESS scale parameter; if this value is `None`, use the value
already in the header; if no value already in the header, use 1
hcomp_smooth : float, optional
HCOMPRESS smooth parameter; if this value is `None`, use the value
already in the header; if no value already in the header, use 0
quantize_level : float, optional
floating point quantization level; if this value is `None`, use the
value already in the header; if no value already in header, use 16
quantize_method : int, optional
floating point quantization dithering method; can be either
NO_DITHER (-1), SUBTRACTIVE_DITHER_1 (1; default), or
SUBTRACTIVE_DITHER_2 (2)
dither_seed : int, optional
random seed to use for dithering; can be either an integer in the
range 1 to 1000 (inclusive), DITHER_SEED_CLOCK (0; default), or
DITHER_SEED_CHECKSUM (-1)
"""
# Clean up EXTNAME duplicates
self._remove_unnecessary_default_extnames(self._header)
image_hdu = ImageHDU(data=self.data, header=self._header)
self._image_header = CompImageHeader(self._header, image_hdu.header)
self._axes = image_hdu._axes
del image_hdu
# Determine based on the size of the input data whether to use the Q
# column format to store compressed data or the P format.
# The Q format is used only if the uncompressed data is larger than
# 4 GB. This is not a perfect heuristic, as one can contrive an input
# array which, when compressed, the entire binary table representing
# the compressed data is larger than 4GB. That said, this is the same
# heuristic used by CFITSIO, so this should give consistent results.
# And the cases where this heuristic is insufficient are extreme and
# almost entirely contrived corner cases, so it will do for now
if self._has_data:
huge_hdu = self.data.nbytes > 2 ** 32
if huge_hdu and not CFITSIO_SUPPORTS_Q_FORMAT:
raise OSError(
"Astropy cannot compress images greater than 4 GB in size "
"({} is {} bytes) without CFITSIO >= 3.35".format(
(self.name, self.ver), self.data.nbytes))
else:
huge_hdu = False
# Update the extension name in the table header
if not name and 'EXTNAME' not in self._header:
# Do not sync this with the image header since the default
# name is specific to the table header.
self._header.set('EXTNAME', self._default_name,
'name of this binary table extension',
after='TFIELDS')
elif name:
# Force the name into table and image headers.
self.name = name
# Set the compression type in the table header.
if compression_type:
if compression_type not in COMPRESSION_TYPES:
warnings.warn(
'Unknown compression type provided (supported are {}). '
'Default ({}) compression will be used.'
.format(', '.join(map(repr, COMPRESSION_TYPES)),
DEFAULT_COMPRESSION_TYPE),
AstropyUserWarning)
compression_type = DEFAULT_COMPRESSION_TYPE
self._header.set('ZCMPTYPE', compression_type,
'compression algorithm', after='TFIELDS')
else:
compression_type = self._header.get('ZCMPTYPE',
DEFAULT_COMPRESSION_TYPE)
compression_type = CMTYPE_ALIASES.get(compression_type,
compression_type)
# If the input image header had BSCALE/BZERO cards, then insert
# them in the table header.
if image_header:
bzero = image_header.get('BZERO', 0.0)
bscale = image_header.get('BSCALE', 1.0)
after_keyword = 'EXTNAME'
if bscale != 1.0:
self._header.set('BSCALE', bscale, after=after_keyword)
after_keyword = 'BSCALE'
if bzero != 0.0:
self._header.set('BZERO', bzero, after=after_keyword)
bitpix_comment = image_header.comments['BITPIX']
naxis_comment = image_header.comments['NAXIS']
else:
bitpix_comment = 'data type of original image'
naxis_comment = 'dimension of original image'
# Set the label for the first column in the table
self._header.set('TTYPE1', 'COMPRESSED_DATA', 'label for field 1',
after='TFIELDS')
# Set the data format for the first column. It is dependent
# on the requested compression type.
if compression_type == 'PLIO_1':
tform1 = '1QI' if huge_hdu else '1PI'
else:
tform1 = '1QB' if huge_hdu else '1PB'
self._header.set('TFORM1', tform1,
'data format of field: variable length array',
after='TTYPE1')
# Create the first column for the table. This column holds the
# compressed data.
col1 = Column(name=self._header['TTYPE1'], format=tform1)
# Create the additional columns required for floating point
# data and calculate the width of the output table.
zbitpix = self._image_header['BITPIX']
if zbitpix < 0 and quantize_level != 0.0:
# floating point image has 'COMPRESSED_DATA',
# 'UNCOMPRESSED_DATA', 'ZSCALE', and 'ZZERO' columns (unless using
# lossless compression, per CFITSIO)
ncols = 4
# CFITSIO 3.28 and up automatically use the GZIP_COMPRESSED_DATA
# store floating point data that couldn't be quantized, instead
# of the UNCOMPRESSED_DATA column. There's no way to control
# this behavior so the only way to determine which behavior will
# be employed is via the CFITSIO version
if CFITSIO_SUPPORTS_GZIPDATA:
ttype2 = 'GZIP_COMPRESSED_DATA'
# The required format for the GZIP_COMPRESSED_DATA is actually
# missing from the standard docs, but CFITSIO suggests it
# should be 1PB, which is logical.
tform2 = '1QB' if huge_hdu else '1PB'
else:
# Q format is not supported for UNCOMPRESSED_DATA columns.
ttype2 = 'UNCOMPRESSED_DATA'
if zbitpix == 8:
tform2 = '1QB' if huge_hdu else '1PB'
elif zbitpix == 16:
tform2 = '1QI' if huge_hdu else '1PI'
elif zbitpix == 32:
tform2 = '1QJ' if huge_hdu else '1PJ'
elif zbitpix == -32:
tform2 = '1QE' if huge_hdu else '1PE'
else:
tform2 = '1QD' if huge_hdu else '1PD'
# Set up the second column for the table that will hold any
# uncompressable data.
self._header.set('TTYPE2', ttype2, 'label for field 2',
after='TFORM1')
self._header.set('TFORM2', tform2,
'data format of field: variable length array',
after='TTYPE2')
col2 = Column(name=ttype2, format=tform2)
# Set up the third column for the table that will hold
# the scale values for quantized data.
self._header.set('TTYPE3', 'ZSCALE', 'label for field 3',
after='TFORM2')
self._header.set('TFORM3', '1D',
'data format of field: 8-byte DOUBLE',
after='TTYPE3')
col3 = Column(name=self._header['TTYPE3'],
format=self._header['TFORM3'])
# Set up the fourth column for the table that will hold
# the zero values for the quantized data.
self._header.set('TTYPE4', 'ZZERO', 'label for field 4',
after='TFORM3')
self._header.set('TFORM4', '1D',
'data format of field: 8-byte DOUBLE',
after='TTYPE4')
after = 'TFORM4'
col4 = Column(name=self._header['TTYPE4'],
format=self._header['TFORM4'])
# Create the ColDefs object for the table
cols = ColDefs([col1, col2, col3, col4])
else:
# default table has just one 'COMPRESSED_DATA' column
ncols = 1
after = 'TFORM1'
# remove any header cards for the additional columns that
# may be left over from the previous data
to_remove = ['TTYPE2', 'TFORM2', 'TTYPE3', 'TFORM3', 'TTYPE4',
'TFORM4']
for k in to_remove:
try:
del self._header[k]
except KeyError:
pass
# Create the ColDefs object for the table
cols = ColDefs([col1])
# Update the table header with the width of the table, the
# number of fields in the table, the indicator for a compressed
# image HDU, the data type of the image data and the number of
# dimensions in the image data array.
self._header.set('NAXIS1', cols.dtype.itemsize,
'width of table in bytes')
self._header.set('TFIELDS', ncols, 'number of fields in each row',
after='GCOUNT')
self._header.set('ZIMAGE', True, 'extension contains compressed image',
after=after)
self._header.set('ZBITPIX', zbitpix,
bitpix_comment, after='ZIMAGE')
self._header.set('ZNAXIS', self._image_header['NAXIS'], naxis_comment,
after='ZBITPIX')
# Strip the table header of all the ZNAZISn and ZTILEn keywords
# that may be left over from the previous data
for idx in itertools.count(1):
try:
del self._header['ZNAXIS' + str(idx)]
del self._header['ZTILE' + str(idx)]
except KeyError:
break
# Verify that any input tile size parameter is the appropriate
# size to match the HDU's data.
naxis = self._image_header['NAXIS']
if not tile_size:
tile_size = []
elif len(tile_size) != naxis:
warnings.warn('Provided tile size not appropriate for the data. '
'Default tile size will be used.', AstropyUserWarning)
tile_size = []
# Set default tile dimensions for HCOMPRESS_1
if compression_type == 'HCOMPRESS_1':
if (self._image_header['NAXIS1'] < 4 or
self._image_header['NAXIS2'] < 4):
raise ValueError('Hcompress minimum image dimension is '
'4 pixels')
elif tile_size:
if tile_size[0] < 4 or tile_size[1] < 4:
# user specified tile size is too small
raise ValueError('Hcompress minimum tile dimension is '
'4 pixels')
major_dims = len([ts for ts in tile_size if ts > 1])
if major_dims > 2:
raise ValueError(
'HCOMPRESS can only support 2-dimensional tile sizes.'
'All but two of the tile_size dimensions must be set '
'to 1.')
if tile_size and (tile_size[0] == 0 and tile_size[1] == 0):
# compress the whole image as a single tile
tile_size[0] = self._image_header['NAXIS1']
tile_size[1] = self._image_header['NAXIS2']
for i in range(2, naxis):
# set all higher tile dimensions = 1
tile_size[i] = 1
elif not tile_size:
# The Hcompress algorithm is inherently 2D in nature, so the
# row by row tiling that is used for other compression
# algorithms is not appropriate. If the image has less than 30
# rows, then the entire image will be compressed as a single
# tile. Otherwise the tiles will consist of 16 rows of the
# image. This keeps the tiles to a reasonable size, and it
# also includes enough rows to allow good compression
# efficiency. It the last tile of the image happens to contain
# less than 4 rows, then find another tile size with between 14
# and 30 rows (preferably even), so that the last tile has at
# least 4 rows.
# 1st tile dimension is the row length of the image
tile_size.append(self._image_header['NAXIS1'])
if self._image_header['NAXIS2'] <= 30:
tile_size.append(self._image_header['NAXIS1'])
else:
# look for another good tile dimension
naxis2 = self._image_header['NAXIS2']
for dim in [16, 24, 20, 30, 28, 26, 22, 18, 14]:
if naxis2 % dim == 0 or naxis2 % dim > 3:
tile_size.append(dim)
break
else:
tile_size.append(17)
for i in range(2, naxis):
# set all higher tile dimensions = 1
tile_size.append(1)
# check if requested tile size causes the last tile to have
# less than 4 pixels
remain = self._image_header['NAXIS1'] % tile_size[0] # 1st dimen
if remain > 0 and remain < 4:
tile_size[0] += 1 # try increasing tile size by 1
remain = self._image_header['NAXIS1'] % tile_size[0]
if remain > 0 and remain < 4:
raise ValueError('Last tile along 1st dimension has '
'less than 4 pixels')
remain = self._image_header['NAXIS2'] % tile_size[1] # 2nd dimen
if remain > 0 and remain < 4:
tile_size[1] += 1 # try increasing tile size by 1
remain = self._image_header['NAXIS2'] % tile_size[1]
if remain > 0 and remain < 4:
raise ValueError('Last tile along 2nd dimension has '
'less than 4 pixels')
# Set up locations for writing the next cards in the header.
last_znaxis = 'ZNAXIS'
if self._image_header['NAXIS'] > 0:
after1 = 'ZNAXIS1'
else:
after1 = 'ZNAXIS'
# Calculate the number of rows in the output table and
# write the ZNAXISn and ZTILEn cards to the table header.
nrows = 0
for idx, axis in enumerate(self._axes):
naxis = 'NAXIS' + str(idx + 1)
znaxis = 'ZNAXIS' + str(idx + 1)
ztile = 'ZTILE' + str(idx + 1)
if tile_size and len(tile_size) >= idx + 1:
ts = tile_size[idx]
else:
if ztile not in self._header:
# Default tile size
if not idx:
ts = self._image_header['NAXIS1']
else:
ts = 1
else:
ts = self._header[ztile]
tile_size.append(ts)
if not nrows:
nrows = (axis - 1) // ts + 1
else:
nrows *= ((axis - 1) // ts + 1)
if image_header and naxis in image_header:
self._header.set(znaxis, axis, image_header.comments[naxis],
after=last_znaxis)
else:
self._header.set(znaxis, axis,
'length of original image axis',
after=last_znaxis)
self._header.set(ztile, ts, 'size of tiles to be compressed',
after=after1)
last_znaxis = znaxis
after1 = ztile
# Set the NAXIS2 header card in the table hdu to the number of
# rows in the table.
self._header.set('NAXIS2', nrows, 'number of rows in table')
self.columns = cols
# Set the compression parameters in the table header.
# First, setup the values to be used for the compression parameters
# in case none were passed in. This will be either the value
# already in the table header for that parameter or the default
# value.
for idx in itertools.count(1):
zname = 'ZNAME' + str(idx)
if zname not in self._header:
break
zval = 'ZVAL' + str(idx)
if self._header[zname] == 'NOISEBIT':
if quantize_level is None:
quantize_level = self._header[zval]
if self._header[zname] == 'SCALE ':
if hcomp_scale is None:
hcomp_scale = self._header[zval]
if self._header[zname] == 'SMOOTH ':
if hcomp_smooth is None:
hcomp_smooth = self._header[zval]
if quantize_level is None:
quantize_level = DEFAULT_QUANTIZE_LEVEL
if hcomp_scale is None:
hcomp_scale = DEFAULT_HCOMP_SCALE
if hcomp_smooth is None:
hcomp_smooth = DEFAULT_HCOMP_SCALE
# Next, strip the table header of all the ZNAMEn and ZVALn keywords
# that may be left over from the previous data
for idx in itertools.count(1):
zname = 'ZNAME' + str(idx)
if zname not in self._header:
break
zval = 'ZVAL' + str(idx)
del self._header[zname]
del self._header[zval]
# Finally, put the appropriate keywords back based on the
# compression type.
after_keyword = 'ZCMPTYPE'
idx = 1
if compression_type == 'RICE_1':
self._header.set('ZNAME1', 'BLOCKSIZE', 'compression block size',
after=after_keyword)
self._header.set('ZVAL1', DEFAULT_BLOCK_SIZE, 'pixels per block',
after='ZNAME1')
self._header.set('ZNAME2', 'BYTEPIX',
'bytes per pixel (1, 2, 4, or 8)', after='ZVAL1')
if self._header['ZBITPIX'] == 8:
bytepix = 1
elif self._header['ZBITPIX'] == 16:
bytepix = 2
else:
bytepix = DEFAULT_BYTE_PIX
self._header.set('ZVAL2', bytepix,
'bytes per pixel (1, 2, 4, or 8)',
after='ZNAME2')
after_keyword = 'ZVAL2'
idx = 3
elif compression_type == 'HCOMPRESS_1':
self._header.set('ZNAME1', 'SCALE', 'HCOMPRESS scale factor',
after=after_keyword)
self._header.set('ZVAL1', hcomp_scale, 'HCOMPRESS scale factor',
after='ZNAME1')
self._header.set('ZNAME2', 'SMOOTH', 'HCOMPRESS smooth option',
after='ZVAL1')
self._header.set('ZVAL2', hcomp_smooth, 'HCOMPRESS smooth option',
after='ZNAME2')
after_keyword = 'ZVAL2'
idx = 3
if self._image_header['BITPIX'] < 0: # floating point image
self._header.set('ZNAME' + str(idx), 'NOISEBIT',
'floating point quantization level',
after=after_keyword)
self._header.set('ZVAL' + str(idx), quantize_level,
'floating point quantization level',
after='ZNAME' + str(idx))
# Add the dither method and seed
if quantize_method:
if quantize_method not in [NO_DITHER, SUBTRACTIVE_DITHER_1,
SUBTRACTIVE_DITHER_2]:
name = QUANTIZE_METHOD_NAMES[DEFAULT_QUANTIZE_METHOD]
warnings.warn('Unknown quantization method provided. '
'Default method ({}) used.'.format(name))
quantize_method = DEFAULT_QUANTIZE_METHOD
if quantize_method == NO_DITHER:
zquantiz_comment = 'No dithering during quantization'
else:
zquantiz_comment = 'Pixel Quantization Algorithm'
self._header.set('ZQUANTIZ',
QUANTIZE_METHOD_NAMES[quantize_method],
zquantiz_comment,
after='ZVAL' + str(idx))
else:
# If the ZQUANTIZ keyword is missing the default is to assume
# no dithering, rather than whatever DEFAULT_QUANTIZE_METHOD
# is set to
quantize_method = self._header.get('ZQUANTIZ', NO_DITHER)
if isinstance(quantize_method, str):
for k, v in QUANTIZE_METHOD_NAMES.items():
if v.upper() == quantize_method:
quantize_method = k
break
else:
quantize_method = NO_DITHER
if quantize_method == NO_DITHER:
if 'ZDITHER0' in self._header:
# If dithering isn't being used then there's no reason to
# keep the ZDITHER0 keyword
del self._header['ZDITHER0']
else:
if dither_seed:
dither_seed = self._generate_dither_seed(dither_seed)
elif 'ZDITHER0' in self._header:
dither_seed = self._header['ZDITHER0']
else:
dither_seed = self._generate_dither_seed(
DEFAULT_DITHER_SEED)
self._header.set('ZDITHER0', dither_seed,
'dithering offset when quantizing floats',
after='ZQUANTIZ')
if image_header:
# Move SIMPLE card from the image header to the
# table header as ZSIMPLE card.
if 'SIMPLE' in image_header:
self._header.set('ZSIMPLE', image_header['SIMPLE'],
image_header.comments['SIMPLE'],
before='ZBITPIX')
# Move EXTEND card from the image header to the
# table header as ZEXTEND card.
if 'EXTEND' in image_header:
self._header.set('ZEXTEND', image_header['EXTEND'],
image_header.comments['EXTEND'])
# Move BLOCKED card from the image header to the
# table header as ZBLOCKED card.
if 'BLOCKED' in image_header:
self._header.set('ZBLOCKED', image_header['BLOCKED'],
image_header.comments['BLOCKED'])
# Move XTENSION card from the image header to the
# table header as ZTENSION card.
# Since we only handle compressed IMAGEs, ZTENSION should
# always be IMAGE, even if the caller has passed in a header
# for some other type of extension.
if 'XTENSION' in image_header:
self._header.set('ZTENSION', 'IMAGE',
image_header.comments['XTENSION'],
before='ZBITPIX')
# Move PCOUNT and GCOUNT cards from image header to the table
# header as ZPCOUNT and ZGCOUNT cards.
if 'PCOUNT' in image_header:
self._header.set('ZPCOUNT', image_header['PCOUNT'],
image_header.comments['PCOUNT'],
after=last_znaxis)
if 'GCOUNT' in image_header:
self._header.set('ZGCOUNT', image_header['GCOUNT'],
image_header.comments['GCOUNT'],
after='ZPCOUNT')
# Move CHECKSUM and DATASUM cards from the image header to the
# table header as XHECKSUM and XDATASUM cards.
if 'CHECKSUM' in image_header:
self._header.set('ZHECKSUM', image_header['CHECKSUM'],
image_header.comments['CHECKSUM'])
if 'DATASUM' in image_header:
self._header.set('ZDATASUM', image_header['DATASUM'],
image_header.comments['DATASUM'])
else:
# Move XTENSION card from the image header to the
# table header as ZTENSION card.
# Since we only handle compressed IMAGEs, ZTENSION should
# always be IMAGE, even if the caller has passed in a header
# for some other type of extension.
if 'XTENSION' in self._image_header:
self._header.set('ZTENSION', 'IMAGE',
self._image_header.comments['XTENSION'],
before='ZBITPIX')
# Move PCOUNT and GCOUNT cards from image header to the table
# header as ZPCOUNT and ZGCOUNT cards.
if 'PCOUNT' in self._image_header:
self._header.set('ZPCOUNT', self._image_header['PCOUNT'],
self._image_header.comments['PCOUNT'],
after=last_znaxis)
if 'GCOUNT' in self._image_header:
self._header.set('ZGCOUNT', self._image_header['GCOUNT'],
self._image_header.comments['GCOUNT'],
after='ZPCOUNT')
# When we have an image checksum we need to ensure that the same
# number of blank cards exist in the table header as there were in
# the image header. This allows those blank cards to be carried
# over to the image header when the hdu is uncompressed.
if 'ZHECKSUM' in self._header:
required_blanks = image_header._countblanks()
image_blanks = self._image_header._countblanks()
table_blanks = self._header._countblanks()
for _ in range(required_blanks - image_blanks):
self._image_header.append()
table_blanks += 1
for _ in range(required_blanks - table_blanks):
self._header.append()
@lazyproperty
def data(self):
# The data attribute is the image data (not the table data).
data = compression.decompress_hdu(self)
if data is None:
return data
# Scale the data if necessary
if (self._orig_bzero != 0 or self._orig_bscale != 1):
new_dtype = self._dtype_for_bitpix()
data = np.array(data, dtype=new_dtype)
zblank = None
if 'ZBLANK' in self.compressed_data.columns.names:
zblank = self.compressed_data['ZBLANK']
else:
if 'ZBLANK' in self._header:
zblank = np.array(self._header['ZBLANK'], dtype='int32')
elif 'BLANK' in self._header:
zblank = np.array(self._header['BLANK'], dtype='int32')
if zblank is not None:
blanks = (data == zblank)
if self._bscale != 1:
np.multiply(data, self._bscale, data)
if self._bzero != 0:
# We have to explicitly cast self._bzero to prevent numpy from
# raising an error when doing self.data += self._bzero, and we
# do this instead of self.data = self.data + self._bzero to
# avoid doubling memory usage.
np.add(data, self._bzero, out=data, casting='unsafe')
if zblank is not None:
data = np.where(blanks, np.nan, data)
# Right out of _ImageBaseHDU.data
self._update_header_scale_info(data.dtype)
return data
@data.setter
def data(self, data):
if (data is not None) and (not isinstance(data, np.ndarray) or
data.dtype.fields is not None):
raise TypeError('CompImageHDU data has incorrect type:{}; '
'dtype.fields = {}'.format(
type(data), data.dtype.fields))
@lazyproperty
def compressed_data(self):
# First we will get the table data (the compressed
# data) from the file, if there is any.
compressed_data = super().data
if isinstance(compressed_data, np.rec.recarray):
# Make sure not to use 'del self.data' so we don't accidentally
# go through the self.data.fdel and close the mmap underlying
# the compressed_data array
del self.__dict__['data']
return compressed_data
else:
# This will actually set self.compressed_data with the
# pre-allocated space for the compression data; this is something I
# might do away with in the future
self._update_compressed_data()
return self.compressed_data
@compressed_data.deleter
def compressed_data(self):
# Deleting the compressed_data attribute has to be handled
# with a little care to prevent a reference leak
# First delete the ._coldefs attributes under it to break a possible
# reference cycle
if 'compressed_data' in self.__dict__:
del self.__dict__['compressed_data']._coldefs
# Now go ahead and delete from self.__dict__; normally
# lazyproperty.__delete__ does this for us, but we can prempt it to
# do some additional cleanup
del self.__dict__['compressed_data']
# If this file was mmap'd, numpy.memmap will hold open a file
# handle until the underlying mmap object is garbage-collected;
# since this reference leak can sometimes hang around longer than
# welcome go ahead and force a garbage collection
gc.collect()
@property
def shape(self):
"""
Shape of the image array--should be equivalent to ``self.data.shape``.
"""
# Determine from the values read from the header
return tuple(reversed(self._axes))
@lazyproperty
def header(self):
# The header attribute is the header for the image data. It
# is not actually stored in the object dictionary. Instead,
# the _image_header is stored. If the _image_header attribute
# has already been defined we just return it. If not, we must
# create it from the table header (the _header attribute).
if hasattr(self, '_image_header'):
return self._image_header
# Clean up any possible doubled EXTNAME keywords that use
# the default. Do this on the original header to ensure
# duplicates are removed cleanly.
self._remove_unnecessary_default_extnames(self._header)
# Start with a copy of the table header.
image_header = self._header.copy()
# Delete cards that are related to the table. And move
# the values of those cards that relate to the image from
# their corresponding table cards. These include
# ZBITPIX -> BITPIX, ZNAXIS -> NAXIS, and ZNAXISn -> NAXISn.
# (Note: Used set here instead of list in case there are any duplicate
# keywords, which there may be in some pathological cases:
# https://github.com/astropy/astropy/issues/2750
for keyword in set(image_header):
if CompImageHeader._is_reserved_keyword(keyword, warn=False):
del image_header[keyword]
if 'ZSIMPLE' in self._header:
image_header.set('SIMPLE', self._header['ZSIMPLE'],
self._header.comments['ZSIMPLE'], before=0)
elif 'ZTENSION' in self._header:
if self._header['ZTENSION'] != 'IMAGE':
warnings.warn("ZTENSION keyword in compressed "
"extension != 'IMAGE'", AstropyUserWarning)
image_header.set('XTENSION', 'IMAGE',
self._header.comments['ZTENSION'], before=0)
else:
image_header.set('XTENSION', 'IMAGE', before=0)
image_header.set('BITPIX', self._header['ZBITPIX'],
self._header.comments['ZBITPIX'], before=1)
image_header.set('NAXIS', self._header['ZNAXIS'],
self._header.comments['ZNAXIS'], before=2)
last_naxis = 'NAXIS'
for idx in range(image_header['NAXIS']):
znaxis = 'ZNAXIS' + str(idx + 1)
naxis = znaxis[1:]
image_header.set(naxis, self._header[znaxis],
self._header.comments[znaxis],
after=last_naxis)
last_naxis = naxis
# Delete any other spurious NAXISn keywords:
naxis = image_header['NAXIS']
for keyword in list(image_header['NAXIS?*']):
try:
n = int(keyword[5:])
except Exception:
continue
if n > naxis:
del image_header[keyword]
# Although PCOUNT and GCOUNT are considered mandatory for IMAGE HDUs,
# ZPCOUNT and ZGCOUNT are optional, probably because for IMAGE HDUs
# their values are always 0 and 1 respectively
if 'ZPCOUNT' in self._header:
image_header.set('PCOUNT', self._header['ZPCOUNT'],
self._header.comments['ZPCOUNT'],
after=last_naxis)
else:
image_header.set('PCOUNT', 0, after=last_naxis)
if 'ZGCOUNT' in self._header:
image_header.set('GCOUNT', self._header['ZGCOUNT'],
self._header.comments['ZGCOUNT'],
after='PCOUNT')
else:
image_header.set('GCOUNT', 1, after='PCOUNT')
if 'ZEXTEND' in self._header:
image_header.set('EXTEND', self._header['ZEXTEND'],
self._header.comments['ZEXTEND'])
if 'ZBLOCKED' in self._header:
image_header.set('BLOCKED', self._header['ZBLOCKED'],
self._header.comments['ZBLOCKED'])
# Move the ZHECKSUM and ZDATASUM cards to the image header
# as CHECKSUM and DATASUM
if 'ZHECKSUM' in self._header:
image_header.set('CHECKSUM', self._header['ZHECKSUM'],
self._header.comments['ZHECKSUM'])
if 'ZDATASUM' in self._header:
image_header.set('DATASUM', self._header['ZDATASUM'],
self._header.comments['ZDATASUM'])
# Remove the EXTNAME card if the value in the table header
# is the default value of COMPRESSED_IMAGE.
if ('EXTNAME' in image_header and
image_header['EXTNAME'] == self._default_name):
del image_header['EXTNAME']
# Look to see if there are any blank cards in the table
# header. If there are, there should be the same number
# of blank cards in the image header. Add blank cards to
# the image header to make it so.
table_blanks = self._header._countblanks()
image_blanks = image_header._countblanks()
for _ in range(table_blanks - image_blanks):
image_header.append()
# Create the CompImageHeader that syncs with the table header, and save
# it off to self._image_header so it can be referenced later
# unambiguously
self._image_header = CompImageHeader(self._header, image_header)
return self._image_header
def _summary(self):
"""
Summarize the HDU: name, dimensions, and formats.
"""
class_name = self.__class__.__name__
# if data is touched, use data info.
if self._data_loaded:
if self.data is None:
_shape, _format = (), ''
else:
# the shape will be in the order of NAXIS's which is the
# reverse of the numarray shape
_shape = list(self.data.shape)
_format = self.data.dtype.name
_shape.reverse()
_shape = tuple(_shape)
_format = _format[_format.rfind('.') + 1:]
# if data is not touched yet, use header info.
else:
_shape = ()
for idx in range(self.header['NAXIS']):
_shape += (self.header['NAXIS' + str(idx + 1)],)
_format = BITPIX2DTYPE[self.header['BITPIX']]
return (self.name, self.ver, class_name, len(self.header), _shape,
_format)
def _update_compressed_data(self):
"""
Compress the image data so that it may be written to a file.
"""
# Check to see that the image_header matches the image data
image_bitpix = DTYPE2BITPIX[self.data.dtype.name]
if image_bitpix != self._orig_bitpix or self.data.shape != self.shape:
self._update_header_data(self.header)
# TODO: This is copied right out of _ImageBaseHDU._writedata_internal;
# it would be cool if we could use an internal ImageHDU and use that to
# write to a buffer for compression or something. See ticket #88
# deal with unsigned integer 16, 32 and 64 data
old_data = self.data
if _is_pseudo_unsigned(self.data.dtype):
# Convert the unsigned array to signed
self.data = np.array(
self.data - _unsigned_zero(self.data.dtype),
dtype=f'=i{self.data.dtype.itemsize}')
should_swap = False
else:
should_swap = not self.data.dtype.isnative
if should_swap:
if self.data.flags.writeable:
self.data.byteswap(True)
else:
# For read-only arrays, there is no way around making
# a byteswapped copy of the data.
self.data = self.data.byteswap(False)
try:
nrows = self._header['NAXIS2']
tbsize = self._header['NAXIS1'] * nrows
self._header['PCOUNT'] = 0
if 'THEAP' in self._header:
del self._header['THEAP']
self._theap = tbsize
# First delete the original compressed data, if it exists
del self.compressed_data
# Make sure that the data is contiguous otherwise CFITSIO
# will not write the expected data
self.data = np.ascontiguousarray(self.data)
# Compress the data.
# The current implementation of compress_hdu assumes the empty
# compressed data table has already been initialized in
# self.compressed_data, and writes directly to it
# compress_hdu returns the size of the heap for the written
# compressed image table
heapsize, self.compressed_data = compression.compress_hdu(self)
finally:
# if data was byteswapped return it to its original order
if should_swap:
self.data.byteswap(True)
self.data = old_data
# CFITSIO will write the compressed data in big-endian order
dtype = self.columns.dtype.newbyteorder('>')
buf = self.compressed_data
compressed_data = buf[:self._theap].view(dtype=dtype,
type=np.rec.recarray)
self.compressed_data = compressed_data.view(FITS_rec)
self.compressed_data._coldefs = self.columns
self.compressed_data._heapoffset = self._theap
self.compressed_data._heapsize = heapsize
def scale(self, type=None, option='old', bscale=1, bzero=0):
"""
Scale image data by using ``BSCALE`` and ``BZERO``.
Calling this method will scale ``self.data`` and update the keywords of
``BSCALE`` and ``BZERO`` in ``self._header`` and ``self._image_header``.
This method should only be used right before writing to the output
file, as the data will be scaled and is therefore not very usable after
the call.
Parameters
----------
type : str, optional
destination data type, use a string representing a numpy dtype
name, (e.g. ``'uint8'``, ``'int16'``, ``'float32'`` etc.). If is
`None`, use the current data type.
option : str, optional
how to scale the data: if ``"old"``, use the original ``BSCALE``
and ``BZERO`` values when the data was read/created. If
``"minmax"``, use the minimum and maximum of the data to scale.
The option will be overwritten by any user-specified bscale/bzero
values.
bscale, bzero : int, optional
user specified ``BSCALE`` and ``BZERO`` values.
"""
if self.data is None:
return
# Determine the destination (numpy) data type
if type is None:
type = BITPIX2DTYPE[self._bitpix]
_type = getattr(np, type)
# Determine how to scale the data
# bscale and bzero takes priority
if (bscale != 1 or bzero != 0):
_scale = bscale
_zero = bzero
else:
if option == 'old':
_scale = self._orig_bscale
_zero = self._orig_bzero
elif option == 'minmax':
if isinstance(_type, np.floating):
_scale = 1
_zero = 0
else:
_min = np.minimum.reduce(self.data.flat)
_max = np.maximum.reduce(self.data.flat)
if _type == np.uint8: # uint8 case
_zero = _min
_scale = (_max - _min) / (2. ** 8 - 1)
else:
_zero = (_max + _min) / 2.
# throw away -2^N
_scale = (_max - _min) / (2. ** (8 * _type.bytes) - 2)
# Do the scaling
if _zero != 0:
# We have to explicitly cast self._bzero to prevent numpy from
# raising an error when doing self.data -= _zero, and we
# do this instead of self.data = self.data - _zero to
# avoid doubling memory usage.
np.subtract(self.data, _zero, out=self.data, casting='unsafe')
self.header['BZERO'] = _zero
else:
# Delete from both headers
for header in (self.header, self._header):
with suppress(KeyError):
del header['BZERO']
if _scale != 1:
self.data /= _scale
self.header['BSCALE'] = _scale
else:
for header in (self.header, self._header):
with suppress(KeyError):
del header['BSCALE']
if self.data.dtype.type != _type:
self.data = np.array(np.around(self.data), dtype=_type) # 0.7.7.1
# Update the BITPIX Card to match the data
self._bitpix = DTYPE2BITPIX[self.data.dtype.name]
self._bzero = self.header.get('BZERO', 0)
self._bscale = self.header.get('BSCALE', 1)
# Update BITPIX for the image header specifically
# TODO: Make this more clear by using self._image_header, but only once
# this has been fixed so that the _image_header attribute is guaranteed
# to be valid
self.header['BITPIX'] = self._bitpix
# Update the table header to match the scaled data
self._update_header_data(self.header)
# Since the image has been manually scaled, the current
# bitpix/bzero/bscale now serve as the 'original' scaling of the image,
# as though the original image has been completely replaced
self._orig_bitpix = self._bitpix
self._orig_bzero = self._bzero
self._orig_bscale = self._bscale
def _prewriteto(self, checksum=False, inplace=False):
if self._scale_back:
self.scale(BITPIX2DTYPE[self._orig_bitpix])
if self._has_data:
self._update_compressed_data()
# Use methods in the superclass to update the header with
# scale/checksum keywords based on the data type of the image data
self._update_uint_scale_keywords()
# Shove the image header and data into a new ImageHDU and use that
# to compute the image checksum
image_hdu = ImageHDU(data=self.data, header=self.header)
image_hdu._update_checksum(checksum)
if 'CHECKSUM' in image_hdu.header:
# This will also pass through to the ZHECKSUM keyword and
# ZDATASUM keyword
self._image_header.set('CHECKSUM',
image_hdu.header['CHECKSUM'],
image_hdu.header.comments['CHECKSUM'])
if 'DATASUM' in image_hdu.header:
self._image_header.set('DATASUM', image_hdu.header['DATASUM'],
image_hdu.header.comments['DATASUM'])
# Store a temporary backup of self.data in a different attribute;
# see below
self._imagedata = self.data
# Now we need to perform an ugly hack to set the compressed data as
# the .data attribute on the HDU so that the call to _writedata
# handles it properly
self.__dict__['data'] = self.compressed_data
return super()._prewriteto(checksum=checksum, inplace=inplace)
def _writeheader(self, fileobj):
"""
Bypasses `BinTableHDU._writeheader()` which updates the header with
metadata about the data that is meaningless here; another reason
why this class maybe shouldn't inherit directly from BinTableHDU...
"""
return ExtensionHDU._writeheader(self, fileobj)
def _writedata(self, fileobj):
"""
Wrap the basic ``_writedata`` method to restore the ``.data``
attribute to the uncompressed image data in the case of an exception.
"""
try:
return super()._writedata(fileobj)
finally:
# Restore the .data attribute to its rightful value (if any)
if hasattr(self, '_imagedata'):
self.__dict__['data'] = self._imagedata
del self._imagedata
else:
del self.data
def _close(self, closed=True):
super()._close(closed=closed)
# Also make sure to close access to the compressed data mmaps
if (closed and self._data_loaded and
_get_array_mmap(self.compressed_data) is not None):
del self.compressed_data
# TODO: This was copied right out of _ImageBaseHDU; get rid of it once we
# find a way to rewrite this class as either a subclass or wrapper for an
# ImageHDU
def _dtype_for_bitpix(self):
"""
Determine the dtype that the data should be converted to depending on
the BITPIX value in the header, and possibly on the BSCALE value as
well. Returns None if there should not be any change.
"""
bitpix = self._orig_bitpix
# Handle possible conversion to uints if enabled
if self._uint and self._orig_bscale == 1:
for bits, dtype in ((16, np.dtype('uint16')),
(32, np.dtype('uint32')),
(64, np.dtype('uint64'))):
if bitpix == bits and self._orig_bzero == 1 << (bits - 1):
return dtype
if bitpix > 16: # scale integers to Float64
return np.dtype('float64')
elif bitpix > 0: # scale integers to Float32
return np.dtype('float32')
def _update_header_scale_info(self, dtype=None):
if (not self._do_not_scale_image_data and
not (self._orig_bzero == 0 and self._orig_bscale == 1)):
for keyword in ['BSCALE', 'BZERO']:
# Make sure to delete from both the image header and the table
# header; later this will be streamlined
for header in (self.header, self._header):
with suppress(KeyError):
del header[keyword]
# Since _update_header_scale_info can, currently, be
# called *after* _prewriteto(), replace these with
# blank cards so the header size doesn't change
header.append()
if dtype is None:
dtype = self._dtype_for_bitpix()
if dtype is not None:
self.header['BITPIX'] = DTYPE2BITPIX[dtype.name]
self._bzero = 0
self._bscale = 1
self._bitpix = self.header['BITPIX']
def _generate_dither_seed(self, seed):
if not _is_int(seed):
raise TypeError("Seed must be an integer")
if not -1 <= seed <= 10000:
raise ValueError(
"Seed for random dithering must be either between 1 and "
"10000 inclusive, 0 for autogeneration from the system "
"clock, or -1 for autogeneration from a checksum of the first "
"image tile (got {})".format(seed))
if seed == DITHER_SEED_CHECKSUM:
# Determine the tile dimensions from the ZTILEn keywords
naxis = self._header['ZNAXIS']
tile_dims = [self._header[f'ZTILE{idx + 1}']
for idx in range(naxis)]
tile_dims.reverse()
# Get the first tile by using the tile dimensions as the end
# indices of slices (starting from 0)
first_tile = self.data[tuple(slice(d) for d in tile_dims)]
# The checksum algorithm used is literally just the sum of the bytes
# of the tile data (not its actual floating point values). Integer
# overflow is irrelevant.
csum = first_tile.view(dtype='uint8').sum()
# Since CFITSIO uses an unsigned long (which may be different on
# different platforms) go ahead and truncate the sum to its
# unsigned long value and take the result modulo 10000
return (ctypes.c_ulong(csum).value % 10000) + 1
elif seed == DITHER_SEED_CLOCK:
# This isn't exactly the same algorithm as CFITSIO, but that's okay
# since the result is meant to be arbitrary. The primary difference
# is that CFITSIO incorporates the HDU number into the result in
# the hopes of heading off the possibility of the same seed being
# generated for two HDUs at the same time. Here instead we just
# add in the HDU object's id
return ((sum(int(x) for x in math.modf(time.time())) + id(self)) %
10000) + 1
else:
return seed
|
aleksandr-bakanov/astropy
|
astropy/io/fits/hdu/compressed.py
|
Python
|
bsd-3-clause
| 87,677 | 0.00008 |
"""Nile River Flows."""
import pandas as pd
from statsmodels.datasets import utils as du
__docformat__ = 'restructuredtext'
COPYRIGHT = """This is public domain."""
TITLE = """Nile River flows at Ashwan 1871-1970"""
SOURCE = """
This data is first analyzed in:
Cobb, G. W. 1978. "The Problem of the Nile: Conditional Solution to a
Changepoint Problem." *Biometrika*. 65.2, 243-51.
"""
DESCRSHORT = """This dataset contains measurements on the annual flow of
the Nile as measured at Ashwan for 100 years from 1871-1970."""
DESCRLONG = DESCRSHORT + " There is an apparent changepoint near 1898."
#suggested notes
NOTE = """::
Number of observations: 100
Number of variables: 2
Variable name definitions:
year - the year of the observations
volumne - the discharge at Aswan in 10^8, m^3
"""
def load():
"""
Load the Nile data and return a Dataset class instance.
Returns
-------
Dataset
See DATASET_PROPOSAL.txt for more information.
"""
return load_pandas()
def load_pandas():
data = _get_data()
# TODO: time series
endog = pd.Series(data['volume'], index=data['year'].astype(int))
dataset = du.Dataset(data=data, names=list(data.columns), endog=endog, endog_name='volume')
return dataset
def _get_data():
return du.load_csv(__file__, 'nile.csv').astype(float)
|
statsmodels/statsmodels
|
statsmodels/datasets/nile/data.py
|
Python
|
bsd-3-clause
| 1,398 | 0.005722 |
# Copyright (c) 2015 Cisco Systems, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron_lib._i18n import _
from neutron_lib import exceptions
class VlanTransparencyDriverError(exceptions.NeutronException):
"""Vlan Transparency not supported by all mechanism drivers."""
message = _("Backend does not support VLAN Transparency.")
|
openstack/neutron-lib
|
neutron_lib/exceptions/vlantransparent.py
|
Python
|
apache-2.0
| 902 | 0 |
import glob
import json
import os
import struct
import unittest
from nose.tools import assert_almost_equal, assert_equals, assert_true, assert_raises
from numpy import allclose, arange, array, array_equal
from numpy import dtype as dtypeFunc
from thunder.rdds.fileio.seriesloader import SeriesLoader
from thunder.utils.common import smallestFloatType
from test_utils import PySparkTestCase, PySparkTestCaseWithOutputDir
_have_image = False
try:
from PIL import Image
_have_image = True
except ImportError:
# PIL not available; skip tests that require it
Image = None
class SeriesBinaryTestData(object):
"""
Data object for SeriesLoader binary test.
"""
__slots__ = ('keys', 'vals', 'keyDtype', 'valDtype')
def __init__(self, keys, vals, keyDtype, valDtype):
"""
Constructor, intended to be called from fromArrays class factory method.
Expects m x n and m x p data for keys and vals.
Parameters
----------
keys: two dimensional array or sequence
vals: two dimensional array or sequence
keydtype: object castable to numpy dtype
data type of keys
valdtype: object castable to numpy dtype
data type of values
Returns
-------
self: new instance of SeriesBinaryTestData
"""
self.keys = keys
self.vals = vals
self.keyDtype = keyDtype
self.valDtype = valDtype
@property
def keyStructFormat(self):
return self.keyDtype.char * self.nkeys
@property
def valStructFormat(self):
return self.valDtype.char * self.nvals
@property
def data(self):
return zip(self.keys, self.vals)
@property
def nkeys(self):
return len(self.keys[0])
@property
def nvals(self):
return len(self.vals[0])
def writeToFile(self, f):
"""
Writes own key, value data to passed file handle in binary format
Parameters
----------
f: file handle, open for writing
f will remain open after this call
"""
for keys, vals in self.data:
f.write(struct.pack(self.keyStructFormat, *keys))
f.write(struct.pack(self.valStructFormat, *vals))
@staticmethod
def _validateLengths(dat):
l = len(dat[0])
for d in dat:
assert len(d) == l, "Data of unequal lengths, %d and %d" % (l, len(d))
@staticmethod
def _normalizeDType(dtypeInstance, data):
if dtypeInstance is None:
return data.dtype
return dtypeFunc(dtypeInstance)
@classmethod
def fromArrays(cls, keys, vals, keyDtype=None, valDtype=None):
"""
Factory method for SeriesBinaryTestData. Validates input before calling class __init__ method.
Expects m x n and m x p data for keys and vals.
Parameters
----------
keys: two dimensional array or sequence
vals: two dimensional array or sequence
keydtype: object castable to numpy dtype
data type of keys
valdtype: object castable to numpy dtype
data type of values
Returns
-------
self: new instance of SeriesBinaryTestData
"""
keyDtype = cls._normalizeDType(keyDtype, keys)
valDtype = cls._normalizeDType(valDtype, vals)
assert len(keys) == len(vals), "Unequal numbers of keys and values, %d and %d" % (len(keys), len(vals))
cls._validateLengths(keys)
cls._validateLengths(vals)
return cls(keys, vals, keyDtype, valDtype)
class TestSeriesLoader(PySparkTestCase):
@staticmethod
def _findTestResourcesDir(resourcesDirName="resources"):
testDirPath = os.path.dirname(os.path.realpath(__file__))
testResourcesDirPath = os.path.join(testDirPath, resourcesDirName)
if not os.path.isdir(testResourcesDirPath):
raise IOError("Test resources directory "+testResourcesDirPath+" not found")
return testResourcesDirPath
@staticmethod
def _findSourceTreeDir(dirName="utils/data"):
testDirPath = os.path.dirname(os.path.realpath(__file__))
testResourcesDirPath = os.path.join(testDirPath, "..", "thunder", dirName)
if not os.path.isdir(testResourcesDirPath):
raise IOError("Directory "+testResourcesDirPath+" not found")
return testResourcesDirPath
def test_fromArrays(self):
ary = arange(8, dtype=dtypeFunc('int16')).reshape((2, 4))
series = SeriesLoader(self.sc).fromArraysAsImages(ary)
seriesVals = series.collect()
seriesAry = series.pack()
# check ordering of keys
assert_equals((0, 0), seriesVals[0][0]) # first key
assert_equals((1, 0), seriesVals[1][0]) # second key
assert_equals((2, 0), seriesVals[2][0])
assert_equals((3, 0), seriesVals[3][0])
assert_equals((0, 1), seriesVals[4][0])
assert_equals((1, 1), seriesVals[5][0])
assert_equals((2, 1), seriesVals[6][0])
assert_equals((3, 1), seriesVals[7][0])
# check dimensions tuple is reversed from numpy shape
assert_equals(ary.shape[::-1], series.dims.count)
# check that values are in original order
collectedVals = array([kv[1] for kv in seriesVals], dtype=dtypeFunc('int16')).ravel()
assert_true(array_equal(ary.ravel(), collectedVals))
# check that packing returns transpose of original array
assert_true(array_equal(ary.T, seriesAry))
def test_fromMultipleArrays(self):
ary = arange(8, dtype=dtypeFunc('int16')).reshape((2, 4))
ary2 = arange(8, 16, dtype=dtypeFunc('int16')).reshape((2, 4))
series = SeriesLoader(self.sc).fromArraysAsImages([ary, ary2])
seriesVals = series.collect()
seriesAry = series.pack()
# check ordering of keys
assert_equals((0, 0), seriesVals[0][0]) # first key
assert_equals((1, 0), seriesVals[1][0]) # second key
assert_equals((3, 0), seriesVals[3][0])
assert_equals((0, 1), seriesVals[4][0])
assert_equals((3, 1), seriesVals[7][0])
# check dimensions tuple is reversed from numpy shape
assert_equals(ary.shape[::-1], series.dims.count)
# check that values are in original order, with subsequent point concatenated in values
collectedVals = array([kv[1] for kv in seriesVals], dtype=dtypeFunc('int16'))
assert_true(array_equal(ary.ravel(), collectedVals[:, 0]))
assert_true(array_equal(ary2.ravel(), collectedVals[:, 1]))
# check that packing returns concatenation of input arrays, with time as first dimension
assert_true(array_equal(ary.T, seriesAry[0]))
assert_true(array_equal(ary2.T, seriesAry[1]))
class TestSeriesBinaryLoader(PySparkTestCaseWithOutputDir):
def _run_tst_fromBinary(self, useConfJson=False):
# run this as a single big test so as to avoid repeated setUp and tearDown of the spark context
# data will be a sequence of test data
# all keys and all values in a test data item must be of the same length
# keys get converted to ints regardless of raw input format
DATA = [
SeriesBinaryTestData.fromArrays([[1, 2, 3]], [[11, 12, 13]], 'int16', 'int16'),
SeriesBinaryTestData.fromArrays([[1, 2, 3], [5, 6, 7]], [[11], [12]], 'int16', 'int16'),
SeriesBinaryTestData.fromArrays([[1, 2, 3]], [[11, 12, 13]], 'int16', 'int32'),
SeriesBinaryTestData.fromArrays([[1, 2, 3]], [[11, 12, 13]], 'int32', 'int16'),
SeriesBinaryTestData.fromArrays([[1, 2, 3]], [[11.0, 12.0, 13.0]], 'int16', 'float32'),
SeriesBinaryTestData.fromArrays([[1, 2, 3]], [[11.0, 12.0, 13.0]], 'float32', 'float32'),
SeriesBinaryTestData.fromArrays([[2, 3, 4]], [[11.0, 12.0, 13.0]], 'float32', 'float32'),
]
for itemidx, item in enumerate(DATA):
outSubdir = os.path.join(self.outputdir, 'input%d' % itemidx)
os.mkdir(outSubdir)
fname = os.path.join(outSubdir, 'inputfile%d.bin' % itemidx)
with open(fname, 'wb') as f:
item.writeToFile(f)
loader = SeriesLoader(self.sc)
if not useConfJson:
series = loader.fromBinary(outSubdir, nkeys=item.nkeys, nvalues=item.nvals, keyType=str(item.keyDtype),
valueType=str(item.valDtype))
else:
# write configuration file
conf = {'input': outSubdir,
'nkeys': item.nkeys, 'nvalues': item.nvals,
'valuetype': str(item.valDtype), 'keytype': str(item.keyDtype)}
with open(os.path.join(outSubdir, "conf.json"), 'wb') as f:
json.dump(conf, f, indent=2)
series = loader.fromBinary(outSubdir)
seriesData = series.rdd.collect()
expectedData = item.data
assert_equals(len(expectedData), len(seriesData),
"Differing numbers of k/v pairs in item %d; expected %d, got %d" %
(itemidx, len(expectedData), len(seriesData)))
for expected, actual in zip(expectedData, seriesData):
expectedKeys = tuple(expected[0])
expectedType = smallestFloatType(item.valDtype)
expectedVals = array(expected[1], dtype=expectedType)
assert_equals(expectedKeys, actual[0],
"Key mismatch in item %d; expected %s, got %s" %
(itemidx, str(expectedKeys), str(actual[0])))
assert_true(allclose(expectedVals, actual[1]),
"Value mismatch in item %d; expected %s, got %s" %
(itemidx, str(expectedVals), str(actual[1])))
assert_equals(expectedType, str(actual[1].dtype),
"Value type mismatch in item %d; expected %s, got %s" %
(itemidx, expectedType, str(actual[1].dtype)))
def test_fromBinary(self):
self._run_tst_fromBinary()
def test_fromBinaryWithConfFile(self):
self._run_tst_fromBinary(True)
|
pearsonlab/thunder
|
test/test_seriesloader.py
|
Python
|
apache-2.0
| 10,333 | 0.002419 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class Experiments(Model):
"""Routing rules in production experiments.
:param ramp_up_rules: List of ramp-up rules.
:type ramp_up_rules: list of :class:`RampUpRule
<azure.mgmt.web.models.RampUpRule>`
"""
_attribute_map = {
'ramp_up_rules': {'key': 'rampUpRules', 'type': '[RampUpRule]'},
}
def __init__(self, ramp_up_rules=None):
self.ramp_up_rules = ramp_up_rules
|
SUSE/azure-sdk-for-python
|
azure-mgmt-web/azure/mgmt/web/models/experiments.py
|
Python
|
mit
| 931 | 0 |
# The MIT License (MIT)
# Copyright (c) 2015 kupiakos
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import logging
import re
import html
from urllib.parse import urlsplit
import traceback
import requests
import mimeparse
import praw
class GifscomPlugin:
"""
Mirrors gifs.com images.
Created by /u/EliteMasterEric
"""
def __init__(self, useragent: str, **options):
"""Initialize the puush importer.
:param useragent: The useragent to use for querying gifs.com.
:param options: Other options in the configuration. Ignored.
"""
self.log = logging.getLogger('lapis.gifscom')
self.headers = {'User-Agent': useragent}
self.regex = re.compile(r'gifs\.com$')
def import_submission(self, submission: praw.objects.Submission) -> dict:
"""Import a submission from gifs.com.
Because this downloads the page and tries to scrape the HTML,
we are at significant risk of the image ID on the DOM changing.
Therefore, this plugin is liable to break.
This function will define the following values in its return data:
- author: simply "an anonymous user on gifs.com"
- source: The url of the submission
- importer_display/header
- import_urls
:param submission: A reddit submission to parse.
"""
try:
url = html.unescape(submission.url)
if not self.regex.match(urlsplit(url).netloc):
return None
data = {'author': 'a gifscom user',
'source': url,
'importer_display':
{'header': 'Mirrored gifscom image:\n\n'}}
r = requests.head(url, headers=self.headers)
mime_text = r.headers.get('Content-Type')
mime = mimeparse.parse_mime_type(mime_text)
if mime[0] == 'image':
image_url = url
else:
self.log.warning('gifs.com URL posted that is not an image: %s', submission.url)
return None
data['import_urls'] = [image_url]
return data
except Exception:
self.log.error('Could not import gifs.com URL %s (%s)',
submission.url, traceback.format_exc())
return None
__plugin__ = GifscomPlugin
# END OF LINE.
|
Shugabuga/LapisMirror
|
plugins/gifscom.py
|
Python
|
mit
| 3,379 | 0.000296 |
p<caret>rint 1
print 3
|
idea4bsd/idea4bsd
|
python/testData/copyPaste/LineToBegin.dst.py
|
Python
|
apache-2.0
| 23 | 0.086957 |
# do not import all apis into this module because that uses a lot of memory and stack frames
# if you need the ability to import all apis from one package, import them with
# from openapi_client.apis import BalancesApi
|
coinapi/coinapi-sdk
|
oeml-sdk/python/openapi_client/api/__init__.py
|
Python
|
mit
| 219 | 0.004566 |
#
# Copyright (c) 2015 Juniper Networks, Inc.
#
import json
import logging
import requests
VROUTER_AGENT_PORT = 9091
class ContrailVRouterApi(object):
def __init__(self):
pass
def add_port(self, instanceId, nicId, sysIfName, macAddress, **kwargs):
data = {
"id": nicId,
"instance-id": instanceId,
"system-name": sysIfName,
"mac-address": macAddress,
"vn-id": "00000000-0000-0000-0000-000000000001",
"vm-project-id": "00000000-0000-0000-0000-000000000001",
"ip-address": "0.0.0.0",
"ip6-address": "0::0",
"rx-vlan-id": 0,
"tx-vlan-id": 0,
"type": 0
}
if 'display_name' in kwargs:
data['display-name'] = kwargs['display_name']
if 'port_type' in kwargs:
if kwargs['port_type'] == "NovaVMPort":
data['type'] = 0
if kwargs['port_type'] == "NameSpacePort":
data['type'] = 1
json_data = json.dumps(data)
url = "http://localhost:%d/port" % (VROUTER_AGENT_PORT)
headers = {'content-type': 'application/json'}
r = requests.post(url, data=json_data, headers=headers)
if r.status_code != requests.codes.ok:
logging.error("%s: %s", url, r.text)
def delete_port(self, nicId):
url = "http://localhost:%d/port/%s" % (VROUTER_AGENT_PORT, nicId)
headers = {'content-type': 'application/json'}
r = requests.delete(url, data=None, headers=headers)
if r.status_code != requests.codes.ok:
logging.error("%s: %s", url, r.headers['status'])
|
pupapaik/contrail-kubernetes
|
scripts/opencontrail-kubelet/opencontrail_kubelet/vrouter_api.py
|
Python
|
apache-2.0
| 1,675 | 0 |
# -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import unittest
from airflow import models
from airflow.api.common.experimental.mark_tasks import set_state, _create_dagruns
from airflow.settings import Session
from airflow.utils.dates import days_ago
from airflow.utils.state import State
DEV_NULL = "/dev/null"
class TestMarkTasks(unittest.TestCase):
def setUp(self):
self.dagbag = models.DagBag(include_examples=True)
self.dag1 = self.dagbag.dags['test_example_bash_operator']
self.dag2 = self.dagbag.dags['example_subdag_operator']
self.execution_dates = [days_ago(2), days_ago(1)]
drs = _create_dagruns(self.dag1, self.execution_dates,
state=State.RUNNING,
run_id_template="scheduled__{}")
for dr in drs:
dr.dag = self.dag1
dr.verify_integrity()
drs = _create_dagruns(self.dag2,
[self.dag2.default_args['start_date']],
state=State.RUNNING,
run_id_template="scheduled__{}")
for dr in drs:
dr.dag = self.dag2
dr.verify_integrity()
self.session = Session()
def snapshot_state(self, dag, execution_dates):
TI = models.TaskInstance
tis = self.session.query(TI).filter(
TI.dag_id==dag.dag_id,
TI.execution_date.in_(execution_dates)
).all()
self.session.expunge_all()
return tis
def verify_state(self, dag, task_ids, execution_dates, state, old_tis):
TI = models.TaskInstance
tis = self.session.query(TI).filter(
TI.dag_id==dag.dag_id,
TI.execution_date.in_(execution_dates)
).all()
self.assertTrue(len(tis) > 0)
for ti in tis:
if ti.task_id in task_ids and ti.execution_date in execution_dates:
self.assertEqual(ti.state, state)
else:
for old_ti in old_tis:
if (old_ti.task_id == ti.task_id
and old_ti.execution_date == ti.execution_date):
self.assertEqual(ti.state, old_ti.state)
def test_mark_tasks_now(self):
# set one task to success but do not commit
snapshot = self.snapshot_state(self.dag1, self.execution_dates)
task = self.dag1.get_task("runme_1")
altered = set_state(task=task, execution_date=self.execution_dates[0],
upstream=False, downstream=False, future=False,
past=False, state=State.SUCCESS, commit=False)
self.assertEqual(len(altered), 1)
self.verify_state(self.dag1, [task.task_id], [self.execution_dates[0]],
None, snapshot)
# set one and only one task to success
altered = set_state(task=task, execution_date=self.execution_dates[0],
upstream=False, downstream=False, future=False,
past=False, state=State.SUCCESS, commit=True)
self.assertEqual(len(altered), 1)
self.verify_state(self.dag1, [task.task_id], [self.execution_dates[0]],
State.SUCCESS, snapshot)
# set no tasks
altered = set_state(task=task, execution_date=self.execution_dates[0],
upstream=False, downstream=False, future=False,
past=False, state=State.SUCCESS, commit=True)
self.assertEqual(len(altered), 0)
self.verify_state(self.dag1, [task.task_id], [self.execution_dates[0]],
State.SUCCESS, snapshot)
# set task to other than success
altered = set_state(task=task, execution_date=self.execution_dates[0],
upstream=False, downstream=False, future=False,
past=False, state=State.FAILED, commit=True)
self.assertEqual(len(altered), 1)
self.verify_state(self.dag1, [task.task_id], [self.execution_dates[0]],
State.FAILED, snapshot)
# dont alter other tasks
snapshot = self.snapshot_state(self.dag1, self.execution_dates)
task = self.dag1.get_task("runme_0")
altered = set_state(task=task, execution_date=self.execution_dates[0],
upstream=False, downstream=False, future=False,
past=False, state=State.SUCCESS, commit=True)
self.assertEqual(len(altered), 1)
self.verify_state(self.dag1, [task.task_id], [self.execution_dates[0]],
State.SUCCESS, snapshot)
def test_mark_downstream(self):
# test downstream
snapshot = self.snapshot_state(self.dag1, self.execution_dates)
task = self.dag1.get_task("runme_1")
relatives = task.get_flat_relatives(upstream=False)
task_ids = [t.task_id for t in relatives]
task_ids.append(task.task_id)
altered = set_state(task=task, execution_date=self.execution_dates[0],
upstream=False, downstream=True, future=False,
past=False, state=State.SUCCESS, commit=True)
self.assertEqual(len(altered), 3)
self.verify_state(self.dag1, task_ids, [self.execution_dates[0]],
State.SUCCESS, snapshot)
def test_mark_upstream(self):
# test upstream
snapshot = self.snapshot_state(self.dag1, self.execution_dates)
task = self.dag1.get_task("run_after_loop")
relatives = task.get_flat_relatives(upstream=True)
task_ids = [t.task_id for t in relatives]
task_ids.append(task.task_id)
altered = set_state(task=task, execution_date=self.execution_dates[0],
upstream=True, downstream=False, future=False,
past=False, state=State.SUCCESS, commit=True)
self.assertEqual(len(altered), 4)
self.verify_state(self.dag1, task_ids, [self.execution_dates[0]],
State.SUCCESS, snapshot)
def test_mark_tasks_future(self):
# set one task to success towards end of scheduled dag runs
snapshot = self.snapshot_state(self.dag1, self.execution_dates)
task = self.dag1.get_task("runme_1")
altered = set_state(task=task, execution_date=self.execution_dates[0],
upstream=False, downstream=False, future=True,
past=False, state=State.SUCCESS, commit=True)
self.assertEqual(len(altered), 2)
self.verify_state(self.dag1, [task.task_id], self.execution_dates,
State.SUCCESS, snapshot)
def test_mark_tasks_past(self):
# set one task to success towards end of scheduled dag runs
snapshot = self.snapshot_state(self.dag1, self.execution_dates)
task = self.dag1.get_task("runme_1")
altered = set_state(task=task, execution_date=self.execution_dates[1],
upstream=False, downstream=False, future=False,
past=True, state=State.SUCCESS, commit=True)
self.assertEqual(len(altered), 2)
self.verify_state(self.dag1, [task.task_id], self.execution_dates,
State.SUCCESS, snapshot)
def test_mark_tasks_subdag(self):
# set one task to success towards end of scheduled dag runs
task = self.dag2.get_task("section-1")
relatives = task.get_flat_relatives(upstream=False)
task_ids = [t.task_id for t in relatives]
task_ids.append(task.task_id)
altered = set_state(task=task, execution_date=self.execution_dates[0],
upstream=False, downstream=True, future=False,
past=False, state=State.SUCCESS, commit=True)
self.assertEqual(len(altered), 14)
# cannot use snapshot here as that will require drilling down the
# the sub dag tree essentially recreating the same code as in the
# tested logic.
self.verify_state(self.dag2, task_ids, [self.execution_dates[0]],
State.SUCCESS, [])
def tearDown(self):
self.dag1.clear()
self.dag2.clear()
# just to make sure we are fully cleaned up
self.session.query(models.DagRun).delete()
self.session.query(models.TaskInstance).delete()
self.session.commit()
self.session.close()
if __name__ == '__main__':
unittest.main()
|
zodiac/incubator-airflow
|
tests/api/common/mark_tasks.py
|
Python
|
apache-2.0
| 9,129 | 0.000548 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('bookmarks', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='bookmark',
name='valid_url',
field=models.BooleanField(default=False),
),
]
|
RossBrunton/BMAT
|
bookmarks/migrations/0002_bookmark_valid_url.py
|
Python
|
mit
| 399 | 0 |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.13 on 2018-06-20 12:32
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('stats', '0008_scenario_mds_allow_partial'),
]
operations = [
migrations.AlterField(
model_name='scenario',
name='owner',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='scenarios', to=settings.AUTH_USER_MODEL),
),
]
|
UUDigitalHumanitieslab/timealign
|
stats/migrations/0009_auto_20180620_1232.py
|
Python
|
mit
| 583 | 0.001715 |
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import math
import os
import platform
import re
import socket
import sys
import time
from contextlib import contextmanager
from functools import wraps
from glob import glob
import click
import requests
from platformio import __apiurl__, __version__, exception
from platformio.commands import PlatformioCLI
from platformio.compat import PY2, WINDOWS
from platformio.fs import cd # pylint: disable=unused-import
from platformio.fs import load_json # pylint: disable=unused-import
from platformio.fs import rmtree as rmtree_ # pylint: disable=unused-import
from platformio.proc import exec_command # pylint: disable=unused-import
from platformio.proc import is_ci # pylint: disable=unused-import
# KEEP unused imports for backward compatibility with PIO Core 3.0 API
class memoized(object):
def __init__(self, expire=0):
expire = str(expire)
if expire.isdigit():
expire = "%ss" % int((int(expire) / 1000))
tdmap = {"s": 1, "m": 60, "h": 3600, "d": 86400}
assert expire.endswith(tuple(tdmap))
self.expire = int(tdmap[expire[-1]] * int(expire[:-1]))
self.cache = {}
def __call__(self, func):
@wraps(func)
def wrapper(*args, **kwargs):
key = str(args) + str(kwargs)
if key not in self.cache or (
self.expire > 0 and self.cache[key][0] < time.time() - self.expire
):
self.cache[key] = (time.time(), func(*args, **kwargs))
return self.cache[key][1]
wrapper.reset = self._reset
return wrapper
def _reset(self):
self.cache.clear()
class throttle(object):
def __init__(self, threshhold):
self.threshhold = threshhold # milliseconds
self.last = 0
def __call__(self, func):
@wraps(func)
def wrapper(*args, **kwargs):
diff = int(round((time.time() - self.last) * 1000))
if diff < self.threshhold:
time.sleep((self.threshhold - diff) * 0.001)
self.last = time.time()
return func(*args, **kwargs)
return wrapper
def singleton(cls):
""" From PEP-318 http://www.python.org/dev/peps/pep-0318/#examples """
_instances = {}
def get_instance(*args, **kwargs):
if cls not in _instances:
_instances[cls] = cls(*args, **kwargs)
return _instances[cls]
return get_instance
@contextmanager
def capture_std_streams(stdout, stderr=None):
_stdout = sys.stdout
_stderr = sys.stderr
sys.stdout = stdout
sys.stderr = stderr or stdout
yield
sys.stdout = _stdout
sys.stderr = _stderr
def get_systype():
type_ = platform.system().lower()
arch = platform.machine().lower()
if type_ == "windows":
arch = "amd64" if platform.architecture()[0] == "64bit" else "x86"
return "%s_%s" % (type_, arch) if arch else type_
def pioversion_to_intstr():
vermatch = re.match(r"^([\d\.]+)", __version__)
assert vermatch
return [int(i) for i in vermatch.group(1).split(".")[:3]]
def change_filemtime(path, mtime):
os.utime(path, (mtime, mtime))
def get_serial_ports(filter_hwid=False):
try:
# pylint: disable=import-outside-toplevel
from serial.tools.list_ports import comports
except ImportError:
raise exception.GetSerialPortsError(os.name)
result = []
for p, d, h in comports():
if not p:
continue
if WINDOWS and PY2:
try:
# pylint: disable=undefined-variable
d = unicode(d, errors="ignore")
except TypeError:
pass
if not filter_hwid or "VID:PID" in h:
result.append({"port": p, "description": d, "hwid": h})
if filter_hwid:
return result
# fix for PySerial
if not result and "darwin" in get_systype():
for p in glob("/dev/tty.*"):
result.append({"port": p, "description": "n/a", "hwid": "n/a"})
return result
# Backward compatibility for PIO Core <3.5
get_serialports = get_serial_ports
def get_logical_devices():
items = []
if WINDOWS:
try:
result = exec_command(
["wmic", "logicaldisk", "get", "name,VolumeName"]
).get("out", "")
devicenamere = re.compile(r"^([A-Z]{1}\:)\s*(\S+)?")
for line in result.split("\n"):
match = devicenamere.match(line.strip())
if not match:
continue
items.append({"path": match.group(1) + "\\", "name": match.group(2)})
return items
except WindowsError: # pylint: disable=undefined-variable
pass
# try "fsutil"
result = exec_command(["fsutil", "fsinfo", "drives"]).get("out", "")
for device in re.findall(r"[A-Z]:\\", result):
items.append({"path": device, "name": None})
return items
result = exec_command(["df"]).get("out")
devicenamere = re.compile(r"^/.+\d+\%\s+([a-z\d\-_/]+)$", flags=re.I)
for line in result.split("\n"):
match = devicenamere.match(line.strip())
if not match:
continue
items.append({"path": match.group(1), "name": os.path.basename(match.group(1))})
return items
def get_mdns_services():
# pylint: disable=import-outside-toplevel
try:
import zeroconf
except ImportError:
from site import addsitedir
from platformio.managers.core import get_core_package_dir
contrib_pysite_dir = get_core_package_dir("contrib-pysite")
addsitedir(contrib_pysite_dir)
sys.path.insert(0, contrib_pysite_dir)
import zeroconf # pylint: disable=import-outside-toplevel
class mDNSListener(object):
def __init__(self):
self._zc = zeroconf.Zeroconf(interfaces=zeroconf.InterfaceChoice.All)
self._found_types = []
self._found_services = []
def __enter__(self):
zeroconf.ServiceBrowser(self._zc, "_services._dns-sd._udp.local.", self)
return self
def __exit__(self, etype, value, traceback):
self._zc.close()
def remove_service(self, zc, type_, name):
pass
def add_service(self, zc, type_, name):
try:
assert zeroconf.service_type_name(name)
assert str(name)
except (AssertionError, UnicodeError, zeroconf.BadTypeInNameException):
return
if name not in self._found_types:
self._found_types.append(name)
zeroconf.ServiceBrowser(self._zc, name, self)
if type_ in self._found_types:
s = zc.get_service_info(type_, name)
if s:
self._found_services.append(s)
def get_services(self):
return self._found_services
items = []
with mDNSListener() as mdns:
time.sleep(3)
for service in mdns.get_services():
properties = None
if service.properties:
try:
properties = {
k.decode("utf8"): v.decode("utf8")
if isinstance(v, bytes)
else v
for k, v in service.properties.items()
}
json.dumps(properties)
except UnicodeDecodeError:
properties = None
items.append(
{
"type": service.type,
"name": service.name,
"ip": ".".join(
[
str(c if isinstance(c, int) else ord(c))
for c in service.address
]
),
"port": service.port,
"properties": properties,
}
)
return items
@memoized(expire="60s")
def _api_request_session():
return requests.Session()
@throttle(500)
def _get_api_result(
url, params=None, data=None, auth=None # pylint: disable=too-many-branches
):
# pylint: disable=import-outside-toplevel
from platformio.app import get_user_agent, get_setting
result = {}
r = None
verify_ssl = sys.version_info >= (2, 7, 9)
if not url.startswith("http"):
url = __apiurl__ + url
if not get_setting("strict_ssl"):
url = url.replace("https://", "http://")
headers = {"User-Agent": get_user_agent()}
try:
if data:
r = _api_request_session().post(
url,
params=params,
data=data,
headers=headers,
auth=auth,
verify=verify_ssl,
)
else:
r = _api_request_session().get(
url, params=params, headers=headers, auth=auth, verify=verify_ssl
)
result = r.json()
r.raise_for_status()
return r.text
except requests.exceptions.HTTPError as e:
if result and "message" in result:
raise exception.APIRequestError(result["message"])
if result and "errors" in result:
raise exception.APIRequestError(result["errors"][0]["title"])
raise exception.APIRequestError(e)
except ValueError:
raise exception.APIRequestError("Invalid response: %s" % r.text.encode("utf-8"))
finally:
if r:
r.close()
return None
def get_api_result(url, params=None, data=None, auth=None, cache_valid=None):
from platformio.app import ContentCache # pylint: disable=import-outside-toplevel
total = 0
max_retries = 5
cache_key = (
ContentCache.key_from_args(url, params, data, auth) if cache_valid else None
)
while total < max_retries:
try:
with ContentCache() as cc:
if cache_key:
result = cc.get(cache_key)
if result is not None:
return json.loads(result)
# check internet before and resolve issue with 60 seconds timeout
internet_on(raise_exception=True)
result = _get_api_result(url, params, data)
if cache_valid:
with ContentCache() as cc:
cc.set(cache_key, result, cache_valid)
return json.loads(result)
except (requests.exceptions.ConnectionError, requests.exceptions.Timeout) as e:
total += 1
if not PlatformioCLI.in_silence():
click.secho(
"[API] ConnectionError: {0} (incremented retry: max={1}, "
"total={2})".format(e, max_retries, total),
fg="yellow",
)
time.sleep(2 * total)
raise exception.APIRequestError(
"Could not connect to PlatformIO API Service. Please try later."
)
PING_REMOTE_HOSTS = [
"140.82.118.3", # Github.com
"35.231.145.151", # Gitlab.com
"88.198.170.159", # platformio.org
"github.com",
"platformio.org",
]
@memoized(expire="5s")
def _internet_on():
timeout = 2
socket.setdefaulttimeout(timeout)
for host in PING_REMOTE_HOSTS:
try:
if os.getenv("HTTP_PROXY", os.getenv("HTTPS_PROXY")):
requests.get("http://%s" % host, allow_redirects=False, timeout=timeout)
else:
socket.socket(socket.AF_INET, socket.SOCK_STREAM).connect((host, 80))
return True
except: # pylint: disable=bare-except
pass
return False
def internet_on(raise_exception=False):
result = _internet_on()
if raise_exception and not result:
raise exception.InternetIsOffline()
return result
def pepver_to_semver(pepver):
return re.sub(r"(\.\d+)\.?(dev|a|b|rc|post)", r"\1-\2.", pepver, 1)
def items_to_list(items):
if isinstance(items, list):
return items
return [i.strip() for i in items.split(",") if i.strip()]
def items_in_list(needle, haystack):
needle = items_to_list(needle)
haystack = items_to_list(haystack)
if "*" in needle or "*" in haystack:
return True
return set(needle) & set(haystack)
def parse_date(datestr):
if "T" in datestr and "Z" in datestr:
return time.strptime(datestr, "%Y-%m-%dT%H:%M:%SZ")
return time.strptime(datestr)
def merge_dicts(d1, d2, path=None):
if path is None:
path = []
for key in d2:
if key in d1 and isinstance(d1[key], dict) and isinstance(d2[key], dict):
merge_dicts(d1[key], d2[key], path + [str(key)])
else:
d1[key] = d2[key]
return d1
def print_labeled_bar(label, is_error=False, fg=None):
terminal_width, _ = click.get_terminal_size()
width = len(click.unstyle(label))
half_line = "=" * int((terminal_width - width - 2) / 2)
click.secho("%s %s %s" % (half_line, label, half_line), fg=fg, err=is_error)
def humanize_duration_time(duration):
if duration is None:
return duration
duration = duration * 1000
tokens = []
for multiplier in (3600000, 60000, 1000, 1):
fraction = math.floor(duration / multiplier)
tokens.append(int(round(duration) if multiplier == 1 else fraction))
duration -= fraction * multiplier
return "{:02d}:{:02d}:{:02d}.{:03d}".format(*tokens)
def get_original_version(version):
if version.count(".") != 2:
return None
_, raw = version.split(".")[:2]
if int(raw) <= 99:
return None
if int(raw) <= 9999:
return "%s.%s" % (raw[:-2], int(raw[-2:]))
return "%s.%s.%s" % (raw[:-4], int(raw[-4:-2]), int(raw[-2:]))
|
platformio/platformio
|
platformio/util.py
|
Python
|
apache-2.0
| 14,440 | 0.001108 |
#!/usr/bin/env python
#
# Appcelerator Titanium Module Packager
#
#
import os, subprocess, sys, glob, string
import zipfile
from datetime import date
cwd = os.path.abspath(os.path.dirname(sys._getframe(0).f_code.co_filename))
os.chdir(cwd)
required_module_keys = ['name','version','moduleid','description','copyright','license','copyright','platform','minsdk']
module_defaults = {
'description':'My module',
'author': 'Your Name',
'license' : 'Specify your license',
'copyright' : 'Copyright (c) %s by Your Company' % str(date.today().year),
}
module_license_default = "TODO: place your license here and we'll include it in the module distribution"
def find_sdk(config):
sdk = config['TITANIUM_SDK']
return os.path.expandvars(os.path.expanduser(sdk))
def replace_vars(config,token):
idx = token.find('$(')
while idx != -1:
idx2 = token.find(')',idx+2)
if idx2 == -1: break
key = token[idx+2:idx2]
if not config.has_key(key): break
token = token.replace('$(%s)' % key, config[key])
idx = token.find('$(')
return token
def read_ti_xcconfig():
contents = open(os.path.join(cwd,'titanium.xcconfig')).read()
config = {}
for line in contents.splitlines(False):
line = line.strip()
if line[0:2]=='//': continue
idx = line.find('=')
if idx > 0:
key = line[0:idx].strip()
value = line[idx+1:].strip()
config[key] = replace_vars(config,value)
return config
def generate_doc(config):
docdir = os.path.join(cwd,'documentation')
if not os.path.exists(docdir):
print "Couldn't find documentation file at: %s" % docdir
return None
try:
import markdown2 as markdown
except ImportError:
import markdown
documentation = []
for file in os.listdir(docdir):
if file in ignoreFiles or os.path.isdir(os.path.join(docdir, file)):
continue
md = open(os.path.join(docdir,file)).read()
html = markdown.markdown(md)
documentation.append({file:html});
return documentation
def compile_js(manifest,config):
js_file = os.path.join(cwd,'assets','dk.napp.social.js')
if not os.path.exists(js_file): return
from compiler import Compiler
try:
import json
except:
import simplejson as json
compiler = Compiler(cwd, manifest['moduleid'], manifest['name'], 'commonjs')
root_asset, module_assets = compiler.compile_module()
root_asset_content = """
%s
return filterDataInRange([NSData dataWithBytesNoCopy:data length:sizeof(data) freeWhenDone:NO], ranges[0]);
""" % root_asset
module_asset_content = """
%s
NSNumber *index = [map objectForKey:path];
if (index == nil) {
return nil;
}
return filterDataInRange([NSData dataWithBytesNoCopy:data length:sizeof(data) freeWhenDone:NO], ranges[index.integerValue]);
""" % module_assets
from tools import splice_code
assets_router = os.path.join(cwd,'Classes','DkNappDrawerModuleAssets.m')
splice_code(assets_router, 'asset', root_asset_content)
splice_code(assets_router, 'resolve_asset', module_asset_content)
# Generate the exports after crawling all of the available JS source
exports = open('metadata.json','w')
json.dump({'exports':compiler.exports }, exports)
exports.close()
def die(msg):
print msg
sys.exit(1)
def warn(msg):
print "[WARN] %s" % msg
def validate_license():
c = open(os.path.join(cwd,'LICENSE')).read()
if c.find(module_license_default)!=-1:
warn('please update the LICENSE file with your license text before distributing')
def validate_manifest():
path = os.path.join(cwd,'manifest')
f = open(path)
if not os.path.exists(path): die("missing %s" % path)
manifest = {}
for line in f.readlines():
line = line.strip()
if line[0:1]=='#': continue
if line.find(':') < 0: continue
key,value = line.split(':')
manifest[key.strip()]=value.strip()
for key in required_module_keys:
if not manifest.has_key(key): die("missing required manifest key '%s'" % key)
if module_defaults.has_key(key):
defvalue = module_defaults[key]
curvalue = manifest[key]
if curvalue==defvalue: warn("please update the manifest key: '%s' to a non-default value" % key)
return manifest,path
ignoreFiles = ['.DS_Store','.gitignore','libTitanium.a','titanium.jar','README']
ignoreDirs = ['.DS_Store','.svn','.git','CVSROOT']
def zip_dir(zf,dir,basepath,ignore=[]):
for root, dirs, files in os.walk(dir):
for name in ignoreDirs:
if name in dirs:
dirs.remove(name) # don't visit ignored directories
for file in files:
if file in ignoreFiles: continue
e = os.path.splitext(file)
if len(e) == 2 and e[1] == '.pyc': continue
if len(e) == 2 and e[1] == '.js': continue
from_ = os.path.join(root, file)
to_ = from_.replace(dir, basepath, 1)
zf.write(from_, to_)
def glob_libfiles():
files = []
for libfile in glob.glob('build/**/*.a'):
if libfile.find('Release-')!=-1:
files.append(libfile)
return files
def build_module(manifest,config):
from tools import ensure_dev_path
ensure_dev_path()
rc = os.system("xcodebuild -sdk iphoneos -configuration Release")
if rc != 0:
die("xcodebuild failed")
rc = os.system("xcodebuild -sdk iphonesimulator -configuration Release")
if rc != 0:
die("xcodebuild failed")
# build the merged library using lipo
moduleid = manifest['moduleid']
libpaths = ''
for libfile in glob_libfiles():
libpaths+='%s ' % libfile
os.system("lipo %s -create -output build/lib%s.a" %(libpaths,moduleid))
def package_module(manifest,mf,config):
name = manifest['name'].lower()
moduleid = manifest['moduleid'].lower()
version = manifest['version']
modulezip = '%s-iphone-%s.zip' % (moduleid,version)
if os.path.exists(modulezip): os.remove(modulezip)
zf = zipfile.ZipFile(modulezip, 'w', zipfile.ZIP_DEFLATED)
modulepath = 'modules/iphone/%s/%s' % (moduleid,version)
zf.write(mf,'%s/manifest' % modulepath)
libname = 'lib%s.a' % moduleid
zf.write('build/%s' % libname, '%s/%s' % (modulepath,libname))
docs = generate_doc(config)
if docs!=None:
for doc in docs:
for file, html in doc.iteritems():
filename = string.replace(file,'.md','.html')
zf.writestr('%s/documentation/%s'%(modulepath,filename),html)
for dn in ('assets','example','platform'):
if os.path.exists(dn):
zip_dir(zf,dn,'%s/%s' % (modulepath,dn),['README'])
zf.write('LICENSE','%s/LICENSE' % modulepath)
zf.write('module.xcconfig','%s/module.xcconfig' % modulepath)
exports_file = 'metadata.json'
if os.path.exists(exports_file):
zf.write(exports_file, '%s/%s' % (modulepath, exports_file))
zf.close()
if __name__ == '__main__':
manifest,mf = validate_manifest()
validate_license()
config = read_ti_xcconfig()
sdk = find_sdk(config)
sys.path.insert(0,os.path.join(sdk,'iphone'))
sys.path.append(os.path.join(sdk, "common"))
compile_js(manifest,config)
build_module(manifest,config)
package_module(manifest,mf,config)
sys.exit(0)
|
rborn/TiSocial.Framework
|
build.py
|
Python
|
mit
| 6,767 | 0.041377 |
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "orchids.settings")
try:
from django.core.management import execute_from_command_line
except ImportError:
# The above import may fail for some other reason. Ensure that the
# issue is really that Django is missing to avoid masking other
# exceptions on Python 2.
try:
import django
except ImportError:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
)
raise
execute_from_command_line(sys.argv)
|
igrowing/Orchids
|
manage.py
|
Python
|
mit
| 805 | 0 |
# Notes on classes
class Sample():
def __init__(self, name, number):
self.name = name
self.number = number
def print_values(self):
print(f"name: {self.name}")
print(f"number: {self.number}")
class SampleWithProperties():
def __init__(self, name, number):
self.name = name
self.number = number
@property
def name(self):
# double underscore is to tell future devs to avoid variable
return self.__name
@property
def double_name(self):
# Can return calculated or other values besides fields
return 2 * self.__name
@property
def number(self):
return self.__number
@name.setter
def name(self, value):
# Often has some sort of validation or transformation code
self.__name = value
@number.setter
def number(self, value):
# Often has some sort of validation or transformation code
self.__number = value % 2
class SuperClass():
def __init__(self, name):
self.name = name
def speak(self):
print(f"Hey, ho {self.name}")
class SubClass(SuperClass):
def __init__(self, name, location):
super().__init__(name)
self.location = location
def shout_out(self):
print(f"{self.location} is where it's at")
def speak(self):
# Need to explicitly over ride parent methods
# calling it here, eg, super().speak()
# just calls it. If super.method() is not
# called, then only this code would run
print(f"{self.location}, let's go! ")
if __name__ == "__main__":
'''
# Demo Sample()
instance = Sample("fred", 3)
instance.print_values()
print(f"Access name field directly: {instance.name}")
instance.number += 100
print(f"Access number field directly: {instance.number}")
'''
'''
# Demo SampleWithProperties()
instance_with_props = SampleWithProperties("fred", 3)
# Directly accessing values
# Next line fails
# print(f"Access name field, direct: {instance_with_props.__name}")
# Python rewrites value names with intial __ to protect namespace
# not really a private value, but less likely to be accessed
print(f"Access name field, direct: {instance_with_props._SampleWithProperties__name}")
# Using getter to access values, looks like direct access but isn't
# name field
print(f"Access name field, getter: {instance_with_props.name}")
print(f"Access name field, getter: {instance_with_props.double_name}")
instance_with_props.name = "Barney"
print(f"Access name field, after setter: {instance_with_props.name}")
# number field
print(f"Access number field, before setter: {instance_with_props.number}")
instance_with_props.number = 4
print(f"Access number field, after setter: {instance_with_props.number}")
instance_with_props.number = 3
print(f"Access number field, after setter: {instance_with_props.number}")
'''
# Demo inheritance
# Show super class functions
instance_super = SuperClass("Johnny")
print(f"Name, super: {instance_super.name}")
print("")
# Show sub inherits name, methods
instance_sub = SubClass("Joey", "Lower East Side")
print(f"Name, super: {instance_sub.name}")
print(f"Method from super: ", end="")
instance_sub.super().speak()
print("")
# Show sub can override parent
print(f"Overide from super: ", end="")
instance_sub.speak()
# Figure out how to call the super method from the instance rather than from the class definition
|
daveinnyc/various
|
python-practice/class_demos.py
|
Python
|
mit
| 3,833 | 0.00574 |
#! /usr/bin/env python
import rospy
import actionlib
from move_base_msgs.msg import MoveBaseActionGoal
from actionlib_msgs.msg import GoalID
class ForceCancel(object):
def __init__(self, nodename="force_cancel", is_newnode=True, repetition=10):
self.repetition = rospy.get_param("~repetition", repetition)
if is_newnode:
rospy.init_node(name=nodename, anonymous=False)
rospy.on_shutdown(self.shutdown)
pub = rospy.Publisher("move_base/cancel", GoalID, queue_size=1)
sub = rospy.Subscriber("move_base/goal", MoveBaseActionGoal, self.callback, queue_size=1)
rospy.wait_for_message("move_base/goal", MoveBaseActionGoal, 60)
r = rospy.Rate(1)
counter = 0
while not rospy.is_shutdown() and (counter < self.repetition):
msg = GoalID()
msg.id = self.id
pub.publish(msg)
r.sleep()
counter += 1
def callback(self, msg):
self.id = msg.goal_id.id
def shutdown(self):
rospy.loginfo("cancel job finished")
rospy.sleep(1)
pass
if __name__ == "__main__":
fc = ForceCancel('force_cancel', False, 5)
|
ron1818/Singaboat_RobotX2016
|
robotx_nav/nodes/move_base_force_cancel.py
|
Python
|
gpl-3.0
| 1,191 | 0.008396 |
#!/usr/bin/env python
# ======================================================================
import pangloss
import sys,getopt,cPickle,numpy
import scipy.stats as stats
# ======================================================================
def Calibrate(argv):
"""
NAME
Calibrate.py
PURPOSE
Transform the results of the lightcone reconstruction process,
Pr(kappah|D), into our target PDF, Pr(kappa|D).
COMMENTS
All PDF input is provided as a list of samples. There are two
modes of operation:
1) The Pr(kappah|C) for an ensemble of calibration lightcones are
compressed into a single number (currently the
median), and then combined with the true kappa values to make
Pr(kappa,kappah|C). This is written out as a 2D sample list.
2) The Pr(kappah|D) for a single observed lightcone is compressed
into a single number (currently the median). This is then used
to take a slice from Pr(kappa,kappah|C) to make Pr(kappa|D,C).
Both 1 and 2 can be carried out in series if desired (Mode=3).
FLAGS
-h Print this message [0]
INPUTS
configfile Plain text file containing Pangloss configuration
OPTIONAL INPUTS
--mode Operating mode 1,2 or 3. See COMMENTS above.
OUTPUTS
stdout Useful information
samples From 1) Pr(kappa,kappah|C) or 2) Pr(kappa|D,C)
EXAMPLE
Calibrate.py example.config
BUGS
AUTHORS
This file is part of the Pangloss project, distributed under the
GPL v2, by Tom Collett (IoA) and Phil Marshall (Oxford).
Please cite: Collett et al 2013, http://arxiv.org/abs/1303.6564
HISTORY
2013-03-21 started Collett & Marshall (Oxford)
"""
# --------------------------------------------------------------------
try:
opts, args = getopt.getopt(argv,"hm:",["help","mode"])
except getopt.GetoptError, err:
print str(err) # will print something like "option -a not recognized"
print Calibrate.__doc__ # will print the big comment above.
return
Mode=3
for o,a in opts:
if o in ("-h", "--help"):
print Calibrate.__doc__
return
elif o in ("-m", "--mode"):
Mode = int(a)
assert Mode < 4 and Mode >0, "unhandled Mode"
else:
assert False, "unhandled option"
# Check for setup file in array args:
if len(args) == 1:
configfile = args[0]
print pangloss.doubledashedline
print pangloss.hello
print pangloss.doubledashedline
print "Calibrate: transforming Pr(kappah|D) to Pr(kappa|D)"
print "Calibrate: taking instructions from",configfile
else:
print Calibrate.__doc__
return
# --------------------------------------------------------------------
# Read in configuration, and extract the ones we need:
experiment = pangloss.Configuration(configfile)
EXP_NAME = experiment.parameters['ExperimentName']
Nc = experiment.parameters['NCalibrationLightcones']
comparator=experiment.parameters['Comparator']
comparatorType=experiment.parameters['ComparatorType']
comparatorWidth=experiment.parameters['ComparatorWidth']
# Figure out which mode is required:
ModeName = experiment.parameters['CalibrateMode']
if ModeName=='Joint': Mode = 1
if ModeName=='Slice': Mode = 2
if ModeName=='JointAndSlice': Mode = 3
CALIB_DIR = experiment.parameters['CalibrationFolder'][0]
jointdistfile= CALIB_DIR+'/'+comparator+'_'+comparatorType+'.pickle'
jointdistasPDFfile= CALIB_DIR+'/'+comparator+'_'+comparatorType+'_asPDF.pickle'
# Final result is PDF for kappa:
x = experiment.parameters['ObservedCatalog'][0]
resultfile = x.split('.')[0]+"_"+EXP_NAME+"_PofKappa.pickle"
# --------------------------------------------------------------------
# Mode 1: generate a joint distribution, eg Pr(kappah,kappa)
# from the calibration dataset:
if Mode==1 or Mode==3:
print pangloss.dashedline
# First find the calibration pdfs for kappa_h:
calpickles = []
for i in range(Nc):
calpickles.append(experiment.getLightconePickleName('simulated',pointing=i))
calresultpickles=[]
if comparator=="Kappah" and comparatorType=="median":
for i in range(Nc):
x = calpickles[i]
pfile = x.split('.')[0].split("_lightcone")[0]+"_"+EXP_NAME+"_KappaHilbert_Kappah_median.pickle"
calresultpickles.append(pfile)
elif comparator=="Kappah" and comparatorType!="median":
for i in range(Nc):
x = calpickles[i]
pfile = x.split('.')[0].split("_lightcone")[0]+"_"+EXP_NAME+"_KappaHilbert_Kappah_"+comparatorType+".pickle"
calresultpickles.append(pfile)
else:
print "Calibrate: Unrecognised comparator "+Comparator
print "Calibrate: If you want to use a comparator other than kappa_h, "
print "Calibrate: you'll need to code it up!"
print "Calibrate: (This should be easy, but you can ask tcollett@ast.cam.uk for help)."
exit()
# Now calculate comparators:
callist=numpy.empty((Nc,2))
jd=pangloss.PDF(["kappa_ext",comparator+'_'+comparatorType])
for i in range(Nc):
C = calresultpickles[i]
pdf = pangloss.readPickle(C)
if comparator=="Kappah":
if comparatorType=="median":
# Recall that we created a special file for this
# choice of comparator and comparator type, in
# Reconstruct. You could also use the
# comparatortype=="mean" code, swapping mean for median.
callist[i,0]=pdf[0]
callist[i,1]=pdf[1][0]
elif comparatorType=="mean":
callist[i,0] = pdf.truth[0]
callist[i,1] = numpy.mean(pdf.samples)
else:
print "Calibrate: Unrecognised comparatorType "+comparatorType
print "Calibrate: If you want to use a comparatorType other than median "
print "Calibrate: or mean, you'll need to code it up!"
print "Calibrate: (This should be easy, but you can ask tcollett@ast.cam.uk for help)."
exit()
jd.append(callist[i])
pangloss.writePickle(callist,jointdistfile)
# Also store the joint dist as a pangloss pdf:
pangloss.writePickle(jd,jointdistasPDFfile)
# Plot:
plotfile = jointdistasPDFfile.split('.')[0]+'.png'
jd.plot("Kappah_median","kappa_ext",weight=None,output=plotfile,title="The joint distribution of $\kappa_{\mathrm{ext}}$ and calibrator \n\n (more correlated means a better calibrator!)")
print "Calibrate: calibration joint PDF saved in:"
print "Calibrate: "+jointdistfile
print "Calibrate: and "+jointdistasPDFfile
print "Calibrate: you can view this PDF in "+plotfile
# --------------------------------------------------------------------
# Mode 2: calibrate a real line of sight's Pr(kappah|D) using the
# joint distribution Pr(kappa,<kappah>|D)
if Mode==2 or Mode==3:
print pangloss.dashedline
callibguide = pangloss.readPickle(jointdistfile)
obspickle = experiment.getLightconePickleName('real')
pfile = obspickle.split('.')[0].split("_lightcone")[0]+'_'+EXP_NAME+"_PofKappah.pickle"
pdf=pangloss.readPickle(pfile)
if comparator=="Kappah":
if comparatorType=="median":# note we created a special file for this choice of comparator and comparator type. You could also use the comparatortype=="mean" code swapping mean for median.
RealComparator=numpy.median(pdf.samples)
elif comparatorType=="mean":
RealComparator=numpy.mean(pdf.samples)
else:
print "I don't know that comparatorType. exiting"
exit()
pdf = pangloss.PDF(["kappa_ext","weight"])
#print RealComparator
#print numpy.median(callibguide[:,1]),numpy.std(callibguide[:,1])
dif=(callibguide[:,1]-RealComparator)
weights=dif*0.0
weights[numpy.abs(dif)<comparatorWidth]=1.
weights/=numpy.sum(weights)
samples=callibguide[:,0]
samplesandweights=callibguide.copy()
samplesandweights[:,1]=weights
pdf.samples=(samplesandweights)
plotfile = resultfile.split('.')[0]+".png"
pdf.plot('kappa_ext',weight='weight',output=plotfile)
average = numpy.average(samples, weights=weights)
variance = numpy.dot(weights, (samples-average)**2)/weights.sum()
average,std=average, variance**.5
#if step function weights can calculate 68%CL easily:
included=samples[weights>0]
onesigconfidence=numpy.abs(\
stats.scoreatpercentile(included,84)-
stats.scoreatpercentile(included,16)\
)/2.
pangloss.writePickle(pdf,resultfile)
print "Calibrate: your reconstructed lightcone has been calibrated,"
print "Calibrate: suggesting it has a kappa_ext of",\
"%.3f +\- %.3f"%(average,onesigconfidence)
print "Calibrate: the PDF for kappa_ext has been output to "+resultfile
print "Calibrate: in the form of sample kappa_ext values, and their weights."
print "Calibrate: you can view this PDF in "+plotfile
print
print "Calibrate: To read and process this file, try:"
print
print " import pangloss"
print " pdf = pangloss.readPickle(\"%s\")"%resultfile
print " kappa_samples = pdf.getParameter(\"kappa_ext\")"
print " kappa_weights = pdf.getParameter(\"weight\")"
# --------------------------------------------------------------------
print
print pangloss.doubledashedline
return resultfile,jointdistasPDFfile
# ======================================================================
if __name__ == '__main__':
Calibrate(sys.argv[1:])
# ======================================================================
|
enoordeh/Pangloss
|
Calibrate.py
|
Python
|
gpl-2.0
| 10,551 | 0.013079 |
import json
from wptserve.utils import isomorphic_decode
def main(request, response):
message = {}
header = request.headers.get(b"Test-Header-Injection");
message[u'test_header_injection'] = isomorphic_decode(header) if header else None
header = request.headers.get(b"Sec-Required-CSP");
message[u'required_csp'] = isomorphic_decode(header) if header else None
header = request.headers.get(b"Sec-Required-CSP");
message[u'required_csp'] = isomorphic_decode(header) if header else None
second_level_iframe_code = u""
if b"include_second_level_iframe" in request.GET:
if b"second_level_iframe_csp" in request.GET and request.GET[b"second_level_iframe_csp"] != b"":
second_level_iframe_code = u'''<script>
var i2 = document.createElement('iframe');
i2.src = 'echo-required-csp.py';
i2.csp = "{0}";
document.body.appendChild(i2);
</script>'''.format(isomorphic_decode(request.GET[b"second_level_iframe_csp"]))
else:
second_level_iframe_code = u'''<script>
var i2 = document.createElement('iframe');
i2.src = 'echo-required-csp.py';
document.body.appendChild(i2);
</script>'''
return [(b"Content-Type", b"text/html"), (b"Allow-CSP-From", b"*")], u'''
<!DOCTYPE html>
<html>
<head>
<!--{2}-->
<script>
window.addEventListener('message', function(e) {{
window.parent.postMessage(e.data, '*');
}});
window.parent.postMessage({0}, '*');
</script>
</head>
<body>
{1}
</body>
</html>
'''.format(json.dumps(message), second_level_iframe_code, str(request.headers))
|
asajeffrey/servo
|
tests/wpt/web-platform-tests/content-security-policy/embedded-enforcement/support/echo-required-csp.py
|
Python
|
mpl-2.0
| 1,679 | 0.006552 |
# -*- coding: utf-8 -*-
import logging
import os
import re
import uuid
from django.conf import settings
from django.contrib.auth.hashers import check_password
import pexpect
from ..users.models import User
logger = logging.getLogger(__name__)
class KerberosAuthenticationBackend(object):
"""Authenticate using Kerberos.
This is the default authentication backend.
"""
@staticmethod
def kinit_timeout_handle(username, realm):
"""Check if the user exists before we throw an error.
If the user does not exist in LDAP, only throw a warning.
"""
try:
User.get_user(username=username)
except User.DoesNotExist:
logger.warning("kinit timed out for {}@{} (invalid user)".format(username, realm))
return
logger.critical("kinit timed out for {}@{}".format(username, realm))
@staticmethod
def get_kerberos_ticket(username, password):
"""Attempts to create a Kerberos ticket for a user.
Args:
username
The username.
password
The password.
Returns:
Boolean indicating success or failure of ticket creation
"""
cache = "/tmp/ion-%s" % uuid.uuid4()
logger.debug("Setting KRB5CCNAME to 'FILE:{}'".format(cache))
os.environ["KRB5CCNAME"] = "FILE:" + cache
try:
realm = settings.CSL_REALM
kinit = pexpect.spawnu("/usr/bin/kinit {}@{}".format(username, realm), timeout=settings.KINIT_TIMEOUT)
kinit.expect(":")
kinit.sendline(password)
kinit.expect(pexpect.EOF)
kinit.close()
exitstatus = kinit.exitstatus
except pexpect.TIMEOUT:
KerberosAuthenticationBackend.kinit_timeout_handle(username, realm)
exitstatus = 1
if exitstatus != 0:
realm = settings.AD_REALM
try:
kinit = pexpect.spawnu("/usr/bin/kinit {}@{}".format(username, realm), timeout=settings.KINIT_TIMEOUT)
kinit.expect(":", timeout=5)
kinit.sendline(password)
kinit.expect(pexpect.EOF)
kinit.close()
exitstatus = kinit.exitstatus
except pexpect.TIMEOUT:
KerberosAuthenticationBackend.kinit_timeout_handle(username, realm)
exitstatus = 1
if exitstatus == 0:
logger.debug("Kerberos authorized {}@{}".format(username, realm))
return True
else:
logger.debug("Kerberos failed to authorize {}".format(username))
if "KRB5CCNAME" in os.environ:
del os.environ["KRB5CCNAME"]
return False
def authenticate(self, username=None, password=None):
"""Authenticate a username-password pair.
Creates a new user if one is not already in the database.
Args:
username
The username of the `User` to authenticate.
password
The password of the `User` to authenticate.
Returns:
`User`
NOTE: None is returned when the user account does not exist. However,
if the account exists but does not exist in LDAP, which is the case for
former and future students who do not have Intranet access, a dummy user
is returned that has the flag is_active=False. (The is_active property in
the User class returns False when the username starts with "INVALID_USER".)
"""
# remove all non-alphanumerics
username = re.sub('\W', '', username)
krb_ticket = self.get_kerberos_ticket(username, password)
if not krb_ticket:
return None
else:
logger.debug("Authentication successful")
try:
user = User.get_user(username=username)
except User.DoesNotExist:
# Shouldn't happen
logger.error("User {} successfully authenticated but not found " "in LDAP.".format(username))
user, status = User.objects.get_or_create(username="INVALID_USER", id=99999)
return user
def get_user(self, user_id):
"""Returns a user, given his or her user id. Required for a custom authentication backend.
Args:
user_id
The user id of the user to fetch.
Returns:
User or None
"""
try:
return User.get_user(id=user_id)
except User.DoesNotExist:
return None
class MasterPasswordAuthenticationBackend(object):
"""Authenticate as any user against a master password whose hash is in secret.py.
Forces a simple LDAP bind.
"""
def authenticate(self, username=None, password=None):
"""Authenticate a username-password pair.
Creates a new user if one is not already in the database.
Args:
username
The username of the `User` to authenticate.
password
The master password.
Returns:
`User`
"""
if check_password(password, settings.MASTER_PASSWORD):
try:
user = User.get_user(username=username)
except User.DoesNotExist:
logger.debug("Master password correct, user does not exist")
return None
logger.debug("Authentication with master password successful")
return user
logger.debug("Master password authentication failed")
return None
def get_user(self, user_id):
"""Returns a user, given his or her user id. Required for a custom authentication backend.
Args:
user_id
The user id of the user to fetch.
Returns:
User or None
"""
try:
return User.get_user(id=user_id)
except User.DoesNotExist:
return None
|
jacobajit/ion
|
intranet/apps/auth/backends.py
|
Python
|
gpl-2.0
| 6,021 | 0.002159 |
import sys
import subprocess
result=subprocess.check_output("grep -or 'is_[A-Z]\w*' .", shell=True)
lines=[ l for l in str(result).splitlines() if l.find('.fst') != -1]
for l in lines:
content = l.split(':')
constr=content[1].strip()[0:-1]
print("sed -i -e 's/%s[.]/%s?./g' %s" % (constr, constr, content[0]))
subprocess.call("sed -i -e 's/%s[.]/%s?./g' %s" % (constr, constr, content[0]), shell=True)
|
A-Manning/FStar
|
src/tools/updateDiscriminators.py
|
Python
|
apache-2.0
| 419 | 0.019093 |
#
#
# BBBBBBBBBBBBBBBBB OOOOOOOOO XXXXXXX XXXXXXX
# B::::::::::::::::B OO:::::::::OO X:::::X X:::::X
# B::::::BBBBBB:::::B OO:::::::::::::OO X:::::X X:::::X
# BB:::::B B:::::BO:::::::OOO:::::::OX::::::X X::::::X
# B::::B B:::::BO::::::O O::::::OXXX:::::X X:::::XXX
# B::::B B:::::BO:::::O O:::::O X:::::X X:::::X
# B::::BBBBBB:::::B O:::::O O:::::O X:::::X:::::X
# B:::::::::::::BB O:::::O O:::::O X:::::::::X
# B::::BBBBBB:::::B O:::::O O:::::O X:::::::::X
# B::::B B:::::BO:::::O O:::::O X:::::X:::::X
# B::::B B:::::BO:::::O O:::::O X:::::X X:::::X
# B::::B B:::::BO::::::O O::::::OXXX:::::X X:::::XXX
# BB:::::BBBBBB::::::BO:::::::OOO:::::::OX::::::X X::::::X
# B:::::::::::::::::B OO:::::::::::::OO X:::::X X:::::X
# B::::::::::::::::B OO:::::::::OO X:::::X X:::::X
# BBBBBBBBBBBBBBBBB OOOOOOOOO XXXXXXX XXXXXXX
#
#
# Assetto Corsa framework created by Marco 'Marocco2' Mollace
#
# version 0.2
#
# Usage of this library is under LGPLv3. Be careful :)
#
#
import ac
import traceback
import os
import sys
import platform
try:
import ctypes
except:
ac.log('BOX: error loading ctypes: ' + traceback.format_exc())
raise
# TODO: read from config file for filters | IMPORTS
from os.path import dirname, realpath
# import configparser
import functools
import threading
import zipfile
import time
def async(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
t = threading.Thread(target=func, args=args, kwargs=kwargs)
t.daemon = True
t.start()
return t
return wrapper
if platform.architecture()[0] == "64bit":
dllfolder = "stdlib64"
dllfolder = os.path.join(os.path.dirname(__file__), dllfolder)
fmodex = "fmodex64.dll"
else:
dllfolder = "stdlib"
dllfolder = os.path.join(os.path.dirname(__file__), dllfolder)
fmodex = "fmodex.dll"
sys.path.insert(0, dllfolder)
os.environ['PATH'] = os.environ['PATH'] + ";."
ctypes.windll[os.path.join(dllfolder, fmodex)]
box_lib_folder = os.path.join(os.path.dirname(__file__), 'box_lib')
sys.path.insert(0, box_lib_folder)
try:
import pyfmodex
except Exception as e:
ac.log('BOX: error loading pyfmodex: ' + traceback.format_exc())
raise
try:
import requests
except Exception as e:
ac.log('BOX: error loading requests: ' + traceback.format_exc())
raise
# A useful push notification via Telegram if I need send some news
def notification(telegram_bot_oauth):
try:
telegram_api_url = "https://api.telegram.org/bot" + telegram_bot_oauth + "/getUpdates"
r = requests.get(telegram_api_url)
message = r.json()
if message["ok"]:
var_notify = message["result"][-1]["message"]["text"]
ac.log('BOX: Notification from Telegram: ' + var_notify)
return var_notify
else:
var_notify = "No Telegram connection"
ac.log('BOX: ' + var_notify)
except:
ac.log('BOX: No Internet connection')
var_notify = ""
return var_notify
# It downloads a zip file and extract it in a folder
def get_zipfile(download_link, dir_path='', absolute_path=False):
try:
local_filename = download_link.split('/')[-1]
# NOTE the stream=True parameter
r = requests.get(download_link, stream=True)
log_getZipFile = "Download of " + local_filename + " completed"
where_is_zip = os.path.join(os.path.dirname(__file__), local_filename)
ac.log("BOX: " + log_getZipFile)
with open(local_filename, 'wb') as f:
for chunk in r.iter_content(chunk_size=1024):
if chunk: # filter out keep-alive new chunks
f.write(chunk)
# f.flush() commented by recommendation from J.F.Sebastian
ac.log("BOX: " + where_is_zip)
try:
with zipfile.ZipFile(local_filename, "r") as z:
if dir_path == "" and not absolute_path:
z.extractall(os.path.dirname(__file__)) # Extracting files
elif absolute_path:
z.extractall(dir_path) # Extracting files
else:
z.extractall(os.path.join(os.path.dirname(__file__), dir_path)) # Extracting files
# os.remove(local_filename)
log_getZipFile = "Files extracted"
return log_getZipFile
except:
log_getZipFile = "Error extracting files"
return log_getZipFile
except:
log_getZipFile = "Error downloading zip file"
ac.log('BOX: error downloading zip file: ' + traceback.format_exc())
return log_getZipFile
# A new function to automatize app updates for AC
# WORK IN PROGRESS
# TODO: make reorder files logic
def newupdate(version, check_link, download_link, dir_path=''):
try:
r = requests.get(check_link)
if r.json() != version: # Check if server version and client version is the same
update_status = get_zipfile(download_link, dir_path)
return update_status
else:
update_status = "No new update"
ac.log('BOX: ' + update_status)
return update_status
except:
update_status = "Error checking new update"
ac.log('BOX: error checking new update: ' + traceback.format_exc())
return update_status
# Uses GitHub to check updates
# WORK IN PROGRESS
# TODO: make reorder files logic
def github_newupdate(git_repo, branch='master', sha='', dir_path=''):
try:
check_link = "https://api.github.com/repos/" + git_repo + "/commits/" + branch
headers = {'Accept': 'application/vnd.github.VERSION.sha'}
r = requests.get(check_link, headers=headers)
if sha == "":
try:
with open("apps\\python\\" + git_repo.split('/')[-1] + "\sha.txt", 'r') as g:
sha = g.read()
g.close()
except:
update_status = "No SHA available"
ac.log('BOX: ' + update_status)
return update_status
if r.text != sha: # Check if server version and client version is the same
download_link = "https://github.com/" + git_repo + "/archive/" + branch + ".zip"
update_status = get_zipfile(download_link, dir_path)
with open("apps\\python\\" + git_repo.split('/')[-1] + "\sha.txt", 'w') as j:
j.write(r.text)
j.close()
return update_status
else:
update_status = "No new update"
ac.log('BOX: ' + update_status)
return update_status
except:
update_status = "Error checking new update"
ac.log('BOX: error checking new update: ' + traceback.format_exc())
return update_status
from threading import Thread, Event
class SoundPlayer(object):
def __init__(self, player):
self._play_event = Event()
self.player = player
self.playbackpos = [0.0, 0.0, 0.0]
self.playbackvol = 1.0
self.EQ = []
self.initEq()
self.sound_mode = pyfmodex.constants.FMOD_CREATECOMPRESSEDSAMPLE
self.speaker_mix = [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0]
for i in self.EQ:
self.player.add_dsp(i)
self.channel = self.player.get_channel(0)
self.queue = []
self.thread = Thread(target=self._worker)
self.thread.daemon = True
self.thread.start()
def initEq(self):
freq = [16.0, 31.5, 63.0, 125.0, 250.0, 500.0, 1000.0, 2000.0, 4000.0, 8000.0, 16000.0]
for i in freq:
dsp = self.player.create_dsp_by_type(pyfmodex.constants.FMOD_DSP_TYPE_PARAMEQ)
dsp.set_param(pyfmodex.constants.FMOD_DSP_PARAMEQ_GAIN, 1.0)
dsp.set_param(pyfmodex.constants.FMOD_DSP_PARAMEQ_BANDWIDTH, 1.0)
dsp.set_param(pyfmodex.constants.FMOD_DSP_PARAMEQ_CENTER, i)
self.EQ.append(dsp)
def set_volume(self, volume):
self.playbackvol = volume
def set_sound_mode(self, sound_mode):
self.sound_mode = sound_mode
def set_position(self, position):
self.playbackpos = position
def set_gain(self, gain):
if self.sound_mode == pyfmodex.constants.FMOD_3D:
for i in self.EQ:
i.set_param(pyfmodex.constants.FMOD_DSP_PARAMEQ_GAIN, gain)
elif self.sound_mode == pyfmodex.constants.FMOD_2D:
volume = gain
self.speaker_mix = [volume, volume, volume, 1.0, volume, volume, volume, volume]
@async
def stop(self):
try:
self.channel.paused = 1
# self.queue.pop(0)
except:
ac.log('BOX: stop() error ' + traceback.format_exc())
@async
def queueSong(self, filename=None):
try:
if filename is not None:
if os.path.isfile(filename):
sound = self.player.create_sound(bytes(filename, encoding='utf-8'), self.sound_mode)
self.queue.append({'sound': sound, 'mode': self.sound_mode})
state = self._play_event.is_set()
if not state:
self._play_event.set()
return 1 # mp3 loaded
else:
ac.log('BOX: File not found : %s' % filename)
except:
ac.log('BOX: queueSong() error ' + traceback.format_exc())
def lenQueue(self):
leng = self.queue.__len__()
return leng
def _worker(self):
while True:
self._play_event.wait()
queue_len = len(self.queue)
while queue_len > 0:
self.player.play_sound(self.queue[0]['sound'], False, 0)
self.channel.spectrum_mix = self.speaker_mix
self.channel.volume = self.playbackvol
self.player.update()
while self.channel.paused == 0 and self.channel.is_playing == 1:
time.sleep(0.1)
self.queue[0]['sound'].release()
self.queue.pop(0)
queue_len = len(self.queue)
self._play_event.clear()
FModSystem = pyfmodex.System()
|
Marocco2/EpicRace
|
BOX/box.py
|
Python
|
lgpl-3.0
| 10,406 | 0.002787 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('pipedrive', '0004_auto_20170502_1701'),
]
operations = [
migrations.AlterField(
model_name='activity',
name='subject',
field=models.CharField(max_length=500, null=True, blank=True),
),
migrations.AlterField(
model_name='activity',
name='type',
field=models.CharField(max_length=500, null=True, blank=True),
),
migrations.AlterField(
model_name='deal',
name='lost_reason',
field=models.CharField(max_length=500, null=True, blank=True),
),
migrations.AlterField(
model_name='deal',
name='title',
field=models.CharField(max_length=500, null=True, blank=True),
),
migrations.AlterField(
model_name='dealfield',
name='field_type',
field=models.CharField(max_length=500),
),
migrations.AlterField(
model_name='dealfield',
name='key',
field=models.CharField(max_length=500),
),
migrations.AlterField(
model_name='dealfield',
name='name',
field=models.CharField(max_length=500),
),
migrations.AlterField(
model_name='organization',
name='address',
field=models.CharField(max_length=500, null=True, blank=True),
),
migrations.AlterField(
model_name='organization',
name='name',
field=models.CharField(max_length=500, null=True, blank=True),
),
migrations.AlterField(
model_name='organizationfield',
name='field_type',
field=models.CharField(max_length=500),
),
migrations.AlterField(
model_name='organizationfield',
name='key',
field=models.CharField(max_length=500),
),
migrations.AlterField(
model_name='organizationfield',
name='name',
field=models.CharField(max_length=500),
),
migrations.AlterField(
model_name='person',
name='email',
field=models.CharField(max_length=500, null=True, blank=True),
),
migrations.AlterField(
model_name='person',
name='name',
field=models.CharField(max_length=500, null=True, blank=True),
),
migrations.AlterField(
model_name='person',
name='phone',
field=models.CharField(max_length=500, null=True, blank=True),
),
migrations.AlterField(
model_name='personfield',
name='field_type',
field=models.CharField(max_length=500),
),
migrations.AlterField(
model_name='personfield',
name='key',
field=models.CharField(max_length=500),
),
migrations.AlterField(
model_name='personfield',
name='name',
field=models.CharField(max_length=500),
),
migrations.AlterField(
model_name='pipeline',
name='name',
field=models.CharField(max_length=500, null=True, blank=True),
),
migrations.AlterField(
model_name='pipeline',
name='url_title',
field=models.CharField(max_length=500, null=True, blank=True),
),
migrations.AlterField(
model_name='stage',
name='name',
field=models.CharField(max_length=500),
),
migrations.AlterField(
model_name='stage',
name='pipeline_name',
field=models.CharField(max_length=500, null=True, blank=True),
),
migrations.AlterField(
model_name='user',
name='email',
field=models.CharField(max_length=500, null=True, blank=True),
),
migrations.AlterField(
model_name='user',
name='name',
field=models.CharField(max_length=500, null=True, blank=True),
),
migrations.AlterField(
model_name='user',
name='phone',
field=models.CharField(max_length=500, null=True, blank=True),
),
]
|
MasAval/django_pipedrive
|
pipedrive/migrations/0005_auto_20170510_1253.py
|
Python
|
bsd-3-clause
| 4,508 | 0 |
'''
A simpler setup version just to compile the speedup module.
It should be used as:
python setup_cython build_ext --inplace
Note: the .c file and other generated files are regenerated from
the .pyx file by running "python build_tools/build.py"
'''
import os
import sys
from setuptools import setup
os.chdir(os.path.dirname(os.path.abspath(__file__)))
IS_PY36_OR_GREATER = sys.version_info > (3, 6)
IS_PY39_OR_GREATER = sys.version_info > (3, 9)
def process_args():
extension_folder = None
target_pydevd_name = None
target_frame_eval = None
force_cython = False
for i, arg in enumerate(sys.argv[:]):
if arg == '--build-lib':
extension_folder = sys.argv[i + 1]
# It shouldn't be removed from sys.argv (among with --build-temp) because they're passed further to setup()
if arg.startswith('--target-pyd-name='):
sys.argv.remove(arg)
target_pydevd_name = arg[len('--target-pyd-name='):]
if arg.startswith('--target-pyd-frame-eval='):
sys.argv.remove(arg)
target_frame_eval = arg[len('--target-pyd-frame-eval='):]
if arg == '--force-cython':
sys.argv.remove(arg)
force_cython = True
return extension_folder, target_pydevd_name, target_frame_eval, force_cython
def build_extension(dir_name, extension_name, target_pydevd_name, force_cython, extended=False, has_pxd=False):
pyx_file = os.path.join(os.path.dirname(__file__), dir_name, "%s.pyx" % (extension_name,))
if target_pydevd_name != extension_name:
# It MUST be there in this case!
# (otherwise we'll have unresolved externals because the .c file had another name initially).
import shutil
# We must force cython in this case (but only in this case -- for the regular setup in the user machine, we
# should always compile the .c file).
force_cython = True
new_pyx_file = os.path.join(os.path.dirname(__file__), dir_name, "%s.pyx" % (target_pydevd_name,))
new_c_file = os.path.join(os.path.dirname(__file__), dir_name, "%s.c" % (target_pydevd_name,))
shutil.copy(pyx_file, new_pyx_file)
pyx_file = new_pyx_file
if has_pxd:
pxd_file = os.path.join(os.path.dirname(__file__), dir_name, "%s.pxd" % (extension_name,))
new_pxd_file = os.path.join(os.path.dirname(__file__), dir_name, "%s.pxd" % (target_pydevd_name,))
shutil.copy(pxd_file, new_pxd_file)
assert os.path.exists(pyx_file)
try:
if force_cython:
from Cython.Build import cythonize # @UnusedImport
ext_modules = cythonize([
"%s/%s.pyx" % (dir_name, target_pydevd_name,),
], force=True)
else:
# Always compile the .c (and not the .pyx) file (which we should keep up-to-date by running build_tools/build.py).
from distutils.extension import Extension
ext_modules = [Extension("%s%s.%s" % (dir_name, "_ext" if extended else "", target_pydevd_name,),
[os.path.join(dir_name, "%s.c" % target_pydevd_name), ],
# uncomment to generate pdbs for visual studio.
# extra_compile_args=["-Zi", "/Od"],
# extra_link_args=["-debug"],
)]
setup(
name='Cythonize',
ext_modules=ext_modules
)
finally:
if target_pydevd_name != extension_name:
try:
os.remove(new_pyx_file)
except:
import traceback
traceback.print_exc()
try:
os.remove(new_c_file)
except:
import traceback
traceback.print_exc()
if has_pxd:
try:
os.remove(new_pxd_file)
except:
import traceback
traceback.print_exc()
extension_folder, target_pydevd_name, target_frame_eval, force_cython = process_args()
extension_name = "pydevd_cython"
if target_pydevd_name is None:
target_pydevd_name = extension_name
build_extension("_pydevd_bundle", extension_name, target_pydevd_name, force_cython, extension_folder, True)
if IS_PY36_OR_GREATER:
extension_name = "pydevd_frame_evaluator"
frame_eval_dir_name = "_pydevd_frame_eval"
target_frame_eval_common = "%s_%s" % (extension_name, "common")
build_extension(frame_eval_dir_name, target_frame_eval_common, target_frame_eval_common, force_cython, extension_folder,
True)
if IS_PY39_OR_GREATER:
extension_name += "_py39_and_above"
if target_frame_eval is None:
target_frame_eval = extension_name
build_extension(frame_eval_dir_name, extension_name, target_frame_eval, force_cython, extension_folder, True)
if extension_folder:
os.chdir(extension_folder)
for folder in [file for file in os.listdir(extension_folder) if
file != 'build' and os.path.isdir(os.path.join(extension_folder, file))]:
file = os.path.join(folder, "__init__.py")
if not os.path.exists(file):
open(file, 'a').close()
|
smmribeiro/intellij-community
|
python/helpers/pydev/setup_cython.py
|
Python
|
apache-2.0
| 5,308 | 0.004145 |
#!/usr/bin/env python
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
"""
This file implements the CLA Classifier region. See the comments in the class
definition of CLAClassifierRegion for a description.
"""
from PyRegion import PyRegion
from nupic.algorithms.cla_classifier_factory import CLAClassifierFactory
###############################################################################
class CLAClassifierRegion(PyRegion):
"""
CLAClassifierRegion implements a CLA specific classifier that accepts a binary
input from the level below (the "activationPattern") and information from the
sensor and encoders (the "classification") describing the input to the system
at that time step.
When learning, for every bit in activation pattern, it records a history of the
classification each time that bit was active. The history is bounded by a
maximum allowed age so that old entries are thrown away.
For inference, it takes an ensemble approach. For every active bit in the
activationPattern, it looks up the most likely classification(s) from the
history stored for that bit and then votes across these to get the resulting
classification(s).
The caller can choose to tell the region that the classifications for
iteration N+K should be aligned with the activationPattern for iteration N.
This results in the classifier producing predictions for K steps in advance.
Any number of different K's can be specified, allowing the classifier to learn
and infer multi-step predictions for a number of steps in advance.
"""
###############################################################################
@classmethod
def getSpec(cls):
ns = dict(
description=CLAClassifierRegion.__doc__,
singleNodeOnly=True,
# The inputs and outputs are not used in this region because they are
# either sparse vectors or dictionaries and hence don't fit the "vector
# of real" input/output pattern.
# There is a custom compute() function provided that accepts the
# inputs and outputs.
inputs=dict(
categoryIn=dict(
description='Category of the input sample',
dataType='Real32',
count=1,
required=True,
regionLevel=True,
isDefaultInput=False,
requireSplitterMap=False),
bottomUpIn=dict(
description='Belief values over children\'s groups',
dataType='Real32',
count=0,
required=True,
regionLevel=False,
isDefaultInput=True,
requireSplitterMap=False),
),
outputs=dict(),
parameters=dict(
learningMode=dict(
description='Boolean (0/1) indicating whether or not a region '
'is in learning mode.',
dataType='UInt32',
count=1,
constraints='bool',
defaultValue=1,
accessMode='ReadWrite'),
inferenceMode=dict(
description='Boolean (0/1) indicating whether or not a region '
'is in inference mode.',
dataType='UInt32',
count=1,
constraints='bool',
defaultValue=0,
accessMode='ReadWrite'),
steps=dict(
description='Comma separated list of the desired steps of '
'prediction that the classifier should learn',
dataType="Byte",
count=0,
constraints='',
defaultValue='1',
accessMode='Create'),
alpha=dict(
description='The alpha used to compute running averages of the '
'bucket duty cycles for each activation pattern bit. A lower '
'alpha results in longer term memory',
dataType="Real32",
count=1,
constraints='',
defaultValue=0.001,
accessMode='Create'),
implementation=dict(
description='The classifier implementation to use.',
accessMode='ReadWrite',
dataType='Byte',
count=0,
constraints='enum: py, cpp'),
clVerbosity=dict(
description='An integer that controls the verbosity level, '
'0 means no verbose output, increasing integers '
'provide more verbosity.',
dataType='UInt32',
count=1,
constraints='',
defaultValue=0 ,
accessMode='ReadWrite'),
),
commands=dict()
)
return ns
###############################################################################
def __init__(self,
steps='1',
alpha=0.001,
clVerbosity=0,
implementation=None,
):
# Convert the steps designation to a list
self.steps = steps
self.stepsList = eval("[%s]" % (steps))
self.alpha = alpha
self.verbosity = clVerbosity
# Initialize internal structures
self._claClassifier = CLAClassifierFactory.create(
steps=self.stepsList,
alpha=self.alpha,
verbosity=self.verbosity,
implementation=implementation,
)
self.learningMode = True
self.inferenceMode = False
self._initEphemerals()
###############################################################################
def _initEphemerals(self):
pass
###############################################################################
def initialize(self, dims, splitterMaps):
pass
###############################################################################
def clear(self):
self._claClassifier.clear()
###############################################################################
def getParameter(self, name, index=-1):
"""
Get the value of the parameter.
@param name -- the name of the parameter to retrieve, as defined
by the Node Spec.
"""
# If any spec parameter name is the same as an attribute, this call
# will get it automatically, e.g. self.learningMode
return PyRegion.getParameter(self, name, index)
###############################################################################
def setParameter(self, name, index, value):
"""
Set the value of the parameter.
@param name -- the name of the parameter to update, as defined
by the Node Spec.
@param value -- the value to which the parameter is to be set.
"""
if name == "learningMode":
self.learningMode = bool(int(value))
elif name == "inferenceMode":
self.inferenceMode = bool(int(value))
else:
return PyRegion.setParameter(self, name, index, value)
###############################################################################
def reset(self):
pass
###############################################################################
def compute(self, inputs, outputs):
"""
Process one input sample.
This method is called by the runtime engine.
We don't use this method in this region because the inputs and outputs don't
fit the standard "vector of reals" used by the engine. Instead, call
the customCompute() method directly
"""
pass
###############################################################################
def customCompute(self, recordNum, patternNZ, classification):
"""
Process one input sample.
This method is called by outer loop code outside the nupic-engine. We
use this instead of the nupic engine compute() because our inputs and
outputs aren't fixed size vectors of reals.
Parameters:
--------------------------------------------------------------------
patternNZ: list of the active indices from the output below
classification: dict of the classification information:
bucketIdx: index of the encoder bucket
actValue: actual value going into the encoder
retval: dict containing inference results, one entry for each step in
self.steps. The key is the number of steps, the value is an
array containing the relative likelihood for each bucketIdx
starting from bucketIdx 0.
for example:
{1 : [0.1, 0.3, 0.2, 0.7]
4 : [0.2, 0.4, 0.3, 0.5]}
"""
return self._claClassifier.compute( recordNum=recordNum,
patternNZ=patternNZ,
classification=classification,
learn = self.learningMode,
infer = self.inferenceMode)
###############################################################################
if __name__=='__main__':
from nupic.engine import Network
n = Network()
classifier = n.addRegion(
'classifier',
'py.CLAClassifierRegion',
'{ steps: "1,2", maxAge: 1000}'
)
|
tomsilver/nupic
|
nupic/regions/CLAClassifierRegion.py
|
Python
|
gpl-3.0
| 9,996 | 0.006803 |
import logging
import time
from collections import OrderedDict, defaultdict
from datetime import datetime, timedelta
from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union
from django.conf import settings
from django.db import connection
from django.db.models import F
from psycopg2.sql import SQL, Composable, Identifier, Literal
from analytics.models import (
BaseCount,
FillState,
InstallationCount,
RealmCount,
StreamCount,
UserCount,
installation_epoch,
last_successful_fill,
)
from zerver.lib.logging_util import log_to_file
from zerver.lib.timestamp import ceiling_to_day, ceiling_to_hour, floor_to_hour, verify_UTC
from zerver.models import (
Message,
Realm,
RealmAuditLog,
Stream,
UserActivityInterval,
UserProfile,
models,
)
## Logging setup ##
logger = logging.getLogger('zulip.management')
log_to_file(logger, settings.ANALYTICS_LOG_PATH)
# You can't subtract timedelta.max from a datetime, so use this instead
TIMEDELTA_MAX = timedelta(days=365*1000)
## Class definitions ##
class CountStat:
HOUR = 'hour'
DAY = 'day'
FREQUENCIES = frozenset([HOUR, DAY])
def __init__(self, property: str, data_collector: 'DataCollector', frequency: str,
interval: Optional[timedelta]=None) -> None:
self.property = property
self.data_collector = data_collector
# might have to do something different for bitfields
if frequency not in self.FREQUENCIES:
raise AssertionError(f"Unknown frequency: {frequency}")
self.frequency = frequency
if interval is not None:
self.interval = interval
elif frequency == CountStat.HOUR:
self.interval = timedelta(hours=1)
else: # frequency == CountStat.DAY
self.interval = timedelta(days=1)
def __str__(self) -> str:
return f"<CountStat: {self.property}>"
class LoggingCountStat(CountStat):
def __init__(self, property: str, output_table: Type[BaseCount], frequency: str) -> None:
CountStat.__init__(self, property, DataCollector(output_table, None), frequency)
class DependentCountStat(CountStat):
def __init__(self, property: str, data_collector: 'DataCollector', frequency: str,
interval: Optional[timedelta] = None, dependencies: Sequence[str] = []) -> None:
CountStat.__init__(self, property, data_collector, frequency, interval=interval)
self.dependencies = dependencies
class DataCollector:
def __init__(self, output_table: Type[BaseCount],
pull_function: Optional[Callable[[str, datetime, datetime, Optional[Realm]], int]]) -> None:
self.output_table = output_table
self.pull_function = pull_function
## CountStat-level operations ##
def process_count_stat(stat: CountStat, fill_to_time: datetime,
realm: Optional[Realm]=None) -> None:
# TODO: The realm argument is not yet supported, in that we don't
# have a solution for how to update FillState if it is passed. It
# exists solely as partial plumbing for when we do fully implement
# doing single-realm analytics runs for use cases like data import.
#
# Also, note that for the realm argument to be properly supported,
# the CountStat object passed in needs to have come from
# E.g. get_count_stats(realm), i.e. have the realm_id already
# entered into the SQL query defined by the CountState object.
if stat.frequency == CountStat.HOUR:
time_increment = timedelta(hours=1)
elif stat.frequency == CountStat.DAY:
time_increment = timedelta(days=1)
else:
raise AssertionError(f"Unknown frequency: {stat.frequency}")
verify_UTC(fill_to_time)
if floor_to_hour(fill_to_time) != fill_to_time:
raise ValueError(f"fill_to_time must be on an hour boundary: {fill_to_time}")
fill_state = FillState.objects.filter(property=stat.property).first()
if fill_state is None:
currently_filled = installation_epoch()
fill_state = FillState.objects.create(property=stat.property,
end_time=currently_filled,
state=FillState.DONE)
logger.info("INITIALIZED %s %s", stat.property, currently_filled)
elif fill_state.state == FillState.STARTED:
logger.info("UNDO START %s %s", stat.property, fill_state.end_time)
do_delete_counts_at_hour(stat, fill_state.end_time)
currently_filled = fill_state.end_time - time_increment
do_update_fill_state(fill_state, currently_filled, FillState.DONE)
logger.info("UNDO DONE %s", stat.property)
elif fill_state.state == FillState.DONE:
currently_filled = fill_state.end_time
else:
raise AssertionError(f"Unknown value for FillState.state: {fill_state.state}.")
if isinstance(stat, DependentCountStat):
for dependency in stat.dependencies:
dependency_fill_time = last_successful_fill(dependency)
if dependency_fill_time is None:
logger.warning("DependentCountStat %s run before dependency %s.",
stat.property, dependency)
return
fill_to_time = min(fill_to_time, dependency_fill_time)
currently_filled = currently_filled + time_increment
while currently_filled <= fill_to_time:
logger.info("START %s %s", stat.property, currently_filled)
start = time.time()
do_update_fill_state(fill_state, currently_filled, FillState.STARTED)
do_fill_count_stat_at_hour(stat, currently_filled, realm)
do_update_fill_state(fill_state, currently_filled, FillState.DONE)
end = time.time()
currently_filled = currently_filled + time_increment
logger.info("DONE %s (%dms)", stat.property, (end-start)*1000)
def do_update_fill_state(fill_state: FillState, end_time: datetime, state: int) -> None:
fill_state.end_time = end_time
fill_state.state = state
fill_state.save()
# We assume end_time is valid (e.g. is on a day or hour boundary as appropriate)
# and is timezone aware. It is the caller's responsibility to enforce this!
def do_fill_count_stat_at_hour(stat: CountStat, end_time: datetime, realm: Optional[Realm]=None) -> None:
start_time = end_time - stat.interval
if not isinstance(stat, LoggingCountStat):
timer = time.time()
assert(stat.data_collector.pull_function is not None)
rows_added = stat.data_collector.pull_function(stat.property, start_time, end_time, realm)
logger.info("%s run pull_function (%dms/%sr)",
stat.property, (time.time()-timer)*1000, rows_added)
do_aggregate_to_summary_table(stat, end_time, realm)
def do_delete_counts_at_hour(stat: CountStat, end_time: datetime) -> None:
if isinstance(stat, LoggingCountStat):
InstallationCount.objects.filter(property=stat.property, end_time=end_time).delete()
if stat.data_collector.output_table in [UserCount, StreamCount]:
RealmCount.objects.filter(property=stat.property, end_time=end_time).delete()
else:
UserCount.objects.filter(property=stat.property, end_time=end_time).delete()
StreamCount.objects.filter(property=stat.property, end_time=end_time).delete()
RealmCount.objects.filter(property=stat.property, end_time=end_time).delete()
InstallationCount.objects.filter(property=stat.property, end_time=end_time).delete()
def do_aggregate_to_summary_table(stat: CountStat, end_time: datetime,
realm: Optional[Realm]=None) -> None:
cursor = connection.cursor()
# Aggregate into RealmCount
output_table = stat.data_collector.output_table
if realm is not None:
realm_clause = SQL("AND zerver_realm.id = {}").format(Literal(realm.id))
else:
realm_clause = SQL("")
if output_table in (UserCount, StreamCount):
realmcount_query = SQL("""
INSERT INTO analytics_realmcount
(realm_id, value, property, subgroup, end_time)
SELECT
zerver_realm.id, COALESCE(sum({output_table}.value), 0), %(property)s,
{output_table}.subgroup, %(end_time)s
FROM zerver_realm
JOIN {output_table}
ON
zerver_realm.id = {output_table}.realm_id
WHERE
{output_table}.property = %(property)s AND
{output_table}.end_time = %(end_time)s
{realm_clause}
GROUP BY zerver_realm.id, {output_table}.subgroup
""").format(
output_table=Identifier(output_table._meta.db_table),
realm_clause=realm_clause,
)
start = time.time()
cursor.execute(realmcount_query, {
'property': stat.property,
'end_time': end_time,
})
end = time.time()
logger.info(
"%s RealmCount aggregation (%dms/%sr)",
stat.property, (end - start) * 1000, cursor.rowcount,
)
if realm is None:
# Aggregate into InstallationCount. Only run if we just
# processed counts for all realms.
#
# TODO: Add support for updating installation data after
# changing an individual realm's values.
installationcount_query = SQL("""
INSERT INTO analytics_installationcount
(value, property, subgroup, end_time)
SELECT
sum(value), %(property)s, analytics_realmcount.subgroup, %(end_time)s
FROM analytics_realmcount
WHERE
property = %(property)s AND
end_time = %(end_time)s
GROUP BY analytics_realmcount.subgroup
""")
start = time.time()
cursor.execute(installationcount_query, {
'property': stat.property,
'end_time': end_time,
})
end = time.time()
logger.info(
"%s InstallationCount aggregation (%dms/%sr)",
stat.property, (end - start) * 1000, cursor.rowcount,
)
cursor.close()
## Utility functions called from outside counts.py ##
# called from zerver/lib/actions.py; should not throw any errors
def do_increment_logging_stat(zerver_object: Union[Realm, UserProfile, Stream], stat: CountStat,
subgroup: Optional[Union[str, int, bool]], event_time: datetime,
increment: int=1) -> None:
if not increment:
return
table = stat.data_collector.output_table
if table == RealmCount:
id_args = {'realm': zerver_object}
elif table == UserCount:
id_args = {'realm': zerver_object.realm, 'user': zerver_object}
else: # StreamCount
id_args = {'realm': zerver_object.realm, 'stream': zerver_object}
if stat.frequency == CountStat.DAY:
end_time = ceiling_to_day(event_time)
else: # CountStat.HOUR:
end_time = ceiling_to_hour(event_time)
row, created = table.objects.get_or_create(
property=stat.property, subgroup=subgroup, end_time=end_time,
defaults={'value': increment}, **id_args)
if not created:
row.value = F('value') + increment
row.save(update_fields=['value'])
def do_drop_all_analytics_tables() -> None:
UserCount.objects.all().delete()
StreamCount.objects.all().delete()
RealmCount.objects.all().delete()
InstallationCount.objects.all().delete()
FillState.objects.all().delete()
def do_drop_single_stat(property: str) -> None:
UserCount.objects.filter(property=property).delete()
StreamCount.objects.filter(property=property).delete()
RealmCount.objects.filter(property=property).delete()
InstallationCount.objects.filter(property=property).delete()
FillState.objects.filter(property=property).delete()
## DataCollector-level operations ##
QueryFn = Callable[[Dict[str, Composable]], Composable]
def do_pull_by_sql_query(
property: str,
start_time: datetime,
end_time: datetime,
query: QueryFn,
group_by: Optional[Tuple[models.Model, str]],
) -> int:
if group_by is None:
subgroup = SQL('NULL')
group_by_clause = SQL('')
else:
subgroup = Identifier(group_by[0]._meta.db_table, group_by[1])
group_by_clause = SQL(', {}').format(subgroup)
# We do string replacement here because cursor.execute will reject a
# group_by_clause given as a param.
# We pass in the datetimes as params to cursor.execute so that we don't have to
# think about how to convert python datetimes to SQL datetimes.
query_ = query({
'subgroup': subgroup,
'group_by_clause': group_by_clause,
})
cursor = connection.cursor()
cursor.execute(query_, {
'property': property,
'time_start': start_time,
'time_end': end_time,
})
rowcount = cursor.rowcount
cursor.close()
return rowcount
def sql_data_collector(
output_table: Type[BaseCount],
query: QueryFn,
group_by: Optional[Tuple[models.Model, str]],
) -> DataCollector:
def pull_function(property: str, start_time: datetime, end_time: datetime,
realm: Optional[Realm] = None) -> int:
# The pull function type needs to accept a Realm argument
# because the 'minutes_active::day' CountStat uses
# DataCollector directly for do_pull_minutes_active, which
# requires the realm argument. We ignore it here, because the
# realm should have been already encoded in the `query` we're
# passed.
return do_pull_by_sql_query(property, start_time, end_time, query, group_by)
return DataCollector(output_table, pull_function)
def do_pull_minutes_active(property: str, start_time: datetime, end_time: datetime,
realm: Optional[Realm] = None) -> int:
user_activity_intervals = UserActivityInterval.objects.filter(
end__gt=start_time, start__lt=end_time,
).select_related(
'user_profile',
).values_list(
'user_profile_id', 'user_profile__realm_id', 'start', 'end')
seconds_active: Dict[Tuple[int, int], float] = defaultdict(float)
for user_id, realm_id, interval_start, interval_end in user_activity_intervals:
if realm is None or realm.id == realm_id:
start = max(start_time, interval_start)
end = min(end_time, interval_end)
seconds_active[(user_id, realm_id)] += (end - start).total_seconds()
rows = [UserCount(user_id=ids[0], realm_id=ids[1], property=property,
end_time=end_time, value=int(seconds // 60))
for ids, seconds in seconds_active.items() if seconds >= 60]
UserCount.objects.bulk_create(rows)
return len(rows)
def count_message_by_user_query(realm: Optional[Realm]) -> QueryFn:
if realm is None:
realm_clause = SQL("")
else:
realm_clause = SQL("zerver_userprofile.realm_id = {} AND").format(Literal(realm.id))
return lambda kwargs: SQL("""
INSERT INTO analytics_usercount
(user_id, realm_id, value, property, subgroup, end_time)
SELECT
zerver_userprofile.id, zerver_userprofile.realm_id, count(*),
%(property)s, {subgroup}, %(time_end)s
FROM zerver_userprofile
JOIN zerver_message
ON
zerver_userprofile.id = zerver_message.sender_id
WHERE
zerver_userprofile.date_joined < %(time_end)s AND
zerver_message.date_sent >= %(time_start)s AND
{realm_clause}
zerver_message.date_sent < %(time_end)s
GROUP BY zerver_userprofile.id {group_by_clause}
""").format(**kwargs, realm_clause=realm_clause)
# Note: ignores the group_by / group_by_clause.
def count_message_type_by_user_query(realm: Optional[Realm]) -> QueryFn:
if realm is None:
realm_clause = SQL("")
else:
realm_clause = SQL("zerver_userprofile.realm_id = {} AND").format(Literal(realm.id))
return lambda kwargs: SQL("""
INSERT INTO analytics_usercount
(realm_id, user_id, value, property, subgroup, end_time)
SELECT realm_id, id, SUM(count) AS value, %(property)s, message_type, %(time_end)s
FROM
(
SELECT zerver_userprofile.realm_id, zerver_userprofile.id, count(*),
CASE WHEN
zerver_recipient.type = 1 THEN 'private_message'
WHEN
zerver_recipient.type = 3 THEN 'huddle_message'
WHEN
zerver_stream.invite_only = TRUE THEN 'private_stream'
ELSE 'public_stream'
END
message_type
FROM zerver_userprofile
JOIN zerver_message
ON
zerver_userprofile.id = zerver_message.sender_id AND
zerver_message.date_sent >= %(time_start)s AND
{realm_clause}
zerver_message.date_sent < %(time_end)s
JOIN zerver_recipient
ON
zerver_message.recipient_id = zerver_recipient.id
LEFT JOIN zerver_stream
ON
zerver_recipient.type_id = zerver_stream.id
GROUP BY
zerver_userprofile.realm_id, zerver_userprofile.id,
zerver_recipient.type, zerver_stream.invite_only
) AS subquery
GROUP BY realm_id, id, message_type
""").format(**kwargs, realm_clause=realm_clause)
# This query joins to the UserProfile table since all current queries that
# use this also subgroup on UserProfile.is_bot. If in the future there is a
# stat that counts messages by stream and doesn't need the UserProfile
# table, consider writing a new query for efficiency.
def count_message_by_stream_query(realm: Optional[Realm]) -> QueryFn:
if realm is None:
realm_clause = SQL("")
else:
realm_clause = SQL("zerver_stream.realm_id = {} AND").format(Literal(realm.id))
return lambda kwargs: SQL("""
INSERT INTO analytics_streamcount
(stream_id, realm_id, value, property, subgroup, end_time)
SELECT
zerver_stream.id, zerver_stream.realm_id, count(*), %(property)s, {subgroup}, %(time_end)s
FROM zerver_stream
JOIN zerver_recipient
ON
zerver_stream.id = zerver_recipient.type_id
JOIN zerver_message
ON
zerver_recipient.id = zerver_message.recipient_id
JOIN zerver_userprofile
ON
zerver_message.sender_id = zerver_userprofile.id
WHERE
zerver_stream.date_created < %(time_end)s AND
zerver_recipient.type = 2 AND
zerver_message.date_sent >= %(time_start)s AND
{realm_clause}
zerver_message.date_sent < %(time_end)s
GROUP BY zerver_stream.id {group_by_clause}
""").format(**kwargs, realm_clause=realm_clause)
# Hardcodes the query needed by active_users:is_bot:day, since that is
# currently the only stat that uses this.
def count_user_by_realm_query(realm: Optional[Realm]) -> QueryFn:
if realm is None:
realm_clause = SQL("")
else:
realm_clause = SQL("zerver_userprofile.realm_id = {} AND").format(Literal(realm.id))
return lambda kwargs: SQL("""
INSERT INTO analytics_realmcount
(realm_id, value, property, subgroup, end_time)
SELECT
zerver_realm.id, count(*), %(property)s, {subgroup}, %(time_end)s
FROM zerver_realm
JOIN zerver_userprofile
ON
zerver_realm.id = zerver_userprofile.realm_id
WHERE
zerver_realm.date_created < %(time_end)s AND
zerver_userprofile.date_joined >= %(time_start)s AND
zerver_userprofile.date_joined < %(time_end)s AND
{realm_clause}
zerver_userprofile.is_active = TRUE
GROUP BY zerver_realm.id {group_by_clause}
""").format(**kwargs, realm_clause=realm_clause)
# Currently hardcodes the query needed for active_users_audit:is_bot:day.
# Assumes that a user cannot have two RealmAuditLog entries with the same event_time and
# event_type in [RealmAuditLog.USER_CREATED, USER_DEACTIVATED, etc].
# In particular, it's important to ensure that migrations don't cause that to happen.
def check_realmauditlog_by_user_query(realm: Optional[Realm]) -> QueryFn:
if realm is None:
realm_clause = SQL("")
else:
realm_clause = SQL("realm_id = {} AND").format(Literal(realm.id))
return lambda kwargs: SQL("""
INSERT INTO analytics_usercount
(user_id, realm_id, value, property, subgroup, end_time)
SELECT
ral1.modified_user_id, ral1.realm_id, 1, %(property)s, {subgroup}, %(time_end)s
FROM zerver_realmauditlog ral1
JOIN (
SELECT modified_user_id, max(event_time) AS max_event_time
FROM zerver_realmauditlog
WHERE
event_type in ({user_created}, {user_activated}, {user_deactivated}, {user_reactivated}) AND
{realm_clause}
event_time < %(time_end)s
GROUP BY modified_user_id
) ral2
ON
ral1.event_time = max_event_time AND
ral1.modified_user_id = ral2.modified_user_id
JOIN zerver_userprofile
ON
ral1.modified_user_id = zerver_userprofile.id
WHERE
ral1.event_type in ({user_created}, {user_activated}, {user_reactivated})
""").format(
**kwargs,
user_created=Literal(RealmAuditLog.USER_CREATED),
user_activated=Literal(RealmAuditLog.USER_ACTIVATED),
user_deactivated=Literal(RealmAuditLog.USER_DEACTIVATED),
user_reactivated=Literal(RealmAuditLog.USER_REACTIVATED),
realm_clause=realm_clause,
)
def check_useractivityinterval_by_user_query(realm: Optional[Realm]) -> QueryFn:
if realm is None:
realm_clause = SQL("")
else:
realm_clause = SQL("zerver_userprofile.realm_id = {} AND").format(Literal(realm.id))
return lambda kwargs: SQL("""
INSERT INTO analytics_usercount
(user_id, realm_id, value, property, subgroup, end_time)
SELECT
zerver_userprofile.id, zerver_userprofile.realm_id, 1, %(property)s, {subgroup}, %(time_end)s
FROM zerver_userprofile
JOIN zerver_useractivityinterval
ON
zerver_userprofile.id = zerver_useractivityinterval.user_profile_id
WHERE
zerver_useractivityinterval.end >= %(time_start)s AND
{realm_clause}
zerver_useractivityinterval.start < %(time_end)s
GROUP BY zerver_userprofile.id {group_by_clause}
""").format(**kwargs, realm_clause=realm_clause)
def count_realm_active_humans_query(realm: Optional[Realm]) -> QueryFn:
if realm is None:
realm_clause = SQL("")
else:
realm_clause = SQL("realm_id = {} AND").format(Literal(realm.id))
return lambda kwargs: SQL("""
INSERT INTO analytics_realmcount
(realm_id, value, property, subgroup, end_time)
SELECT
usercount1.realm_id, count(*), %(property)s, NULL, %(time_end)s
FROM (
SELECT realm_id, user_id
FROM analytics_usercount
WHERE
property = 'active_users_audit:is_bot:day' AND
subgroup = 'false' AND
{realm_clause}
end_time = %(time_end)s
) usercount1
JOIN (
SELECT realm_id, user_id
FROM analytics_usercount
WHERE
property = '15day_actives::day' AND
{realm_clause}
end_time = %(time_end)s
) usercount2
ON
usercount1.user_id = usercount2.user_id
GROUP BY usercount1.realm_id
""").format(**kwargs, realm_clause=realm_clause)
# Currently unused and untested
count_stream_by_realm_query = lambda kwargs: SQL("""
INSERT INTO analytics_realmcount
(realm_id, value, property, subgroup, end_time)
SELECT
zerver_realm.id, count(*), %(property)s, {subgroup}, %(time_end)s
FROM zerver_realm
JOIN zerver_stream
ON
zerver_realm.id = zerver_stream.realm_id AND
WHERE
zerver_realm.date_created < %(time_end)s AND
zerver_stream.date_created >= %(time_start)s AND
zerver_stream.date_created < %(time_end)s
GROUP BY zerver_realm.id {group_by_clause}
""").format(**kwargs)
def get_count_stats(realm: Optional[Realm]=None) -> Dict[str, CountStat]:
## CountStat declarations ##
count_stats_ = [
# Messages Sent stats
# Stats that count the number of messages sent in various ways.
# These are also the set of stats that read from the Message table.
CountStat('messages_sent:is_bot:hour',
sql_data_collector(UserCount, count_message_by_user_query(
realm), (UserProfile, 'is_bot')),
CountStat.HOUR),
CountStat('messages_sent:message_type:day',
sql_data_collector(
UserCount, count_message_type_by_user_query(realm), None),
CountStat.DAY),
CountStat('messages_sent:client:day',
sql_data_collector(UserCount, count_message_by_user_query(realm),
(Message, 'sending_client_id')), CountStat.DAY),
CountStat('messages_in_stream:is_bot:day',
sql_data_collector(StreamCount, count_message_by_stream_query(realm),
(UserProfile, 'is_bot')), CountStat.DAY),
# Number of Users stats
# Stats that count the number of active users in the UserProfile.is_active sense.
# 'active_users_audit:is_bot:day' is the canonical record of which users were
# active on which days (in the UserProfile.is_active sense).
# Important that this stay a daily stat, so that 'realm_active_humans::day' works as expected.
CountStat('active_users_audit:is_bot:day',
sql_data_collector(UserCount, check_realmauditlog_by_user_query(
realm), (UserProfile, 'is_bot')),
CountStat.DAY),
# Important note: LoggingCountStat objects aren't passed the
# Realm argument, because by nature they have a logging
# structure, not a pull-from-database structure, so there's no
# way to compute them for a single realm after the fact (the
# use case for passing a Realm argument).
# Sanity check on 'active_users_audit:is_bot:day', and a archetype for future LoggingCountStats.
# In RealmCount, 'active_users_audit:is_bot:day' should be the partial
# sum sequence of 'active_users_log:is_bot:day', for any realm that
# started after the latter stat was introduced.
LoggingCountStat('active_users_log:is_bot:day',
RealmCount, CountStat.DAY),
# Another sanity check on 'active_users_audit:is_bot:day'. Is only an
# approximation, e.g. if a user is deactivated between the end of the
# day and when this stat is run, they won't be counted. However, is the
# simplest of the three to inspect by hand.
CountStat('active_users:is_bot:day',
sql_data_collector(RealmCount, count_user_by_realm_query(realm), (UserProfile, 'is_bot')),
CountStat.DAY, interval=TIMEDELTA_MAX),
# Messages read stats. messages_read::hour is the total
# number of messages read, whereas
# messages_read_interactions::hour tries to count the total
# number of UI interactions resulting in messages being marked
# as read (imperfect because of batching of some request
# types, but less likely to be overwhelmed by a single bulk
# operation).
LoggingCountStat('messages_read::hour', UserCount, CountStat.HOUR),
LoggingCountStat('messages_read_interactions::hour', UserCount, CountStat.HOUR),
# User Activity stats
# Stats that measure user activity in the UserActivityInterval sense.
CountStat('1day_actives::day',
sql_data_collector(
UserCount, check_useractivityinterval_by_user_query(realm), None),
CountStat.DAY, interval=timedelta(days=1)-UserActivityInterval.MIN_INTERVAL_LENGTH),
CountStat('15day_actives::day',
sql_data_collector(
UserCount, check_useractivityinterval_by_user_query(realm), None),
CountStat.DAY, interval=timedelta(days=15)-UserActivityInterval.MIN_INTERVAL_LENGTH),
CountStat('minutes_active::day', DataCollector(
UserCount, do_pull_minutes_active), CountStat.DAY),
# Rate limiting stats
# Used to limit the number of invitation emails sent by a realm
LoggingCountStat('invites_sent::day', RealmCount, CountStat.DAY),
# Dependent stats
# Must come after their dependencies.
# Canonical account of the number of active humans in a realm on each day.
DependentCountStat('realm_active_humans::day',
sql_data_collector(
RealmCount, count_realm_active_humans_query(realm), None),
CountStat.DAY,
dependencies=['active_users_audit:is_bot:day', '15day_actives::day']),
]
return OrderedDict([(stat.property, stat) for stat in count_stats_])
# To avoid refactoring for now COUNT_STATS can be used as before
COUNT_STATS = get_count_stats()
|
shubhamdhama/zulip
|
analytics/lib/counts.py
|
Python
|
apache-2.0
| 29,311 | 0.003685 |
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'G:\WorkDir\gas-sensing_resistors\SC_spectrum\SC_main.ui'
#
# Created: Wed Jan 20 20:49:15 2016
# by: PyQt4 UI code generator 4.11.3
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
from Rt_mplCanvas import Rt_CanvasWidget
from SC_mplCanvas import SC_CanvasWidget
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_SC_APP(object):
def setupUi(self, SC_APP):
SC_APP.setObjectName(_fromUtf8("SC_APP"))
SC_APP.resize(800, 600)
SC_APP.setMinimumSize(QtCore.QSize(800, 600))
SC_APP.setMaximumSize(QtCore.QSize(800, 600))
font = QtGui.QFont()
font.setPointSize(12)
SC_APP.setFont(font)
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap(_fromUtf8(":/icon/icons/lmd.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
SC_APP.setWindowIcon(icon)
self.verticalLayout_13 = QtGui.QVBoxLayout(SC_APP)
self.verticalLayout_13.setObjectName(_fromUtf8("verticalLayout_13"))
self.verticalLayout_12 = QtGui.QVBoxLayout()
self.verticalLayout_12.setObjectName(_fromUtf8("verticalLayout_12"))
self.horizontalLayout_15 = QtGui.QHBoxLayout()
self.horizontalLayout_15.setObjectName(_fromUtf8("horizontalLayout_15"))
self.verticalLayout_10 = QtGui.QVBoxLayout()
self.verticalLayout_10.setObjectName(_fromUtf8("verticalLayout_10"))
self.SC_MPLS = QtGui.QStackedWidget(SC_APP)
self.SC_MPLS.setMinimumSize(QtCore.QSize(480, 320))
self.SC_MPLS.setMaximumSize(QtCore.QSize(480, 320))
font = QtGui.QFont()
font.setPointSize(12)
self.SC_MPLS.setFont(font)
self.SC_MPLS.setObjectName(_fromUtf8("SC_MPLS"))
self.Rt_MPL = Rt_CanvasWidget()
self.Rt_MPL.setObjectName(_fromUtf8("Rt_MPL"))
self.SC_MPLS.addWidget(self.Rt_MPL)
self.SC_MPL = SC_CanvasWidget()
self.SC_MPL.setObjectName(_fromUtf8("SC_MPL"))
self.SC_MPLS.addWidget(self.SC_MPL)
self.verticalLayout_10.addWidget(self.SC_MPLS)
self.log_state = QtGui.QCheckBox(SC_APP)
self.log_state.setObjectName(_fromUtf8("log_state"))
self.verticalLayout_10.addWidget(self.log_state)
self.groupBox_5 = QtGui.QGroupBox(SC_APP)
self.groupBox_5.setObjectName(_fromUtf8("groupBox_5"))
self.verticalLayout_8 = QtGui.QVBoxLayout(self.groupBox_5)
self.verticalLayout_8.setObjectName(_fromUtf8("verticalLayout_8"))
self.verticalLayout_7 = QtGui.QVBoxLayout()
self.verticalLayout_7.setObjectName(_fromUtf8("verticalLayout_7"))
self.horizontalLayout_19 = QtGui.QHBoxLayout()
self.horizontalLayout_19.setObjectName(_fromUtf8("horizontalLayout_19"))
self.horizontalLayout_12 = QtGui.QHBoxLayout()
self.horizontalLayout_12.setObjectName(_fromUtf8("horizontalLayout_12"))
self.label_18 = QtGui.QLabel(self.groupBox_5)
self.label_18.setMinimumSize(QtCore.QSize(64, 32))
self.label_18.setMaximumSize(QtCore.QSize(64, 32))
self.label_18.setObjectName(_fromUtf8("label_18"))
self.horizontalLayout_12.addWidget(self.label_18)
self.run_time = QtGui.QLineEdit(self.groupBox_5)
self.run_time.setMinimumSize(QtCore.QSize(113, 22))
self.run_time.setMaximumSize(QtCore.QSize(113, 22))
self.run_time.setReadOnly(True)
self.run_time.setObjectName(_fromUtf8("run_time"))
self.horizontalLayout_12.addWidget(self.run_time)
self.label_5 = QtGui.QLabel(self.groupBox_5)
self.label_5.setObjectName(_fromUtf8("label_5"))
self.horizontalLayout_12.addWidget(self.label_5)
self.horizontalLayout_19.addLayout(self.horizontalLayout_12)
self.horizontalLayout_18 = QtGui.QHBoxLayout()
self.horizontalLayout_18.setObjectName(_fromUtf8("horizontalLayout_18"))
self.label_19 = QtGui.QLabel(self.groupBox_5)
self.label_19.setMinimumSize(QtCore.QSize(56, 32))
self.label_19.setMaximumSize(QtCore.QSize(56, 32))
self.label_19.setObjectName(_fromUtf8("label_19"))
self.horizontalLayout_18.addWidget(self.label_19)
self.flow1 = QtGui.QLineEdit(self.groupBox_5)
self.flow1.setMinimumSize(QtCore.QSize(113, 22))
self.flow1.setMaximumSize(QtCore.QSize(113, 22))
# self.flow1.setReadOnly(True)
self.flow1.setObjectName(_fromUtf8("flow1"))
self.horizontalLayout_18.addWidget(self.flow1)
self.label_7 = QtGui.QLabel(self.groupBox_5)
self.label_7.setMinimumSize(QtCore.QSize(48, 32))
self.label_7.setMaximumSize(QtCore.QSize(48, 32))
self.label_7.setObjectName(_fromUtf8("label_7"))
self.horizontalLayout_18.addWidget(self.label_7)
self.f1_open = QtGui.QCheckBox(self.groupBox_5)
self.f1_open.setText(_fromUtf8(""))
self.f1_open.setObjectName(_fromUtf8("f1_open"))
self.horizontalLayout_18.addWidget(self.f1_open)
self.horizontalLayout_19.addLayout(self.horizontalLayout_18)
self.verticalLayout_7.addLayout(self.horizontalLayout_19)
self.horizontalLayout_20 = QtGui.QHBoxLayout()
self.horizontalLayout_20.setObjectName(_fromUtf8("horizontalLayout_20"))
self.horizontalLayout_13 = QtGui.QHBoxLayout()
self.horizontalLayout_13.setObjectName(_fromUtf8("horizontalLayout_13"))
self.label_20 = QtGui.QLabel(self.groupBox_5)
self.label_20.setMinimumSize(QtCore.QSize(64, 32))
self.label_20.setMaximumSize(QtCore.QSize(64, 32))
self.label_20.setObjectName(_fromUtf8("label_20"))
self.horizontalLayout_13.addWidget(self.label_20)
self.now_R = QtGui.QLineEdit(self.groupBox_5)
self.now_R.setMinimumSize(QtCore.QSize(113, 22))
self.now_R.setMaximumSize(QtCore.QSize(113, 22))
self.now_R.setReadOnly(True)
self.now_R.setObjectName(_fromUtf8("now_R"))
self.horizontalLayout_13.addWidget(self.now_R)
self.label_6 = QtGui.QLabel(self.groupBox_5)
self.label_6.setObjectName(_fromUtf8("label_6"))
self.horizontalLayout_13.addWidget(self.label_6)
self.horizontalLayout_20.addLayout(self.horizontalLayout_13)
self.horizontalLayout_17 = QtGui.QHBoxLayout()
self.horizontalLayout_17.setObjectName(_fromUtf8("horizontalLayout_17"))
self.label_26 = QtGui.QLabel(self.groupBox_5)
self.label_26.setMinimumSize(QtCore.QSize(56, 32))
self.label_26.setMaximumSize(QtCore.QSize(56, 32))
self.label_26.setObjectName(_fromUtf8("label_26"))
self.horizontalLayout_17.addWidget(self.label_26)
self.flow2 = QtGui.QLineEdit(self.groupBox_5)
self.flow2.setMinimumSize(QtCore.QSize(113, 22))
self.flow2.setMaximumSize(QtCore.QSize(113, 22))
# self.flow2.setReadOnly(True)
self.flow2.setObjectName(_fromUtf8("flow2"))
self.horizontalLayout_17.addWidget(self.flow2)
self.label_8 = QtGui.QLabel(self.groupBox_5)
self.label_8.setMinimumSize(QtCore.QSize(48, 32))
self.label_8.setMaximumSize(QtCore.QSize(48, 32))
self.label_8.setObjectName(_fromUtf8("label_8"))
self.horizontalLayout_17.addWidget(self.label_8)
self.f2_open = QtGui.QCheckBox(self.groupBox_5)
self.f2_open.setText(_fromUtf8(""))
self.f2_open.setObjectName(_fromUtf8("f2_open"))
self.horizontalLayout_17.addWidget(self.f2_open)
self.horizontalLayout_20.addLayout(self.horizontalLayout_17)
self.verticalLayout_7.addLayout(self.horizontalLayout_20)
self.horizontalLayout_21 = QtGui.QHBoxLayout()
self.horizontalLayout_21.setObjectName(_fromUtf8("horizontalLayout_21"))
self.horizontalLayout_14 = QtGui.QHBoxLayout()
self.horizontalLayout_14.setObjectName(_fromUtf8("horizontalLayout_14"))
self.label_27 = QtGui.QLabel(self.groupBox_5)
self.label_27.setMinimumSize(QtCore.QSize(64, 32))
self.label_27.setMaximumSize(QtCore.QSize(64, 32))
self.label_27.setObjectName(_fromUtf8("label_27"))
self.horizontalLayout_14.addWidget(self.label_27)
self.now_T = QtGui.QLineEdit(self.groupBox_5)
self.now_T.setMinimumSize(QtCore.QSize(113, 22))
self.now_T.setMaximumSize(QtCore.QSize(113, 22))
self.now_T.setReadOnly(True)
self.now_T.setObjectName(_fromUtf8("now_T"))
self.horizontalLayout_14.addWidget(self.now_T)
self.label_4 = QtGui.QLabel(self.groupBox_5)
self.label_4.setMinimumSize(QtCore.QSize(0, 16))
self.label_4.setMaximumSize(QtCore.QSize(32, 16))
self.label_4.setObjectName(_fromUtf8("label_4"))
self.horizontalLayout_14.addWidget(self.label_4)
self.horizontalLayout_21.addLayout(self.horizontalLayout_14)
self.horizontalLayout_16 = QtGui.QHBoxLayout()
self.horizontalLayout_16.setObjectName(_fromUtf8("horizontalLayout_16"))
self.label_28 = QtGui.QLabel(self.groupBox_5)
self.label_28.setMinimumSize(QtCore.QSize(56, 32))
self.label_28.setMaximumSize(QtCore.QSize(56, 32))
self.label_28.setObjectName(_fromUtf8("label_28"))
self.horizontalLayout_16.addWidget(self.label_28)
self.flow3 = QtGui.QLineEdit(self.groupBox_5)
self.flow3.setMinimumSize(QtCore.QSize(113, 22))
self.flow3.setMaximumSize(QtCore.QSize(113, 22))
# self.flow3.setReadOnly(True)
self.flow3.setObjectName(_fromUtf8("flow3"))
self.horizontalLayout_16.addWidget(self.flow3)
self.label_9 = QtGui.QLabel(self.groupBox_5)
self.label_9.setMinimumSize(QtCore.QSize(48, 32))
self.label_9.setMaximumSize(QtCore.QSize(48, 32))
self.label_9.setObjectName(_fromUtf8("label_9"))
self.horizontalLayout_16.addWidget(self.label_9)
self.f3_open = QtGui.QCheckBox(self.groupBox_5)
self.f3_open.setText(_fromUtf8(""))
self.f3_open.setObjectName(_fromUtf8("f3_open"))
self.horizontalLayout_16.addWidget(self.f3_open)
self.horizontalLayout_21.addLayout(self.horizontalLayout_16)
self.verticalLayout_7.addLayout(self.horizontalLayout_21)
self.verticalLayout_8.addLayout(self.verticalLayout_7)
self.verticalLayout_10.addWidget(self.groupBox_5)
self.horizontalLayout_15.addLayout(self.verticalLayout_10)
self.verticalLayout_5 = QtGui.QVBoxLayout()
self.verticalLayout_5.setSpacing(20)
self.verticalLayout_5.setObjectName(_fromUtf8("verticalLayout_5"))
self.groupBox_15 = QtGui.QGroupBox(SC_APP)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.groupBox_15.sizePolicy().hasHeightForWidth())
self.groupBox_15.setSizePolicy(sizePolicy)
self.groupBox_15.setMinimumSize(QtCore.QSize(281, 120))
self.groupBox_15.setMaximumSize(QtCore.QSize(281, 120))
font = QtGui.QFont()
font.setPointSize(12)
self.groupBox_15.setFont(font)
self.groupBox_15.setObjectName(_fromUtf8("groupBox_15"))
self.verticalLayout_9 = QtGui.QVBoxLayout(self.groupBox_15)
self.verticalLayout_9.setSpacing(10)
self.verticalLayout_9.setContentsMargins(10, 0, 10, 0)
self.verticalLayout_9.setObjectName(_fromUtf8("verticalLayout_9"))
self.verticalLayout_4 = QtGui.QVBoxLayout()
self.verticalLayout_4.setSpacing(10)
self.verticalLayout_4.setMargin(0)
self.verticalLayout_4.setObjectName(_fromUtf8("verticalLayout_4"))
self.horizontalLayout_32 = QtGui.QHBoxLayout()
self.horizontalLayout_32.setObjectName(_fromUtf8("horizontalLayout_32"))
self.label_16 = QtGui.QLabel(self.groupBox_15)
self.label_16.setObjectName(_fromUtf8("label_16"))
self.horizontalLayout_32.addWidget(self.label_16)
self.sample_id = QtGui.QLineEdit(self.groupBox_15)
self.sample_id.setObjectName(_fromUtf8("sample_id"))
self.horizontalLayout_32.addWidget(self.sample_id)
self.verticalLayout_4.addLayout(self.horizontalLayout_32)
self.horizontalLayout_33 = QtGui.QHBoxLayout()
self.horizontalLayout_33.setObjectName(_fromUtf8("horizontalLayout_33"))
self.label_21 = QtGui.QLabel(self.groupBox_15)
self.label_21.setObjectName(_fromUtf8("label_21"))
self.horizontalLayout_33.addWidget(self.label_21)
self.save_path = QtGui.QLineEdit(self.groupBox_15)
self.save_path.setObjectName(_fromUtf8("save_path"))
self.horizontalLayout_33.addWidget(self.save_path)
self.btn_savepath = QtGui.QPushButton(self.groupBox_15)
self.btn_savepath.setText(_fromUtf8(""))
icon1 = QtGui.QIcon()
icon1.addPixmap(QtGui.QPixmap(_fromUtf8(":/icon/icons/folder.ico")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.btn_savepath.setIcon(icon1)
self.btn_savepath.setIconSize(QtCore.QSize(16, 16))
self.btn_savepath.setObjectName(_fromUtf8("btn_savepath"))
self.horizontalLayout_33.addWidget(self.btn_savepath)
self.verticalLayout_4.addLayout(self.horizontalLayout_33)
self.horizontalLayout_8 = QtGui.QHBoxLayout()
self.horizontalLayout_8.setSpacing(10)
self.horizontalLayout_8.setObjectName(_fromUtf8("horizontalLayout_8"))
self.horizontalLayout_35 = QtGui.QHBoxLayout()
self.horizontalLayout_35.setObjectName(_fromUtf8("horizontalLayout_35"))
self.label_24 = QtGui.QLabel(self.groupBox_15)
self.label_24.setMinimumSize(QtCore.QSize(36, 24))
self.label_24.setMaximumSize(QtCore.QSize(36, 24))
self.label_24.setObjectName(_fromUtf8("label_24"))
self.horizontalLayout_35.addWidget(self.label_24)
self.sample_area = QtGui.QLineEdit(self.groupBox_15)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.sample_area.sizePolicy().hasHeightForWidth())
self.sample_area.setSizePolicy(sizePolicy)
self.sample_area.setMinimumSize(QtCore.QSize(40, 22))
self.sample_area.setMaximumSize(QtCore.QSize(40, 22))
self.sample_area.setText(_fromUtf8(""))
self.sample_area.setObjectName(_fromUtf8("sample_area"))
self.horizontalLayout_35.addWidget(self.sample_area)
self.label_25 = QtGui.QLabel(self.groupBox_15)
self.label_25.setMinimumSize(QtCore.QSize(32, 29))
self.label_25.setMaximumSize(QtCore.QSize(32, 29))
font = QtGui.QFont()
font.setPointSize(12)
self.label_25.setFont(font)
self.label_25.setObjectName(_fromUtf8("label_25"))
self.horizontalLayout_35.addWidget(self.label_25)
self.horizontalLayout_8.addLayout(self.horizontalLayout_35)
self.horizontalLayout_34 = QtGui.QHBoxLayout()
self.horizontalLayout_34.setObjectName(_fromUtf8("horizontalLayout_34"))
self.label_22 = QtGui.QLabel(self.groupBox_15)
self.label_22.setMinimumSize(QtCore.QSize(36, 29))
self.label_22.setMaximumSize(QtCore.QSize(36, 29))
self.label_22.setObjectName(_fromUtf8("label_22"))
self.horizontalLayout_34.addWidget(self.label_22)
self.sample_height = QtGui.QLineEdit(self.groupBox_15)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.sample_height.sizePolicy().hasHeightForWidth())
self.sample_height.setSizePolicy(sizePolicy)
self.sample_height.setMinimumSize(QtCore.QSize(40, 22))
self.sample_height.setMaximumSize(QtCore.QSize(40, 22))
self.sample_height.setText(_fromUtf8(""))
self.sample_height.setObjectName(_fromUtf8("sample_height"))
self.horizontalLayout_34.addWidget(self.sample_height)
self.label_23 = QtGui.QLabel(self.groupBox_15)
self.label_23.setMinimumSize(QtCore.QSize(23, 29))
self.label_23.setMaximumSize(QtCore.QSize(23, 29))
font = QtGui.QFont()
font.setPointSize(12)
self.label_23.setFont(font)
self.label_23.setObjectName(_fromUtf8("label_23"))
self.horizontalLayout_34.addWidget(self.label_23)
self.horizontalLayout_8.addLayout(self.horizontalLayout_34)
self.verticalLayout_4.addLayout(self.horizontalLayout_8)
self.verticalLayout_9.addLayout(self.verticalLayout_4)
self.verticalLayout_5.addWidget(self.groupBox_15)
self.groupBox_2 = QtGui.QGroupBox(SC_APP)
self.groupBox_2.setMinimumSize(QtCore.QSize(281, 131))
self.groupBox_2.setMaximumSize(QtCore.QSize(281, 131))
font = QtGui.QFont()
font.setPointSize(12)
self.groupBox_2.setFont(font)
self.groupBox_2.setObjectName(_fromUtf8("groupBox_2"))
self.verticalLayout_11 = QtGui.QVBoxLayout(self.groupBox_2)
self.verticalLayout_11.setSpacing(10)
self.verticalLayout_11.setMargin(10)
self.verticalLayout_11.setObjectName(_fromUtf8("verticalLayout_11"))
self.verticalLayout = QtGui.QVBoxLayout()
self.verticalLayout.setSpacing(20)
self.verticalLayout.setContentsMargins(0, 10, 0, 10)
self.verticalLayout.setObjectName(_fromUtf8("verticalLayout"))
self.horizontalLayout = QtGui.QHBoxLayout()
self.horizontalLayout.setSpacing(20)
self.horizontalLayout.setObjectName(_fromUtf8("horizontalLayout"))
self.INST_SET = QtGui.QPushButton(self.groupBox_2)
self.INST_SET.setObjectName(_fromUtf8("INST_SET"))
self.horizontalLayout.addWidget(self.INST_SET)
self.AI518P_SET = QtGui.QPushButton(self.groupBox_2)
self.AI518P_SET.setObjectName(_fromUtf8("AI518P_SET"))
self.horizontalLayout.addWidget(self.AI518P_SET)
self.verticalLayout.addLayout(self.horizontalLayout)
self.horizontalLayout_9 = QtGui.QHBoxLayout()
self.horizontalLayout_9.setSpacing(20)
self.horizontalLayout_9.setObjectName(_fromUtf8("horizontalLayout_9"))
self.GAS_SET = QtGui.QPushButton(self.groupBox_2)
self.GAS_SET.setObjectName(_fromUtf8("GAS_SET"))
self.horizontalLayout_9.addWidget(self.GAS_SET)
self.COORD_SET = QtGui.QPushButton(self.groupBox_2)
self.COORD_SET.setObjectName(_fromUtf8("COORD_SET"))
self.horizontalLayout_9.addWidget(self.COORD_SET)
self.verticalLayout.addLayout(self.horizontalLayout_9)
self.verticalLayout_11.addLayout(self.verticalLayout)
self.verticalLayout_5.addWidget(self.groupBox_2)
self.groupBox_4 = QtGui.QGroupBox(SC_APP)
self.groupBox_4.setMinimumSize(QtCore.QSize(281, 111))
self.groupBox_4.setMaximumSize(QtCore.QSize(281, 111))
font = QtGui.QFont()
font.setPointSize(12)
self.groupBox_4.setFont(font)
self.groupBox_4.setObjectName(_fromUtf8("groupBox_4"))
self.verticalLayout_6 = QtGui.QVBoxLayout(self.groupBox_4)
self.verticalLayout_6.setSpacing(0)
self.verticalLayout_6.setMargin(10)
self.verticalLayout_6.setObjectName(_fromUtf8("verticalLayout_6"))
self.verticalLayout_3 = QtGui.QVBoxLayout()
self.verticalLayout_3.setSpacing(10)
self.verticalLayout_3.setObjectName(_fromUtf8("verticalLayout_3"))
self.Rt_Curve = QtGui.QRadioButton(self.groupBox_4)
self.Rt_Curve.setChecked(True)
self.Rt_Curve.setObjectName(_fromUtf8("Rt_Curve"))
self.verticalLayout_3.addWidget(self.Rt_Curve)
self.SC_Curve = QtGui.QRadioButton(self.groupBox_4)
self.SC_Curve.setObjectName(_fromUtf8("SC_Curve"))
self.verticalLayout_3.addWidget(self.SC_Curve)
self.verticalLayout_6.addLayout(self.verticalLayout_3)
self.verticalLayout_5.addWidget(self.groupBox_4)
self.horizontalLayout_2 = QtGui.QHBoxLayout()
self.horizontalLayout_2.setObjectName(_fromUtf8("horizontalLayout_2"))
self.SC_start = QtGui.QPushButton(SC_APP)
font = QtGui.QFont()
font.setPointSize(12)
self.SC_start.setFont(font)
self.SC_start.setObjectName(_fromUtf8("SC_start"))
self.horizontalLayout_2.addWidget(self.SC_start)
self.SC_stop = QtGui.QPushButton(SC_APP)
font = QtGui.QFont()
font.setPointSize(12)
self.SC_stop.setFont(font)
self.SC_stop.setObjectName(_fromUtf8("SC_stop"))
self.horizontalLayout_2.addWidget(self.SC_stop)
self.SC_save = QtGui.QPushButton(SC_APP)
font = QtGui.QFont()
font.setPointSize(12)
self.SC_save.setFont(font)
self.SC_save.setObjectName(_fromUtf8("SC_save"))
self.horizontalLayout_2.addWidget(self.SC_save)
self.verticalLayout_5.addLayout(self.horizontalLayout_2)
self.horizontalLayout_15.addLayout(self.verticalLayout_5)
self.verticalLayout_12.addLayout(self.horizontalLayout_15)
self.groupBox_3 = QtGui.QGroupBox(SC_APP)
self.groupBox_3.setMinimumSize(QtCore.QSize(780, 61))
self.groupBox_3.setMaximumSize(QtCore.QSize(780, 61))
font = QtGui.QFont()
font.setPointSize(12)
self.groupBox_3.setFont(font)
self.groupBox_3.setObjectName(_fromUtf8("groupBox_3"))
self.verticalLayout_2 = QtGui.QVBoxLayout(self.groupBox_3)
self.verticalLayout_2.setObjectName(_fromUtf8("verticalLayout_2"))
self.horizontalLayout_11 = QtGui.QHBoxLayout()
self.horizontalLayout_11.setObjectName(_fromUtf8("horizontalLayout_11"))
self.horizontalLayout_4 = QtGui.QHBoxLayout()
self.horizontalLayout_4.setSpacing(0)
self.horizontalLayout_4.setObjectName(_fromUtf8("horizontalLayout_4"))
self.inst_sta = QtGui.QLabel(self.groupBox_3)
self.inst_sta.setText(_fromUtf8(""))
self.inst_sta.setPixmap(QtGui.QPixmap(_fromUtf8(":/icon/icons/noyb.png")))
self.inst_sta.setObjectName(_fromUtf8("inst_sta"))
self.horizontalLayout_4.addWidget(self.inst_sta)
self.pcb_sta = QtGui.QLabel(self.groupBox_3)
self.pcb_sta.setText(_fromUtf8(""))
self.pcb_sta.setPixmap(QtGui.QPixmap(_fromUtf8(":/icon/icons/nodlb.png")))
self.pcb_sta.setObjectName(_fromUtf8("pcb_sta"))
self.horizontalLayout_4.addWidget(self.pcb_sta)
self.ai518_sta = QtGui.QLabel(self.groupBox_3)
self.ai518_sta.setText(_fromUtf8(""))
self.ai518_sta.setPixmap(QtGui.QPixmap(_fromUtf8(":/icon/icons/nowky.png")))
self.ai518_sta.setObjectName(_fromUtf8("ai518_sta"))
self.horizontalLayout_4.addWidget(self.ai518_sta)
self.horizontalLayout_11.addLayout(self.horizontalLayout_4)
self.sys_state = QtGui.QLineEdit(self.groupBox_3)
self.sys_state.setEnabled(False)
self.sys_state.setObjectName(_fromUtf8("sys_state"))
self.horizontalLayout_11.addWidget(self.sys_state)
self.horizontalLayout_10 = QtGui.QHBoxLayout()
self.horizontalLayout_10.setObjectName(_fromUtf8("horizontalLayout_10"))
self.horizontalLayout_5 = QtGui.QHBoxLayout()
self.horizontalLayout_5.setSpacing(0)
self.horizontalLayout_5.setObjectName(_fromUtf8("horizontalLayout_5"))
self.label_14 = QtGui.QLabel(self.groupBox_3)
font = QtGui.QFont()
font.setPointSize(12)
font.setBold(True)
font.setWeight(75)
self.label_14.setFont(font)
self.label_14.setObjectName(_fromUtf8("label_14"))
self.horizontalLayout_5.addWidget(self.label_14)
self.valve1_sta = QtGui.QLabel(self.groupBox_3)
self.valve1_sta.setText(_fromUtf8(""))
self.valve1_sta.setPixmap(QtGui.QPixmap(_fromUtf8(":/icon/icons/guan.png")))
self.valve1_sta.setObjectName(_fromUtf8("valve1_sta"))
self.horizontalLayout_5.addWidget(self.valve1_sta)
self.horizontalLayout_10.addLayout(self.horizontalLayout_5)
self.horizontalLayout_3 = QtGui.QHBoxLayout()
self.horizontalLayout_3.setSpacing(0)
self.horizontalLayout_3.setObjectName(_fromUtf8("horizontalLayout_3"))
self.label_13 = QtGui.QLabel(self.groupBox_3)
font = QtGui.QFont()
font.setPointSize(12)
font.setBold(True)
font.setWeight(75)
self.label_13.setFont(font)
self.label_13.setObjectName(_fromUtf8("label_13"))
self.horizontalLayout_3.addWidget(self.label_13)
self.valve2_sta = QtGui.QLabel(self.groupBox_3)
self.valve2_sta.setText(_fromUtf8(""))
self.valve2_sta.setPixmap(QtGui.QPixmap(_fromUtf8(":/icon/icons/guan.png")))
self.valve2_sta.setObjectName(_fromUtf8("valve2_sta"))
self.horizontalLayout_3.addWidget(self.valve2_sta)
self.horizontalLayout_10.addLayout(self.horizontalLayout_3)
self.horizontalLayout_6 = QtGui.QHBoxLayout()
self.horizontalLayout_6.setSpacing(0)
self.horizontalLayout_6.setObjectName(_fromUtf8("horizontalLayout_6"))
self.label_15 = QtGui.QLabel(self.groupBox_3)
font = QtGui.QFont()
font.setPointSize(12)
font.setBold(True)
font.setWeight(75)
self.label_15.setFont(font)
self.label_15.setObjectName(_fromUtf8("label_15"))
self.horizontalLayout_6.addWidget(self.label_15)
self.valve3_sta = QtGui.QLabel(self.groupBox_3)
self.valve3_sta.setText(_fromUtf8(""))
self.valve3_sta.setPixmap(QtGui.QPixmap(_fromUtf8(":/icon/icons/guan.png")))
self.valve3_sta.setObjectName(_fromUtf8("valve3_sta"))
self.horizontalLayout_6.addWidget(self.valve3_sta)
self.horizontalLayout_10.addLayout(self.horizontalLayout_6)
self.horizontalLayout_7 = QtGui.QHBoxLayout()
self.horizontalLayout_7.setSpacing(0)
self.horizontalLayout_7.setObjectName(_fromUtf8("horizontalLayout_7"))
self.label_17 = QtGui.QLabel(self.groupBox_3)
font = QtGui.QFont()
font.setPointSize(12)
font.setBold(True)
font.setWeight(75)
self.label_17.setFont(font)
self.label_17.setObjectName(_fromUtf8("label_17"))
self.horizontalLayout_7.addWidget(self.label_17)
self.clean_sta = QtGui.QLabel(self.groupBox_3)
self.clean_sta.setText(_fromUtf8(""))
self.clean_sta.setPixmap(QtGui.QPixmap(_fromUtf8(":/icon/icons/guan.png")))
self.clean_sta.setObjectName(_fromUtf8("clean_sta"))
self.horizontalLayout_7.addWidget(self.clean_sta)
self.horizontalLayout_10.addLayout(self.horizontalLayout_7)
self.horizontalLayout_11.addLayout(self.horizontalLayout_10)
self.label = QtGui.QLabel(self.groupBox_3)
self.label.setText(_fromUtf8(""))
self.label.setPixmap(QtGui.QPixmap(_fromUtf8(":/icon/icons/partulab.png")))
self.label.setObjectName(_fromUtf8("label"))
self.horizontalLayout_11.addWidget(self.label)
self.verticalLayout_2.addLayout(self.horizontalLayout_11)
self.verticalLayout_12.addWidget(self.groupBox_3)
self.verticalLayout_13.addLayout(self.verticalLayout_12)
self.AI518P_SET.setEnabled(False)
self.retranslateUi(SC_APP)
self.SC_MPLS.setCurrentIndex(0)
QtCore.QMetaObject.connectSlotsByName(SC_APP)
def retranslateUi(self, SC_APP):
SC_APP.setWindowTitle(_translate("SC_APP", "灵敏度-浓度谱", None))
self.log_state.setText(_translate("SC_APP", "log", None))
self.groupBox_5.setTitle(_translate("SC_APP", "测量参数", None))
self.label_18.setText(_translate("SC_APP", "测量时间", None))
self.label_5.setText(_translate("SC_APP", "S", None))
self.label_19.setText(_translate("SC_APP", "流量计1", None))
self.label_7.setText(_translate("SC_APP", "mL/min", None))
self.label_20.setText(_translate("SC_APP", "当前阻值", None))
self.label_6.setText(_translate("SC_APP", "Ω", None))
self.label_26.setText(_translate("SC_APP", "流量计2", None))
self.label_8.setText(_translate("SC_APP", "mL/min", None))
self.label_27.setText(_translate("SC_APP", "当前温度", None))
self.label_4.setText(_translate("SC_APP", "℃", None))
self.label_28.setText(_translate("SC_APP", "流量计3", None))
self.label_9.setText(_translate("SC_APP", "mL/min", None))
self.groupBox_15.setTitle(_translate("SC_APP", "样品信息", None))
self.label_16.setText(_translate("SC_APP", "样品标识", None))
self.sample_id.setText(_translate("SC_APP", "SC_test", None))
self.label_21.setText(_translate("SC_APP", "保存路径", None))
self.save_path.setText(_translate("SC_APP", "D:/", None))
self.label_24.setText(_translate("SC_APP", "面积", None))
self.label_25.setText(_translate("SC_APP", "mm^2", None))
self.label_22.setText(_translate("SC_APP", "厚度", None))
self.label_23.setText(_translate("SC_APP", "mm", None))
self.groupBox_2.setTitle(_translate("SC_APP", "参数设置", None))
self.INST_SET.setText(_translate("SC_APP", "仪器设置", None))
self.AI518P_SET.setText(_translate("SC_APP", "温度设置", None))
self.GAS_SET.setText(_translate("SC_APP", "气压控制", None))
self.COORD_SET.setText(_translate("SC_APP", "XY坐标设置", None))
self.groupBox_4.setTitle(_translate("SC_APP", "曲线选择", None))
self.Rt_Curve.setText(_translate("SC_APP", "R-t曲线", None))
self.SC_Curve.setText(_translate("SC_APP", "S-C曲线", None))
self.SC_start.setText(_translate("SC_APP", "开始测量", None))
self.SC_stop.setText(_translate("SC_APP", "停止测量", None))
self.SC_save.setText(_translate("SC_APP", "保存数据", None))
self.groupBox_3.setTitle(_translate("SC_APP", "当前状态", None))
self.label_14.setText(_translate("SC_APP", "阀门1", None))
self.label_13.setText(_translate("SC_APP", "阀门2", None))
self.label_15.setText(_translate("SC_APP", "阀门3", None))
self.label_17.setText(_translate("SC_APP", "清洗阀", None))
import mypic_rc
if __name__ == "__main__":
import sys
app = QtGui.QApplication(sys.argv)
SC_APP = QtGui.QDialog()
ui = Ui_SC_APP()
ui.setupUi(SC_APP)
SC_APP.show()
sys.exit(app.exec_())
|
cygnushan/measurement
|
SC_spectrum/Ui_SC_main.py
|
Python
|
mit
| 31,084 | 0.001263 |
# -*- coding: utf-8 -*-
"""Redundancy.
---
layout: post
source: David Foster Wallace
source_url: http://bit.ly/1c85lgR
title: Redundancy
date: 2014-06-10 12:31:19
categories: writing
---
Points out use redundant phrases.
"""
from proselint.tools import memoize, preferred_forms_check
@memoize
def check(text):
"""Suggest the preferred forms."""
err = "wallace.redundancy"
msg = "Redundancy. Use '{}' instead of '{}'."
redundancies = [
["rectangular", ["rectangular in shape"]],
["audible", ["audible to the ear"]],
]
return preferred_forms_check(text, redundancies, err, msg)
|
jstewmon/proselint
|
proselint/checks/wallace/redundancy.py
|
Python
|
bsd-3-clause
| 659 | 0 |
from pandac.PandaModules import *
from toontown.toonbase.ToonBaseGlobal import *
from direct.directnotify import DirectNotifyGlobal
from direct.fsm import StateData
from direct.showbase.PythonUtil import PriorityCallbacks
from toontown.safezone import PublicWalk
from toontown.launcher import DownloadForceAcknowledge
import TrialerForceAcknowledge
import ZoneUtil
from toontown.friends import FriendsListManager
from toontown.toonbase import ToontownGlobals
from toontown.toon.Toon import teleportDebug
from toontown.estate import HouseGlobals
from toontown.toonbase import TTLocalizer
from otp.otpbase import OTPLocalizer
from otp.avatar import Emote
from otp.avatar.Avatar import teleportNotify
from direct.task import Task
import QuietZoneState
from toontown.distributed import ToontownDistrictStats
class Place(StateData.StateData, FriendsListManager.FriendsListManager):
notify = DirectNotifyGlobal.directNotify.newCategory('Place')
def __init__(self, loader, doneEvent):
StateData.StateData.__init__(self, doneEvent)
FriendsListManager.FriendsListManager.__init__(self)
self.loader = loader
self.dfaDoneEvent = 'dfaDoneEvent'
self.trialerFADoneEvent = 'trialerFADoneEvent'
self.zoneId = None
self.trialerFA = None
self._tiToken = None
self._leftQuietZoneLocalCallbacks = PriorityCallbacks()
self._leftQuietZoneSubframeCall = None
self._setZoneCompleteLocalCallbacks = PriorityCallbacks()
self._setZoneCompleteSubframeCall = None
return
def load(self):
StateData.StateData.load(self)
FriendsListManager.FriendsListManager.load(self)
self.walkDoneEvent = 'walkDone'
self.walkStateData = PublicWalk.PublicWalk(self.fsm, self.walkDoneEvent)
self.walkStateData.load()
self._tempFSM = self.fsm
def unload(self):
StateData.StateData.unload(self)
FriendsListManager.FriendsListManager.unload(self)
self.notify.info('Unloading Place (%s). Fsm in %s' % (self.zoneId, self._tempFSM.getCurrentState().getName()))
if self._leftQuietZoneSubframeCall:
self._leftQuietZoneSubframeCall.cleanup()
self._leftQuietZoneSubframeCall = None
if self._setZoneCompleteSubframeCall:
self._setZoneCompleteSubframeCall.cleanup()
self._setZoneCompleteSubframeCall = None
self._leftQuietZoneLocalCallbacks = None
self._setZoneCompleteLocalCallbacks = None
del self._tempFSM
taskMgr.remove('goHomeFailed')
del self.walkDoneEvent
self.walkStateData.unload()
del self.walkStateData
del self.loader
if self.trialerFA:
self.trialerFA.exit()
del self.trialerFA
return
def _getQZState(self):
if hasattr(base, 'cr') and hasattr(base.cr, 'playGame'):
if hasattr(base.cr.playGame, 'quietZoneStateData') and base.cr.playGame.quietZoneStateData:
return base.cr.playGame.quietZoneStateData
return None
def addLeftQuietZoneCallback(self, callback, priority = None):
qzsd = self._getQZState()
if qzsd:
return qzsd.addLeftQuietZoneCallback(callback, priority)
else:
token = self._leftQuietZoneLocalCallbacks.add(callback, priority=priority)
if not self._leftQuietZoneSubframeCall:
self._leftQuietZoneSubframeCall = SubframeCall(self._doLeftQuietZoneCallbacks, taskMgr.getCurrentTask().getPriority() - 1)
return token
def removeLeftQuietZoneCallback(self, token):
if token is not None:
if token in self._leftQuietZoneLocalCallbacks:
self._leftQuietZoneLocalCallbacks.remove(token)
qzsd = self._getQZState()
if qzsd:
qzsd.removeLeftQuietZoneCallback(token)
return
def _doLeftQuietZoneCallbacks(self):
self._leftQuietZoneLocalCallbacks()
self._leftQuietZoneLocalCallbacks.clear()
self._leftQuietZoneSubframeCall = None
return
def addSetZoneCompleteCallback(self, callback, priority = None):
qzsd = self._getQZState()
if qzsd:
return qzsd.addSetZoneCompleteCallback(callback, priority)
else:
token = self._setZoneCompleteLocalCallbacks.add(callback, priority=priority)
if not self._setZoneCompleteSubframeCall:
self._setZoneCompleteSubframeCall = SubframeCall(self._doSetZoneCompleteLocalCallbacks, taskMgr.getCurrentTask().getPriority() - 1)
return token
def removeSetZoneCompleteCallback(self, token):
if token is not None:
if any(token==x[1] for x in self._setZoneCompleteLocalCallbacks._callbacks):
self._setZoneCompleteLocalCallbacks.remove(token)
qzsd = self._getQZState()
if qzsd:
qzsd.removeSetZoneCompleteCallback(token)
return
def _doSetZoneCompleteLocalCallbacks(self):
self._setZoneCompleteSubframeCall = None
localCallbacks = self._setZoneCompleteLocalCallbacks
self._setZoneCompleteLocalCallbacks()
localCallbacks.clear()
return
def setState(self, state):
if hasattr(self, 'fsm'):
curState = self.fsm.getName()
if state == 'pet' or curState == 'pet':
self.preserveFriendsList()
self.fsm.request(state)
def getState(self):
if hasattr(self, 'fsm'):
curState = self.fsm.getCurrentState().getName()
return curState
def getZoneId(self):
return self.zoneId
def getTaskZoneId(self):
return self.getZoneId()
def isPeriodTimerEffective(self):
return 1
def handleTeleportQuery(self, fromAvatar, toAvatar):
if base.config.GetBool('want-tptrack', False):
if toAvatar == localAvatar:
toAvatar.doTeleportResponse(fromAvatar, toAvatar, toAvatar.doId, 1, toAvatar.defaultShard, base.cr.playGame.getPlaceId(), self.getZoneId(), fromAvatar.doId)
else:
self.notify.warning('handleTeleportQuery toAvatar.doId != localAvatar.doId' % (toAvatar.doId, localAvatar.doId))
else:
fromAvatar.d_teleportResponse(toAvatar.doId, 1, toAvatar.defaultShard, base.cr.playGame.getPlaceId(), self.getZoneId())
def enablePeriodTimer(self):
if self.isPeriodTimerEffective():
if base.cr.periodTimerExpired:
taskMgr.doMethodLater(5, self.redoPeriodTimer, 'redoPeriodTimer')
self.accept('periodTimerExpired', self.periodTimerExpired)
def disablePeriodTimer(self):
taskMgr.remove('redoPeriodTimer')
self.ignore('periodTimerExpired')
def redoPeriodTimer(self, task):
messenger.send('periodTimerExpired')
return Task.done
def periodTimerExpired(self):
self.fsm.request('final')
if base.localAvatar.book.isEntered:
base.localAvatar.book.exit()
base.localAvatar.b_setAnimState('CloseBook', 1, callback=self.__handlePeriodTimerBookClose)
else:
base.localAvatar.b_setAnimState('TeleportOut', 1, self.__handlePeriodTimerExitTeleport)
def exitPeriodTimerExpired(self):
pass
def __handlePeriodTimerBookClose(self):
base.localAvatar.b_setAnimState('TeleportOut', 1, self.__handlePeriodTimerExitTeleport)
def __handlePeriodTimerExitTeleport(self):
base.cr.loginFSM.request('periodTimeout')
def detectedPhoneCollision(self):
self.fsm.request('phone')
def detectedFishingCollision(self):
self.fsm.request('fishing')
def enterStart(self):
pass
def exitStart(self):
pass
def enterFinal(self):
pass
def exitFinal(self):
pass
def enterWalk(self, teleportIn = 0):
self.enterFLM()
self.walkStateData.enter()
if teleportIn == 0:
self.walkStateData.fsm.request('walking')
self.acceptOnce(self.walkDoneEvent, self.handleWalkDone)
if base.cr.productName in ['DisneyOnline-US', 'ES'] and not base.cr.isPaid() and base.localAvatar.tutorialAck:
base.localAvatar.chatMgr.obscure(0, 0)
base.localAvatar.chatMgr.normalButton.show()
self.accept('teleportQuery', self.handleTeleportQuery)
base.localAvatar.setTeleportAvailable(1)
base.localAvatar.questPage.acceptOnscreenHooks()
base.localAvatar.invPage.acceptOnscreenHooks()
base.localAvatar.questMap.acceptOnscreenHooks()
self.walkStateData.fsm.request('walking')
self.enablePeriodTimer()
def exitWalk(self):
self.exitFLM()
if base.cr.productName in ['DisneyOnline-US', 'ES'] and not base.cr.isPaid() and base.localAvatar.tutorialAck and not base.cr.whiteListChatEnabled:
base.localAvatar.chatMgr.obscure(1, 0)
self.disablePeriodTimer()
messenger.send('wakeup')
self.walkStateData.exit()
self.ignore(self.walkDoneEvent)
base.localAvatar.setTeleportAvailable(0)
self.ignore('teleportQuery')
if base.cr.playGame.hood != None:
base.cr.playGame.hood.hideTitleText()
base.localAvatar.questPage.hideQuestsOnscreen()
base.localAvatar.questPage.ignoreOnscreenHooks()
base.localAvatar.invPage.ignoreOnscreenHooks()
base.localAvatar.invPage.hideInventoryOnscreen()
base.localAvatar.questMap.hide()
base.localAvatar.questMap.ignoreOnscreenHooks()
return
def handleWalkDone(self, doneStatus):
mode = doneStatus['mode']
if mode == 'StickerBook':
self.last = self.fsm.getCurrentState().getName()
self.fsm.request('stickerBook')
elif mode == 'Options':
self.last = self.fsm.getCurrentState().getName()
self.fsm.request('stickerBook', [base.localAvatar.optionsPage])
elif mode == 'Sit':
self.last = self.fsm.getCurrentState().getName()
self.fsm.request('sit')
else:
Place.notify.error('Invalid mode: %s' % mode)
def enterSit(self):
self.enterFLM()
base.localAvatar.laffMeter.start()
self.accept('teleportQuery', self.handleTeleportQuery)
base.localAvatar.setTeleportAvailable(1)
base.localAvatar.b_setAnimState('SitStart', 1)
self.accept('arrow_up', self.fsm.request, extraArgs=['walk'])
def exitSit(self):
self.exitFLM()
base.localAvatar.laffMeter.stop()
base.localAvatar.setTeleportAvailable(0)
self.ignore('teleportQuery')
self.ignore('arrow_up')
def enterDrive(self):
self.enterFLM()
base.localAvatar.laffMeter.start()
self.accept('teleportQuery', self.handleTeleportQuery)
base.localAvatar.setTeleportAvailable(1)
base.localAvatar.b_setAnimState('SitStart', 1)
def exitDrive(self):
self.exitFLM()
base.localAvatar.laffMeter.stop()
base.localAvatar.setTeleportAvailable(0)
self.ignore('teleportQuery')
def enterPush(self):
self.enterFLM()
base.localAvatar.laffMeter.start()
self.accept('teleportQuery', self.handleTeleportQuery)
base.localAvatar.setTeleportAvailable(1)
base.localAvatar.attachCamera()
base.localAvatar.startUpdateSmartCamera()
base.localAvatar.startPosHprBroadcast()
base.localAvatar.b_setAnimState('Push', 1)
def exitPush(self):
self.exitFLM()
base.localAvatar.laffMeter.stop()
base.localAvatar.setTeleportAvailable(0)
base.localAvatar.stopUpdateSmartCamera()
base.localAvatar.detachCamera()
base.localAvatar.stopPosHprBroadcast()
self.ignore('teleportQuery')
def enterStickerBook(self, page = None):
self.enterFLM()
base.localAvatar.laffMeter.start()
target = base.cr.doFind('DistributedTarget')
if target:
target.hideGui()
self.accept('teleportQuery', self.handleTeleportQuery)
base.localAvatar.setTeleportAvailable(1)
if page:
base.localAvatar.book.setPage(page)
base.localAvatar.b_setAnimState('OpenBook', 1, self.enterStickerBookGUI)
base.localAvatar.obscureMoveFurnitureButton(1)
def enterStickerBookGUI(self):
base.localAvatar.collisionsOn()
base.localAvatar.book.showButton()
base.localAvatar.book.enter()
base.localAvatar.setGuiConflict(1)
base.localAvatar.startSleepWatch(self.__handleFallingAsleep)
self.accept('bookDone', self.__handleBook)
base.localAvatar.b_setAnimState('ReadBook', 1)
self.enablePeriodTimer()
def __handleFallingAsleep(self, task):
base.localAvatar.book.exit()
base.localAvatar.b_setAnimState('CloseBook', 1, callback=self.__handleFallingAsleepBookClose)
return Task.done
def __handleFallingAsleepBookClose(self):
if hasattr(self, 'fsm'):
self.fsm.request('walk')
base.localAvatar.forceGotoSleep()
def exitStickerBook(self):
base.localAvatar.stopSleepWatch()
self.disablePeriodTimer()
self.exitFLM()
base.localAvatar.laffMeter.stop()
base.localAvatar.setGuiConflict(0)
base.localAvatar.book.exit()
base.localAvatar.book.hideButton()
base.localAvatar.collisionsOff()
self.ignore('bookDone')
base.localAvatar.setTeleportAvailable(0)
self.ignore('teleportQuery')
base.localAvatar.obscureMoveFurnitureButton(-1)
target = base.cr.doFind('DistributedTarget')
if target:
target.showGui()
def __handleBook(self):
base.localAvatar.stopSleepWatch()
base.localAvatar.book.exit()
bookStatus = base.localAvatar.book.getDoneStatus()
if bookStatus['mode'] == 'close':
base.localAvatar.b_setAnimState('CloseBook', 1, callback=self.handleBookClose)
elif bookStatus['mode'] == 'teleport':
zoneId = bookStatus['hood']
base.localAvatar.collisionsOff()
base.localAvatar.b_setAnimState('CloseBook', 1, callback=self.handleBookCloseTeleport, extraArgs=[zoneId, zoneId])
elif bookStatus['mode'] == 'exit':
self.exitTo = bookStatus.get('exitTo')
base.localAvatar.collisionsOff()
base.localAvatar.b_setAnimState('CloseBook', 1, callback=self.__handleBookCloseExit)
elif bookStatus['mode'] == 'gohome':
zoneId = bookStatus['hood']
base.localAvatar.collisionsOff()
base.localAvatar.b_setAnimState('CloseBook', 1, callback=self.goHomeNow, extraArgs=[zoneId])
elif bookStatus['mode'] == 'startparty':
firstStart = bookStatus['firstStart']
hostId = bookStatus['hostId']
base.localAvatar.collisionsOff()
base.localAvatar.b_setAnimState('CloseBook', 1, callback=self.startPartyNow, extraArgs=[firstStart, hostId])
def handleBookCloseTeleport(self, hoodId, zoneId):
if localAvatar.hasActiveBoardingGroup():
rejectText = TTLocalizer.BoardingCannotLeaveZone
localAvatar.elevatorNotifier.showMe(rejectText)
return
self.requestLeave({'loader': ZoneUtil.getBranchLoaderName(zoneId),
'where': ZoneUtil.getToonWhereName(zoneId),
'how': 'teleportIn',
'hoodId': hoodId,
'zoneId': zoneId,
'shardId': None,
'avId': -1})
return
def __handleBookCloseExit(self):
base.localAvatar.b_setAnimState('TeleportOut', 1, self.__handleBookExitTeleport, [0])
def __handleBookExitTeleport(self, requestStatus):
if base.cr.timeManager:
base.cr.timeManager.setDisconnectReason(ToontownGlobals.DisconnectBookExit)
base.transitions.fadeScreen(1.0)
base.cr.gameFSM.request(self.exitTo)
def goHomeNow(self, curZoneId):
if localAvatar.hasActiveBoardingGroup():
rejectText = TTLocalizer.BoardingCannotLeaveZone
localAvatar.elevatorNotifier.showMe(rejectText)
return
hoodId = ToontownGlobals.MyEstate
self.requestLeave({'loader': 'safeZoneLoader',
'where': 'estate',
'how': 'teleportIn',
'hoodId': hoodId,
'zoneId': -1,
'shardId': None,
'avId': -1})
return
def startPartyNow(self, firstStart, hostId):
if localAvatar.hasActiveBoardingGroup():
rejectText = TTLocalizer.BoardingCannotLeaveZone
localAvatar.elevatorNotifier.showMe(rejectText)
return
base.localAvatar.creatingNewPartyWithMagicWord = False
base.localAvatar.aboutToPlanParty = False
hoodId = ToontownGlobals.PartyHood
if firstStart:
zoneId = 0
ToontownDistrictStats.refresh('shardInfoUpdated')
curShardTuples = base.cr.listActiveShards()
lowestPop = 100000000000000000L
shardId = None
for shardInfo in curShardTuples:
pop = shardInfo[2]
if pop < lowestPop:
lowestPop = pop
shardId = shardInfo[0]
if shardId == base.localAvatar.defaultShard:
shardId = None
base.cr.playGame.getPlace().requestLeave({'loader': 'safeZoneLoader',
'where': 'party',
'how': 'teleportIn',
'hoodId': hoodId,
'zoneId': zoneId,
'shardId': None, # ALPHA BANDAGE: should be shardId, but this causes the AI it teleports to to die right now.
'avId': -1})
else:
if hostId is None:
hostId = base.localAvatar.doId
base.cr.partyManager.sendAvatarToParty(hostId)
return
return
def handleBookClose(self):
if hasattr(self, 'fsm'):
self.fsm.request('walk')
if hasattr(self, 'toonSubmerged') and self.toonSubmerged == 1:
if hasattr(self, 'walkStateData'):
self.walkStateData.fsm.request('swimming', [self.loader.swimSound])
def requestLeave(self, requestStatus):
teleportDebug(requestStatus, 'requestLeave(%s)' % (requestStatus,))
if hasattr(self, 'fsm'):
self.doRequestLeave(requestStatus)
def doRequestLeave(self, requestStatus):
teleportDebug(requestStatus, 'requestLeave(%s)' % (requestStatus,))
self.fsm.request('DFA', [requestStatus])
def enterDFA(self, requestStatus):
teleportDebug(requestStatus, 'enterDFA(%s)' % (requestStatus,))
self.acceptOnce(self.dfaDoneEvent, self.enterDFACallback, [requestStatus])
self.dfa = DownloadForceAcknowledge.DownloadForceAcknowledge(self.dfaDoneEvent)
self.dfa.enter(base.cr.hoodMgr.getPhaseFromHood(requestStatus['hoodId']))
def exitDFA(self):
self.ignore(self.dfaDoneEvent)
def handleEnterTunnel(self, requestStatus, collEntry):
if localAvatar.hasActiveBoardingGroup():
rejectText = TTLocalizer.BoardingCannotLeaveZone
localAvatar.elevatorNotifier.showMe(rejectText)
dummyNP = NodePath('dummyNP')
dummyNP.reparentTo(render)
tunnelOrigin = requestStatus['tunnelOrigin']
dummyNP.setPos(localAvatar.getPos())
dummyNP.setH(tunnelOrigin.getH())
dummyNP.setPos(dummyNP, 0, 4, 0)
localAvatar.setPos(dummyNP.getPos())
dummyNP.removeNode()
del dummyNP
return
self.requestLeave(requestStatus)
def enterDFACallback(self, requestStatus, doneStatus):
teleportDebug(requestStatus, 'enterDFACallback%s' % ((requestStatus, doneStatus),))
self.dfa.exit()
del self.dfa
if doneStatus['mode'] == 'complete':
if requestStatus.get('tutorial', 0):
out = {'teleportIn': 'tunnelOut'}
requestStatus['zoneId'] = 22000
requestStatus['hoodId'] = 22000
else:
out = {'teleportIn': 'teleportOut',
'tunnelIn': 'tunnelOut',
'doorIn': 'doorOut'}
teleportDebug(requestStatus, 'requesting %s, requestStatus=%s' % (out[requestStatus['how']], requestStatus))
self.fsm.request(out[requestStatus['how']], [requestStatus])
elif doneStatus['mode'] == 'incomplete':
self.fsm.request('DFAReject')
else:
Place.notify.error('Unknown done status for DownloadForceAcknowledge: ' + `doneStatus`)
def enterDFAReject(self):
self.fsm.request('walk')
def exitDFAReject(self):
pass
def enterTrialerFA(self, requestStatus):
teleportDebug(requestStatus, 'enterTrialerFA(%s)' % requestStatus)
self.acceptOnce(self.trialerFADoneEvent, self.trialerFACallback, [requestStatus])
self.trialerFA = TrialerForceAcknowledge.TrialerForceAcknowledge(self.trialerFADoneEvent)
self.trialerFA.enter(requestStatus['hoodId'])
def exitTrialerFA(self):
pass
def trialerFACallback(self, requestStatus, doneStatus):
if doneStatus['mode'] == 'pass':
self.fsm.request('DFA', [requestStatus])
elif doneStatus['mode'] == 'fail':
self.fsm.request('trialerFAReject')
else:
Place.notify.error('Unknown done status for TrialerForceAcknowledge: %s' % doneStatus)
def enterTrialerFAReject(self):
self.fsm.request('walk')
def exitTrialerFAReject(self):
pass
def enterDoorIn(self, requestStatus):
NametagGlobals.setWant2dNametags(False)
door = base.cr.doId2do.get(requestStatus['doorDoId'])
if not door is None:
door.readyToExit()
base.localAvatar.obscureMoveFurnitureButton(1)
base.localAvatar.startQuestMap()
def exitDoorIn(self):
NametagGlobals.setWant2dNametags(True)
base.localAvatar.obscureMoveFurnitureButton(-1)
def enterDoorOut(self):
base.localAvatar.obscureMoveFurnitureButton(1)
def exitDoorOut(self):
base.localAvatar.obscureMoveFurnitureButton(-1)
base.localAvatar.stopQuestMap()
def handleDoorDoneEvent(self, requestStatus):
self.doneStatus = requestStatus
messenger.send(self.doneEvent)
def handleDoorTrigger(self):
self.fsm.request('doorOut')
def enterTunnelIn(self, requestStatus):
self.notify.debug('enterTunnelIn(requestStatus=' + str(requestStatus) + ')')
tunnelOrigin = base.render.find(requestStatus['tunnelName'])
self.accept('tunnelInMovieDone', self.__tunnelInMovieDone)
base.localAvatar.reconsiderCheesyEffect()
base.localAvatar.tunnelIn(tunnelOrigin)
base.localAvatar.startQuestMap()
def __tunnelInMovieDone(self):
self.ignore('tunnelInMovieDone')
self.fsm.request('walk')
def exitTunnelIn(self):
pass
def enterTunnelOut(self, requestStatus):
hoodId = requestStatus['hoodId']
zoneId = requestStatus['zoneId']
how = requestStatus['how']
tunnelOrigin = requestStatus['tunnelOrigin']
fromZoneId = ZoneUtil.getCanonicalZoneId(self.getZoneId())
tunnelName = requestStatus.get('tunnelName')
if tunnelName == None:
tunnelName = base.cr.hoodMgr.makeLinkTunnelName(self.loader.hood.id, fromZoneId)
self.doneStatus = {'loader': ZoneUtil.getLoaderName(zoneId),
'where': ZoneUtil.getToonWhereName(zoneId),
'how': how,
'hoodId': hoodId,
'zoneId': zoneId,
'shardId': None,
'tunnelName': tunnelName}
self.accept('tunnelOutMovieDone', self.__tunnelOutMovieDone)
base.localAvatar.tunnelOut(tunnelOrigin)
base.localAvatar.stopQuestMap()
return
def __tunnelOutMovieDone(self):
self.ignore('tunnelOutMovieDone')
messenger.send(self.doneEvent)
def exitTunnelOut(self):
pass
def enterTeleportOut(self, requestStatus, callback):
base.localAvatar.laffMeter.start()
base.localAvatar.b_setAnimState('TeleportOut', 1, callback, [requestStatus])
base.localAvatar.obscureMoveFurnitureButton(1)
def exitTeleportOut(self):
base.localAvatar.laffMeter.stop()
base.localAvatar.stopQuestMap()
base.localAvatar.obscureMoveFurnitureButton(-1)
def enterDied(self, requestStatus, callback = None):
if callback == None:
callback = self.__diedDone
base.localAvatar.laffMeter.start()
camera.wrtReparentTo(render)
base.localAvatar.b_setAnimState('Died', 1, callback, [requestStatus])
base.localAvatar.obscureMoveFurnitureButton(1)
return
def __diedDone(self, requestStatus):
self.doneStatus = requestStatus
messenger.send(self.doneEvent)
def exitDied(self):
base.localAvatar.laffMeter.stop()
base.localAvatar.obscureMoveFurnitureButton(-1)
def getEstateZoneAndGoHome(self, requestStatus):
self.doneStatus = requestStatus
avId = requestStatus['avId']
self.acceptOnce('setLocalEstateZone', self.goHome)
if avId > 0:
base.cr.estateMgr.getLocalEstateZone(avId)
else:
base.cr.estateMgr.getLocalEstateZone(base.localAvatar.getDoId())
if HouseGlobals.WANT_TELEPORT_TIMEOUT:
taskMgr.doMethodLater(HouseGlobals.TELEPORT_TIMEOUT, self.goHomeFailed, 'goHomeFailed')
def goHome(self, ownerId, zoneId):
self.notify.debug('goHome ownerId = %s' % ownerId)
taskMgr.remove('goHomeFailed')
if ownerId > 0 and ownerId != base.localAvatar.doId and not base.cr.isFriend(ownerId):
self.doneStatus['failed'] = 1
self.goHomeFailed(None)
return
if ownerId == 0 and zoneId == 0:
if self.doneStatus['shardId'] is None or self.doneStatus['shardId'] is base.localAvatar.defaultShard:
self.doneStatus['failed'] = 1
self.goHomeFailed(None)
return
else:
self.doneStatus['hood'] = ToontownGlobals.MyEstate
self.doneStatus['zone'] = base.localAvatar.lastHood
self.doneStatus['loaderId'] = 'safeZoneLoader'
self.doneStatus['whereId'] = 'estate'
self.doneStatus['how'] = 'teleportIn'
messenger.send(self.doneEvent)
return
if self.doneStatus['zoneId'] == -1:
self.doneStatus['zoneId'] = zoneId
elif self.doneStatus['zoneId'] != zoneId:
self.doneStatus['where'] = 'house'
self.doneStatus['ownerId'] = ownerId
messenger.send(self.doneEvent)
messenger.send('localToonLeft')
return
def goHomeFailed(self, task):
self.notify.debug('goHomeFailed')
self.notifyUserGoHomeFailed()
self.ignore('setLocalEstateZone')
self.doneStatus['hood'] = base.localAvatar.lastHood
self.doneStatus['zone'] = base.localAvatar.lastHood
self.fsm.request('teleportIn', [self.doneStatus])
return Task.done
def notifyUserGoHomeFailed(self):
self.notify.debug('notifyUserGoHomeFailed')
failedToVisitAvId = self.doneStatus.get('avId', -1)
avName = None
if failedToVisitAvId != -1:
avatar = base.cr.identifyAvatar(failedToVisitAvId)
if avatar:
avName = avatar.getName()
if avName:
message = TTLocalizer.EstateTeleportFailedNotFriends % avName
else:
message = TTLocalizer.EstateTeleportFailed
base.localAvatar.setSystemMessage(0, message)
return
def enterTeleportIn(self, requestStatus):
self._tiToken = self.addSetZoneCompleteCallback(Functor(self._placeTeleportInPostZoneComplete, requestStatus), 100)
def _placeTeleportInPostZoneComplete(self, requestStatus):
teleportDebug(requestStatus, '_placeTeleportInPostZoneComplete(%s)' % (requestStatus,))
NametagGlobals.setWant2dNametags(False)
base.localAvatar.laffMeter.start()
base.localAvatar.startQuestMap()
base.localAvatar.reconsiderCheesyEffect()
base.localAvatar.obscureMoveFurnitureButton(1)
avId = requestStatus.get('avId', -1)
if avId != -1:
if avId in base.cr.doId2do:
teleportDebug(requestStatus, 'teleport to avatar')
avatar = base.cr.doId2do[avId]
avatar.forceToTruePosition()
base.localAvatar.gotoNode(avatar)
base.localAvatar.b_teleportGreeting(avId)
else:
friend = base.cr.identifyAvatar(avId)
if friend != None:
teleportDebug(requestStatus, 'friend not here, giving up')
base.localAvatar.setSystemMessage(avId, OTPLocalizer.WhisperTargetLeftVisit % (friend.getName(),))
friend.d_teleportGiveup(base.localAvatar.doId)
base.transitions.irisIn()
self.nextState = requestStatus.get('nextState', 'walk')
base.localAvatar.attachCamera()
base.localAvatar.startUpdateSmartCamera()
base.localAvatar.startPosHprBroadcast()
globalClock.tick()
base.localAvatar.b_setAnimState('TeleportIn', 1, callback=self.teleportInDone)
base.localAvatar.d_broadcastPositionNow()
base.localAvatar.b_setParent(ToontownGlobals.SPRender)
return
def teleportInDone(self):
if hasattr(self, 'fsm'):
teleportNotify.debug('teleportInDone: %s' % self.nextState)
self.fsm.request(self.nextState, [1])
def exitTeleportIn(self):
self.removeSetZoneCompleteCallback(self._tiToken)
self._tiToken = None
NametagGlobals.setWant2dNametags(True)
base.localAvatar.laffMeter.stop()
base.localAvatar.obscureMoveFurnitureButton(-1)
base.localAvatar.stopUpdateSmartCamera()
base.localAvatar.detachCamera()
base.localAvatar.stopPosHprBroadcast()
return
def requestTeleport(self, hoodId, zoneId, shardId, avId):
if avId > 0:
teleportNotify.debug('requestTeleport%s' % ((hoodId,
zoneId,
shardId,
avId),))
if localAvatar.hasActiveBoardingGroup():
if avId > 0:
teleportNotify.debug('requestTeleport: has active boarding group')
rejectText = TTLocalizer.BoardingCannotLeaveZone
localAvatar.elevatorNotifier.showMe(rejectText)
return
loaderId = ZoneUtil.getBranchLoaderName(zoneId)
whereId = ZoneUtil.getToonWhereName(zoneId)
if hoodId == ToontownGlobals.MyEstate:
loaderId = 'safeZoneLoader'
whereId = 'estate'
if hoodId == ToontownGlobals.PartyHood:
loaderId = 'safeZoneLoader'
whereId = 'party'
self.requestLeave({'loader': loaderId,
'where': whereId,
'how': 'teleportIn',
'hoodId': hoodId,
'zoneId': zoneId,
'shardId': shardId,
'avId': avId})
def enterQuest(self, npcToon):
base.localAvatar.b_setAnimState('neutral', 1)
self.accept('teleportQuery', self.handleTeleportQuery)
base.localAvatar.setTeleportAvailable(1)
base.localAvatar.laffMeter.start()
base.localAvatar.obscureMoveFurnitureButton(1)
def exitQuest(self):
base.localAvatar.setTeleportAvailable(0)
self.ignore('teleportQuery')
base.localAvatar.laffMeter.stop()
base.localAvatar.obscureMoveFurnitureButton(-1)
def enterPurchase(self):
base.localAvatar.b_setAnimState('neutral', 1)
self.accept('teleportQuery', self.handleTeleportQuery)
base.localAvatar.setTeleportAvailable(1)
base.localAvatar.laffMeter.start()
base.localAvatar.obscureMoveFurnitureButton(1)
def exitPurchase(self):
base.localAvatar.setTeleportAvailable(0)
self.ignore('teleportQuery')
base.localAvatar.laffMeter.stop()
base.localAvatar.obscureMoveFurnitureButton(-1)
def enterFishing(self):
base.localAvatar.b_setAnimState('neutral', 1)
self.accept('teleportQuery', self.handleTeleportQuery)
base.localAvatar.setTeleportAvailable(1)
base.localAvatar.laffMeter.start()
def exitFishing(self):
base.localAvatar.setTeleportAvailable(0)
self.ignore('teleportQuery')
base.localAvatar.laffMeter.stop()
def enterBanking(self):
base.localAvatar.b_setAnimState('neutral', 1)
self.accept('teleportQuery', self.handleTeleportQuery)
base.localAvatar.setTeleportAvailable(1)
base.localAvatar.laffMeter.start()
base.localAvatar.obscureMoveFurnitureButton(1)
base.localAvatar.startSleepWatch(self.__handleFallingAsleepBanking)
self.enablePeriodTimer()
def __handleFallingAsleepBanking(self, arg):
if hasattr(self, 'fsm'):
messenger.send('bankAsleep')
self.fsm.request('walk')
base.localAvatar.forceGotoSleep()
def exitBanking(self):
base.localAvatar.setTeleportAvailable(0)
self.ignore('teleportQuery')
base.localAvatar.laffMeter.stop()
base.localAvatar.obscureMoveFurnitureButton(-1)
base.localAvatar.stopSleepWatch()
self.disablePeriodTimer()
def enterPhone(self):
base.localAvatar.b_setAnimState('neutral', 1)
self.accept('teleportQuery', self.handleTeleportQuery)
base.localAvatar.setTeleportAvailable(1)
base.localAvatar.laffMeter.start()
base.localAvatar.obscureMoveFurnitureButton(1)
base.localAvatar.startSleepWatch(self.__handleFallingAsleepPhone)
self.enablePeriodTimer()
def __handleFallingAsleepPhone(self, arg):
if hasattr(self, 'fsm'):
self.fsm.request('walk')
messenger.send('phoneAsleep')
base.localAvatar.forceGotoSleep()
def exitPhone(self):
base.localAvatar.setTeleportAvailable(0)
self.ignore('teleportQuery')
base.localAvatar.laffMeter.stop()
base.localAvatar.obscureMoveFurnitureButton(-1)
base.localAvatar.stopSleepWatch()
self.disablePeriodTimer()
def enterStopped(self):
base.localAvatar.b_setAnimState('neutral', 1)
Emote.globalEmote.disableBody(base.localAvatar, 'enterStopped')
self.accept('teleportQuery', self.handleTeleportQuery)
if base.localAvatar.isDisguised:
base.localAvatar.setTeleportAvailable(0)
else:
base.localAvatar.setTeleportAvailable(1)
base.localAvatar.laffMeter.start()
base.localAvatar.obscureMoveFurnitureButton(1)
base.localAvatar.startSleepWatch(self.__handleFallingAsleepStopped)
self.enablePeriodTimer()
def __handleFallingAsleepStopped(self, arg):
if hasattr(self, 'fsm'):
self.fsm.request('walk')
base.localAvatar.forceGotoSleep()
messenger.send('stoppedAsleep')
def exitStopped(self):
Emote.globalEmote.releaseBody(base.localAvatar, 'exitStopped')
base.localAvatar.setTeleportAvailable(0)
self.ignore('teleportQuery')
base.localAvatar.laffMeter.stop()
base.localAvatar.obscureMoveFurnitureButton(-1)
base.localAvatar.stopSleepWatch()
self.disablePeriodTimer()
messenger.send('exitingStoppedState')
def enterPet(self):
base.localAvatar.b_setAnimState('neutral', 1)
Emote.globalEmote.disableBody(base.localAvatar, 'enterPet')
self.accept('teleportQuery', self.handleTeleportQuery)
base.localAvatar.setTeleportAvailable(1)
base.localAvatar.setTeleportAllowed(0)
base.localAvatar.laffMeter.start()
self.enterFLM()
def exitPet(self):
base.localAvatar.setTeleportAvailable(0)
base.localAvatar.setTeleportAllowed(1)
Emote.globalEmote.releaseBody(base.localAvatar, 'exitPet')
self.ignore('teleportQuery')
base.localAvatar.laffMeter.stop()
self.exitFLM()
def enterQuietZone(self, requestStatus):
self.quietZoneDoneEvent = uniqueName('quietZoneDone')
self.acceptOnce(self.quietZoneDoneEvent, self.handleQuietZoneDone)
self.quietZoneStateData = QuietZoneState.QuietZoneState(self.quietZoneDoneEvent)
self.quietZoneStateData.load()
self.quietZoneStateData.enter(requestStatus)
def exitQuietZone(self):
self.ignore(self.quietZoneDoneEvent)
del self.quietZoneDoneEvent
self.quietZoneStateData.exit()
self.quietZoneStateData.unload()
self.quietZoneStateData = None
return
def handleQuietZoneDone(self):
how = base.cr.handlerArgs['how']
self.fsm.request(how, [base.cr.handlerArgs])
|
ToontownUprising/src
|
toontown/hood/Place.py
|
Python
|
mit
| 37,237 | 0.002793 |
"""
Manage OS-level configuration
"""
import logging
l = logging.getLogger("angr.simos")
from archinfo import ArchARM, ArchMIPS32, ArchX86, ArchAMD64
from simuvex import SimState, SimIRSB, SimStateSystem, SimActionData
from simuvex import s_options as o
from simuvex.s_procedure import SimProcedure, SimProcedureContinuation
from simuvex.s_type import SimTypePointer, SimTypeFunction, SimTypeTop
from cle.metaelf import MetaELF
from cle.backedcgc import BackedCGC
class SimOS(object):
"""A class describing OS/arch-level configuration"""
def __init__(self, project):
self.arch = project.arch
self.proj = project
self.continue_addr = None
self.configure_project()
def configure_project(self):
"""Configure the project to set up global settings (like SimProcedures)"""
self.continue_addr = self.proj._extern_obj.get_pseudo_addr('angr##simproc_continue')
self.proj.hook(self.continue_addr, SimProcedureContinuation)
def state_blank(self, addr=None, initial_prefix=None, **kwargs):
if kwargs.get('mode', None) is None:
kwargs['mode'] = self.proj._default_analysis_mode
if kwargs.get('memory_backer', None) is None:
kwargs['memory_backer'] = self.proj.loader.memory
if kwargs.get('arch', None) is None:
kwargs['arch'] = self.proj.arch
state = SimState(**kwargs)
state.regs.sp = self.arch.initial_sp
if initial_prefix is not None:
for reg in state.arch.default_symbolic_registers:
state.registers.store(reg, state.se.Unconstrained(initial_prefix + "_" + reg,
state.arch.bits,
explicit_name=True))
for reg, val, is_addr, mem_region in state.arch.default_register_values:
if o.ABSTRACT_MEMORY in state.options and is_addr:
address = state.se.ValueSet(region=mem_region, bits=state.arch.bits, val=val)
state.registers.store(reg, address)
else:
state.registers.store(reg, val)
if addr is None: addr = self.proj.entry
state.regs.ip = addr
state.scratch.ins_addr = addr
state.scratch.bbl_addr = addr
state.scratch.stmt_idx = 0
state.scratch.jumpkind = 'Ijk_Boring'
state.procedure_data.hook_addr = self.continue_addr
return state
def state_entry(self, **kwargs):
return self.state_blank(**kwargs)
def state_full_init(self, **kwargs):
return self.state_entry(**kwargs)
def prepare_call_state(self, calling_state, initial_state=None,
preserve_registers=(), preserve_memory=()):
'''
This function prepares a state that is executing a call instruction.
If given an initial_state, it copies over all of the critical registers to it from the
calling_state. Otherwise, it prepares the calling_state for action.
This is mostly used to create minimalistic for CFG generation. Some ABIs, such as MIPS PIE and
x86 PIE, require certain information to be maintained in certain registers. For example, for
PIE MIPS, this function transfer t9, gp, and ra to the new state.
'''
if isinstance(self.arch, ArchMIPS32):
if initial_state is not None:
initial_state = self.state_blank()
mips_caller_saves = ('s0', 's1', 's2', 's3', 's4', 's5', 's6', 's7', 'gp', 'sp', 'bp', 'ra')
preserve_registers = preserve_registers + mips_caller_saves + ('t9',)
if initial_state is None:
new_state = calling_state.copy()
else:
new_state = initial_state.copy()
for reg in set(preserve_registers):
new_state.registers.store(reg, calling_state.registers.load(reg))
for addr, val in set(preserve_memory):
new_state.memory.store(addr, calling_state.memory.load(addr, val))
return new_state
class SimLinux(SimOS):
"""OS-specific configuration for *nix-y OSes"""
def __init__(self, *args, **kwargs):
super(SimLinux, self).__init__(*args, **kwargs)
self._loader_addr = None
self._loader_lock_addr = None
self._loader_unlock_addr = None
self._vsyscall_addr = None
def configure_project(self):
super(SimLinux, self).configure_project()
self._loader_addr = self.proj._extern_obj.get_pseudo_addr('angr##loader')
self._loader_lock_addr = self.proj._extern_obj.get_pseudo_addr('angr##loader_lock')
self._loader_unlock_addr = self.proj._extern_obj.get_pseudo_addr('angr##loader_unlock')
self._vsyscall_addr = self.proj._extern_obj.get_pseudo_addr('angr##vsyscall')
self.proj.hook(self._loader_addr, LinuxLoader, kwargs={'project': self.proj})
self.proj.hook(self._loader_lock_addr, _dl_rtld_lock_recursive)
self.proj.hook(self._loader_unlock_addr, _dl_rtld_unlock_recursive)
self.proj.hook(self._vsyscall_addr, _vsyscall)
ld_obj = self.proj.loader.linux_loader_object
if ld_obj is not None:
tlsfunc = ld_obj.get_symbol('__tls_get_addr')
if tlsfunc is not None:
self.proj.hook(tlsfunc.rebased_addr, _tls_get_addr, kwargs={'ld': self.proj.loader})
_rtld_global = ld_obj.get_symbol('_rtld_global')
if _rtld_global is not None:
if isinstance(self.proj.arch, ArchAMD64):
self.proj.loader.memory.write_addr_at(_rtld_global.rebased_addr + 0xF08, self._loader_lock_addr)
self.proj.loader.memory.write_addr_at(_rtld_global.rebased_addr + 0xF10, self._loader_unlock_addr)
_rtld_global_ro = ld_obj.get_symbol('_rtld_global_ro')
if _rtld_global_ro is not None:
pass
tls_obj = self.proj.loader.tls_object
if tls_obj is not None:
if isinstance(self.proj.arch, ArchAMD64):
self.proj.loader.memory.write_addr_at(tls_obj.thread_pointer + 0x28, 0x5f43414e4152595f)
self.proj.loader.memory.write_addr_at(tls_obj.thread_pointer + 0x30, 0x5054524755415244)
elif isinstance(self.proj.arch, ArchX86):
self.proj.loader.memory.write_addr_at(tls_obj.thread_pointer + 0x10, self._vsyscall_addr)
elif isinstance(self.proj.arch, ArchARM):
self.proj.hook(0xffff0fe0, _kernel_user_helper_get_tls, kwargs={'ld': self.proj.loader})
# Only set up ifunc resolution if we are using the ELF backend on AMD64
if isinstance(self.proj.loader.main_bin, MetaELF):
if isinstance(self.proj.arch, ArchAMD64):
for binary in self.proj.loader.all_objects:
if not isinstance(binary, MetaELF):
continue
for reloc in binary.relocs:
if reloc.symbol is None or reloc.resolvedby is None:
continue
if reloc.resolvedby.type != 'STT_GNU_IFUNC':
continue
gotaddr = reloc.addr + binary.rebase_addr
gotvalue = self.proj.loader.memory.read_addr_at(gotaddr)
if self.proj.is_hooked(gotvalue):
continue
# Replace it with a ifunc-resolve simprocedure!
kwargs = {
'proj': self.proj,
'funcaddr': gotvalue,
'gotaddr': gotaddr,
'funcname': reloc.symbol.name
}
randaddr = self.proj._extern_obj.get_pseudo_addr('ifunc_' + reloc.symbol.name)
self.proj.hook(randaddr, IFuncResolver, kwargs=kwargs)
self.proj.loader.memory.write_addr_at(gotaddr, randaddr)
def state_blank(self, fs=None, **kwargs):
state = super(SimLinux, self).state_blank(**kwargs) #pylint:disable=invalid-name
if self.proj.loader.tls_object is not None:
if isinstance(state.arch, ArchAMD64):
state.regs.fs = self.proj.loader.tls_object.thread_pointer
elif isinstance(state.arch, ArchX86):
state.regs.gs = self.proj.loader.tls_object.thread_pointer >> 16
elif isinstance(state.arch, ArchMIPS32):
state.regs.ulr = self.proj.loader.tls_object.thread_pointer
state.register_plugin('posix', SimStateSystem(fs=fs))
if self.proj.loader.main_bin.is_ppc64_abiv1:
state.libc.ppc64_abiv = 'ppc64_1'
return state
def state_entry(self, args=None, env=None, sargc=None, **kwargs):
state = super(SimLinux, self).state_entry(**kwargs)
# Handle default values
if args is None:
args = []
if env is None:
env = {}
# Prepare argc
argc = state.BVV(len(args), state.arch.bits)
if sargc is not None:
argc = state.se.Unconstrained("argc", state.arch.bits)
# Make string table for args/env/auxv
table = StringTableSpec()
# Add args to string table
for arg in args:
table.add_string(arg)
table.add_null()
# Add environment to string table
for k, v in env.iteritems():
table.add_string(k + '=' + v)
table.add_null()
# Prepare the auxiliary vector and add it to the end of the string table
# TODO: Actually construct a real auxiliary vector
aux = []
for a, b in aux:
table.add_pointer(a)
table.add_pointer(b)
table.add_null()
table.add_null()
# Dump the table onto the stack, calculate pointers to args, env, and auxv
state.memory.store(state.regs.sp, state.BVV(0, 8*16), endness='Iend_BE')
argv = table.dump(state, state.regs.sp)
envp = argv + ((len(args) + 1) * state.arch.bytes)
auxv = argv + ((len(args) + len(env) + 2) * state.arch.bytes)
# Put argc on stack and fix the stack pointer
newsp = argv - state.arch.bytes
state.memory.store(newsp, argc, endness=state.arch.memory_endness)
state.regs.sp = newsp
if state.arch.name in ('PPC32',):
state.stack_push(state.BVV(0, 32))
state.stack_push(state.BVV(0, 32))
state.stack_push(state.BVV(0, 32))
state.stack_push(state.BVV(0, 32))
# store argc argv envp auxv in the posix plugin
state.posix.argv = argv
state.posix.argc = argc
state.posix.environ = envp
state.posix.auxv = auxv
self.set_entry_register_values(state)
return state
def set_entry_register_values(self, state):
for reg, val in state.arch.entry_register_values.iteritems():
if isinstance(val, (int, long)):
state.registers.store(reg, val, size=state.arch.bytes)
elif isinstance(val, (str,)):
if val == 'argc':
state.registers.store(reg, state.posix.argc, size=state.arch.bytes)
elif val == 'argv':
state.registers.store(reg, state.posix.argv)
elif val == 'envp':
state.registers.store(reg, state.posix.environ)
elif val == 'auxv':
state.registers.store(reg, state.posix.auxv)
elif val == 'ld_destructor':
# a pointer to the dynamic linker's destructor routine, to be called at exit
# or NULL. We like NULL. It makes things easier.
state.registers.store(reg, state.BVV(0, state.arch.bits))
elif val == 'toc':
if self.proj.loader.main_bin.is_ppc64_abiv1:
state.registers.store(reg, self.proj.loader.main_bin.ppc64_initial_rtoc)
else:
l.warning('Unknown entry point register value indicator "%s"', val)
else:
l.error('What the ass kind of default value is %s?', val)
def state_full_init(self, **kwargs):
kwargs['addr'] = self.proj._extern_obj.get_pseudo_addr('angr##loader')
return super(SimLinux, self).state_full_init(**kwargs)
class SimCGC(SimOS):
def state_blank(self, fs=None, **kwargs):
s = super(SimCGC, self).state_blank(**kwargs) # pylint:disable=invalid-name
s.register_plugin('posix', SimStateSystem(fs=fs))
# Create the CGC plugin
s.get_plugin('cgc')
# Set CGC-specific options
#s.options.add(o.CGC_NO_SYMBOLIC_RECEIVE_LENGTH)
s.options.add(o.CGC_ZERO_FILL_UNCONSTRAINED_MEMORY)
return s
def state_entry(self, **kwargs):
state = super(SimCGC, self).state_entry(**kwargs)
if isinstance(self.proj.loader.main_bin, BackedCGC):
for reg, val in self.proj.loader.main_bin.initial_register_values():
if reg in state.arch.registers:
setattr(state.regs, reg, val)
elif reg == 'eflags':
pass
elif reg == 'fctrl':
state.regs.fpround = (val & 0xC00) >> 10
elif reg == 'fstat':
state.regs.fc3210 = (val & 0x4700)
elif reg == 'ftag':
empty_bools = [((val >> (x*2)) & 3) == 3 for x in xrange(8)]
tag_chars = [state.BVV(0 if x else 1, 8) for x in empty_bools]
for i, tag in enumerate(tag_chars):
setattr(state.regs, 'fpu_t%d' % i, tag)
elif reg in ('fiseg', 'fioff', 'foseg', 'fooff', 'fop'):
pass
elif reg == 'mxcsr':
state.regs.sseround = (val & 0x600) >> 9
else:
l.error("What is this register %s I have to translate?", reg)
# Do all the writes
writes_backer = self.proj.loader.main_bin.writes_backer
stdout = 1
for size in writes_backer:
if size == 0:
continue
str_to_write = state.posix.files[1].content.load(state.posix.files[1].pos, size)
a = SimActionData(state, 'file_1_0', 'write', addr=state.BVV(state.posix.files[1].pos, state.arch.bits), data=str_to_write, size=size)
state.posix.write(stdout, str_to_write, size)
state.log.add_action(a)
else:
# Set CGC-specific variables
state.regs.eax = 0
state.regs.ebx = 0
state.regs.ecx = 0
state.regs.edx = 0
state.regs.edi = 0
state.regs.esi = 0
state.regs.esp = 0xbaaaaffc
state.regs.ebp = 0
#state.regs.eflags = s.BVV(0x202, 32)
# fpu values
state.regs.mm0 = 0
state.regs.mm1 = 0
state.regs.mm2 = 0
state.regs.mm3 = 0
state.regs.mm4 = 0
state.regs.mm5 = 0
state.regs.mm6 = 0
state.regs.mm7 = 0
state.regs.fpu_tags = 0
state.regs.fpround = 0
state.regs.fc3210 = 0x0300
state.regs.ftop = 0
# sse values
state.regs.sseround = 0
state.regs.xmm0 = 0
state.regs.xmm1 = 0
state.regs.xmm2 = 0
state.regs.xmm3 = 0
state.regs.xmm4 = 0
state.regs.xmm5 = 0
state.regs.xmm6 = 0
state.regs.xmm7 = 0
return state
#
# Loader-related simprocedures
#
class IFuncResolver(SimProcedure):
# pylint: disable=arguments-differ,unused-argument
def run(self, proj=None, funcaddr=None, gotaddr=None, funcname=None):
resolve = proj.factory.callable(funcaddr, concrete_only=True)
try:
value = resolve()
except AngrCallableError:
l.critical("Ifunc \"%s\" failed to resolve!", funcname)
#import IPython; IPython.embed()
raise
self.state.memory.store(gotaddr, value, endness=self.state.arch.memory_endness)
self.add_successor(self.state, value, self.state.se.true, 'Ijk_Boring')
def __repr__(self):
return '<IFuncResolver %s>' % self.kwargs.get('funcname', None)
class LinuxLoader(SimProcedure):
# pylint: disable=unused-argument,arguments-differ,attribute-defined-outside-init
local_vars = ('initializers',)
def run(self, project=None):
self.initializers = project.loader.get_initializers()
self.run_initializer(project)
def run_initializer(self, project=None):
if len(self.initializers) == 0:
project._simos.set_entry_register_values(self.state)
self.jump(project.entry)
else:
addr = self.initializers.pop(0)
self.call(addr, (self.state.posix.argc, self.state.posix.argv, self.state.posix.environ), 'run_initializer')
class _tls_get_addr(SimProcedure):
# pylint: disable=arguments-differ
def run(self, ptr, ld=None):
module_id = self.state.memory.load(ptr, self.state.arch.bytes, endness=self.state.arch.memory_endness).model.value
offset = self.state.memory.load(ptr+self.state.arch.bytes, self.state.arch.bytes, endness=self.state.arch.memory_endness).model.value
return self.state.BVV(ld.tls_object.get_addr(module_id, offset), self.state.arch.bits)
class _dl_rtld_lock_recursive(SimProcedure):
# pylint: disable=arguments-differ, unused-argument
def run(self, lock):
# For future reference:
# ++((pthread_mutex_t *)(lock))->__data.__count;
self.ret()
class _dl_rtld_unlock_recursive(SimProcedure):
def run(self):
self.ret()
class _vsyscall(SimProcedure):
# This is pretty much entirely copied from SimProcedure.ret
def run(self):
if self.cleanup:
self.state.options.discard(o.AST_DEPS)
self.state.options.discard(o.AUTO_REFS)
ret_irsb = self.state.arch.disassemble_vex(self.state.arch.ret_instruction, mem_addr=self.addr)
ret_simirsb = SimIRSB(self.state, ret_irsb, inline=True, addr=self.addr)
if not ret_simirsb.flat_successors + ret_simirsb.unsat_successors:
ret_state = ret_simirsb.default_exit
else:
ret_state = (ret_simirsb.flat_successors + ret_simirsb.unsat_successors)[0]
if self.cleanup:
self.state.options.add(o.AST_DEPS)
self.state.options.add(o.AUTO_REFS)
self.add_successor(ret_state, ret_state.scratch.target, ret_state.scratch.guard, 'Ijk_Sys')
class _kernel_user_helper_get_tls(SimProcedure):
# pylint: disable=arguments-differ
def run(self, ld=None):
self.state.regs.r0 = ld.tls_object.thread_pointer
self.ret()
os_mapping = {
'unix': SimLinux,
'unknown': SimOS,
'windows': SimOS,
'cgc': SimCGC
}
from .surveyors.caller import Callable
from .errors import AngrCallableError
from .tablespecs import StringTableSpec
|
GuardianRG/angr
|
angr/simos.py
|
Python
|
bsd-2-clause
| 19,350 | 0.004393 |
from __future__ import unicode_literals
import time
import pytest
@pytest.mark.selenium
def test_ui(selenium):
selenium.browser.get(selenium.url('/download'))
time.sleep(3)
|
clld/tsezacp
|
tests/test_selenium.py
|
Python
|
apache-2.0
| 183 | 0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.