text
stringlengths 6
947k
| repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
|
---|---|---|---|---|---|---|
from django.contrib.gis import admin
from .models import Line, Point, Polygon, Roads
@admin.register(Line, Point, Polygon, Roads)
class OSMAdmin(admin.OSMGeoAdmin):
fields = ('way', 'osm_id', 'ref', 'name') | quecolectivo/server | djangoserver/quecolectivo/api/admin.py | Python | gpl-3.0 | 211 | 0.009479 |
import asjson
from flask.views import MethodView
from functools import wraps
from flask.ext.mongoengine.wtf import model_form
from flask import request, render_template, Blueprint, redirect, abort, session, make_response
from .models import User, SessionStorage
from mongoengine import DoesNotExist
auth = Blueprint('auth', __name__, template_folder='templates')
class UserAuth(MethodView):
@staticmethod
def get():
form = model_form(User)(request.form)
return render_template('auth/index.html', form=form)
@staticmethod
def post():
if request.form:
try:
username = request.form['name']
password = request.form['password']
user = User.objects.get(name=username)
if user and user.password == password:
# prepare response/redirect
response = make_response(redirect('/panel_control'))
if 'session' in request.cookies:
session_id = request.cookies['session']
else:
session_id = session['csrf_token']
# Setting user-cookie
response.set_cookie('session_id', value=session_id)
# After.We update our storage session(remove old + add new record)
record = SessionStorage()
record.remove_old_session(username)
record.user = username
record.session_key = session_id
record.save()
# And redirect to admin-panel
return response
else:
raise DoesNotExist
except DoesNotExist:
return abort(401)
@staticmethod
def is_admin():
# Выуживаем куки из различных мест,т.к. отправлять могут в виде атрибута заголовков
cookies = request.cookies
if not cookies: # Ничего не нашли на первой иттерации.Попробуем вытащить из заголовка
try:
cookies = asjson.loads(request.headers['Set-Cookie'])
except KeyError:
pass
if 'session_id' in cookies:
session_id = cookies['session_id']
return bool(SessionStorage.objects.filter(session_key=session_id))
else:
return False
def requires_auth(f):
@wraps(f)
def decorated(*args, **kwargs):
if not UserAuth.is_admin():
return redirect('auth')
return f(*args, **kwargs)
return decorated
auth.add_url_rule('/auth/', view_func=UserAuth.as_view('auth'))
| VeeSot/blog | auth/views.py | Python | gpl-2.0 | 2,792 | 0.0015 |
"""
Dashboard page for Studio
"""
from edxapp_acceptance.pages.studio.index import DashboardPage
from bok_choy.promise import BrokenPromise
from regression.pages.studio import BASE_URL
from regression.pages.lms import BASE_URL_LMS
class DashboardPageExtended(DashboardPage):
"""
This class is an extended class of Studio Dashboard Page,
where we add methods that are different or not used in DashboardPage
"""
url = BASE_URL + '/home'
def is_browser_on_page(self):
"""
Verifies if the browser is on the correct page
"""
return self.q(css='.courses-tab.active').present
def select_course(self, course_title):
"""
Selects the course we want to perform tests on
"""
course_names = self.q(css='.course-link h3')
for vals in course_names:
if course_title in vals.text:
vals.click()
return
raise BrokenPromise('Course title not found')
def click_logout_button(self):
"""
Clicks username drop down than logout button
"""
self.q(css='.account-username').click()
self.wait_for_element_visibility(
'.action-signout', 'Sign out button visibility')
self.q(css='.action-signout').click()
def click_view_live_button(self):
"""
Clicks view live button
"""
self.browser.execute_script(
"document.querySelectorAll('[data-course-key = \"course-v1:"
"ArbiRaees+AR-1000+fall\"] .view-button')[0].click();")
self.browser.switch_to_window(self.browser.window_handles[-1])
def click_terms_of_service(self):
"""
Clicks Terms of Service link
"""
self.q(css='a[href="' + BASE_URL_LMS + '/edx-terms-service"]').click()
def click_privacy_policy(self):
"""
Clicks Privacy Policy link
"""
self.q(
css='a[href="' + BASE_URL_LMS + '/edx-privacy-policy"]').click()
| raeeschachar/edx-e2e-mirror | regression/pages/studio/studio_home.py | Python | agpl-3.0 | 2,009 | 0 |
# -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (c) 2008-2012 Alistek Ltd (http://www.alistek.com) All Rights Reserved.
# General contacts <info@alistek.com>
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsability of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# garantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This module is GPLv3 or newer and incompatible
# with OpenERP SA "AGPL + Private Use License"!
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
from osv import fields
from osv import osv
import netsvc
import tools
from xml.dom import minidom
import os, base64
import urllib2
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
from tools.translate import _
from report_aeroo_ooo.DocumentConverter import DocumentConversionException
from report_aeroo_ooo.report import OpenOffice_service
from report_aeroo.report_aeroo import aeroo_lock
_url = 'http://www.alistek.com/aeroo_banner/v6_1_report_aeroo_ooo.png'
class aeroo_config_installer(osv.osv_memory):
_name = 'aeroo_config.installer'
_inherit = 'res.config.installer'
_rec_name = 'host'
_logo_image = None
def _get_image(self, cr, uid, context=None):
if self._logo_image:
return self._logo_image
try:
im = urllib2.urlopen(_url.encode("UTF-8"))
if im.headers.maintype!='image':
raise TypeError(im.headers.maintype)
except Exception, e:
path = os.path.join('report_aeroo','config_pixmaps','module_banner.png')
image_file = file_data = tools.file_open(path,'rb')
try:
file_data = image_file.read()
self._logo_image = base64.encodestring(file_data)
return self._logo_image
finally:
image_file.close()
else:
self._logo_image = base64.encodestring(im.read())
return self._logo_image
def _get_image_fn(self, cr, uid, ids, name, args, context=None):
image = self._get_image(cr, uid, context)
return dict.fromkeys(ids, image) # ok to use .fromkeys() as the image is same for all
_columns = {
'host':fields.char('Host', size=64, required=True),
'port':fields.integer('Port', required=True),
'ooo_restart_cmd': fields.char('OOO restart command', size=256, \
help='Enter the shell command that will be executed to restart the LibreOffice/OpenOffice background process.'+ \
'The command will be executed as the user of the OpenERP server process,'+ \
'so you may need to prefix it with sudo and configure your sudoers file to have this command executed without password.'),
'state':fields.selection([
('init','Init'),
('error','Error'),
('done','Done'),
],'State', select=True, readonly=True),
'msg': fields.text('Message', readonly=True),
'error_details': fields.text('Error Details', readonly=True),
'link':fields.char('Installation Manual', size=128, help='Installation (Dependencies and Base system setup)', readonly=True),
'config_logo': fields.function(_get_image_fn, string='Image', type='binary', method=True),
}
def default_get(self, cr, uid, fields, context=None):
config_obj = self.pool.get('oo.config')
data = super(aeroo_config_installer, self).default_get(cr, uid, fields, context=context)
ids = config_obj.search(cr, 1, [], context=context)
if ids:
res = config_obj.read(cr, 1, ids[0], context=context)
del res['id']
data.update(res)
return data
def check(self, cr, uid, ids, context=None):
config_obj = self.pool.get('oo.config')
data = self.read(cr, uid, ids, ['host','port','ooo_restart_cmd'])[0]
del data['id']
config_id = config_obj.search(cr, 1, [], context=context)
if config_id:
config_obj.write(cr, 1, config_id, data, context=context)
else:
config_id = config_obj.create(cr, 1, data, context=context)
try:
fp = tools.file_open('report_aeroo_ooo/test_temp.odt', mode='rb')
file_data = fp.read()
DC = netsvc.Service._services.setdefault('openoffice', \
OpenOffice_service(cr, data['host'], data['port']))
with aeroo_lock:
DC.putDocument(file_data)
DC.saveByStream()
fp.close()
DC.closeDocument()
del DC
except DocumentConversionException, e:
netsvc.Service.remove('openoffice')
error_details = str(e)
state = 'error'
except Exception, e:
error_details = str(e)
state = 'error'
else:
error_details = ''
state = 'done'
if state=='error':
msg = _('Connection to OpenOffice.org instance was not established or convertion to PDF unsuccessful!')
else:
msg = _('Connection to the OpenOffice.org instance was successfully established and PDF convertion is working.')
return self.write(cr, uid, ids, {'msg':msg,'error_details':error_details,'state':state})
_defaults = {
'config_logo': _get_image,
'host':'localhost',
'port':8100,
'ooo_restart_cmd': 'sudo /etc/init.d/libreoffice restart',
'state':'init',
'link':'http://www.alistek.com/wiki/index.php/Aeroo_Reports_Linux_server#Installation_.28Dependencies_and_Base_system_setup.29',
}
aeroo_config_installer()
| dhp-denero/LibrERP | report_aeroo_ooo/installer.py | Python | agpl-3.0 | 6,702 | 0.008207 |
#Attribute set in both superclass and subclass
class C(object):
def __init__(self):
self.var = 0
class D(C):
def __init__(self):
self.var = 1 # self.var will be overwritten
C.__init__(self)
#Attribute set in both superclass and subclass
class E(object):
def __init__(self):
self.var = 0 # self.var will be overwritten
class F(E):
def __init__(self):
E.__init__(self)
self.var = 1
| github/codeql | python/ql/test/query-tests/Classes/overwriting-attribute/overwriting_attribute.py | Python | mit | 451 | 0.015521 |
import feedparser
def getLatest():
feed = feedparser.parse("http://rss.thepiratebay.se/0")
title = feed['entries'][0]['title']
link = feed['entries'][0]['comments'].replace('http://', 'https://')
return "%s - %s" % (title, link)
| b0nk/botxxy | src/tpb.py | Python | gpl-2.0 | 241 | 0.016598 |
#!/usr/bin/python
#-*- coding: utf-8 -*-
from xlrd import open_workbook
x_data1=[]
y_data1=[]
wb = open_workbook('phase_detector.xlsx')
for s in wb.sheets():
print 'Sheet:',s.name
for row in range(s.nrows):
print 'the row is:',row+1
values = []
for col in range(s.ncols):
values.append(s.cell(row,col).value)
print values
x_data1.append(values[0])
y_data1.append(values[1])
print x_data1
print y_data1
| MiracleWong/PythonBasic | PythonExcel/testExcel.py | Python | mit | 471 | 0.012739 |
#!/usr/bin/python3
import sys
import os
def printUsage():
sys.exit('Usage: %s server|client' % sys.argv[0])
if ((len(sys.argv)!=2) or (sys.argv[1] != 'client') and (sys.argv[1] != 'server')):
printUsage()
print("Generating daemon script\n")
fileContents = open('dyndns.sh').read( os.path.getsize('dyndns.sh') )
fileContents = fileContents.replace('{DYNDNS_PATH}', os.getcwd())
fileContents = fileContents.replace('{VERSION}', sys.argv[1])
fileContents = fileContents.replace('{USER}', os.getlogin())
print("Writing daemon script in /etc/init.d\n")
daemonPath = '/etc/init.d/dyndns'
daemon = open(daemonPath, 'w')
daemon.write(fileContents)
daemon.close()
print('Changing permissions\n')
os.chmod(daemonPath, 0o755)
print('Installing the init script')
os.system('update-rc.d dyndns defaults')
print('done.\nYou can start the service by using:\nsudo service dyndns start') | MilkyWeb/dyndns | install.py | Python | mit | 881 | 0.010216 |
from django.http import Http404
from django.shortcuts import get_object_or_404
from django.urls.base import reverse_lazy as reverse
from django.utils.translation import ugettext_lazy as _
from ..forms.journals import JournalEntryForm, JournalForm, JournalLeaderEntryForm
from ..models.journals import Journal, JournalEntry, JournalLeaderEntry, Subject
from .generic import CreateView, DeleteView, DetailView, TemplateView, UpdateView
class AlternatingView(TemplateView):
template_name = "leprikon/alternating.html"
def get_title(self):
return _("Alternating in school year {}").format(self.request.school_year)
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context["alternate_leader_entries"] = self.request.leader.get_alternate_leader_entries(self.request.school_year)
return context
class JournalQuerySetMixin:
def get_queryset(self):
qs = super().get_queryset()
if not self.request.user.is_staff:
qs = qs.filter(leaders=self.request.leader)
return qs
class JournalView(JournalQuerySetMixin, DetailView):
model = Journal
template_name_suffix = "_journal"
class JournalCreateView(CreateView):
model = Journal
form_class = JournalForm
template_name = "leprikon/journal_form.html"
title = _("New journal")
def dispatch(self, request, subject):
kwargs = {"id": subject}
if not self.request.user.is_staff:
kwargs["leaders"] = self.request.leader
self.subject = get_object_or_404(Subject, **kwargs)
self.success_url = reverse("leprikon:subject_journals", args=(self.subject.subject_type.slug, self.subject.id))
return super().dispatch(request)
def get_form_kwargs(self):
kwargs = super().get_form_kwargs()
kwargs["subject"] = self.subject
return kwargs
def get_message(self):
return _("New journal {} has been created.").format(self.object)
class JournalUpdateView(JournalQuerySetMixin, UpdateView):
model = Journal
form_class = JournalForm
success_url = reverse("leprikon:summary")
template_name = "leprikon/journal_form.html"
title = _("Change journal")
class JournalDeleteView(DeleteView):
model = Journal
title = _("Delete journal")
message = _("Journal has been deleted.")
def get_queryset(self):
qs = super().get_queryset()
if not self.request.user.is_staff:
qs = qs.filter(subject__leaders=self.request.leader)
return qs
def get_object(self):
obj = super().get_object()
if obj.all_journal_entries:
raise Http404()
return obj
def get_question(self):
return _("Do You really want to delete the journal {}?").format(self.object)
class JournalEntryCreateView(CreateView):
model = JournalEntry
form_class = JournalEntryForm
template_name = "leprikon/journalentry_form.html"
title = _("New journal entry")
message = _("The journal entry has been created.")
def dispatch(self, request, *args, **kwargs):
if self.request.user.is_staff:
self.journal = get_object_or_404(Journal, id=int(kwargs.pop("journal")))
else:
self.journal = get_object_or_404(Journal, id=int(kwargs.pop("journal")), leaders=self.request.leader)
return super().dispatch(request, *args, **kwargs)
def get_form_kwargs(self):
kwargs = super().get_form_kwargs()
kwargs["journal"] = self.journal
return kwargs
class JournalEntryUpdateView(UpdateView):
model = JournalEntry
form_class = JournalEntryForm
template_name = "leprikon/journalentry_form.html"
title = _("Change journal entry")
message = _("The journal entry has been updated.")
def get_object(self):
obj = super().get_object()
if self.request.user.is_staff or self.request.leader in obj.journal.all_leaders + obj.all_alternates:
return obj
else:
raise Http404()
class JournalEntryDeleteView(DeleteView):
model = JournalEntry
title = _("Delete journal entry")
message = _("The journal entry has been deleted.")
def get_queryset(self):
qs = super().get_queryset()
if not self.request.user.is_staff:
qs = qs.filter(journal__leaders=self.request.leader)
return qs
def get_object(self):
obj = super().get_object()
if obj.affects_submitted_timesheets:
raise Http404()
return obj
def get_question(self):
return _("Do You really want to delete journal entry?")
class JournalLeaderEntryUpdateView(UpdateView):
model = JournalLeaderEntry
form_class = JournalLeaderEntryForm
template_name = "leprikon/journalleaderentry_form.html"
title = _("Change timesheet entry")
message = _("The timesheet entry has been updated.")
def get_object(self):
obj = super().get_object()
if (
self.request.user.is_staff
or obj.timesheet.leader == self.request.leader
or self.request.leader in obj.journal_entry.journal.all_leaders
):
return obj
else:
raise Http404()
class JournalLeaderEntryDeleteView(DeleteView):
model = JournalLeaderEntry
title = _("Delete timesheet entry")
message = _("The timesheet entry has been deleted.")
def get_queryset(self):
return (
super()
.get_queryset()
.filter(
timesheet__leader=self.request.leader,
timesheet__submitted=False,
)
)
def get_question(self):
return _("Do You really want to delete timesheet entry?")
| leprikon-cz/leprikon | leprikon/views/journals.py | Python | bsd-3-clause | 5,762 | 0.001736 |
import json
from sets import Set
from sys import maxint
import math
# tmp hacky functions for vec3
def norm2 (a):
return dot(a, a)
def dot ( a, b ):
return a[0] * b[0] + a[1] * b[1] + a[2] * b[2]
def area (a, b, c):
u = [ b[0] - a[0], b[1] - a[1], b[2] - a[2] ]
v = [ c[0] - a[0], c[1] - a[1], c[2] - a[2] ]
dot_uv = dot(u, v)
cross2 = norm2(u) * norm2(v) - dot_uv * dot_uv
return math.sqrt(cross2) * 0.5
class DiagramJson:
def __init__(self):
self.json = {
'form': {
'vertices': {},
'vertices_2_force_faces': {}, # face array
'vertices_2_force_cells': {},
'vertices_external': None, # converted from set: vid: 1
'edges': {}
},
'force': {
'vertices': {},
'edges': {},
'faces_e': {},
'faces_v': {},
'cells': {}
},
'strength_scaler': {
'min': maxint,
'max': 0
},
'force_face_2_strength': {}
}
class Txt2JsonParser:
def __init__(self):
self.diagramJson = DiagramJson()
# # tmp data structures used only when parsing
# self.form_edge_2_vertex = {}
self.force_face_2_form_edge = {} # inverse index, for caluclate edge width i.e. area of faces (strength)
# self.form_vertex_external_count = {} # vid: count - 0, 1, 2
def readFormVertex(self, filename):
f = open(filename)
v = self.diagramJson.json['form']['vertices']
v2fa = self.diagramJson.json['form']['vertices_2_force_faces']
for line in f:
vertex = line.strip().split('\t')
# print vertex
v[vertex[0]] = map(float, vertex[1:])
# create array for form_vertices to force_face array (cells)
v2fa[vertex[0]] = []
# print self.diagramJson.json
f.close()
def readFormEdge(self, filename_edge_vertex, filename_edge_to_force_face, filename_edge_ex):
f_edge_vertex = open(filename_edge_vertex)
edges = self.diagramJson.json['form']['edges']
for line in f_edge_vertex:
edge = line.strip().split('\t')
e = edges[edge[0]] = {}
e['vertex'] = edge[1:]
# e['external'] = False
# print edge[0], e['vertex']
# print edges
f_edge_vertex.close()
v2fa = self.diagramJson.json['form']['vertices_2_force_faces']
f_edge_to_force_face = open(filename_edge_to_force_face)
for line in f_edge_to_force_face:
edge = line.strip().split('\t')
f = edge[1] if edge[1] != "Null" else None
edges[edge[0]]['force_face'] = f
edge_vertex = edges[edge[0]]['vertex']
for v in edge_vertex:
v2fa[v].append(f)
# force_face_2_form_edge (tmp structure) for compute strength
if f != None:
self.force_face_2_form_edge[f] = edge[0]
f_edge_to_force_face.close()
vertex_ex_set = Set()
f_edge_ex = open(filename_edge_ex)
for line in f_edge_ex:
edge = line.strip().split('\t')
for e in edge:
edges[e]['external'] = True
vertex_ex_set.add(edges[e]['vertex'][0])
vertex_ex_set.add(edges[e]['vertex'][1])
f_edge_ex.close()
self.diagramJson.json['form']['vertices_external'] = dict.fromkeys(vertex_ex_set, 1)
# label external force edge
for e in edges:
is_ex_vertex_0 = edges[e]['vertex'][0] in vertex_ex_set
is_ex_vertex_1 = edges[e]['vertex'][1] in vertex_ex_set
if is_ex_vertex_0 != is_ex_vertex_1:
# print edges[e]['vertex'][0], ':', is_ex_vertex_0, ' , ', edges[e]['vertex'][1], ':', is_ex_vertex_1
# force vector: from v0 to v1
edges[e]['ex_force'] = True
# print edges
# print self.diagramJson.json
def readForceVertex(self, filename):
f = open(filename)
v = self.diagramJson.json['force']['vertices']
for line in f:
vertex = line.strip().split('\t')
# print vertex
v[vertex[0]] = map(float, vertex[1:])
# print self.diagramJson.json
f.close()
def readForceEdge(self, filename_edge_vertex):
f_edge_vertex = open(filename_edge_vertex)
edges = self.diagramJson.json['force']['edges']
for line in f_edge_vertex:
edge = line.strip().split('\t')
edges[edge[0]] = edge[1:]
# print edges
f_edge_vertex.close()
# print self.diagramJson.json
def readForceFaceEdge(self, filename_face_edge):
f_face_edge = open(filename_face_edge)
edges = self.diagramJson.json['force']['edges']
faces_e = self.diagramJson.json['force']['faces_e']
# faces_v = self.diagramJson.json['force']['faces_v']
for line in f_face_edge:
face = line.strip().split('\t')
faces_e[face[0]] = face[1:]
# # convert face edge to face vertex
# cur_face_vertex = Set()
# for e in face[1:]:
# # extend vertex array
# # cur_face_vertex.extend(edges[e])
# for v in edges[e]:
# cur_face_vertex.add(v)
# faces_v[face[0]] = list(cur_face_vertex)
# print faces_v[face[0]]
f_face_edge.close()
# print self.diagramJson.json
def readForceFaceVertex(self, filename_face_vertex):
f_face_vertex = open(filename_face_vertex)
# fan shape order
faces_v = self.diagramJson.json['force']['faces_v']
strengthScaler = self.diagramJson.json['strength_scaler']
force_face_2_strength = self.diagramJson.json['force_face_2_strength']
v = self.diagramJson.json['force']['vertices']
e = self.diagramJson.json['form']['edges']
for line in f_face_vertex:
face = line.strip().split('\t')
faces_v[face[0]] = face[1:]
strength = 0
if len(face) == 4:
# tri
strength = area( v[face[1]], v[face[2]], v[face[3]] )
elif len(face) == 5:
# quad
strength = area( v[face[1]], v[face[2]], v[face[3]] ) + area( v[face[1]], v[face[3]], v[face[4]] )
else:
print 'Error: face ', face[0], ' is not tri or quad!!'
# if face[0] == '17f' or face[0] == '19f':
# print face[0], face[1:], map( lambda vid: v[vid], face[1:] ), area(v[face[1]], v[face[2]], v[face[3]]), strength
# e[ self.force_face_2_form_edge[face[0]] ]['strength'] = strength
force_face_2_strength[ face[0] ] = strength
curEdge = e[ self.force_face_2_form_edge[face[0]] ]
if 'external' not in curEdge and 'ex_force' not in curEdge:
strengthScaler['max'] = max(strength, strengthScaler['max'])
strengthScaler['min'] = min(strength, strengthScaler['min'])
f_face_vertex.close()
if __name__ == "__main__":
# foldername = "example_01"
# foldername = "example_02"
# foldername = "example_03"
foldername = "example_04"
parser = Txt2JsonParser()
parser.readFormVertex(foldername + "/form_v.txt")
parser.readFormEdge(foldername + "/form_e_v.txt", \
foldername + "/form_e_to_force_f.txt", \
foldername + "/form_e_ex.txt")
parser.readForceVertex(foldername + "/force_v.txt")
parser.readForceEdge(foldername + "/force_e_v.txt")
# parser.readForceFaceEdge(foldername + "/force_f_e.txt")
parser.readForceFaceVertex(foldername + "/force_f_v.txt")
with open(foldername + '/diagram.json', 'w') as out:
json.dump(parser.diagramJson.json, out)
| shrekshao/Polyhedron3D | assets/models/test/txt2json_parser.py | Python | mit | 8,122 | 0.006156 |
#! /usr/bin/env python
import sys, pgq.setadmin
if __name__ == '__main__':
script = pgq.setadmin.SetAdmin('set_admin', sys.argv[1:])
script.start()
| ssinger/skytools-cvs | python/setadm.py | Python | isc | 159 | 0.012579 |
# -*- coding: utf-8 -*-
#
# 2015-11-03 Cornelius Kölbel <cornelius@privacyidea.org>
# Add check if an admin user exists
# 2014-12-15 Cornelius Kölbel, info@privacyidea.org
# Initial creation
#
# (c) Cornelius Kölbel
# Info: http://www.privacyidea.org
#
# This code is free software; you can redistribute it and/or
# modify it under the terms of the GNU AFFERO GENERAL PUBLIC LICENSE
# License as published by the Free Software Foundation; either
# version 3 of the License, or any later version.
#
# This code is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU AFFERO GENERAL PUBLIC LICENSE for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from privacyidea.models import Admin
from privacyidea.lib.token import check_user_pass
from privacyidea.lib.policydecorators import libpolicy, login_mode
from privacyidea.lib.crypto import hash_with_pepper, verify_with_pepper
class ROLE(object):
ADMIN = "admin"
USER = "user"
VALIDATE = "validate"
def verify_db_admin(username, password):
"""
This function is used to verify the username and the password against the
database table "Admin".
:param username: The administrator username
:param password: The password
:return: True if password is correct for the admin
:rtype: bool
"""
success = False
qa = Admin.query.filter(Admin.username == username).first()
if qa:
success = verify_with_pepper(qa.password, password)
return success
def db_admin_exist(username):
"""
Checks if a local admin in the database exists
:param username: The username of the admin
:return: True, if exist
"""
return bool(get_db_admin(username))
def create_db_admin(app, username, email=None, password=None):
pw_dig = None
if password:
pw_dig = hash_with_pepper(password)
user = Admin(email=email, username=username, password=pw_dig)
user.save()
def list_db_admin():
admins = Admin.query.all()
print("Name \t email")
print(30*"=")
for admin in admins:
print("{0!s} \t {1!s}".format(admin.username, admin.email))
def get_db_admins():
admins = Admin.query.all()
return admins
def get_db_admin(username):
return Admin.query.filter(Admin.username == username).first()
def delete_db_admin(username):
print("Deleting admin {0!s}".format(username))
Admin.query.filter(Admin.username == username).first().delete()
@libpolicy(login_mode)
def check_webui_user(user_obj,
password,
options=None,
superuser_realms=None,
check_otp=False):
"""
This function is used to authenticate the user at the web ui.
It checks against the userstore or against OTP/privacyidea (check_otp).
It returns a tuple of
* true/false if the user authenticated successfully
* the role of the user
* the "detail" dictionary of the response
:param user_obj: The user who tries to authenticate
:type user_obj: User Object
:param password: Password, static and or OTP
:param options: additional options like g and clientip
:type options: dict
:param superuser_realms: list of realms, that contain admins
:type superuser_realms: list
:param check_otp: If set, the user is not authenticated against the
userstore but against privacyidea
:return: tuple of bool, string and dict/None
"""
options = options or {}
superuser_realms = superuser_realms or []
user_auth = False
role = ROLE.USER
details = None
if check_otp:
# check if the given password matches an OTP token
check, details = check_user_pass(user_obj, password, options=options)
if check:
user_auth = True
else:
# check the password of the user against the userstore
if user_obj.check_password(password):
user_auth = True
# If the realm is in the SUPERUSER_REALM then the authorization role
# is risen to "admin".
if user_obj.realm in superuser_realms:
role = ROLE.ADMIN
return user_auth, role, details
| wheldom01/privacyidea | privacyidea/lib/auth.py | Python | agpl-3.0 | 4,375 | 0 |
# Copyright (c) 2014, The MITRE Corporation. All rights reserved.
# For license information, see the LICENSE.txt file
from __future__ import absolute_import
from django.conf import settings
from django.test import Client, TestCase
class TETestObj(object):
def __init__(self, target, expected_stubs, expected_operand=None, expected_nsmap=None):
self.target = target
self.expected_stub_set = set(expected_stubs)
self.expected_operand = expected_operand
self.expected_nsmap = expected_nsmap
def check_result(self, xpath_builders, operand=None, nsmap=None):
xpath_stubs = ['/'.join(xb.xpath_parts) for xb in xpath_builders]
xpath_stub_set = set(xpath_stubs)
if self.expected_stub_set != xpath_stub_set:
raise ValueError('Expected XPath Stubs failure!\n'
'Expected: %s\n'
'Actual : %s\n' % (self.expected_stub_set, xpath_stub_set))
if self.expected_operand is not None:
if self.expected_operand != operand:
raise ValueError('Expected operand failure!\n'
'Expected: %s\n'
'Actual : %s\n' % (self.expected_operand, operand))
if self.expected_nsmap is not None:
if self.expected_nsmap != nsmap:
raise ValueError('Expected nsmap failure!\n'
'Expected: %s\n'
'Actual : %\n' % (self.expected_nsmap, nsmap))
no_wc_001 = TETestObj(target='STIX_Package/STIX_Header/Handling/Marking/Marking_Structure/Terms_Of_Use',
expected_stubs=[
'/stix:STIX_Package/stix:STIX_Header/stix:Handling/marking:Marking/marking:Marking_Structure/'
'terms:Terms_Of_Use',
])
# l_wc_001 = TETestObj(target='**/NameElement',
# expected_stubs=['//xal:NameElement', ])
l_wc_002 = TETestObj(target='*/STIX_Header/Title',
expected_stubs=['/*/stix:STIX_Header/stix:Title', ])
l_wc_003 = TETestObj(target='**/@cybox_major_version',
expected_stubs=['//@cybox_major_version',])
m_wc_001 = TETestObj(target='STIX_Package/*/Title',
expected_stubs=['/stix:STIX_Package/*/stix:Title'])
# m_wc_002 = TETestObj(target='STIX_Package/**/NameElement',
# expected_stubs=['/stix:STIX_Package//xal:NameElement'])
t_wc_001 = TETestObj(target='STIX_Package/STIX_Header/*',
expected_stubs=['/stix:STIX_Package/stix:STIX_Header/*',
'/stix:STIX_Package/stix:STIX_Header/@*'])
t_wc_002 = TETestObj(target='STIX_Package/TTPs/**',
expected_stubs=['/stix:STIX_Package/stix:TTPs//*',
'/stix:STIX_Package/stix:TTPs//@*'])
class BaseXmlQueryHandlerTests(TestCase):
def test_01(self):
"""
Test the target_to_xpath_stubs2() function
:return:
"""
test_tes = (no_wc_001,
# l_wc_001, l_wc_002,
l_wc_002,
m_wc_001, # m_wc_002,
t_wc_001, t_wc_002)
from taxii_services.query_handlers.stix_xml_111_handler import StixXml111QueryHandler
for test_te in test_tes:
xpath_builders, nsmap = StixXml111QueryHandler.target_to_xpath_builders(None, test_te.target)
test_te.check_result(xpath_builders, nsmap)
| TAXIIProject/django-taxii-services | tests/test_query_handler.py | Python | bsd-3-clause | 3,576 | 0.003356 |
# -*- coding: utf-8 -*-
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright (C) 2012-2016 GEM Foundation
#
# OpenQuake is free software: you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# OpenQuake is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with OpenQuake. If not, see <http://www.gnu.org/licenses/>.
"""
Module exports :class:`AkkarBommer2010`,
class:`AkkarBommer2010SWISS01`,
class:`AkkarBommer2010SWISS04`,
class:`AkkarBommer2010SWISS08`,
"""
from __future__ import division
import numpy as np
from scipy.constants import g
from openquake.hazardlib.gsim.base import GMPE, CoeffsTable
from openquake.hazardlib import const
from openquake.hazardlib.imt import PGA, PGV, SA
from openquake.hazardlib.gsim.akkar_bommer_2010_swiss_coeffs import (
COEFFS_FS_ROCK_SWISS01,
COEFFS_FS_ROCK_SWISS04,
COEFFS_FS_ROCK_SWISS08
)
from openquake.hazardlib.gsim.utils_swiss_gmpe import _apply_adjustments
class AkkarBommer2010(GMPE):
"""
Implements GMPE developed by Sinan Akkar and Julian J. Bommer
and published as "Empirical Equations for the Prediction of PGA, PGV,
and Spectral Accelerations in Europe, the Mediterranean Region, and
the Middle East", Seismological Research Letters, 81(2), 195-206.
SA at 4 s (not supported by the original equations) has been added in the
context of the SHARE project and assumed to be equal to SA at 3 s but
scaled with proper factor.
Equation coefficients for PGA and SA periods up to 0.05 seconds have been
taken from updated model as described in 'Extending ground-motion
prediction equations for spectral accelerations to higher response
frequencies',Julian J. Bommer, Sinan Akkar, Stephane Drouet,
Bull. Earthquake Eng. (2012) volume 10, pages 379 - 399.
Coefficients for PGV and SA above 0.05 seconds are taken from the
original 2010 publication.
"""
#: Supported tectonic region type is 'active shallow crust' because the
#: equations have been derived from data from Southern Europe, North
#: Africa, and active areas of the Middle East, as explained in the
# 'Introduction', page 195.
DEFINED_FOR_TECTONIC_REGION_TYPE = const.TRT.ACTIVE_SHALLOW_CRUST
#: Set of :mod:`intensity measure types <openquake.hazardlib.imt>`
#: this GSIM can calculate. A set should contain classes from module
#: :mod:`openquake.hazardlib.imt`.
DEFINED_FOR_INTENSITY_MEASURE_TYPES = set([
PGA,
PGV,
SA
])
#: Supported intensity measure component is the geometric mean of two
#: horizontal components
#: :attr:`~openquake.hazardlib.const.IMC.AVERAGE_HORIZONTAL`, see page 196.
DEFINED_FOR_INTENSITY_MEASURE_COMPONENT = const.IMC.AVERAGE_HORIZONTAL
#: Supported standard deviation types are inter-event, intra-event
#: and total, see equation 2, page 199.
DEFINED_FOR_STANDARD_DEVIATION_TYPES = set([
const.StdDev.TOTAL,
const.StdDev.INTER_EVENT,
const.StdDev.INTRA_EVENT
])
#: Required site parameter is only Vs30 (used to distinguish rock
#: and stiff and soft soil).
REQUIRES_SITES_PARAMETERS = set(('vs30', ))
#: Required rupture parameters are magnitude and rake (eq. 1, page 199).
REQUIRES_RUPTURE_PARAMETERS = set(('rake', 'mag'))
#: Required distance measure is RRup (eq. 1, page 199).
REQUIRES_DISTANCES = set(('rjb', ))
def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types):
"""
See :meth:`superclass method
<.base.GroundShakingIntensityModel.get_mean_and_stddevs>`
for spec of input and result values.
"""
# extracting dictionary of coefficients specific to required
# intensity measure type.
C = self.COEFFS[imt]
imean = (self._compute_magnitude(rup, C) +
self._compute_distance(rup, dists, imt, C) +
self._get_site_amplification(sites, imt, C) +
self._get_mechanism(sites, rup, imt, C))
# Convert units to g,
# but only for PGA and SA (not PGV):
if isinstance(imt, (PGA, SA)):
mean = np.log((10.0 ** (imean - 2.0)) / g)
else:
# PGV:
mean = np.log(10.0 ** imean)
# apply scaling factor for SA at 4 s
if isinstance(imt, SA) and imt.period == 4.0:
mean /= 0.8
istddevs = self._get_stddevs(
C, stddev_types, num_sites=len(sites.vs30)
)
stddevs = np.log(10 ** np.array(istddevs))
return mean, stddevs
def _get_stddevs(self, C, stddev_types, num_sites):
"""
Return standard deviations as defined in table 1, p. 200.
"""
stddevs = []
for stddev_type in stddev_types:
assert stddev_type in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES
if stddev_type == const.StdDev.TOTAL:
stddevs.append(C['SigmaTot'] + np.zeros(num_sites))
elif stddev_type == const.StdDev.INTRA_EVENT:
stddevs.append(C['Sigma1'] + np.zeros(num_sites))
elif stddev_type == const.StdDev.INTER_EVENT:
stddevs.append(C['tau'] + np.zeros(num_sites))
return stddevs
def _compute_magnitude(self, rup, C):
"""
Compute the first term of the equation described on p. 199:
``b1 + b2 * M + b3 * M**2``
"""
return C['b1'] + (C['b2'] * rup.mag) + (C['b3'] * (rup.mag ** 2))
def _compute_distance(self, rup, dists, imt, C):
"""
Compute the second term of the equation described on p. 199:
``(b4 + b5 * M) * log(sqrt(Rjb ** 2 + b6 ** 2))``
"""
return (((C['b4'] + C['b5'] * rup.mag)
* np.log10((np.sqrt(dists.rjb ** 2.0 + C['b6'] ** 2.0)))))
def _get_site_amplification(self, sites, imt, C):
"""
Compute the third term of the equation described on p. 199:
``b7 * Ss + b8 * Sa``
"""
Ss, Sa = self._get_site_type_dummy_variables(sites)
return (C['b7'] * Ss) + (C['b8'] * Sa)
def _get_site_type_dummy_variables(self, sites):
"""
Get site type dummy variables, ``Ss`` (for soft and stiff soil sites)
and ``Sa`` (for rock sites).
"""
Ss = np.zeros((len(sites.vs30),))
Sa = np.zeros((len(sites.vs30),))
# Soft soil; Vs30 < 360 m/s. Page 199.
idxSs = (sites.vs30 < 360.0)
# Stiff soil Class A; 360 m/s <= Vs30 <= 750 m/s. Page 199.
idxSa = (sites.vs30 >= 360.0) & (sites.vs30 <= 750.0)
Ss[idxSs] = 1
Sa[idxSa] = 1
return Ss, Sa
def _get_mechanism(self, sites, rup, imt, C):
"""
Compute the fourth term of the equation described on p. 199:
``b9 * Fn + b10 * Fr``
"""
Fn, Fr = self._get_fault_type_dummy_variables(sites, rup, imt)
return (C['b9'] * Fn) + (C['b10'] * Fr)
def _get_fault_type_dummy_variables(self, sites, rup, imt):
"""
Same classification of SadighEtAl1997. Akkar and Bommer 2010 is based
on Akkar and Bommer 2007b; read Strong-Motion Dataset and Record
Processing on p. 514 (Akkar and Bommer 2007b).
"""
Fn, Fr = 0, 0
if rup.rake >= -135 and rup.rake <= -45:
# normal
Fn = 1
elif rup.rake >= 45 and rup.rake <= 135:
# reverse
Fr = 1
return Fn, Fr
#: For PGA and SA up to 0.05 seconds, coefficients are taken from table 5,
#: page 385 of 'Extending ground-motion prediction equations for spectral
#: accelerations to higher response frequencies', while for PGV and SA with
#: periods greater than 0.05 coefficients are taken from table 1, pages
#: 200-201 of 'Empirical Equations for the Prediction of PGA, PGV,
#: and Spectral Accelerations in Europe, the Mediterranean Region, and
#: the Middle East'
COEFFS = CoeffsTable(sa_damping=5, table="""\
IMT b1 b2 b3 b4 b5 b6 b7 b8 b9 b10 Sigma1 tau SigmaTot
pga 1.43525 0.74866 -0.06520 -2.72950 0.25139 7.74959 0.08320 0.00766 -0.05823 0.07087 0.2611 0.1056 0.281646179
0.01 1.43153 0.75258 -0.06557 -2.73290 0.25170 7.73304 0.08105 0.00745 -0.05886 0.07169 0.2616 0.1051 0.281922986
0.02 1.48690 0.75966 -0.06767 -2.82146 0.26510 7.20661 0.07825 0.00618 -0.06111 0.06756 0.2635 0.1114 0.286080775
0.03 1.64821 0.73507 -0.06700 -2.89764 0.27607 6.87179 0.06376 -0.00528 -0.06189 0.06529 0.2675 0.1137 0.290661212
0.04 2.08925 0.65032 -0.06218 -3.02618 0.28999 7.42328 0.05045 -0.02091 -0.06278 0.05935 0.2709 0.1152 0.294377054
0.05 2.49228 0.58575 -0.06043 -3.20215 0.31485 7.75532 0.03798 -0.03143 -0.06708 0.06382 0.2728 0.1181 0.297266631
0.10 2.11994 0.75179 -0.07448 -3.10538 0.30253 8.21405 0.02667 -0.00062 -0.04906 0.07910 0.2728 0.1167 0.296713212
0.15 1.64489 0.83683 -0.07544 -2.75848 0.25490 8.31786 0.02578 0.01703 -0.04184 0.07840 0.2788 0.1192 0.303212928
0.20 0.92065 0.96815 -0.07903 -2.49264 0.21790 8.21914 0.06557 0.02105 -0.02098 0.08438 0.2821 0.1081 0.302102665
0.25 0.13978 1.13068 -0.08761 -2.33824 0.20089 7.20688 0.09810 0.03919 -0.04853 0.08577 0.2871 0.0990 0.303689661
0.30 -0.84006 1.37439 -0.10349 -2.19123 0.18139 6.54299 0.12847 0.04340 -0.05554 0.09221 0.2902 0.0976 0.306172827
0.35 -1.32207 1.47055 -0.10873 -2.12993 0.17485 6.24751 0.16213 0.06695 -0.04722 0.09003 0.2983 0.1054 0.316373276
0.40 -1.70320 1.55930 -0.11388 -2.12718 0.17137 6.57173 0.21222 0.09201 -0.05145 0.09903 0.2998 0.1101 0.319377598
0.45 -1.97201 1.61645 -0.11742 -2.16619 0.17700 6.78082 0.24121 0.11675 -0.05202 0.09943 0.3037 0.1123 0.323797746
0.50 -2.76925 1.83268 -0.13202 -2.12969 0.16877 7.17423 0.25944 0.13562 -0.04283 0.08579 0.3078 0.1163 0.329038797
0.55 -3.51672 2.02523 -0.14495 -2.04211 0.15617 6.76170 0.26498 0.14446 -0.04259 0.06945 0.3070 0.1274 0.332384958
0.60 -3.92759 2.08471 -0.14648 -1.88144 0.13621 6.10103 0.27718 0.15156 -0.03853 0.05932 0.3007 0.1430 0.332970704
0.65 -4.49490 2.21154 -0.15522 -1.79031 0.12916 5.19135 0.28574 0.15239 -0.03423 0.05111 0.3004 0.1546 0.337848072
0.70 -4.62925 2.21764 -0.15491 -1.79800 0.13495 4.46323 0.30348 0.15652 -0.04146 0.04661 0.2978 0.1626 0.339298688
0.75 -4.95053 2.29142 -0.15983 -1.81321 0.13920 4.27945 0.31516 0.16333 -0.04050 0.04253 0.2973 0.1602 0.337714865
0.80 -5.32863 2.38389 -0.16571 -1.77273 0.13273 4.37011 0.32153 0.17366 -0.03946 0.03373 0.2927 0.1584 0.332812034
0.85 -5.75799 2.50635 -0.17479 -1.77068 0.13096 4.62192 0.33520 0.18480 -0.03786 0.02867 0.2917 0.1543 0.32999603
0.90 -5.82689 2.50287 -0.17367 -1.76295 0.13059 4.65393 0.34849 0.19061 -0.02884 0.02475 0.2915 0.1521 0.328795772
0.95 -5.90592 2.51405 -0.17417 -1.79854 0.13535 4.84540 0.35919 0.19411 -0.02209 0.02502 0.2912 0.1484 0.326833291
1.00 -6.17066 2.58558 -0.17938 -1.80717 0.13599 4.97596 0.36619 0.19519 -0.02269 0.02121 0.2895 0.1483 0.325273946
1.05 -6.60337 2.69584 -0.18646 -1.73843 0.12485 5.04489 0.37278 0.19461 -0.02613 0.01115 0.2888 0.1465 0.323832812
1.10 -6.90379 2.77044 -0.19171 -1.71109 0.12227 5.00975 0.37756 0.19423 -0.02655 0.00140 0.2896 0.1427 0.322848958
1.15 -6.96180 2.75857 -0.18890 -1.66588 0.11447 5.08902 0.38149 0.19402 -0.02088 0.00148 0.2871 0.1435 0.320965201
1.20 -6.99236 2.73427 -0.18491 -1.59120 0.10265 5.03274 0.38120 0.19309 -0.01623 0.00413 0.2878 0.1439 0.321770182
1.25 -6.74613 2.62375 -0.17392 -1.52886 0.09129 5.08347 0.38782 0.19392 -0.01826 0.00413 0.2863 0.1453 0.321060399
1.30 -6.51719 2.51869 -0.16330 -1.46527 0.08005 5.14423 0.38862 0.19273 -0.01902 -0.00369 0.2869 0.1427 0.320429243
1.35 -6.55821 2.52238 -0.16307 -1.48223 0.08173 5.29006 0.38677 0.19082 -0.01842 -0.00897 0.2885 0.1428 0.321906959
1.40 -6.61945 2.52611 -0.16274 -1.48257 0.08213 5.33490 0.38625 0.19285 -0.01607 -0.00876 0.2875 0.1458 0.322356774
1.45 -6.62737 2.49858 -0.15910 -1.43310 0.07577 5.19412 0.38285 0.19161 -0.01288 -0.00564 0.2857 0.1477 0.321620553
1.50 -6.71787 2.49486 -0.15689 -1.35301 0.06379 5.15750 0.37867 0.18812 -0.01208 -0.00215 0.2839 0.1468 0.319608276
1.55 -6.80776 2.50291 -0.15629 -1.31227 0.05697 5.27441 0.37267 0.18568 -0.00845 -0.00047 0.2845 0.1450 0.319319981
1.60 -6.83632 2.51009 -0.15676 -1.33260 0.05870 5.54539 0.36952 0.18149 -0.00533 -0.00006 0.2844 0.1457 0.319549448
1.65 -6.88684 2.54048 -0.15995 -1.40931 0.06860 5.93828 0.36531 0.17617 -0.00852 -0.00301 0.2841 0.1503 0.321407685
1.70 -6.94600 2.57151 -0.16294 -1.47676 0.07672 6.36599 0.35936 0.17301 -0.01204 -0.00744 0.2840 0.1537 0.32292366
1.75 -7.09166 2.62938 -0.16794 -1.54037 0.08428 6.82292 0.35284 0.16945 -0.01386 -0.01387 0.2840 0.1558 0.323928449
1.80 -7.22818 2.66824 -0.17057 -1.54273 0.08325 7.11603 0.34775 0.16743 -0.01402 -0.01492 0.2834 0.1582 0.324565556
1.85 -7.29772 2.67565 -0.17004 -1.50936 0.07663 7.31928 0.34561 0.16730 -0.01526 -0.01192 0.2828 0.1592 0.32453117
1.90 -7.35522 2.67749 -0.16934 -1.46988 0.07065 7.25988 0.34142 0.16325 -0.01563 -0.00703 0.2826 0.1611 0.325293667
1.95 -7.40716 2.68206 -0.16906 -1.43816 0.06525 7.25344 0.33720 0.16171 -0.01848 -0.00351 0.2832 0.1642 0.327358947
2.00 -7.50404 2.71004 -0.17130 -1.44395 0.06602 7.26059 0.33298 0.15839 -0.02258 -0.00486 0.2835 0.1657 0.328372867
2.05 -7.55598 2.72737 -0.17291 -1.45794 0.06774 7.40320 0.33010 0.15496 -0.02626 -0.00731 0.2836 0.1665 0.328863513
2.10 -7.53463 2.71709 -0.17221 -1.46662 0.06940 7.46168 0.32645 0.15337 -0.02920 -0.00871 0.2832 0.1663 0.328417311
2.15 -7.50811 2.71035 -0.17212 -1.49679 0.07429 7.51273 0.32439 0.15264 -0.03484 -0.01225 0.2830 0.1661 0.328143581
2.20 -8.09168 2.91159 -0.18920 -1.55644 0.08428 7.77062 0.31354 0.14430 -0.03985 -0.01927 0.2830 0.1627 0.326435736
2.25 -8.11057 2.92087 -0.19044 -1.59537 0.09052 7.87702 0.30997 0.14430 -0.04155 -0.02322 0.2830 0.1627 0.326435736
2.30 -8.16272 2.93325 -0.19155 -1.60461 0.09284 7.91753 0.30826 0.14412 -0.04238 -0.02626 0.2829 0.1633 0.326648588
2.35 -7.94704 2.85328 -0.18539 -1.57428 0.09077 7.61956 0.32071 0.14321 -0.04963 -0.02342 0.2815 0.1632 0.325386678
2.40 -7.96679 2.85363 -0.18561 -1.57833 0.09288 7.59643 0.31801 0.14301 -0.04910 -0.02570 0.2826 0.1645 0.326990841
2.45 -7.97878 2.84900 -0.18527 -1.57728 0.09428 7.50338 0.31401 0.14324 -0.04812 -0.02643 0.2825 0.1665 0.327915385
2.50 -7.88403 2.81817 -0.18320 -1.60381 0.09887 7.53947 0.31104 0.14332 -0.04710 -0.02769 0.2818 0.1681 0.328129319
2.55 -7.68101 2.75720 -0.17905 -1.65212 0.10680 7.61893 0.30875 0.14343 -0.04607 -0.02819 0.2818 0.1688 0.328488478
2.60 -7.72574 2.82043 -0.18717 -1.88782 0.14049 8.12248 0.31122 0.14255 -0.05106 -0.02966 0.2838 0.1741 0.332946317
2.65 -7.53288 2.74824 -0.18142 -1.89525 0.14356 7.92236 0.30935 0.14223 -0.05024 -0.02930 0.2845 0.1759 0.334486263
2.70 -7.41587 2.69012 -0.17632 -1.87041 0.14283 7.49999 0.30688 0.14074 -0.04887 -0.02963 0.2854 0.1772 0.335936006
2.75 -7.34541 2.65352 -0.17313 -1.86079 0.14340 7.26668 0.30635 0.14052 -0.04743 -0.02919 0.2862 0.1783 0.337196278
2.80 -7.24561 2.61028 -0.16951 -1.85612 0.14444 7.11861 0.30534 0.13923 -0.04731 -0.02751 0.2867 0.1794 0.338202972
2.85 -7.07107 2.56123 -0.16616 -1.90422 0.15127 7.36277 0.30508 0.13933 -0.04522 -0.02776 0.2869 0.1788 0.338054803
2.90 -6.99332 2.52699 -0.16303 -1.89704 0.15039 7.45038 0.30362 0.13776 -0.04203 -0.02615 0.2874 0.1784 0.338268119
2.95 -6.95669 2.51006 -0.16142 -1.90132 0.15081 7.60234 0.29987 0.13584 -0.03863 -0.02487 0.2872 0.1783 0.338045456
3.00 -6.92924 2.45899 -0.15513 -1.76801 0.13314 7.21950 0.29772 0.13198 -0.03855 -0.02469 0.2876 0.1785 0.338490783
4.00 -6.92924 2.45899 -0.15513 -1.76801 0.13314 7.21950 0.29772 0.13198 -0.03855 -0.02469 0.2876 0.1785 0.338490783
pgv -2.12833 1.21448 -0.08137 -2.46942 0.22349 6.41443 0.20354 0.08484 -0.05856 0.01305 0.2562 0.1083 0.278149834
""")
class AkkarBommer2010SWISS01(AkkarBommer2010):
"""
This class extends :class:`AkkarBommer2010`
adjusted to be used for the Swiss Hazard Model [2014].
This GMPE is valid for a fixed value of vs30=600m/s
# kappa value
K-adjustments corresponding to model 01 - as prepared by Ben Edwards
K-value for PGA were not provided but infered from SA[0.01s]
the model considers a fixed value of vs30=600 to match the
reference vs30=1100m/s
# small-magnitude correction
# single station sigma - inter-event magnitude/distance adjustment
Disclaimer: these equations are modified to be used for the
Swiss Seismic Hazard Model [2014].
The use of these models is the soly responsability of the hazard modeler.
Model implmented by laurentiu.danciu@gmail.com
"""
DEFINED_FOR_STANDARD_DEVIATION_TYPES = set([
const.StdDev.TOTAL
])
def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types):
"""
See :meth:`superclass method
<.base.GroundShakingIntensityModel.get_mean_and_stddevs>`
for spec of input and result values.
"""
sites.vs30 = 600 * np.ones(len(sites.vs30))
mean, stddevs = super(AkkarBommer2010SWISS01, self).\
get_mean_and_stddevs(sites, rup, dists, imt, stddev_types)
tau_ss = 'tau'
log_phi_ss = np.log(10)
mean, stddevs = _apply_adjustments(
AkkarBommer2010.COEFFS, self.COEFFS_FS_ROCK[imt], tau_ss,
mean, stddevs, sites, rup, dists.rjb, imt, stddev_types,
log_phi_ss)
return mean, np.log(10 ** np.array(stddevs))
COEFFS_FS_ROCK = COEFFS_FS_ROCK_SWISS01
class AkkarBommer2010SWISS04(AkkarBommer2010SWISS01):
"""
This class extends :class:`AkkarBommer2010` following same strategy
as for :class:`AkkarBommer2010SWISS01`
"""
DEFINED_FOR_STANDARD_DEVIATION_TYPES = set([
const.StdDev.TOTAL
])
COEFFS_FS_ROCK = COEFFS_FS_ROCK_SWISS04
class AkkarBommer2010SWISS08(AkkarBommer2010SWISS01):
"""
This class extends :class:`AkkarBommer2010` following same strategy
as for :class:`AkkarBommer2010SWISS01` to be used for the
Swiss Hazard Model [2014].
"""
DEFINED_FOR_STANDARD_DEVIATION_TYPES = set([
const.StdDev.TOTAL
])
COEFFS_FS_ROCK = COEFFS_FS_ROCK_SWISS08
| rcgee/oq-hazardlib | openquake/hazardlib/gsim/akkar_bommer_2010.py | Python | agpl-3.0 | 21,696 | 0.003134 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('players', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='player',
name='last_updated',
field=models.DateTimeField(null=True, blank=True),
preserve_default=True,
),
]
| robrocker7/h1z1map | server/players/migrations/0002_player_last_updated.py | Python | apache-2.0 | 442 | 0 |
# Copyright 2012 OpenStack Foundation
# All Rights Reserved
# Copyright (c) 2012 NEC Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
import copy
import time
import uuid
from keystoneauth1 import loading as ks_loading
from neutronclient.common import exceptions as neutron_client_exc
from neutronclient.v2_0 import client as clientv20
from oslo_config import cfg
from oslo_log import log as logging
from oslo_utils import excutils
from oslo_utils import uuidutils
import six
from nova.api.openstack import extensions
from nova.compute import utils as compute_utils
from nova import exception
from nova.i18n import _, _LE, _LI, _LW
from nova.network import base_api
from nova.network import model as network_model
from nova.network.neutronv2 import constants
from nova import objects
from nova.pci import manager as pci_manager
from nova.pci import request as pci_request
from nova.pci import whitelist as pci_whitelist
neutron_opts = [
cfg.StrOpt('url',
default='http://127.0.0.1:9696',
help='URL for connecting to neutron'),
cfg.StrOpt('region_name',
help='Region name for connecting to neutron in admin context'),
cfg.StrOpt('ovs_bridge',
default='br-int',
help='Default OVS bridge name to use if not specified '
'by Neutron'),
cfg.IntOpt('extension_sync_interval',
default=600,
help='Number of seconds before querying neutron for'
' extensions'),
]
NEUTRON_GROUP = 'neutron'
CONF = cfg.CONF
CONF.register_opts(neutron_opts, NEUTRON_GROUP)
deprecations = {'cafile': [cfg.DeprecatedOpt('ca_certificates_file',
group=NEUTRON_GROUP)],
'insecure': [cfg.DeprecatedOpt('api_insecure',
group=NEUTRON_GROUP)],
'timeout': [cfg.DeprecatedOpt('url_timeout',
group=NEUTRON_GROUP)]}
_neutron_options = ks_loading.register_session_conf_options(
CONF, NEUTRON_GROUP, deprecated_opts=deprecations)
ks_loading.register_auth_conf_options(CONF, NEUTRON_GROUP)
CONF.import_opt('default_floating_pool', 'nova.network.floating_ips')
CONF.import_opt('flat_injected', 'nova.network.manager')
LOG = logging.getLogger(__name__)
soft_external_network_attach_authorize = extensions.soft_core_authorizer(
'network', 'attach_external_network')
_SESSION = None
_ADMIN_AUTH = None
DEFAULT_SECGROUP = 'default'
def list_opts():
opts = copy.deepcopy(_neutron_options)
opts.insert(0, ks_loading.get_auth_common_conf_options()[0])
# NOTE(dims): There are a lot of auth plugins, we just generate
# the config options for a few common ones
plugins = ['password', 'v2password', 'v3password']
for name in plugins:
plugin = ks_loading.get_plugin_loader(name)
for plugin_option in ks_loading.get_auth_plugin_conf_options(plugin):
for option in opts:
if option.name == plugin_option.name:
break
else:
opts.append(plugin_option)
opts.sort(key=lambda x: x.name)
return [(NEUTRON_GROUP, opts)]
def reset_state():
global _ADMIN_AUTH
global _SESSION
_ADMIN_AUTH = None
_SESSION = None
def _load_auth_plugin(conf):
auth_plugin = ks_loading.load_auth_from_conf_options(conf, NEUTRON_GROUP)
if auth_plugin:
return auth_plugin
err_msg = _('Unknown auth type: %s') % conf.neutron.auth_type
raise neutron_client_exc.Unauthorized(message=err_msg)
def get_client(context, admin=False):
# NOTE(dprince): In the case where no auth_token is present we allow use of
# neutron admin tenant credentials if it is an admin context. This is to
# support some services (metadata API) where an admin context is used
# without an auth token.
global _ADMIN_AUTH
global _SESSION
auth_plugin = None
if not _SESSION:
_SESSION = ks_loading.load_session_from_conf_options(
CONF, NEUTRON_GROUP)
if admin or (context.is_admin and not context.auth_token):
if not _ADMIN_AUTH:
_ADMIN_AUTH = _load_auth_plugin(CONF)
auth_plugin = _ADMIN_AUTH
elif context.auth_token:
auth_plugin = context.get_auth_plugin()
if not auth_plugin:
# We did not get a user token and we should not be using
# an admin token so log an error
raise neutron_client_exc.Unauthorized()
return clientv20.Client(session=_SESSION,
auth=auth_plugin,
endpoint_override=CONF.neutron.url,
region_name=CONF.neutron.region_name)
def _is_not_duplicate(item, items, items_list_name, instance):
present = item in items
# The expectation from this function's perspective is that the
# item is not part of the items list so if it is part of it
# we should at least log it as a warning
if present:
LOG.warning(_LW("%(item)s already exists in list: %(list_name)s "
"containing: %(items)s. ignoring it"),
{'item': item,
'list_name': items_list_name,
'items': items},
instance=instance)
return not present
class API(base_api.NetworkAPI):
"""API for interacting with the neutron 2.x API."""
def __init__(self, skip_policy_check=False):
super(API, self).__init__(skip_policy_check=skip_policy_check)
self.last_neutron_extension_sync = None
self.extensions = {}
def setup_networks_on_host(self, context, instance, host=None,
teardown=False):
"""Setup or teardown the network structures."""
def _get_available_networks(self, context, project_id,
net_ids=None, neutron=None):
"""Return a network list available for the tenant.
The list contains networks owned by the tenant and public networks.
If net_ids specified, it searches networks with requested IDs only.
"""
if not neutron:
neutron = get_client(context)
if net_ids:
# If user has specified to attach instance only to specific
# networks then only add these to **search_opts. This search will
# also include 'shared' networks.
search_opts = {'id': net_ids}
nets = neutron.list_networks(**search_opts).get('networks', [])
else:
# (1) Retrieve non-public network list owned by the tenant.
search_opts = {'tenant_id': project_id, 'shared': False}
nets = neutron.list_networks(**search_opts).get('networks', [])
# (2) Retrieve public network list.
search_opts = {'shared': True}
nets += neutron.list_networks(**search_opts).get('networks', [])
_ensure_requested_network_ordering(
lambda x: x['id'],
nets,
net_ids)
return nets
def _create_port(self, port_client, instance, network_id, port_req_body,
fixed_ip=None, security_group_ids=None,
available_macs=None, dhcp_opts=None):
"""Attempts to create a port for the instance on the given network.
:param port_client: The client to use to create the port.
:param instance: Create the port for the given instance.
:param network_id: Create the port on the given network.
:param port_req_body: Pre-populated port request. Should have the
device_id, device_owner, and any required neutron extension values.
:param fixed_ip: Optional fixed IP to use from the given network.
:param security_group_ids: Optional list of security group IDs to
apply to the port.
:param available_macs: Optional set of available MAC addresses,
from which one will be used at random.
:param dhcp_opts: Optional DHCP options.
:returns: ID of the created port.
:raises PortLimitExceeded: If neutron fails with an OverQuota error.
:raises NoMoreFixedIps: If neutron fails with
IpAddressGenerationFailure error.
:raises: PortBindingFailed: If port binding failed.
"""
try:
if fixed_ip:
port_req_body['port']['fixed_ips'] = [
{'ip_address': str(fixed_ip)}]
port_req_body['port']['network_id'] = network_id
port_req_body['port']['admin_state_up'] = True
port_req_body['port']['tenant_id'] = instance.project_id
if security_group_ids:
port_req_body['port']['security_groups'] = security_group_ids
if available_macs is not None:
if not available_macs:
raise exception.PortNotFree(
instance=instance.uuid)
mac_address = available_macs.pop()
port_req_body['port']['mac_address'] = mac_address
if dhcp_opts is not None:
port_req_body['port']['extra_dhcp_opts'] = dhcp_opts
port = port_client.create_port(port_req_body)
port_id = port['port']['id']
if (port['port'].get('binding:vif_type') ==
network_model.VIF_TYPE_BINDING_FAILED):
port_client.delete_port(port_id)
raise exception.PortBindingFailed(port_id=port_id)
LOG.debug('Successfully created port: %s', port_id,
instance=instance)
return port_id
except neutron_client_exc.InvalidIpForNetworkClient:
LOG.warning(_LW('Neutron error: %(ip)s is not a valid IP address '
'for network %(network_id)s.'),
{'ip': fixed_ip, 'network_id': network_id},
instance=instance)
msg = (_('Fixed IP %(ip)s is not a valid ip address for '
'network %(network_id)s.') %
{'ip': fixed_ip, 'network_id': network_id})
raise exception.InvalidInput(reason=msg)
except neutron_client_exc.IpAddressInUseClient:
LOG.warning(_LW('Neutron error: Fixed IP %s is '
'already in use.'), fixed_ip, instance=instance)
msg = _("Fixed IP %s is already in use.") % fixed_ip
raise exception.FixedIpAlreadyInUse(message=msg)
except neutron_client_exc.OverQuotaClient:
LOG.warning(_LW(
'Neutron error: Port quota exceeded in tenant: %s'),
port_req_body['port']['tenant_id'], instance=instance)
raise exception.PortLimitExceeded()
except neutron_client_exc.IpAddressGenerationFailureClient:
LOG.warning(_LW('Neutron error: No more fixed IPs in network: %s'),
network_id, instance=instance)
raise exception.NoMoreFixedIps(net=network_id)
except neutron_client_exc.MacAddressInUseClient:
LOG.warning(_LW('Neutron error: MAC address %(mac)s is already '
'in use on network %(network)s.'),
{'mac': mac_address, 'network': network_id},
instance=instance)
raise exception.PortInUse(port_id=mac_address)
except neutron_client_exc.NeutronClientException:
with excutils.save_and_reraise_exception():
LOG.exception(_LE('Neutron error creating port on network %s'),
network_id, instance=instance)
def _check_external_network_attach(self, context, nets):
"""Check if attaching to external network is permitted."""
if not soft_external_network_attach_authorize(context):
for net in nets:
# Perform this check here rather than in validate_networks to
# ensure the check is performed every time
# allocate_for_instance is invoked
if net.get('router:external') and not net.get('shared'):
raise exception.ExternalNetworkAttachForbidden(
network_uuid=net['id'])
def _unbind_ports(self, context, ports,
neutron, port_client=None):
"""Unbind the given ports by clearing their device_id and
device_owner.
:param context: The request context.
:param ports: list of port IDs.
:param neutron: neutron client for the current context.
:param port_client: The client with appropriate karma for
updating the ports.
"""
port_binding = self._has_port_binding_extension(context,
refresh_cache=True, neutron=neutron)
if port_client is None:
# Requires admin creds to set port bindings
port_client = (neutron if not port_binding else
get_client(context, admin=True))
for port_id in ports:
# A port_id is optional in the NetworkRequest object so check here
# in case the caller forgot to filter the list.
if port_id is None:
continue
port_req_body = {'port': {'device_id': '', 'device_owner': ''}}
if port_binding:
port_req_body['port']['binding:host_id'] = None
port_req_body['port']['binding:profile'] = {}
if constants.DNS_INTEGRATION in self.extensions:
port_req_body['port']['dns_name'] = ''
try:
port_client.update_port(port_id, port_req_body)
except neutron_client_exc.NotFound:
pass
except Exception:
LOG.exception(_LE("Unable to clear device ID "
"for port '%s'"), port_id)
def _process_requested_networks(self, context, instance, neutron,
requested_networks, hypervisor_macs=None):
"""Processes and validates requested networks for allocation.
Iterates over the list of NetworkRequest objects, validating the
request and building sets of ports, networks and MAC addresses to
use for allocating ports for the instance.
:param instance: allocate networks on this instance
:type instance: nova.objects.Instance
:param neutron: neutron client session
:type neutron: neutronclient.v2_0.client.Client
:param requested_networks: list of NetworkRequests
:type requested_networks: nova.objects.NetworkRequestList
:param hypervisor_macs: None or a set of MAC addresses that the
instance should use. hypervisor_macs are supplied by the hypervisor
driver (contrast with requested_networks which is user supplied).
NB: NeutronV2 currently assigns hypervisor supplied MAC addresses
to arbitrary networks, which requires openflow switches to
function correctly if more than one network is being used with
the bare metal hypervisor (which is the only one known to limit
MAC addresses).
:type hypervisor_macs: set
:returns: tuple of:
- ports: dict mapping of port id to port dict
- net_ids: list of requested network ids
- ordered_networks: list of nova.objects.NetworkRequest objects
for requested networks (either via explicit network request
or the network for an explicit port request)
- available_macs: set of available MAC addresses to use if creating
a port later; this is the set of hypervisor_macs after removing
any MAC addresses from explicitly requested ports.
:raises nova.exception.PortNotFound: If a requested port is not found
in Neutron.
:raises nova.exception.PortNotUsable: If a requested port is not owned
by the same tenant that the instance is created under. This error
can also be raised if hypervisor_macs is not None and a requested
port's MAC address is not in that set.
:raises nova.exception.PortInUse: If a requested port is already
attached to another instance.
:raises nova.exception.PortNotUsableDNS: If a requested port has a
value assigned to its dns_name attribute.
"""
available_macs = None
if hypervisor_macs is not None:
# Make a copy we can mutate: records macs that have not been used
# to create a port on a network. If we find a mac with a
# pre-allocated port we also remove it from this set.
available_macs = set(hypervisor_macs)
ports = {}
net_ids = []
ordered_networks = []
if requested_networks:
for request in requested_networks:
# Process a request to use a pre-existing neutron port.
if request.port_id:
# Make sure the port exists.
port = self._show_port(context, request.port_id,
neutron_client=neutron)
# Make sure the instance has access to the port.
if port['tenant_id'] != instance.project_id:
raise exception.PortNotUsable(port_id=request.port_id,
instance=instance.uuid)
# Make sure the port isn't already attached to another
# instance.
if port.get('device_id'):
raise exception.PortInUse(port_id=request.port_id)
# Make sure that if the user assigned a value to the port's
# dns_name attribute, it is equal to the instance's
# hostname
if port.get('dns_name'):
if port['dns_name'] != instance.hostname:
raise exception.PortNotUsableDNS(
port_id=request.port_id,
instance=instance.uuid, value=port['dns_name'],
hostname=instance.hostname)
# Make sure the port is usable
if (port.get('binding:vif_type') ==
network_model.VIF_TYPE_BINDING_FAILED):
raise exception.PortBindingFailed(
port_id=request.port_id)
if hypervisor_macs is not None:
if port['mac_address'] not in hypervisor_macs:
LOG.debug("Port %(port)s mac address %(mac)s is "
"not in the set of hypervisor macs: "
"%(hyper_macs)s",
{'port': request.port_id,
'mac': port['mac_address'],
'hyper_macs': hypervisor_macs},
instance=instance)
raise exception.PortNotUsable(
port_id=request.port_id,
instance=instance.uuid)
# Don't try to use this MAC if we need to create a
# port on the fly later. Identical MACs may be
# configured by users into multiple ports so we
# discard rather than popping.
available_macs.discard(port['mac_address'])
# If requesting a specific port, automatically process
# the network for that port as if it were explicitly
# requested.
request.network_id = port['network_id']
ports[request.port_id] = port
# Process a request to use a specific neutron network.
if request.network_id:
net_ids.append(request.network_id)
ordered_networks.append(request)
return ports, net_ids, ordered_networks, available_macs
def _clean_security_groups(self, security_groups):
"""Cleans security groups requested from Nova API
Neutron already passes a 'default' security group when
creating ports so it's not necessary to specify it to the
request.
"""
if security_groups == [DEFAULT_SECGROUP]:
security_groups = []
return security_groups
def _process_security_groups(self, instance, neutron, security_groups):
"""Processes and validates requested security groups for allocation.
Iterates over the list of requested security groups, validating the
request and filtering out the list of security group IDs to use for
port allocation.
:param instance: allocate networks on this instance
:type instance: nova.objects.Instance
:param neutron: neutron client session
:type neutron: neutronclient.v2_0.client.Client
:param security_groups: list of requested security group name or IDs
to use when allocating new ports for the instance
:return: list of security group IDs to use when allocating new ports
:raises nova.exception.NoUniqueMatch: If multiple security groups
are requested with the same name.
:raises nova.exception.SecurityGroupNotFound: If a requested security
group is not in the tenant-filtered list of available security
groups in Neutron.
"""
security_group_ids = []
# TODO(arosen) Should optimize more to do direct query for security
# group if len(security_groups) == 1
if len(security_groups):
search_opts = {'tenant_id': instance.project_id}
user_security_groups = neutron.list_security_groups(
**search_opts).get('security_groups')
for security_group in security_groups:
name_match = None
uuid_match = None
for user_security_group in user_security_groups:
if user_security_group['name'] == security_group:
# If there was a name match in a previous iteration
# of the loop, we have a conflict.
if name_match:
raise exception.NoUniqueMatch(
_("Multiple security groups found matching"
" '%s'. Use an ID to be more specific.") %
security_group)
name_match = user_security_group['id']
if user_security_group['id'] == security_group:
uuid_match = user_security_group['id']
# If a user names the security group the same as
# another's security groups uuid, the name takes priority.
if name_match:
security_group_ids.append(name_match)
elif uuid_match:
security_group_ids.append(uuid_match)
else:
raise exception.SecurityGroupNotFound(
security_group_id=security_group)
return security_group_ids
def allocate_for_instance(self, context, instance, **kwargs):
"""Allocate network resources for the instance.
:param context: The request context.
:param instance: nova.objects.instance.Instance object.
:param requested_networks: optional value containing
network_id, fixed_ip, and port_id
:param security_groups: security groups to allocate for instance
:param macs: None or a set of MAC addresses that the instance
should use. macs is supplied by the hypervisor driver (contrast
with requested_networks which is user supplied).
NB: NeutronV2 currently assigns hypervisor supplied MAC addresses
to arbitrary networks, which requires openflow switches to
function correctly if more than one network is being used with
the bare metal hypervisor (which is the only one known to limit
MAC addresses).
:param dhcp_options: None or a set of key/value pairs that should
determine the DHCP BOOTP response, eg. for PXE booting an instance
configured with the baremetal hypervisor. It is expected that these
are already formatted for the neutron v2 api.
See nova/virt/driver.py:dhcp_options_for_instance for an example.
:param bind_host_id: the host ID to attach to the ports being created.
"""
hypervisor_macs = kwargs.get('macs', None)
# The neutron client and port_client (either the admin context or
# tenant context) are read here. The reason for this is that there are
# a number of different calls for the instance allocation.
# We do not want to create a new neutron session for each of these
# calls.
neutron = get_client(context)
# Requires admin creds to set port bindings
port_client = (neutron if not
self._has_port_binding_extension(context,
refresh_cache=True, neutron=neutron) else
get_client(context, admin=True))
# Store the admin client - this is used later
admin_client = port_client if neutron != port_client else None
LOG.debug('allocate_for_instance()', instance=instance)
if not instance.project_id:
msg = _('empty project id for instance %s')
raise exception.InvalidInput(
reason=msg % instance.uuid)
requested_networks = kwargs.get('requested_networks')
dhcp_opts = kwargs.get('dhcp_options', None)
bind_host_id = kwargs.get('bind_host_id')
ports, net_ids, ordered_networks, available_macs = (
self._process_requested_networks(context,
instance, neutron, requested_networks, hypervisor_macs))
nets = self._get_available_networks(context, instance.project_id,
net_ids, neutron=neutron)
if not nets:
# NOTE(chaochin): If user specifies a network id and the network
# can not be found, raise NetworkNotFound error.
if requested_networks:
for request in requested_networks:
if not request.port_id and request.network_id:
raise exception.NetworkNotFound(
network_id=request.network_id)
else:
LOG.debug("No network configured", instance=instance)
return network_model.NetworkInfo([])
# if this function is directly called without a requested_network param
# or if it is indirectly called through allocate_port_for_instance()
# with None params=(network_id=None, requested_ip=None, port_id=None,
# pci_request_id=None):
if (not requested_networks
or requested_networks.is_single_unspecified):
# If no networks were requested and none are available, consider
# it a bad request.
if not nets:
raise exception.InterfaceAttachFailedNoNetwork(
project_id=instance.project_id)
# bug/1267723 - if no network is requested and more
# than one is available then raise NetworkAmbiguous Exception
if len(nets) > 1:
msg = _("Multiple possible networks found, use a Network "
"ID to be more specific.")
raise exception.NetworkAmbiguous(msg)
ordered_networks.append(
objects.NetworkRequest(network_id=nets[0]['id']))
# NOTE(melwitt): check external net attach permission after the
# check for ambiguity, there could be another
# available net which is permitted bug/1364344
self._check_external_network_attach(context, nets)
security_groups = self._clean_security_groups(
kwargs.get('security_groups', []))
security_group_ids = self._process_security_groups(
instance, neutron, security_groups)
preexisting_port_ids = []
created_port_ids = []
ports_in_requested_order = []
nets_in_requested_order = []
for request in ordered_networks:
# Network lookup for available network_id
network = None
for net in nets:
if net['id'] == request.network_id:
network = net
break
# if network_id did not pass validate_networks() and not available
# here then skip it safely not continuing with a None Network
else:
continue
nets_in_requested_order.append(network)
port_security_enabled = network.get('port_security_enabled', True)
if port_security_enabled:
if not network.get('subnets'):
# Neutron can't apply security groups to a port
# for a network without L3 assignements.
raise exception.SecurityGroupCannotBeApplied()
else:
if security_group_ids:
# We don't want to apply security groups on port
# for a network defined with
# 'port_security_enabled=False'.
raise exception.SecurityGroupCannotBeApplied()
zone = 'compute:%s' % instance.availability_zone
port_req_body = {'port': {'device_id': instance.uuid,
'device_owner': zone}}
try:
self._populate_neutron_extension_values(
context, instance, request.pci_request_id, port_req_body,
network=network, neutron=neutron,
bind_host_id=bind_host_id)
if request.port_id:
port = ports[request.port_id]
port_client.update_port(port['id'], port_req_body)
preexisting_port_ids.append(port['id'])
ports_in_requested_order.append(port['id'])
else:
created_port = self._create_port(
port_client, instance, request.network_id,
port_req_body, request.address,
security_group_ids, available_macs, dhcp_opts)
created_port_ids.append(created_port)
ports_in_requested_order.append(created_port)
self._update_port_dns_name(context, instance, network,
ports_in_requested_order[-1],
neutron)
except Exception:
with excutils.save_and_reraise_exception():
self._unbind_ports(context,
preexisting_port_ids,
neutron, port_client)
self._delete_ports(neutron, instance, created_port_ids)
nw_info = self.get_instance_nw_info(
context, instance, networks=nets_in_requested_order,
port_ids=ports_in_requested_order,
admin_client=admin_client,
preexisting_port_ids=preexisting_port_ids,
update_cells=True)
# NOTE(danms): Only return info about ports we created in this run.
# In the initial allocation case, this will be everything we created,
# and in later runs will only be what was created that time. Thus,
# this only affects the attach case, not the original use for this
# method.
return network_model.NetworkInfo([vif for vif in nw_info
if vif['id'] in created_port_ids +
preexisting_port_ids])
def _refresh_neutron_extensions_cache(self, context, neutron=None):
"""Refresh the neutron extensions cache when necessary."""
if (not self.last_neutron_extension_sync or
((time.time() - self.last_neutron_extension_sync)
>= CONF.neutron.extension_sync_interval)):
if neutron is None:
neutron = get_client(context)
extensions_list = neutron.list_extensions()['extensions']
self.last_neutron_extension_sync = time.time()
self.extensions.clear()
self.extensions = {ext['name']: ext for ext in extensions_list}
def _has_port_binding_extension(self, context, refresh_cache=False,
neutron=None):
if refresh_cache:
self._refresh_neutron_extensions_cache(context, neutron=neutron)
return constants.PORTBINDING_EXT in self.extensions
@staticmethod
def _populate_neutron_binding_profile(instance, pci_request_id,
port_req_body):
"""Populate neutron binding:profile.
Populate it with SR-IOV related information
"""
if pci_request_id:
pci_dev = pci_manager.get_instance_pci_devs(
instance, pci_request_id).pop()
devspec = pci_whitelist.get_pci_device_devspec(pci_dev)
profile = {'pci_vendor_info': "%s:%s" % (pci_dev.vendor_id,
pci_dev.product_id),
'pci_slot': pci_dev.address,
'physical_network':
devspec.get_tags().get('physical_network')
}
port_req_body['port']['binding:profile'] = profile
def _populate_neutron_extension_values(self, context, instance,
pci_request_id, port_req_body,
network=None, neutron=None,
bind_host_id=None):
"""Populate neutron extension values for the instance.
If the extensions loaded contain QOS_QUEUE then pass the rxtx_factor.
"""
self._refresh_neutron_extensions_cache(context, neutron=neutron)
if constants.QOS_QUEUE in self.extensions:
flavor = instance.get_flavor()
rxtx_factor = flavor.get('rxtx_factor')
port_req_body['port']['rxtx_factor'] = rxtx_factor
has_port_binding_extension = (
self._has_port_binding_extension(context, neutron=neutron))
if has_port_binding_extension:
port_req_body['port']['binding:host_id'] = bind_host_id
self._populate_neutron_binding_profile(instance,
pci_request_id,
port_req_body)
if constants.DNS_INTEGRATION in self.extensions:
# If the DNS integration extension is enabled in Neutron, most
# ports will get their dns_name attribute set in the port create or
# update requests in allocate_for_instance. So we just add the
# dns_name attribute to the payload of those requests. The
# exception is when the port binding extension is enabled in
# Neutron and the port is on a network that has a non-blank
# dns_domain attribute. This case requires to be processed by
# method _update_port_dns_name
if (not has_port_binding_extension
or not network.get('dns_domain')):
port_req_body['port']['dns_name'] = instance.hostname
def _update_port_dns_name(self, context, instance, network, port_id,
neutron):
"""Update an instance port dns_name attribute with instance.hostname.
The dns_name attribute of a port on a network with a non-blank
dns_domain attribute will be sent to the external DNS service
(Designate) if DNS integration is enabled in Neutron. This requires the
assignment of the dns_name to the port to be done with a Neutron client
using the user's context. allocate_for_instance uses a port with admin
context if the port binding extensions is enabled in Neutron. In this
case, we assign in this method the dns_name attribute to the port with
an additional update request. Only a very small fraction of ports will
require this additional update request.
"""
if (constants.DNS_INTEGRATION in self.extensions and
self._has_port_binding_extension(context) and
network.get('dns_domain')):
try:
port_req_body = {'port': {'dns_name': instance.hostname}}
neutron.update_port(port_id, port_req_body)
except neutron_client_exc.BadRequest:
LOG.warning(_LW('Neutron error: Instance hostname '
'%(hostname)s is not a valid DNS name'),
{'hostname': instance.hostname}, instance=instance)
msg = (_('Instance hostname %(hostname)s is not a valid DNS '
'name') % {'hostname': instance.hostname})
raise exception.InvalidInput(reason=msg)
def _delete_ports(self, neutron, instance, ports, raise_if_fail=False):
exceptions = []
for port in ports:
try:
neutron.delete_port(port)
except neutron_client_exc.NeutronClientException as e:
if e.status_code == 404:
LOG.warning(_LW("Port %s does not exist"), port,
instance=instance)
else:
exceptions.append(e)
LOG.warning(
_LW("Failed to delete port %s for instance."),
port, instance=instance, exc_info=True)
if len(exceptions) > 0 and raise_if_fail:
raise exceptions[0]
def deallocate_for_instance(self, context, instance, **kwargs):
"""Deallocate all network resources related to the instance."""
LOG.debug('deallocate_for_instance()', instance=instance)
search_opts = {'device_id': instance.uuid}
neutron = get_client(context)
data = neutron.list_ports(**search_opts)
ports = [port['id'] for port in data.get('ports', [])]
requested_networks = kwargs.get('requested_networks') or []
# NOTE(danms): Temporary and transitional
if isinstance(requested_networks, objects.NetworkRequestList):
requested_networks = requested_networks.as_tuples()
ports_to_skip = set([port_id for nets, fips, port_id, pci_request_id
in requested_networks])
# NOTE(boden): requested_networks only passed in when deallocating
# from a failed build / spawn call. Therefore we need to include
# preexisting ports when deallocating from a standard delete op
# in which case requested_networks is not provided.
ports_to_skip |= set(self._get_preexisting_port_ids(instance))
ports = set(ports) - ports_to_skip
# Reset device_id and device_owner for the ports that are skipped
self._unbind_ports(context, ports_to_skip, neutron)
# Delete the rest of the ports
self._delete_ports(neutron, instance, ports, raise_if_fail=True)
# NOTE(arosen): This clears out the network_cache only if the instance
# hasn't already been deleted. This is needed when an instance fails to
# launch and is rescheduled onto another compute node. If the instance
# has already been deleted this call does nothing.
base_api.update_instance_cache_with_nw_info(self, context, instance,
network_model.NetworkInfo([]))
def allocate_port_for_instance(self, context, instance, port_id,
network_id=None, requested_ip=None,
bind_host_id=None):
"""Allocate a port for the instance."""
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(network_id=network_id,
address=requested_ip,
port_id=port_id,
pci_request_id=None)])
return self.allocate_for_instance(context, instance,
requested_networks=requested_networks,
bind_host_id=bind_host_id)
def deallocate_port_for_instance(self, context, instance, port_id):
"""Remove a specified port from the instance.
Return network information for the instance
"""
neutron = get_client(context)
preexisting_ports = self._get_preexisting_port_ids(instance)
if port_id in preexisting_ports:
self._unbind_ports(context, [port_id], neutron)
else:
self._delete_ports(neutron, instance, [port_id],
raise_if_fail=True)
return self.get_instance_nw_info(context, instance)
def list_ports(self, context, **search_opts):
"""List ports for the client based on search options."""
return get_client(context).list_ports(**search_opts)
def show_port(self, context, port_id):
"""Return the port for the client given the port id.
:param context: Request context.
:param port_id: The id of port to be queried.
:returns: A dict containing port data keyed by 'port', e.g.
::
{'port': {'port_id': 'abcd',
'fixed_ip_address': '1.2.3.4'}}
"""
return dict(port=self._show_port(context, port_id))
def _show_port(self, context, port_id, neutron_client=None, fields=None):
"""Return the port for the client given the port id.
:param context: Request context.
:param port_id: The id of port to be queried.
:param neutron_client: A neutron client.
:param fields: The condition fields to query port data.
:returns: A dict of port data.
e.g. {'port_id': 'abcd', 'fixed_ip_address': '1.2.3.4'}
"""
if not neutron_client:
neutron_client = get_client(context)
try:
if fields:
result = neutron_client.show_port(port_id, fields=fields)
else:
result = neutron_client.show_port(port_id)
return result.get('port')
except neutron_client_exc.PortNotFoundClient:
raise exception.PortNotFound(port_id=port_id)
except neutron_client_exc.Unauthorized:
raise exception.Forbidden()
except neutron_client_exc.NeutronClientException as exc:
msg = (_("Failed to access port %(port_id)s: %(reason)s") %
{'port_id': port_id, 'reason': exc})
raise exception.NovaException(message=msg)
def _get_instance_nw_info(self, context, instance, networks=None,
port_ids=None, admin_client=None,
preexisting_port_ids=None, **kwargs):
# NOTE(danms): This is an inner method intended to be called
# by other code that updates instance nwinfo. It *must* be
# called with the refresh_cache-%(instance_uuid) lock held!
LOG.debug('_get_instance_nw_info()', instance=instance)
# Ensure that we have an up to date copy of the instance info cache.
# Otherwise multiple requests could collide and cause cache
# corruption.
compute_utils.refresh_info_cache_for_instance(context, instance)
nw_info = self._build_network_info_model(context, instance, networks,
port_ids, admin_client,
preexisting_port_ids)
return network_model.NetworkInfo.hydrate(nw_info)
def _gather_port_ids_and_networks(self, context, instance, networks=None,
port_ids=None):
"""Return an instance's complete list of port_ids and networks."""
if ((networks is None and port_ids is not None) or
(port_ids is None and networks is not None)):
message = _("This method needs to be called with either "
"networks=None and port_ids=None or port_ids and "
"networks as not none.")
raise exception.NovaException(message=message)
ifaces = compute_utils.get_nw_info_for_instance(instance)
# This code path is only done when refreshing the network_cache
if port_ids is None:
port_ids = [iface['id'] for iface in ifaces]
net_ids = [iface['network']['id'] for iface in ifaces]
if networks is None:
networks = self._get_available_networks(context,
instance.project_id,
net_ids)
# an interface was added/removed from instance.
else:
# Prepare the network ids list for validation purposes
networks_ids = [network['id'] for network in networks]
# Validate that interface networks doesn't exist in networks.
# Though this issue can and should be solved in methods
# that prepare the networks list, this method should have this
# ignore-duplicate-networks/port-ids mechanism to reduce the
# probability of failing to boot the VM.
networks = networks + [
{'id': iface['network']['id'],
'name': iface['network']['label'],
'tenant_id': iface['network']['meta']['tenant_id']}
for iface in ifaces
if _is_not_duplicate(iface['network']['id'],
networks_ids,
"networks",
instance)]
# Include existing interfaces so they are not removed from the db.
# Validate that the interface id is not in the port_ids
port_ids = [iface['id'] for iface in ifaces
if _is_not_duplicate(iface['id'],
port_ids,
"port_ids",
instance)] + port_ids
return networks, port_ids
@base_api.refresh_cache
def add_fixed_ip_to_instance(self, context, instance, network_id):
"""Add a fixed IP to the instance from specified network."""
neutron = get_client(context)
search_opts = {'network_id': network_id}
data = neutron.list_subnets(**search_opts)
ipam_subnets = data.get('subnets', [])
if not ipam_subnets:
raise exception.NetworkNotFoundForInstance(
instance_id=instance.uuid)
zone = 'compute:%s' % instance.availability_zone
search_opts = {'device_id': instance.uuid,
'device_owner': zone,
'network_id': network_id}
data = neutron.list_ports(**search_opts)
ports = data['ports']
for p in ports:
for subnet in ipam_subnets:
fixed_ips = p['fixed_ips']
fixed_ips.append({'subnet_id': subnet['id']})
port_req_body = {'port': {'fixed_ips': fixed_ips}}
try:
neutron.update_port(p['id'], port_req_body)
return self._get_instance_nw_info(context, instance)
except Exception as ex:
msg = ("Unable to update port %(portid)s on subnet "
"%(subnet_id)s with failure: %(exception)s")
LOG.debug(msg, {'portid': p['id'],
'subnet_id': subnet['id'],
'exception': ex}, instance=instance)
raise exception.NetworkNotFoundForInstance(
instance_id=instance.uuid)
@base_api.refresh_cache
def remove_fixed_ip_from_instance(self, context, instance, address):
"""Remove a fixed IP from the instance."""
neutron = get_client(context)
zone = 'compute:%s' % instance.availability_zone
search_opts = {'device_id': instance.uuid,
'device_owner': zone,
'fixed_ips': 'ip_address=%s' % address}
data = neutron.list_ports(**search_opts)
ports = data['ports']
for p in ports:
fixed_ips = p['fixed_ips']
new_fixed_ips = []
for fixed_ip in fixed_ips:
if fixed_ip['ip_address'] != address:
new_fixed_ips.append(fixed_ip)
port_req_body = {'port': {'fixed_ips': new_fixed_ips}}
try:
neutron.update_port(p['id'], port_req_body)
except Exception as ex:
msg = ("Unable to update port %(portid)s with"
" failure: %(exception)s")
LOG.debug(msg, {'portid': p['id'], 'exception': ex},
instance=instance)
return self._get_instance_nw_info(context, instance)
raise exception.FixedIpNotFoundForSpecificInstance(
instance_uuid=instance.uuid, ip=address)
def _get_port_vnic_info(self, context, neutron, port_id):
"""Retrieve port vnic info
Invoked with a valid port_id.
Return vnic type and the attached physical network name.
"""
phynet_name = None
port = self._show_port(context, port_id, neutron_client=neutron,
fields=['binding:vnic_type', 'network_id'])
vnic_type = port.get('binding:vnic_type',
network_model.VNIC_TYPE_NORMAL)
if vnic_type in network_model.VNIC_TYPES_SRIOV:
net_id = port['network_id']
net = neutron.show_network(net_id,
fields='provider:physical_network').get('network')
phynet_name = net.get('provider:physical_network')
return vnic_type, phynet_name
def create_pci_requests_for_sriov_ports(self, context, pci_requests,
requested_networks):
"""Check requested networks for any SR-IOV port request.
Create a PCI request object for each SR-IOV port, and add it to the
pci_requests object that contains a list of PCI request object.
"""
if not requested_networks:
return
neutron = get_client(context, admin=True)
for request_net in requested_networks:
phynet_name = None
vnic_type = network_model.VNIC_TYPE_NORMAL
if request_net.port_id:
vnic_type, phynet_name = self._get_port_vnic_info(
context, neutron, request_net.port_id)
pci_request_id = None
if vnic_type in network_model.VNIC_TYPES_SRIOV:
request = objects.InstancePCIRequest(
count=1,
spec=[{pci_request.PCI_NET_TAG: phynet_name}],
request_id=str(uuid.uuid4()))
pci_requests.requests.append(request)
pci_request_id = request.request_id
# Add pci_request_id into the requested network
request_net.pci_request_id = pci_request_id
def _ports_needed_per_instance(self, context, neutron, requested_networks):
ports_needed_per_instance = 0
if requested_networks is None or len(requested_networks) == 0:
nets = self._get_available_networks(context, context.project_id,
neutron=neutron)
if len(nets) > 1:
# Attaching to more than one network by default doesn't
# make sense, as the order will be arbitrary and the guest OS
# won't know which to configure
msg = _("Multiple possible networks found, use a Network "
"ID to be more specific.")
raise exception.NetworkAmbiguous(msg)
else:
ports_needed_per_instance = 1
else:
net_ids_requested = []
# TODO(danms): Remove me when all callers pass an object
if isinstance(requested_networks[0], tuple):
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest.from_tuple(t)
for t in requested_networks])
for request in requested_networks:
if request.port_id:
port = self._show_port(context, request.port_id,
neutron_client=neutron)
if port.get('device_id', None):
raise exception.PortInUse(port_id=request.port_id)
if not port.get('fixed_ips'):
raise exception.PortRequiresFixedIP(
port_id=request.port_id)
request.network_id = port['network_id']
else:
ports_needed_per_instance += 1
net_ids_requested.append(request.network_id)
# NOTE(jecarey) There is currently a race condition.
# That is, if you have more than one request for a specific
# fixed IP at the same time then only one will be allocated
# the ip. The fixed IP will be allocated to only one of the
# instances that will run. The second instance will fail on
# spawn. That instance will go into error state.
# TODO(jecarey) Need to address this race condition once we
# have the ability to update mac addresses in Neutron.
if request.address:
# TODO(jecarey) Need to look at consolidating list_port
# calls once able to OR filters.
search_opts = {'network_id': request.network_id,
'fixed_ips': 'ip_address=%s' % (
request.address),
'fields': 'device_id'}
existing_ports = neutron.list_ports(
**search_opts)['ports']
if existing_ports:
i_uuid = existing_ports[0]['device_id']
raise exception.FixedIpAlreadyInUse(
address=request.address,
instance_uuid=i_uuid)
# Now check to see if all requested networks exist
if net_ids_requested:
nets = self._get_available_networks(
context, context.project_id, net_ids_requested,
neutron=neutron)
for net in nets:
if not net.get('subnets'):
raise exception.NetworkRequiresSubnet(
network_uuid=net['id'])
if len(nets) != len(net_ids_requested):
requested_netid_set = set(net_ids_requested)
returned_netid_set = set([net['id'] for net in nets])
lostid_set = requested_netid_set - returned_netid_set
if lostid_set:
id_str = ''
for _id in lostid_set:
id_str = id_str and id_str + ', ' + _id or _id
raise exception.NetworkNotFound(network_id=id_str)
return ports_needed_per_instance
def validate_networks(self, context, requested_networks, num_instances):
"""Validate that the tenant can use the requested networks.
Return the number of instances than can be successfully allocated
with the requested network configuration.
"""
LOG.debug('validate_networks() for %s', requested_networks)
neutron = get_client(context)
ports_needed_per_instance = self._ports_needed_per_instance(
context, neutron, requested_networks)
# Note(PhilD): Ideally Nova would create all required ports as part of
# network validation, but port creation requires some details
# from the hypervisor. So we just check the quota and return
# how many of the requested number of instances can be created
if ports_needed_per_instance:
quotas = neutron.show_quota(tenant_id=context.project_id)['quota']
if quotas.get('port', -1) == -1:
# Unlimited Port Quota
return num_instances
# We only need the port count so only ask for ids back.
params = dict(tenant_id=context.project_id, fields=['id'])
ports = neutron.list_ports(**params)['ports']
free_ports = quotas.get('port') - len(ports)
if free_ports < 0:
msg = (_("The number of defined ports: %(ports)d "
"is over the limit: %(quota)d") %
{'ports': len(ports),
'quota': quotas.get('port')})
raise exception.PortLimitExceeded(msg)
ports_needed = ports_needed_per_instance * num_instances
if free_ports >= ports_needed:
return num_instances
else:
return free_ports // ports_needed_per_instance
return num_instances
def _get_instance_uuids_by_ip(self, context, address):
"""Retrieve instance uuids associated with the given IP address.
:returns: A list of dicts containing the uuids keyed by 'instance_uuid'
e.g. [{'instance_uuid': uuid}, ...]
"""
search_opts = {"fixed_ips": 'ip_address=%s' % address}
data = get_client(context).list_ports(**search_opts)
ports = data.get('ports', [])
return [{'instance_uuid': port['device_id']} for port in ports
if port['device_id']]
def _get_port_id_by_fixed_address(self, client,
instance, address):
"""Return port_id from a fixed address."""
zone = 'compute:%s' % instance.availability_zone
search_opts = {'device_id': instance.uuid,
'device_owner': zone}
data = client.list_ports(**search_opts)
ports = data['ports']
port_id = None
for p in ports:
for ip in p['fixed_ips']:
if ip['ip_address'] == address:
port_id = p['id']
break
if not port_id:
raise exception.FixedIpNotFoundForAddress(address=address)
return port_id
@base_api.refresh_cache
def associate_floating_ip(self, context, instance,
floating_address, fixed_address,
affect_auto_assigned=False):
"""Associate a floating IP with a fixed IP."""
# Note(amotoki): 'affect_auto_assigned' is not respected
# since it is not used anywhere in nova code and I could
# find why this parameter exists.
client = get_client(context)
port_id = self._get_port_id_by_fixed_address(client, instance,
fixed_address)
fip = self._get_floating_ip_by_address(client, floating_address)
param = {'port_id': port_id,
'fixed_ip_address': fixed_address}
client.update_floatingip(fip['id'], {'floatingip': param})
if fip['port_id']:
port = self._show_port(context, fip['port_id'],
neutron_client=client)
orig_instance_uuid = port['device_id']
msg_dict = dict(address=floating_address,
instance_id=orig_instance_uuid)
LOG.info(_LI('re-assign floating IP %(address)s from '
'instance %(instance_id)s'), msg_dict,
instance=instance)
orig_instance = objects.Instance.get_by_uuid(context,
orig_instance_uuid)
# purge cached nw info for the original instance
base_api.update_instance_cache_with_nw_info(self, context,
orig_instance)
def get_all(self, context):
"""Get all networks for client."""
client = get_client(context)
networks = client.list_networks().get('networks')
network_objs = []
for network in networks:
network_objs.append(objects.Network(context=context,
name=network['name'],
label=network['name'],
uuid=network['id']))
return objects.NetworkList(context=context,
objects=network_objs)
def get(self, context, network_uuid):
"""Get specific network for client."""
client = get_client(context)
try:
network = client.show_network(network_uuid).get('network') or {}
except neutron_client_exc.NetworkNotFoundClient:
raise exception.NetworkNotFound(network_id=network_uuid)
net_obj = objects.Network(context=context,
name=network['name'],
label=network['name'],
uuid=network['id'])
return net_obj
def delete(self, context, network_uuid):
"""Delete a network for client."""
raise NotImplementedError()
def disassociate(self, context, network_uuid):
"""Disassociate a network for client."""
raise NotImplementedError()
def associate(self, context, network_uuid, host=base_api.SENTINEL,
project=base_api.SENTINEL):
"""Associate a network for client."""
raise NotImplementedError()
def get_fixed_ip(self, context, id):
"""Get a fixed IP from the id."""
raise NotImplementedError()
def get_fixed_ip_by_address(self, context, address):
"""Return instance uuids given an address."""
uuid_maps = self._get_instance_uuids_by_ip(context, address)
if len(uuid_maps) == 1:
return uuid_maps[0]
elif not uuid_maps:
raise exception.FixedIpNotFoundForAddress(address=address)
else:
raise exception.FixedIpAssociatedWithMultipleInstances(
address=address)
def _setup_net_dict(self, client, network_id):
if not network_id:
return {}
pool = client.show_network(network_id)['network']
return {pool['id']: pool}
def _setup_port_dict(self, context, client, port_id):
if not port_id:
return {}
port = self._show_port(context, port_id, neutron_client=client)
return {port['id']: port}
def _setup_pools_dict(self, client):
pools = self._get_floating_ip_pools(client)
return {i['id']: i for i in pools}
def _setup_ports_dict(self, client, project_id=None):
search_opts = {'tenant_id': project_id} if project_id else {}
ports = client.list_ports(**search_opts)['ports']
return {p['id']: p for p in ports}
def get_floating_ip(self, context, id):
"""Return floating IP object given the floating IP id."""
client = get_client(context)
try:
fip = client.show_floatingip(id)['floatingip']
except neutron_client_exc.NeutronClientException as e:
if e.status_code == 404:
raise exception.FloatingIpNotFound(id=id)
else:
with excutils.save_and_reraise_exception():
LOG.exception(_LE('Unable to access floating IP %s'), id)
pool_dict = self._setup_net_dict(client,
fip['floating_network_id'])
port_dict = self._setup_port_dict(context, client, fip['port_id'])
return self._format_floating_ip_model(fip, pool_dict, port_dict)
def _get_floating_ip_pools(self, client, project_id=None):
search_opts = {constants.NET_EXTERNAL: True}
if project_id:
search_opts.update({'tenant_id': project_id})
data = client.list_networks(**search_opts)
return data['networks']
def get_floating_ip_pools(self, context):
"""Return floating IP pool names."""
client = get_client(context)
pools = self._get_floating_ip_pools(client)
# Note(salv-orlando): Return a list of names to be consistent with
# nova.network.api.get_floating_ip_pools
return [n['name'] or n['id'] for n in pools]
def _format_floating_ip_model(self, fip, pool_dict, port_dict):
pool = pool_dict[fip['floating_network_id']]
result = {'id': fip['id'],
'address': fip['floating_ip_address'],
'pool': pool['name'] or pool['id'],
'project_id': fip['tenant_id'],
# In Neutron v2, an exact fixed_ip_id does not exist.
'fixed_ip_id': fip['port_id'],
}
# In Neutron v2 API fixed_ip_address and instance uuid
# (= device_id) are known here, so pass it as a result.
result['fixed_ip'] = {'address': fip['fixed_ip_address']}
if fip['port_id']:
instance_uuid = port_dict[fip['port_id']]['device_id']
result['instance'] = {'uuid': instance_uuid}
# TODO(mriedem): remove this workaround once the get_floating_ip*
# API methods are converted to use nova objects.
result['fixed_ip']['instance_uuid'] = instance_uuid
else:
result['instance'] = None
return result
def get_floating_ip_by_address(self, context, address):
"""Return a floating IP given an address."""
client = get_client(context)
fip = self._get_floating_ip_by_address(client, address)
pool_dict = self._setup_net_dict(client,
fip['floating_network_id'])
port_dict = self._setup_port_dict(context, client, fip['port_id'])
return self._format_floating_ip_model(fip, pool_dict, port_dict)
def get_floating_ips_by_project(self, context):
client = get_client(context)
project_id = context.project_id
fips = self._safe_get_floating_ips(client, tenant_id=project_id)
if not fips:
return []
pool_dict = self._setup_pools_dict(client)
port_dict = self._setup_ports_dict(client, project_id)
return [self._format_floating_ip_model(fip, pool_dict, port_dict)
for fip in fips]
def get_instance_id_by_floating_address(self, context, address):
"""Return the instance id a floating IP's fixed IP is allocated to."""
client = get_client(context)
fip = self._get_floating_ip_by_address(client, address)
if not fip['port_id']:
return None
try:
port = self._show_port(context, fip['port_id'],
neutron_client=client)
except exception.PortNotFound:
# NOTE: Here is a potential race condition between _show_port() and
# _get_floating_ip_by_address(). fip['port_id'] shows a port which
# is the server instance's. At _get_floating_ip_by_address(),
# Neutron returns the list which includes the instance. Just after
# that, the deletion of the instance happens and Neutron returns
# 404 on _show_port().
LOG.debug('The port(%s) is not found', fip['port_id'])
return None
return port['device_id']
def get_vifs_by_instance(self, context, instance):
raise NotImplementedError()
def get_vif_by_mac_address(self, context, mac_address):
raise NotImplementedError()
def _get_floating_ip_pool_id_by_name_or_id(self, client, name_or_id):
search_opts = {constants.NET_EXTERNAL: True, 'fields': 'id'}
if uuidutils.is_uuid_like(name_or_id):
search_opts.update({'id': name_or_id})
else:
search_opts.update({'name': name_or_id})
data = client.list_networks(**search_opts)
nets = data['networks']
if len(nets) == 1:
return nets[0]['id']
elif len(nets) == 0:
raise exception.FloatingIpPoolNotFound()
else:
msg = (_("Multiple floating IP pools matches found for name '%s'")
% name_or_id)
raise exception.NovaException(message=msg)
def allocate_floating_ip(self, context, pool=None):
"""Add a floating IP to a project from a pool."""
client = get_client(context)
pool = pool or CONF.default_floating_pool
pool_id = self._get_floating_ip_pool_id_by_name_or_id(client, pool)
param = {'floatingip': {'floating_network_id': pool_id}}
try:
fip = client.create_floatingip(param)
except (neutron_client_exc.IpAddressGenerationFailureClient,
neutron_client_exc.ExternalIpAddressExhaustedClient) as e:
raise exception.NoMoreFloatingIps(six.text_type(e))
except neutron_client_exc.OverQuotaClient as e:
raise exception.FloatingIpLimitExceeded(six.text_type(e))
except neutron_client_exc.BadRequest as e:
raise exception.FloatingIpBadRequest(six.text_type(e))
return fip['floatingip']['floating_ip_address']
def _safe_get_floating_ips(self, client, **kwargs):
"""Get floating IP gracefully handling 404 from Neutron."""
try:
return client.list_floatingips(**kwargs)['floatingips']
# If a neutron plugin does not implement the L3 API a 404 from
# list_floatingips will be raised.
except neutron_client_exc.NotFound:
return []
except neutron_client_exc.NeutronClientException as e:
# bug/1513879 neutron client is currently using
# NeutronClientException when there is no L3 API
if e.status_code == 404:
return []
with excutils.save_and_reraise_exception():
LOG.exception(_LE('Unable to access floating IP for %s'),
', '.join(['%s %s' % (k, v)
for k, v in six.iteritems(kwargs)]))
def _get_floating_ip_by_address(self, client, address):
"""Get floating IP from floating IP address."""
if not address:
raise exception.FloatingIpNotFoundForAddress(address=address)
fips = self._safe_get_floating_ips(client, floating_ip_address=address)
if len(fips) == 0:
raise exception.FloatingIpNotFoundForAddress(address=address)
elif len(fips) > 1:
raise exception.FloatingIpMultipleFoundForAddress(address=address)
return fips[0]
def _get_floating_ips_by_fixed_and_port(self, client, fixed_ip, port):
"""Get floating IPs from fixed IP and port."""
return self._safe_get_floating_ips(client, fixed_ip_address=fixed_ip,
port_id=port)
def release_floating_ip(self, context, address,
affect_auto_assigned=False):
"""Remove a floating IP with the given address from a project."""
# Note(amotoki): We cannot handle a case where multiple pools
# have overlapping IP address range. In this case we cannot use
# 'address' as a unique key.
# This is a limitation of the current nova.
# Note(amotoki): 'affect_auto_assigned' is not respected
# since it is not used anywhere in nova code and I could
# find why this parameter exists.
self._release_floating_ip(context, address)
def disassociate_and_release_floating_ip(self, context, instance,
floating_ip):
"""Removes (deallocates) and deletes the floating IP.
This api call was added to allow this to be done in one operation
if using neutron.
"""
@base_api.refresh_cache
def _release_floating_ip_and_refresh_cache(self, context, instance,
floating_ip):
self._release_floating_ip(context, floating_ip['address'],
raise_if_associated=False)
if instance:
_release_floating_ip_and_refresh_cache(self, context, instance,
floating_ip)
else:
self._release_floating_ip(context, floating_ip['address'],
raise_if_associated=False)
def _release_floating_ip(self, context, address,
raise_if_associated=True):
client = get_client(context)
fip = self._get_floating_ip_by_address(client, address)
if raise_if_associated and fip['port_id']:
raise exception.FloatingIpAssociated(address=address)
client.delete_floatingip(fip['id'])
@base_api.refresh_cache
def disassociate_floating_ip(self, context, instance, address,
affect_auto_assigned=False):
"""Disassociate a floating IP from the instance."""
# Note(amotoki): 'affect_auto_assigned' is not respected
# since it is not used anywhere in nova code and I could
# find why this parameter exists.
client = get_client(context)
fip = self._get_floating_ip_by_address(client, address)
client.update_floatingip(fip['id'], {'floatingip': {'port_id': None}})
def migrate_instance_start(self, context, instance, migration):
"""Start to migrate the network of an instance."""
# NOTE(wenjianhn): just pass to make migrate instance doesn't
# raise for now.
pass
def migrate_instance_finish(self, context, instance, migration):
"""Finish migrating the network of an instance."""
self._update_port_binding_for_instance(context, instance,
migration['dest_compute'])
def add_network_to_project(self, context, project_id, network_uuid=None):
"""Force add a network to the project."""
raise NotImplementedError()
def _nw_info_get_ips(self, client, port):
network_IPs = []
for fixed_ip in port['fixed_ips']:
fixed = network_model.FixedIP(address=fixed_ip['ip_address'])
floats = self._get_floating_ips_by_fixed_and_port(
client, fixed_ip['ip_address'], port['id'])
for ip in floats:
fip = network_model.IP(address=ip['floating_ip_address'],
type='floating')
fixed.add_floating_ip(fip)
network_IPs.append(fixed)
return network_IPs
def _nw_info_get_subnets(self, context, port, network_IPs):
subnets = self._get_subnets_from_port(context, port)
for subnet in subnets:
subnet['ips'] = [fixed_ip for fixed_ip in network_IPs
if fixed_ip.is_in_subnet(subnet)]
return subnets
def _nw_info_build_network(self, port, networks, subnets):
network_name = None
network_mtu = None
for net in networks:
if port['network_id'] == net['id']:
network_name = net['name']
tenant_id = net['tenant_id']
network_mtu = net.get('mtu')
break
else:
tenant_id = port['tenant_id']
LOG.warning(_LW("Network %(id)s not matched with the tenants "
"network! The ports tenant %(tenant_id)s will be "
"used."),
{'id': port['network_id'], 'tenant_id': tenant_id})
bridge = None
ovs_interfaceid = None
# Network model metadata
should_create_bridge = None
vif_type = port.get('binding:vif_type')
port_details = port.get('binding:vif_details', {})
if vif_type == network_model.VIF_TYPE_OVS:
bridge = port_details.get(network_model.VIF_DETAILS_BRIDGE_NAME,
CONF.neutron.ovs_bridge)
ovs_interfaceid = port['id']
elif vif_type == network_model.VIF_TYPE_BRIDGE:
bridge = port_details.get(network_model.VIF_DETAILS_BRIDGE_NAME,
"brq" + port['network_id'])
should_create_bridge = True
elif vif_type == network_model.VIF_TYPE_DVS:
# The name of the DVS port group will contain the neutron
# network id
bridge = port['network_id']
elif (vif_type == network_model.VIF_TYPE_VHOSTUSER and
port_details.get(network_model.VIF_DETAILS_VHOSTUSER_OVS_PLUG,
False)):
bridge = port_details.get(network_model.VIF_DETAILS_BRIDGE_NAME,
CONF.neutron.ovs_bridge)
ovs_interfaceid = port['id']
# Prune the bridge name if necessary. For the DVS this is not done
# as the bridge is a '<network-name>-<network-UUID>'.
if bridge is not None and vif_type != network_model.VIF_TYPE_DVS:
bridge = bridge[:network_model.NIC_NAME_LEN]
network = network_model.Network(
id=port['network_id'],
bridge=bridge,
injected=CONF.flat_injected,
label=network_name,
tenant_id=tenant_id,
mtu=network_mtu
)
network['subnets'] = subnets
port_profile = port.get('binding:profile')
if port_profile:
physical_network = port_profile.get('physical_network')
if physical_network:
network['physical_network'] = physical_network
if should_create_bridge is not None:
network['should_create_bridge'] = should_create_bridge
return network, ovs_interfaceid
def _get_preexisting_port_ids(self, instance):
"""Retrieve the preexisting ports associated with the given instance.
These ports were not created by nova and hence should not be
deallocated upon instance deletion.
"""
net_info = compute_utils.get_nw_info_for_instance(instance)
if not net_info:
LOG.debug('Instance cache missing network info.',
instance=instance)
return [vif['id'] for vif in net_info
if vif.get('preserve_on_delete')]
def _build_network_info_model(self, context, instance, networks=None,
port_ids=None, admin_client=None,
preexisting_port_ids=None):
"""Return list of ordered VIFs attached to instance.
:param context: Request context.
:param instance: Instance we are returning network info for.
:param networks: List of networks being attached to an instance.
If value is None this value will be populated
from the existing cached value.
:param port_ids: List of port_ids that are being attached to an
instance in order of attachment. If value is None
this value will be populated from the existing
cached value.
:param admin_client: A neutron client for the admin context.
:param preexisting_port_ids: List of port_ids that nova didn't
allocate and there shouldn't be deleted when
an instance is de-allocated. Supplied list will
be added to the cached list of preexisting port
IDs for this instance.
"""
search_opts = {'tenant_id': instance.project_id,
'device_id': instance.uuid, }
if admin_client is None:
client = get_client(context, admin=True)
else:
client = admin_client
data = client.list_ports(**search_opts)
current_neutron_ports = data.get('ports', [])
nw_info_refresh = networks is None and port_ids is None
networks, port_ids = self._gather_port_ids_and_networks(
context, instance, networks, port_ids)
nw_info = network_model.NetworkInfo()
if preexisting_port_ids is None:
preexisting_port_ids = []
preexisting_port_ids = set(
preexisting_port_ids + self._get_preexisting_port_ids(instance))
current_neutron_port_map = {}
for current_neutron_port in current_neutron_ports:
current_neutron_port_map[current_neutron_port['id']] = (
current_neutron_port)
for port_id in port_ids:
current_neutron_port = current_neutron_port_map.get(port_id)
if current_neutron_port:
vif_active = False
if (current_neutron_port['admin_state_up'] is False
or current_neutron_port['status'] == 'ACTIVE'):
vif_active = True
network_IPs = self._nw_info_get_ips(client,
current_neutron_port)
subnets = self._nw_info_get_subnets(context,
current_neutron_port,
network_IPs)
devname = "tap" + current_neutron_port['id']
devname = devname[:network_model.NIC_NAME_LEN]
network, ovs_interfaceid = (
self._nw_info_build_network(current_neutron_port,
networks, subnets))
preserve_on_delete = (current_neutron_port['id'] in
preexisting_port_ids)
nw_info.append(network_model.VIF(
id=current_neutron_port['id'],
address=current_neutron_port['mac_address'],
network=network,
vnic_type=current_neutron_port.get('binding:vnic_type',
network_model.VNIC_TYPE_NORMAL),
type=current_neutron_port.get('binding:vif_type'),
profile=current_neutron_port.get('binding:profile'),
details=current_neutron_port.get('binding:vif_details'),
ovs_interfaceid=ovs_interfaceid,
devname=devname,
active=vif_active,
preserve_on_delete=preserve_on_delete))
elif nw_info_refresh:
LOG.info(_LI('Port %s from network info_cache is no '
'longer associated with instance in Neutron. '
'Removing from network info_cache.'), port_id,
instance=instance)
return nw_info
def _get_subnets_from_port(self, context, port):
"""Return the subnets for a given port."""
fixed_ips = port['fixed_ips']
# No fixed_ips for the port means there is no subnet associated
# with the network the port is created on.
# Since list_subnets(id=[]) returns all subnets visible for the
# current tenant, returned subnets may contain subnets which is not
# related to the port. To avoid this, the method returns here.
if not fixed_ips:
return []
search_opts = {'id': [ip['subnet_id'] for ip in fixed_ips]}
data = get_client(context).list_subnets(**search_opts)
ipam_subnets = data.get('subnets', [])
subnets = []
for subnet in ipam_subnets:
subnet_dict = {'cidr': subnet['cidr'],
'gateway': network_model.IP(
address=subnet['gateway_ip'],
type='gateway'),
}
# attempt to populate DHCP server field
search_opts = {'network_id': subnet['network_id'],
'device_owner': 'network:dhcp'}
data = get_client(context).list_ports(**search_opts)
dhcp_ports = data.get('ports', [])
for p in dhcp_ports:
for ip_pair in p['fixed_ips']:
if ip_pair['subnet_id'] == subnet['id']:
subnet_dict['dhcp_server'] = ip_pair['ip_address']
break
subnet_object = network_model.Subnet(**subnet_dict)
for dns in subnet.get('dns_nameservers', []):
subnet_object.add_dns(
network_model.IP(address=dns, type='dns'))
for route in subnet.get('host_routes', []):
subnet_object.add_route(
network_model.Route(cidr=route['destination'],
gateway=network_model.IP(
address=route['nexthop'],
type='gateway')))
subnets.append(subnet_object)
return subnets
def get_dns_domains(self, context):
"""Return a list of available dns domains.
These can be used to create DNS entries for floating IPs.
"""
raise NotImplementedError()
def add_dns_entry(self, context, address, name, dns_type, domain):
"""Create specified DNS entry for address."""
raise NotImplementedError()
def modify_dns_entry(self, context, name, address, domain):
"""Create specified DNS entry for address."""
raise NotImplementedError()
def delete_dns_entry(self, context, name, domain):
"""Delete the specified dns entry."""
raise NotImplementedError()
def delete_dns_domain(self, context, domain):
"""Delete the specified dns domain."""
raise NotImplementedError()
def get_dns_entries_by_address(self, context, address, domain):
"""Get entries for address and domain."""
raise NotImplementedError()
def get_dns_entries_by_name(self, context, name, domain):
"""Get entries for name and domain."""
raise NotImplementedError()
def create_private_dns_domain(self, context, domain, availability_zone):
"""Create a private DNS domain with nova availability zone."""
raise NotImplementedError()
def create_public_dns_domain(self, context, domain, project=None):
"""Create a private DNS domain with optional nova project."""
raise NotImplementedError()
def setup_instance_network_on_host(self, context, instance, host):
"""Setup network for specified instance on host."""
self._update_port_binding_for_instance(context, instance, host)
def cleanup_instance_network_on_host(self, context, instance, host):
"""Cleanup network for specified instance on host."""
pass
def _update_port_binding_for_instance(self, context, instance, host):
if not self._has_port_binding_extension(context, refresh_cache=True):
return
neutron = get_client(context, admin=True)
search_opts = {'device_id': instance.uuid,
'tenant_id': instance.project_id}
data = neutron.list_ports(**search_opts)
ports = data['ports']
for p in ports:
# If the host hasn't changed, like in the case of resizing to the
# same host, there is nothing to do.
if p.get('binding:host_id') != host:
try:
neutron.update_port(p['id'],
{'port': {'binding:host_id': host}})
except Exception:
with excutils.save_and_reraise_exception():
LOG.exception(_LE("Unable to update host of port %s"),
p['id'], instance=instance)
def update_instance_vnic_index(self, context, instance, vif, index):
"""Update instance vnic index.
When the 'VNIC index' extension is supported this method will update
the vnic index of the instance on the port.
"""
self._refresh_neutron_extensions_cache(context)
if constants.VNIC_INDEX_EXT in self.extensions:
neutron = get_client(context)
port_req_body = {'port': {'vnic_index': index}}
try:
neutron.update_port(vif['id'], port_req_body)
except Exception:
with excutils.save_and_reraise_exception():
LOG.exception(_LE('Unable to update instance VNIC index '
'for port %s.'),
vif['id'], instance=instance)
def _ensure_requested_network_ordering(accessor, unordered, preferred):
"""Sort a list with respect to the preferred network ordering."""
if preferred:
unordered.sort(key=lambda i: preferred.index(accessor(i)))
| Snergster/virl-salt | openstack/nova/files/mitaka/nova+network+neutronv2+api.py | Python | gpl-2.0 | 90,885 | 0.000275 |
from .activations import ActivationsPanel
from .experiments import ExperimentsPanel
from .occlusion import OcclusionPanel
| JarnoRFB/qtpyvis | qtgui/panels/__init__.py | Python | mit | 122 | 0 |
#-*- coding: utf-8 -*-
'''
python-libtorrent for Kodi (script.module.libtorrent)
Copyright (C) 2015-2016 DiMartino, srg70, RussakHH, aisman
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
'''
import os
import xbmc, xbmcgui, xbmcvfs, xbmcaddon
from net import HTTP
__libbaseurl__ = "https://github.com/DiMartinoXBMC/script.module.libtorrent/raw/master/python_libtorrent"
__settings__ = xbmcaddon.Addon(id='script.module.libtorrent')
__version__ = __settings__.getAddonInfo('version')
__plugin__ = __settings__.getAddonInfo('name') + " v." + __version__
__icon__=os.path.join(xbmc.translatePath('special://home'), 'addons',
'script.module.libtorrent', 'icon.png')
__language__ = __settings__.getLocalizedString
from python_libtorrent.platform_pulsar import get_platform, get_libname
def log(msg):
try:
xbmc.log("### [%s]: %s" % (__plugin__,msg,), level=xbmc.LOGNOTICE )
except UnicodeEncodeError:
xbmc.log("### [%s]: %s" % (__plugin__,msg.encode("utf-8", "ignore"),), level=xbmc.LOGNOTICE )
except:
xbmc.log("### [%s]: %s" % (__plugin__,'ERROR LOG',), level=xbmc.LOGNOTICE )
def getSettingAsBool(setting):
return __settings__.getSetting(setting).lower() == "true"
class LibraryManager():
def __init__(self, dest_path, platform):
self.dest_path = dest_path
self.platform = platform
self.root=os.path.dirname(os.path.dirname(__file__))
def check_exist(self):
for libname in get_libname(self.platform):
if not xbmcvfs.exists(os.path.join(self.dest_path,libname)):
return False
return True
def check_update(self):
need_update=False
for libname in get_libname(self.platform):
if libname!='liblibtorrent.so':
self.libpath = os.path.join(self.dest_path, libname)
self.sizepath=os.path.join(self.root, self.platform['system'], self.platform['version'], libname+'.size.txt')
size=str(os.path.getsize(self.libpath))
size_old=open( self.sizepath, "r" ).read()
if size_old!=size:
need_update=True
return need_update
def update(self):
if self.check_update():
for libname in get_libname(self.platform):
self.libpath = os.path.join(self.dest_path, libname)
xbmcvfs.delete(self.libpath)
self.download()
def download(self):
xbmcvfs.mkdirs(self.dest_path)
for libname in get_libname(self.platform):
dest = os.path.join(self.dest_path, libname)
log("try to fetch %s" % libname)
url = "%s/%s/%s/%s.zip" % (__libbaseurl__, self.platform['system'], self.platform['version'], libname)
if libname!='liblibtorrent.so':
try:
self.http = HTTP()
self.http.fetch(url, download=dest + ".zip", progress=True)
log("%s -> %s" % (url, dest))
xbmc.executebuiltin('XBMC.Extract("%s.zip","%s")' % (dest, self.dest_path), True)
xbmcvfs.delete(dest + ".zip")
except:
text = 'Failed download %s!' % libname
xbmc.executebuiltin("XBMC.Notification(%s,%s,%s,%s)" % (__plugin__,text,750,__icon__))
else:
xbmcvfs.copy(os.path.join(self.dest_path, 'libtorrent.so'), dest)
return True
def android_workaround(self, new_dest_path):
for libname in get_libname(self.platform):
libpath=os.path.join(self.dest_path, libname)
size=str(os.path.getsize(libpath))
new_libpath=os.path.join(new_dest_path, libname)
if not xbmcvfs.exists(new_libpath):
xbmcvfs.copy(libpath, new_libpath)
log('Copied %s -> %s' %(libpath, new_libpath))
else:
new_size=str(os.path.getsize(new_libpath))
if size!=new_size:
xbmcvfs.delete(new_libpath)
xbmcvfs.copy(libpath, new_libpath)
log('Deleted and copied (%s) %s -> (%s) %s' %(size, libpath, new_size, new_libpath))
return new_dest_path
| chimkentec/KodiMODo_rep | script.module.libtorrent/python_libtorrent/python_libtorrent/functions.py | Python | gpl-3.0 | 5,308 | 0.008855 |
import matplotlib.pyplot as plt
import numpy as np
import sys
import time
import scipy.signal as sig
infile = sys.argv[1]
indata = np.load(infile)
spec = indata[0]
samp_rate = indata[1]
fftsize = indata[2]
center_freq = indata[3] # MHz
halffft = int(0.5*fftsize)
freqs = 0.5*samp_rate*np.array(range(-halffft,halffft))/(halffft)
#plt.plot(spec)
delta_nu = samp_rate/fftsize
plt.plot(freqs,spec)
plt.xlabel('relative to center [Mhz]')
#plt.figure()
RFI = [[1419.4-0.210, 0.02],
[1419.4-1.937, 0.015],
[1419.4-4.4, 0.015],
[1419.4+3.0, 0.01],
[center_freq, 4*delta_nu], # remove dip in the center of band, always about 4 fft points wide. Use 8, else errors
[1416.4-0.8, 0.04],
[1420.4-2, 0.01],
[1425, 0.01],
[1424.4-1.8, 0.01],
[1424.4+0.5845, 0.01],
[1424.4+0.483, 0.005],
]
flags = []
#plt.plot(spec)
for item in RFI:
RFI_freq = item[0]
RFI_width = item[1]
ch0_freq = center_freq - 0.5*samp_rate
ind_low = int(np.floor((RFI_freq-0.5*RFI_width - ch0_freq)/delta_nu))
ind_high = int(np.ceil((RFI_freq+0.5*RFI_width - ch0_freq)/delta_nu))
if ind_low>0 and ind_high<len(spec):
margin = min(ind_high-ind_low, ind_low, len(spec)-ind_high)
RFI_part = spec[ind_low-margin:ind_high+margin]
xdata = np.arange(len(RFI_part))
weights = np.ones_like(RFI_part)
weights[margin:-margin] = 0.0 # Ignore RFI when fitting
pf = np.polyfit(xdata, RFI_part, deg=1, w=weights)
interpdata = np.polyval(pf, xdata)
#plt.figure()
#plt.plot(xdata, interpdata)
spec[ind_low:ind_high] = interpdata[margin:-margin]
else:
print 'Ignoring', item
plt.figure()
calspec = spec * 750/1.6
plt.plot(calspec)
plt.ylabel('Roughly [K]')
#plt.figure()
#fftsize = 0.8*fftsize
#halffft = int(0.5*fftsize)
#freqs = 0.5*samp_rate*np.array(range(-halffft,halffft))/(halffft)
#l = len(spec)
#lind = 0.1*l
#hind = 0.9*l
#newspec = spec[lind:hind-1]
#print np.shape(newspec), np.shape(freqs)
#plt.plot(freqs, newspec)
#xdata = np.arange(len(newspec))
#weights = np.ones_like(newspec)
#margin = 0.25*len(newspec)
#weights[margin:-margin] = 0.0 # Ignore RFI when fitting
#pf = np.polyfit(xdata, newspec, w=weights, deg=8)
#interpdata = np.polyval(pf, xdata)
#plt.plot(freqs,interpdata)
#plt.figure()
#plt.plot(freqs, newspec-interpdata)
#plt.figure()
#dec = sig.decimate(spec, 8, axis=0)
#plt.plot(dec)
plt.show()
| varenius/salsa | USRP/usrp_gnuradio_dev/plot_array_file.py | Python | mit | 2,472 | 0.016181 |
from direct.distributed.MsgTypes import *
OTP_DO_ID_SERVER_ROOT = 4007
OTP_DO_ID_FRIEND_MANAGER = 4501
OTP_DO_ID_LEADERBOARD_MANAGER = 4502
OTP_DO_ID_SERVER = 4600
OTP_DO_ID_UBER_DOG = 4601
OTP_CHANNEL_AI_AND_UD_BROADCAST = 4602
OTP_CHANNEL_UD_BROADCAST = 4603
OTP_CHANNEL_AI_BROADCAST = 4604
OTP_NET_MSGR_CHANNEL_ID_ALL_AI = 4605
OTP_NET_MSGR_CHANNEL_ID_UBER_DOG = 4606
OTP_NET_MSGR_CHANNEL_ID_AI_ONLY = 4607
OTP_DO_ID_COMMON = 4615
OTP_DO_ID_GATEWAY = 4616
OTP_DO_ID_PIRATES = 4617
OTP_DO_ID_TOONTOWN = 4618
OTP_DO_ID_FAIRIES = 4619
OTP_DO_ID_CARS = 4620
OTP_DO_ID_AVATARS = 4630
OTP_DO_ID_FRIENDS = 4640
OTP_DO_ID_GUILDS = 4650
OTP_DO_ID_ESCROW = 4660
OTP_DO_ID_CLIENT_SERVICES_MANAGER = 4665
OTP_DO_ID_TTI_FRIENDS_MANAGER = 4666
OTP_DO_ID_GLOBAL_PARTY_MANAGER = 4477
OTP_DO_ID_PIRATES_AVATAR_MANAGER = 4674
OTP_DO_ID_PIRATES_CREW_MANAGER = 4675
OTP_DO_ID_PIRATES_INVENTORY_MANAGER = 4677
OTP_DO_ID_PIRATES_SPEEDCHAT_RELAY = 4711
OTP_DO_ID_PIRATES_SHIP_MANAGER = 4678
OTP_DO_ID_PIRATES_TRAVEL_AGENT = 4679
OTP_DO_ID_PIRATES_FRIENDS_MANAGER = 4680
OTP_DO_ID_CHAT_MANAGER = 4681
OTP_DO_ID_TOONTOWN_AVATAR_MANAGER = 4682
OTP_DO_ID_TOONTOWN_DELIVERY_MANAGER = 4683
OTP_DO_ID_TOONTOWN_TEMP_STORE_MANAGER = 4684
OTP_DO_ID_TOONTOWN_SPEEDCHAT_RELAY = 4712
OTP_DO_ID_SWITCHBOARD_MANAGER = 4685
OTP_DO_ID_AVATAR_FRIENDS_MANAGER = 4686
OTP_DO_ID_PLAYER_FRIENDS_MANAGER = 4687
OTP_DO_ID_CENTRAL_LOGGER = 4688
OTP_DO_ID_CARS_AVATAR_MANAGER = 4689
OTP_DO_ID_TOONTOWN_MAIL_MANAGER = 4690
OTP_DO_ID_TOONTOWN_PARTY_MANAGER = 4691
OTP_DO_ID_TOONTOWN_RAT_MANAGER = 4692
OTP_DO_ID_STATUS_DATABASE = 4693
OTP_DO_ID_TOONTOWN_AWARD_MANAGER = 4694
OTP_DO_ID_TOONTOWN_CODE_REDEMPTION_MANAGER = 4695
OTP_DO_ID_TOONTOWN_IN_GAME_NEWS_MANAGER = 4696
OTP_DO_ID_TOONTOWN_NON_REPEATABLE_RANDOM_SOURCE = 4697
OTP_DO_ID_AI_TRADE_AVATAR = 4698
OTP_DO_ID_TOONTOWN_WHITELIST_MANAGER = 4699
OTP_DO_ID_PIRATES_MATCH_MAKER = 4700
OTP_DO_ID_PIRATES_GUILD_MANAGER = 4701
OTP_DO_ID_PIRATES_AWARD_MAKER = 4702
OTP_DO_ID_PIRATES_CODE_REDEMPTION = 4703
OTP_DO_ID_PIRATES_SETTINGS_MANAGER = 4704
OTP_DO_ID_PIRATES_HOLIDAY_MANAGER = 4705
OTP_DO_ID_PIRATES_CREW_MATCH_MANAGER = 4706
OTP_DO_ID_PIRATES_AVATAR_ACCESSORIES_MANAGER = 4710
OTP_DO_ID_TOONTOWN_CPU_INFO_MANAGER = 4713
OTP_DO_ID_TOONTOWN_SECURITY_MANAGER = 4714
OTP_DO_ID_SNAPSHOT_DISPATCHER = 4800
OTP_DO_ID_SNAPSHOT_RENDERER = 4801
OTP_DO_ID_SNAPSHOT_RENDERER_01 = 4801
OTP_DO_ID_SNAPSHOT_RENDERER_02 = 4802
OTP_DO_ID_SNAPSHOT_RENDERER_03 = 4803
OTP_DO_ID_SNAPSHOT_RENDERER_04 = 4804
OTP_DO_ID_SNAPSHOT_RENDERER_05 = 4805
OTP_DO_ID_SNAPSHOT_RENDERER_06 = 4806
OTP_DO_ID_SNAPSHOT_RENDERER_07 = 4807
OTP_DO_ID_SNAPSHOT_RENDERER_08 = 4808
OTP_DO_ID_SNAPSHOT_RENDERER_09 = 4809
OTP_DO_ID_SNAPSHOT_RENDERER_10 = 4810
OTP_DO_ID_SNAPSHOT_RENDERER_11 = 4811
OTP_DO_ID_SNAPSHOT_RENDERER_12 = 4812
OTP_DO_ID_SNAPSHOT_RENDERER_13 = 4813
OTP_DO_ID_SNAPSHOT_RENDERER_14 = 4814
OTP_DO_ID_SNAPSHOT_RENDERER_15 = 4815
OTP_DO_ID_SNAPSHOT_RENDERER_16 = 4816
OTP_DO_ID_SNAPSHOT_RENDERER_17 = 4817
OTP_DO_ID_SNAPSHOT_RENDERER_18 = 4818
OTP_DO_ID_SNAPSHOT_RENDERER_19 = 4819
OTP_DO_ID_SNAPSHOT_RENDERER_20 = 4820
OTP_DO_ID_PIRATES_INVENTORY_MANAGER_BASE = 5001
OTP_ZONE_ID_INVALID = 0
OTP_ZONE_ID_OLD_QUIET_ZONE = 1
OTP_ZONE_ID_MANAGEMENT = 2
OTP_ZONE_ID_DISTRICTS = 3
OTP_ZONE_ID_DISTRICTS_STATS = 4
OTP_ZONE_ID_ELEMENTS = 5
OTP_NET_MESSENGER_CHANNEL = (OTP_DO_ID_UBER_DOG << 32) + OTP_ZONE_ID_MANAGEMENT
OTP_MOD_CHANNEL = 6200
OTP_ADMIN_CHANNEL = 6400
OTP_SYSADMIN_CHANNEL = 6500 | Spiderlover/Toontown | otp/distributed/OtpDoGlobals.py | Python | mit | 3,475 | 0.000288 |
# ################ A simple graphical interface which communicates with the server #####################################
# While client_gui only allows to set the facelets with the mouse, this file (client_gui2) also takes input from the
# webcam and includes sliders for some opencv parameters.
from tkinter import *
import socket
import twophase.cubie as cubie
import twophase.vision_params as vision_params
# ################################## some global variables and constants ###############################################
DEFAULT_HOST = 'localhost'
DEFAULT_PORT = '8080'
width = 60 # width of a facelet in pixels
facelet_id = [[[0 for col in range(3)] for row in range(3)] for fc in range(6)]
colorpick_id = [0 for i in range(6)]
curcol = None
t = ("U", "R", "F", "D", "L", "B")
cols = ("yellow", "green", "red", "white", "blue", "orange")
########################################################################################################################
# ################################################ Diverse functions ###################################################
def show_text(txt):
"""Display messages."""
print(txt)
display.insert(INSERT, txt)
root.update_idletasks()
def create_facelet_rects(a):
"""Initialize the facelet grid on the canvas."""
offset = ((1, 0), (2, 1), (1, 1), (1, 2), (0, 1), (3, 1))
for f in range(6):
for row in range(3):
y = 10 + offset[f][1] * 3 * a + row * a
for col in range(3):
x = 10 + offset[f][0] * 3 * a + col * a
facelet_id[f][row][col] = canvas.create_rectangle(x, y, x + a, y + a, fill="grey")
if row == 1 and col == 1:
canvas.create_text(x + width // 2, y + width // 2, font=("", 14), text=t[f], state=DISABLED)
for f in range(6):
canvas.itemconfig(facelet_id[f][1][1], fill=cols[f])
def create_colorpick_rects(a):
"""Initialize the "paintbox" on the canvas."""
global curcol
global cols
for i in range(6):
x = (i % 3) * (a + 5) + 7 * a
y = (i // 3) * (a + 5) + 7 * a
colorpick_id[i] = canvas.create_rectangle(x, y, x + a, y + a, fill=cols[i])
canvas.itemconfig(colorpick_id[0], width=4)
curcol = cols[0]
def get_definition_string():
"""Generate the cube definition string from the facelet colors."""
color_to_facelet = {}
for i in range(6):
color_to_facelet.update({canvas.itemcget(facelet_id[i][1][1], "fill"): t[i]})
s = ''
for f in range(6):
for row in range(3):
for col in range(3):
s += color_to_facelet[canvas.itemcget(facelet_id[f][row][col], "fill")]
return s
########################################################################################################################
# ############################### Solve the displayed cube with a local or remote server ###############################
def solve():
"""Connect to the server and return the solving maneuver."""
display.delete(1.0, END) # clear output window
try:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
except socket.error:
show_text('Failed to create socket')
return
# host = 'f9f0b2jt6zmzyo6b.myfritz.net' # my RaspberryPi, if online
host = txt_host.get(1.0, END).rstrip() # default is localhost
port = int(txt_port.get(1.0, END)) # default is port 8080
try:
remote_ip = socket.gethostbyname(host)
except socket.gaierror:
show_text('Hostname could not be resolved.')
return
try:
s.connect((remote_ip, port))
except BaseException as e:
show_text('Cannot connect to server! ' + e.__doc__)
return
show_text('Connected with ' + remote_ip + '\n')
try:
defstr = get_definition_string() + '\n'
except BaseException as e:
show_text('Invalid facelet configuration.\nWrong or missing colors. ' + e.__doc__)
return
show_text(defstr)
try:
s.sendall((defstr + '\n').encode())
except BaseException as e:
show_text('Cannot send cube configuration to server. ' + e.__doc__)
return
show_text(s.recv(2048).decode())
########################################################################################################################
# ################################# Functions to change the facelet colors #############################################
def clean():
"""Restore the cube to a clean cube."""
for f in range(6):
for row in range(3):
for col in range(3):
canvas.itemconfig(facelet_id[f][row][col], fill=canvas.itemcget(facelet_id[f][1][1], "fill"))
def empty():
"""Remove the facelet colors except the center facelets colors."""
for f in range(6):
for row in range(3):
for col in range(3):
if row != 1 or col != 1:
canvas.itemconfig(facelet_id[f][row][col], fill="grey")
def random():
"""Generate a random cube and sets the corresponding facelet colors."""
cc = cubie.CubieCube()
cc.randomize()
fc = cc.to_facelet_cube()
idx = 0
for f in range(6):
for row in range(3):
for col in range(3):
canvas.itemconfig(facelet_id[f][row][col], fill=cols[fc.f[idx]])
idx += 1
########################################################################################################################
# ################################### Edit the facelet colors ##########################################################
def click(_event):
"""Define how to react on left mouse clicks"""
global curcol
idlist = canvas.find_withtag("current")
if len(idlist) > 0:
if idlist[0] in colorpick_id:
curcol = canvas.itemcget("current", "fill")
for i in range(6):
canvas.itemconfig(colorpick_id[i], width=1)
canvas.itemconfig("current", width=5)
else:
canvas.itemconfig("current", fill=curcol)
########################################################################################################################
# ######################################### functions to set the slider values #########################################
def set_rgb_L(val):
vision_params.rgb_L = int(val)
def set_orange_L(val):
vision_params.orange_L = int(val)
def set_orange_H(val):
vision_params.orange_H = int(val)
def set_yellow_H(val):
vision_params.yellow_H = int(val)
def set_green_H(val):
vision_params.green_H = int(val)
def set_blue_H(val):
vision_params.blue_H = int(val)
def set_sat_W(val):
vision_params.sat_W = int(val)
def set_val_W(val):
vision_params.val_W = int(val)
def set_sigma_C(val):
vision_params.sigma_C = int(val)
def set_delta_C(val):
vision_params.delta_C = int(val)
def transfer():
"""Transfer the facelet colors detected by the opencv vision to the GUI editor."""
if len(vision_params.face_col) == 0:
return
centercol = vision_params.face_col[1][1]
vision_params.cube_col[centercol] = vision_params.face_col
vision_params.cube_hsv[centercol] = vision_params.face_hsv
dc = {}
for i in range(6):
dc[canvas.itemcget(facelet_id[i][1][1], "fill")] = i # map color to face number
for i in range(3):
for j in range(3):
canvas.itemconfig(facelet_id[dc[centercol]][i][j], fill=vision_params.face_col[i][j])
# ######################################################################################################################
# ###################################### Generate and display the TK_widgets ##########################################
root = Tk()
root.wm_title("Solver Client")
canvas = Canvas(root, width=12 * width + 20, height=9 * width + 20)
canvas.pack()
bsolve = Button(text="Solve", height=2, width=10, relief=RAISED, command=solve)
bsolve_window = canvas.create_window(10 + 10.5 * width, 10 + 6.5 * width, anchor=NW, window=bsolve)
bclean = Button(text="Clean", height=1, width=10, relief=RAISED, command=clean)
bclean_window = canvas.create_window(10 + 10.5 * width, 10 + 7.5 * width, anchor=NW, window=bclean)
bempty = Button(text="Empty", height=1, width=10, relief=RAISED, command=empty)
bempty_window = canvas.create_window(10 + 10.5 * width, 10 + 8 * width, anchor=NW, window=bempty)
brandom = Button(text="Random", height=1, width=10, relief=RAISED, command=random)
brandom_window = canvas.create_window(10 + 10.5 * width, 10 + 8.5 * width, anchor=NW, window=brandom)
display = Text(height=7, width=39)
text_window = canvas.create_window(10 + 6.5 * width, 10 + .5 * width, anchor=NW, window=display)
hp = Label(text=' Hostname and Port')
hp_window = canvas.create_window(10 + 0 * width, 10 + 0.6 * width, anchor=NW, window=hp)
txt_host = Text(height=1, width=20)
txt_host_window = canvas.create_window(10 + 0 * width, 10 + 1 * width, anchor=NW, window=txt_host)
txt_host.insert(INSERT, DEFAULT_HOST)
txt_port = Text(height=1, width=20)
txt_port_window = canvas.create_window(10 + 0 * width, 10 + 1.5 * width, anchor=NW, window=txt_port)
txt_port.insert(INSERT, DEFAULT_PORT)
canvas.bind("<Button-1>", click)
create_facelet_rects(width)
create_colorpick_rects(width)
s_orange_L = Scale(root, from_=1, to=14, length=width * 1.4, showvalue=0, label='red-orange', orient=HORIZONTAL,
command=set_orange_L)
canvas.create_window(10, 12 + 6.0 * width, anchor=NW, window=s_orange_L)
s_orange_L.set(vision_params.orange_L)
s_orange_H = Scale(root, from_=8, to=40, length=width * 1.4, showvalue=0, label='orange-yellow', orient=HORIZONTAL,
command=set_orange_H)
canvas.create_window(10, 12 + 6.6 * width, anchor=NW, window=s_orange_H)
s_orange_H.set(vision_params.orange_H)
s_yellow_H = Scale(root, from_=31, to=80, length=width * 1.4, showvalue=0, label='yellow-green', orient=HORIZONTAL,
command=set_yellow_H)
canvas.create_window(10, 12 + 7.2 * width, anchor=NW, window=s_yellow_H)
s_yellow_H.set(vision_params.yellow_H)
s_green_H = Scale(root, from_=70, to=120, length=width * 1.4, showvalue=0, label='green-blue', orient=HORIZONTAL,
command=set_green_H)
canvas.create_window(10, 12 + 7.8 * width, anchor=NW, window=s_green_H)
s_green_H.set(vision_params.green_H)
s_blue_H = Scale(root, from_=120, to=180, length=width * 1.4, showvalue=0, label='blue-red', orient=HORIZONTAL,
command=set_blue_H)
canvas.create_window(10, 12 + 8.4 * width, anchor=NW, window=s_blue_H)
s_blue_H.set(vision_params.blue_H)
s_rgb_L = Scale(root, from_=0, to=140, length=width * 1.4, showvalue=0, label='black-filter', orient=HORIZONTAL,
command=set_rgb_L)
canvas.create_window(10 + width * 1.5, 12 + 6 * width, anchor=NW, window=s_rgb_L)
s_rgb_L.set(vision_params.rgb_L)
s_sat_W = Scale(root, from_=120, to=0, length=width * 1.4, showvalue=0, label='white-filter s', orient=HORIZONTAL,
command=set_sat_W)
canvas.create_window(10 + width * 1.5, 12 + 6.6 * width, anchor=NW, window=s_sat_W)
s_sat_W.set(vision_params.sat_W)
s_val_W = Scale(root, from_=80, to=255, length=width * 1.4, showvalue=0, label='white-filter v', orient=HORIZONTAL,
command=set_val_W)
canvas.create_window(10 + width * 1.5, 12 + 7.2 * width, anchor=NW, window=s_val_W)
s_val_W.set(vision_params.val_W)
s_sigma_C = Scale(root, from_=30, to=0, length=width * 1.4, showvalue=0, label='color-filter \u03c3', orient=HORIZONTAL,
command=set_sigma_C)
canvas.create_window(10 + width * 1.5, 12 + 7.8 * width, anchor=NW, window=s_sigma_C)
s_sigma_C.set(vision_params.sigma_C)
s_delta_C = Scale(root, from_=10, to=0, length=width * 1.4, showvalue=0, label='color-filter \u03b4', orient=HORIZONTAL,
command=set_delta_C)
canvas.create_window(10 + width * 1.5, 12 + 8.4 * width, anchor=NW, window=s_delta_C)
s_delta_C.set(vision_params.delta_C)
btransfer = Button(text="Webcam import", height=2, width=13, relief=RAISED, command=transfer)
canvas.create_window(10 + 0.5 * width, 10 + 2.1 * width, anchor=NW, window=btransfer)
root.mainloop()
########################################################################################################################
| hkociemba/RubiksCube-TwophaseSolver | package_src/twophase/client_gui2.py | Python | gpl-3.0 | 12,351 | 0.00421 |
def extractDreamstlTumblrCom(item):
'''
Parser for 'dreamstl.tumblr.com'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return None
tagmap = [
('the s ranks that i raised', 'The S-Ranks that I Raised', 'translated'),
('the s ranks that i\'ve raised', 'The S-Ranks that I Raised', 'translated'),
('PRC', 'PRC', 'translated'),
('Loiterous', 'Loiterous', 'oel'),
]
for tagname, name, tl_type in tagmap:
if tagname in item['tags']:
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False | fake-name/ReadableWebProxy | WebMirror/management/rss_parser_funcs/feed_parse_extractDreamstlTumblrCom.py | Python | bsd-3-clause | 761 | 0.027595 |
# -*- coding: utf-8 -*-
from django.contrib import admin
from .catalogadmin import CatalogAdmin
from .publicationadmin import PublicationAdmin
from .typeadmin import TypeAdmin
from ..models import Type, Catalog, Publication
admin.site.register(Type, TypeAdmin)
admin.site.register(Catalog, CatalogAdmin)
admin.site.register(Publication, PublicationAdmin)
| mbourqui/django-publications-bootstrap | publications_bootstrap/admin/__init__.py | Python | mit | 358 | 0 |
__author__ = 'Dominic Miglar <dominic.miglar@bitmovin.net>'
import unittest
from bitcodin import get_input
from bitcodin.exceptions import BitcodinNotFoundError
from bitcodin.test.bitcodin_test_case import BitcodinTestCase
class GetNonExistentInputTestCase(BitcodinTestCase):
def setUp(self):
super(GetNonExistentInputTestCase, self).setUp()
def runTest(self):
with self.assertRaises(BitcodinNotFoundError):
encoding_profile = get_input(0)
def tearDown(self):
super(GetNonExistentInputTestCase, self).tearDown()
if __name__ == '__main__':
unittest.main()
| bitmovin/bitcodin-python | bitcodin/test/input/testcase_get_non_existent_input.py | Python | unlicense | 614 | 0 |
import olymap.item
def test_get_animal():
tests = (
({}, None),
({'IT': {'an': ['1']}}, True),
({'IT': {'an': ['0']}}, None),
({'IT': {'de': ['1']}}, None),
({'IM': {'an': ['1']}}, None),
)
for box, answer in tests:
assert olymap.item.get_animal(box) == answer
def test_get_attack_bonus():
tests = (
({}, 0),
({'IM': {'ab': ['60']}}, 60),
({'IM': {'ab': ['0']}}, 0),
({'IM': {'de': ['60']}}, 0),
({'IT': {'ab': ['60']}}, 0),
)
for box, answer in tests:
assert olymap.item.get_attack_bonus(box) == answer
def test_get_aura_bonus():
tests = (
({}, None),
({'IM': {'ba': ['60']}}, '60'),
({'IM': {'ba': ['0']}}, '0'),
({'IM': {'de': ['60']}}, None),
({'IT': {'ba': ['60']}}, None),
)
for box, answer in tests:
assert olymap.item.get_aura_bonus(box) == answer
def test_get_defense_bonus():
tests = (
({}, 0),
({'IM': {'db': ['60']}}, 60),
({'IM': {'db': ['0']}}, 0),
({'IM': {'de': ['60']}}, 0),
({'IT': {'db': ['60']}}, 0),
)
for box, answer in tests:
assert olymap.item.get_defense_bonus(box) == answer
def test_get_fly_capacity():
tests = (
({}, None),
({'IT': {'fc': ['100']}}, '100'),
({'IT': {'fc': ['0']}}, '0'),
({'IT': {'de': ['60']}}, None),
({'IM': {'fc': ['60']}}, None),
)
for box, answer in tests:
assert olymap.item.get_fly_capacity(box) == answer
def test_get_item_attack():
tests = (
({}, None),
({'IT': {'at': ['60']}}, '60'),
({'IT': {'at': ['0']}}, '0'),
({'IT': {'de': ['60']}}, None),
({'IM': {'at': ['60']}}, None),
)
for box, answer in tests:
assert olymap.item.get_item_attack(box) == answer
def test_get_item_bonuses():
tests = (
({}, {'attack_bonus': 0, 'defense_bonus': 0, 'missile_bonus': 0, 'aura_bonus': None}),
({'IM': {'ab': ['60'], 'mb': ['61'], 'db': ['62'], 'ba': ['63']}}, {'attack_bonus': 60, 'defense_bonus': 62, 'missile_bonus': 61, 'aura_bonus': '63'}),
({'IM': {'ab': ['60']}}, {'attack_bonus': 60, 'defense_bonus': 0, 'missile_bonus': 0, 'aura_bonus': None}),
)
for box, answer in tests:
assert olymap.item.get_item_bonuses(box) == answer
def test_get_item_defense():
tests = (
({}, None),
({'IT': {'de': ['60']}}, '60'),
({'IT': {'de': ['0']}}, '0'),
({'IT': {'at': ['60']}}, None),
({'IM': {'de': ['60']}}, None),
)
for box, answer in tests:
assert olymap.item.get_item_defense(box) == answer
def test_get_item_missile():
tests = (
({}, None),
({'IT': {'mi': ['60']}}, '60'),
({'IT': {'mi': ['0']}}, '0'),
({'IT': {'de': ['60']}}, None),
({'IM': {'mi': ['60']}}, None),
)
for box, answer in tests:
assert olymap.item.get_item_missile(box) == answer
def test_get_land_capacity():
tests = (
({}, None),
({'IT': {'lc': ['100']}}, '100'),
({'IT': {'lc': ['0']}}, '0'),
({'IT': {'de': ['60']}}, None),
({'IM': {'lc': ['60']}}, None),
)
for box, answer in tests:
assert olymap.item.get_land_capacity(box) == answer
def test_get_lore():
tests = (
({}, None),
({'IM': {'lo': ['100']}}, '100'),
({'IM': {'lo': ['0']}}, '0'),
({'IM': {'de': ['60']}}, None),
({'IT': {'lo': ['60']}}, None),
)
for box, answer in tests:
assert olymap.item.get_lore(box) == answer
def test_get_man_item():
tests = (
({}, None),
({'IT': {'mu': ['1']}}, True),
({'IT': {'mu': ['0']}}, None),
({'IT': {'de': ['1']}}, None),
({'IM': {'mu': ['1']}}, None),
)
for box, answer in tests:
assert olymap.item.get_man_item(box) == answer
def test_get_missile_bonus():
tests = (
({}, 0),
({'IM': {'mb': ['60']}}, 60),
({'IM': {'mb': ['0']}}, 0),
({'IM': {'mi': ['60']}}, 0),
({'IT': {'mb': ['60']}}, 0),
)
for box, answer in tests:
assert olymap.item.get_missile_bonus(box) == answer
def test_get_plural():
tests = (
({}, None),
({'na': ['single'], 'IT': {'pl': ['plural']}}, 'plural'),
({'na': ['single']}, 'single'),
({'na': ['single'], 'IT': {'de': ['plural']}}, 'single'),
({'na': ['single'], 'IM': {'pl': ['plural']}}, 'single'),
)
for box, answer in tests:
assert olymap.item.get_plural(box) == answer
def test_get_prominent():
tests = (
({}, None),
({'IT': {'pr': ['1']}}, True),
({'IT': {'pr': ['0']}}, None),
({'IT': {'de': ['1']}}, None),
({'IM': {'pr': ['1']}}, None),
)
for box, answer in tests:
assert olymap.item.get_prominent(box) == answer
def test_get_ride_capacity():
tests = (
({}, None),
({'IT': {'rc': ['100']}}, '100'),
({'IT': {'rc': ['0']}}, '0'),
({'IT': {'de': ['60']}}, None),
({'IM': {'rc': ['60']}}, None),
)
for box, answer in tests:
assert olymap.item.get_ride_capacity(box) == answer
| olympiag3/olypy | tests/unit/test_olymap_item.py | Python | apache-2.0 | 5,307 | 0.000942 |
# Copyright 2013 University of Chicago
class EEAgentParameterException(Exception):
def __init__(self, message):
Exception.__init__(self, message)
class EEAgentUnauthorizedException(Exception):
pass
class EEAgentSupDException(Exception):
def __init__(self, message):
Exception.__init__(self, message)
| ooici/eeagent | eeagent/eeagent_exceptions.py | Python | apache-2.0 | 337 | 0.002967 |
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import frappe, json, sys
from frappe import _
from frappe.utils import cint, flt, now, cstr, strip_html
from frappe.model import default_fields
from frappe.model.naming import set_new_name
from frappe.modules import load_doctype_module
_classes = {}
def get_controller(doctype):
"""Returns the **class** object of the given DocType.
For `custom` type, returns `frappe.model.document.Document`.
:param doctype: DocType name as string."""
from frappe.model.document import Document
if not doctype in _classes:
module_name, custom = frappe.db.get_value("DocType", doctype, ["module", "custom"]) \
or ["Core", False]
if custom:
_class = Document
else:
module = load_doctype_module(doctype, module_name)
classname = doctype.replace(" ", "").replace("-", "")
if hasattr(module, classname):
_class = getattr(module, classname)
if issubclass(_class, BaseDocument):
_class = getattr(module, classname)
else:
raise ImportError, doctype
else:
raise ImportError, doctype
_classes[doctype] = _class
return _classes[doctype]
class BaseDocument(object):
ignore_in_getter = ("doctype", "_meta", "meta", "_table_fields", "_valid_columns")
def __init__(self, d):
self.update(d)
self.dont_update_if_missing = []
if hasattr(self, "__setup__"):
self.__setup__()
@property
def meta(self):
if not hasattr(self, "_meta"):
self._meta = frappe.get_meta(self.doctype)
return self._meta
def update(self, d):
if "doctype" in d:
self.set("doctype", d.get("doctype"))
# first set default field values of base document
for key in default_fields:
if key in d:
self.set(key, d.get(key))
for key, value in d.iteritems():
self.set(key, value)
return self
def update_if_missing(self, d):
if isinstance(d, BaseDocument):
d = d.get_valid_dict()
if "doctype" in d:
self.set("doctype", d.get("doctype"))
for key, value in d.iteritems():
# dont_update_if_missing is a list of fieldnames, for which, you don't want to set default value
if (self.get(key) is None) and (value is not None) and (key not in self.dont_update_if_missing):
self.set(key, value)
def get_db_value(self, key):
return frappe.db.get_value(self.doctype, self.name, key)
def get(self, key=None, filters=None, limit=None, default=None):
if key:
if isinstance(key, dict):
return _filter(self.get_all_children(), key, limit=limit)
if filters:
if isinstance(filters, dict):
value = _filter(self.__dict__.get(key, []), filters, limit=limit)
else:
default = filters
filters = None
value = self.__dict__.get(key, default)
else:
value = self.__dict__.get(key, default)
if value is None and key not in self.ignore_in_getter \
and key in (d.fieldname for d in self.meta.get_table_fields()):
self.set(key, [])
value = self.__dict__.get(key)
return value
else:
return self.__dict__
def getone(self, key, filters=None):
return self.get(key, filters=filters, limit=1)[0]
def set(self, key, value, as_value=False):
if isinstance(value, list) and not as_value:
self.__dict__[key] = []
self.extend(key, value)
else:
self.__dict__[key] = value
def delete_key(self, key):
if key in self.__dict__:
del self.__dict__[key]
def append(self, key, value=None):
if value==None:
value={}
if isinstance(value, (dict, BaseDocument)):
if not self.__dict__.get(key):
self.__dict__[key] = []
value = self._init_child(value, key)
self.__dict__[key].append(value)
# reference parent document
value.parent_doc = self
return value
else:
raise ValueError, "Document attached to child table must be a dict or BaseDocument, not " + str(type(value))[1:-1]
def extend(self, key, value):
if isinstance(value, list):
for v in value:
self.append(key, v)
else:
raise ValueError
def remove(self, doc):
self.get(doc.parentfield).remove(doc)
def _init_child(self, value, key):
if not self.doctype:
return value
if not isinstance(value, BaseDocument):
if "doctype" not in value:
value["doctype"] = self.get_table_field_doctype(key)
if not value["doctype"]:
raise AttributeError, key
value = get_controller(value["doctype"])(value)
value.init_valid_columns()
value.parent = self.name
value.parenttype = self.doctype
value.parentfield = key
if not getattr(value, "idx", None):
value.idx = len(self.get(key) or []) + 1
if not getattr(value, "name", None):
value.__dict__['__islocal'] = 1
return value
def get_valid_dict(self):
d = {}
for fieldname in self.meta.get_valid_columns():
d[fieldname] = self.get(fieldname)
return d
def init_valid_columns(self):
for key in default_fields:
if key not in self.__dict__:
self.__dict__[key] = None
if self.doctype in ("DocField", "DocPerm") and self.parent in ("DocType", "DocField", "DocPerm"):
from frappe.model.meta import get_table_columns
valid = get_table_columns(self.doctype)
else:
valid = self.meta.get_valid_columns()
for key in valid:
if key not in self.__dict__:
self.__dict__[key] = None
def is_new(self):
return self.get("__islocal")
def as_dict(self, no_nulls=False):
doc = self.get_valid_dict()
doc["doctype"] = self.doctype
for df in self.meta.get_table_fields():
children = self.get(df.fieldname) or []
doc[df.fieldname] = [d.as_dict(no_nulls=no_nulls) for d in children]
if no_nulls:
for k in doc.keys():
if doc[k] is None:
del doc[k]
for key in ("_user_tags", "__islocal", "__onload", "_starred_by"):
if self.get(key):
doc[key] = self.get(key)
return frappe._dict(doc)
def as_json(self):
from frappe.utils.response import json_handler
return json.dumps(self.as_dict(), indent=1, sort_keys=True, default=json_handler)
def get_table_field_doctype(self, fieldname):
return self.meta.get_field(fieldname).options
def get_parentfield_of_doctype(self, doctype):
fieldname = [df.fieldname for df in self.meta.get_table_fields() if df.options==doctype]
return fieldname[0] if fieldname else None
def db_insert(self):
"""INSERT the document (with valid columns) in the database."""
if not self.name:
# name will be set by document class in most cases
set_new_name(self)
d = self.get_valid_dict()
columns = d.keys()
try:
frappe.db.sql("""insert into `tab{doctype}`
({columns}) values ({values})""".format(
doctype = self.doctype,
columns = ", ".join(["`"+c+"`" for c in columns]),
values = ", ".join(["%s"] * len(columns))
), d.values())
except Exception, e:
if e.args[0]==1062:
if self.meta.autoname=="hash":
self.name = None
self.db_insert()
return
type, value, traceback = sys.exc_info()
frappe.msgprint(_("Duplicate name {0} {1}").format(self.doctype, self.name))
raise frappe.NameError, (self.doctype, self.name, e), traceback
else:
raise
self.set("__islocal", False)
def db_update(self):
if self.get("__islocal") or not self.name:
self.db_insert()
return
d = self.get_valid_dict()
columns = d.keys()
try:
frappe.db.sql("""update `tab{doctype}`
set {values} where name=%s""".format(
doctype = self.doctype,
values = ", ".join(["`"+c+"`=%s" for c in columns])
), d.values() + [d.get("name")])
except Exception, e:
if e.args[0]==1062:
type, value, traceback = sys.exc_info()
fieldname = str(e).split("'")[-2]
frappe.msgprint(_("{0} must be unique".format(self.meta.get_label(fieldname))))
raise frappe.ValidationError, (self.doctype, self.name, e), traceback
else:
raise
def db_set(self, fieldname, value, update_modified=True):
self.set(fieldname, value)
self.set("modified", now())
self.set("modified_by", frappe.session.user)
frappe.db.set_value(self.doctype, self.name, fieldname, value,
self.modified, self.modified_by, update_modified=update_modified)
def _fix_numeric_types(self):
for df in self.meta.get("fields"):
if df.fieldtype == "Check":
self.set(df.fieldname, cint(self.get(df.fieldname)))
elif self.get(df.fieldname) is not None:
if df.fieldtype == "Int":
self.set(df.fieldname, cint(self.get(df.fieldname)))
elif df.fieldtype in ("Float", "Currency", "Percent"):
self.set(df.fieldname, flt(self.get(df.fieldname)))
if self.docstatus is not None:
self.docstatus = cint(self.docstatus)
def _get_missing_mandatory_fields(self):
"""Get mandatory fields that do not have any values"""
def get_msg(df):
if df.fieldtype == "Table":
return "{}: {}: {}".format(_("Error"), _("Data missing in table"), _(df.label))
elif self.parentfield:
return "{}: {} #{}: {}: {}".format(_("Error"), _("Row"), self.idx,
_("Value missing for"), _(df.label))
else:
return "{}: {}: {}".format(_("Error"), _("Value missing for"), _(df.label))
missing = []
for df in self.meta.get("fields", {"reqd": 1}):
if self.get(df.fieldname) in (None, []) or not strip_html(cstr(self.get(df.fieldname))).strip():
missing.append((df.fieldname, get_msg(df)))
return missing
def get_invalid_links(self, is_submittable=False):
def get_msg(df, docname):
if self.parentfield:
return "{} #{}: {}: {}".format(_("Row"), self.idx, _(df.label), docname)
else:
return "{}: {}".format(_(df.label), docname)
invalid_links = []
cancelled_links = []
for df in self.meta.get_link_fields() + self.meta.get("fields",
{"fieldtype":"Dynamic Link"}):
docname = self.get(df.fieldname)
if docname:
if df.fieldtype=="Link":
doctype = df.options
if not doctype:
frappe.throw(_("Options not set for link field {0}").format(df.fieldname))
else:
doctype = self.get(df.options)
if not doctype:
frappe.throw(_("{0} must be set first").format(self.meta.get_label(df.options)))
# MySQL is case insensitive. Preserve case of the original docname in the Link Field.
value = frappe.db.get_value(doctype, docname)
setattr(self, df.fieldname, value)
if not value:
invalid_links.append((df.fieldname, docname, get_msg(df, docname)))
elif (df.fieldname != "amended_from"
and (is_submittable or self.meta.is_submittable) and frappe.get_meta(doctype).is_submittable
and cint(frappe.db.get_value(doctype, docname, "docstatus"))==2):
cancelled_links.append((df.fieldname, docname, get_msg(df, docname)))
return invalid_links, cancelled_links
def _validate_selects(self):
if frappe.flags.in_import:
return
for df in self.meta.get_select_fields():
if df.fieldname=="naming_series" or not (self.get(df.fieldname) and df.options):
continue
options = (df.options or "").split("\n")
# if only empty options
if not filter(None, options):
continue
# strip and set
self.set(df.fieldname, cstr(self.get(df.fieldname)).strip())
value = self.get(df.fieldname)
if value not in options and not (frappe.flags.in_test and value.startswith("_T-")):
# show an elaborate message
prefix = _("Row #{0}:").format(self.idx) if self.get("parentfield") else ""
label = _(self.meta.get_label(df.fieldname))
comma_options = '", "'.join(_(each) for each in options)
frappe.throw(_('{0} {1} cannot be "{2}". It should be one of "{3}"').format(prefix, label,
value, comma_options))
def _validate_constants(self):
if frappe.flags.in_import or self.is_new():
return
constants = [d.fieldname for d in self.meta.get("fields", {"set_only_once": 1})]
if constants:
values = frappe.db.get_value(self.doctype, self.name, constants, as_dict=True)
for fieldname in constants:
if self.get(fieldname) != values.get(fieldname):
frappe.throw(_("Value cannot be changed for {0}").format(self.meta.get_label(fieldname)),
frappe.CannotChangeConstantError)
def _validate_update_after_submit(self):
current = frappe.db.get_value(self.doctype, self.name, "*", as_dict=True)
for key, value in current.iteritems():
df = self.meta.get_field(key)
if df and not df.allow_on_submit and (self.get(key) or value) and self.get(key) != value:
frappe.throw(_("Not allowed to change {0} after submission").format(df.label),
frappe.UpdateAfterSubmitError)
def precision(self, fieldname, parentfield=None):
"""Returns float precision for a particular field (or get global default).
:param fieldname: Fieldname for which precision is required.
:param parentfield: If fieldname is in child table."""
from frappe.model.meta import get_field_precision
if parentfield and not isinstance(parentfield, basestring):
parentfield = parentfield.parentfield
cache_key = parentfield or "main"
if not hasattr(self, "_precision"):
self._precision = frappe._dict()
if cache_key not in self._precision:
self._precision[cache_key] = frappe._dict()
if fieldname not in self._precision[cache_key]:
self._precision[cache_key][fieldname] = None
doctype = self.meta.get_field(parentfield).options if parentfield else self.doctype
df = frappe.get_meta(doctype).get_field(fieldname)
if df.fieldtype in ("Currency", "Float", "Percent"):
self._precision[cache_key][fieldname] = get_field_precision(df, self)
return self._precision[cache_key][fieldname]
def get_formatted(self, fieldname, doc=None, currency=None):
from frappe.utils.formatters import format_value
df = self.meta.get_field(fieldname)
if not df and fieldname in default_fields:
from frappe.model.meta import get_default_df
df = get_default_df(fieldname)
return format_value(self.get(fieldname), df=df, doc=doc or self, currency=currency)
def is_print_hide(self, fieldname, for_print=True):
"""Returns true if fieldname is to be hidden for print.
Print Hide can be set via the Print Format Builder or in the controller as a list
of hidden fields. Example
class MyDoc(Document):
def __setup__(self):
self.print_hide = ["field1", "field2"]
:param fieldname: Fieldname to be checked if hidden.
"""
df = self.meta.get_field(fieldname)
return df and (df.get("__print_hide") or df.print_hide)
def in_format_data(self, fieldname):
"""Returns True if shown via Print Format::`format_data` property.
Called from within standard print format."""
doc = getattr(self, "parent_doc", self)
if hasattr(doc, "format_data_map"):
return fieldname in doc.format_data_map
else:
return True
def _filter(data, filters, limit=None):
"""pass filters as:
{"key": "val", "key": ["!=", "val"],
"key": ["in", "val"], "key": ["not in", "val"], "key": "^val",
"key" : True (exists), "key": False (does not exist) }"""
out = []
for d in data:
add = True
for f in filters:
fval = filters[f]
if fval is True:
fval = ("not None", fval)
elif fval is False:
fval = ("None", fval)
elif not isinstance(fval, (tuple, list)):
if isinstance(fval, basestring) and fval.startswith("^"):
fval = ("^", fval[1:])
else:
fval = ("=", fval)
if not frappe.compare(getattr(d, f, None), fval[0], fval[1]):
add = False
break
if add:
out.append(d)
if limit and (len(out)-1)==limit:
break
return out
| gangadhar-kadam/verve_live_frappe | frappe/model/base_document.py | Python | mit | 15,292 | 0.029362 |
# **********************************************************************
#
# Copyright (c) 2003-2017 ZeroC, Inc. All rights reserved.
#
# This copy of Ice is licensed to you under the terms described in the
# ICE_LICENSE file included in this distribution.
#
# **********************************************************************
import Test
class AI(Test.A):
def callA(self, current=None):
return "A"
class BI(Test.B, AI):
def callB(self, current=None):
return "B"
class CI(Test.C, AI):
def callC(self, current=None):
return "C"
class DI(Test.D, BI, CI):
def callD(self, current=None):
return "D"
class EI(Test.E):
def callE(self, current=None):
return "E"
class FI(Test.F, EI):
def callF(self, current=None):
return "F"
class GI(Test.G):
def __init__(self, communicator):
self._communicator = communicator
def shutdown(self, current=None):
self._communicator.shutdown()
def callG(self, current=None):
return "G"
class HI(Test.H, GI):
def __init__(self, communicator):
GI.__init__(self, communicator)
def callH(self, current=None):
return "H"
| ljx0305/ice | python/test/Ice/facets/TestI.py | Python | gpl-2.0 | 1,190 | 0.006723 |
# -*- coding: utf-8 -*-
from rest_framework import status as http_status
import itertools
from flask import request
from framework import status
from framework.exceptions import HTTPError
from framework.flask import redirect # VOL-aware redirect
from framework.auth.decorators import must_be_signed
from website.archiver import ARCHIVER_SUCCESS, ARCHIVER_FAILURE
from addons.base.views import DOWNLOAD_ACTIONS
from website import settings
from osf.exceptions import NodeStateError
from website.project.decorators import (
must_be_valid_project, must_be_contributor_or_public,
must_have_permission, must_be_contributor_and_not_group_member,
must_not_be_registration, must_be_registration,
must_not_be_retracted_registration
)
from osf import features
from osf.models import Identifier, RegistrationSchema
from website.project.utils import serialize_node
from osf.utils.permissions import ADMIN
from website import language
from website.ember_osf_web.decorators import ember_flag_is_active
from website.project import signals as project_signals
from website.project.metadata.schemas import _id_to_name
from website import util
from website.project.metadata.utils import serialize_meta_schema
from website.project.model import has_anonymous_link
from website.archiver.decorators import fail_archive_on_error
from .node import _view_project
from api.waffle.utils import flag_is_active
@must_be_valid_project
@must_not_be_retracted_registration
@must_be_contributor_or_public
def node_register_page(auth, node, **kwargs):
"""Display the registration metadata for a registration.
:return: serialized Node
"""
if node.is_registration:
return serialize_node(node, auth)
else:
status.push_status_message(
'You have been redirected to the project\'s registrations page. From here you can initiate a new Draft Registration to complete the registration process',
trust=False,
id='redirected_to_registrations',
)
return redirect(node.web_url_for('node_registrations', view='draft', _guid=True))
@must_be_valid_project
@must_have_permission(ADMIN)
@must_be_contributor_and_not_group_member
def node_registration_retraction_redirect(auth, node, **kwargs):
return redirect(node.web_url_for('node_registration_retraction_get', _guid=True))
@must_be_valid_project
@must_not_be_retracted_registration
@must_have_permission(ADMIN)
@must_be_contributor_and_not_group_member
def node_registration_retraction_get(auth, node, **kwargs):
"""Prepares node object for registration retraction page.
:return: serialized Node to be retracted
:raises: 400: BAD_REQUEST if registration already pending retraction
"""
if not node.is_registration:
raise HTTPError(http_status.HTTP_400_BAD_REQUEST, data={
'message_short': 'Invalid Request',
'message_long': 'Withdrawal of non-registrations is not permitted.'
})
if node.is_pending_retraction:
raise HTTPError(http_status.HTTP_400_BAD_REQUEST, data={
'message_short': 'Invalid Request',
'message_long': 'This registration is already pending withdrawal.'
})
return serialize_node(node, auth, primary=True)
@must_be_valid_project
@must_have_permission(ADMIN)
@must_be_contributor_and_not_group_member
def node_registration_retraction_post(auth, node, **kwargs):
"""Handles retraction of public registrations
:param auth: Authentication object for User
:return: Redirect URL for successful POST
"""
if node.is_pending_retraction:
raise HTTPError(http_status.HTTP_400_BAD_REQUEST, data={
'message_short': 'Invalid Request',
'message_long': 'This registration is already pending withdrawal'
})
if not node.is_registration:
raise HTTPError(http_status.HTTP_400_BAD_REQUEST, data={
'message_short': 'Invalid Request',
'message_long': 'Withdrawal of non-registrations is not permitted.'
})
if node.root_id != node.id:
raise HTTPError(http_status.HTTP_400_BAD_REQUEST, data={
'message_short': 'Invalid Request',
'message_long': 'Withdrawal of non-parent registrations is not permitted.'
})
data = request.get_json()
try:
node.retract_registration(auth.user, data.get('justification', None))
node.save()
node.retraction.ask(node.get_active_contributors_recursive(unique_users=True))
except NodeStateError as err:
raise HTTPError(http_status.HTTP_403_FORBIDDEN, data=dict(message_long=str(err)))
return {'redirectUrl': node.web_url_for('view_project')}
@must_be_valid_project
@must_not_be_retracted_registration
@must_be_contributor_or_public
@ember_flag_is_active(features.EMBER_REGISTRATION_FORM_DETAIL)
def node_register_template_page(auth, node, metaschema_id, **kwargs):
if flag_is_active(request, features.EMBER_REGISTRIES_DETAIL_PAGE):
# Registration meta page obviated during redesign
return redirect(node.url)
if node.is_registration and bool(node.registered_schema):
try:
meta_schema = RegistrationSchema.objects.get(_id=metaschema_id)
except RegistrationSchema.DoesNotExist:
# backwards compatability for old urls, lookup by name
meta_schema = RegistrationSchema.objects.filter(name=_id_to_name(metaschema_id)).order_by('-schema_version').first()
if not meta_schema:
raise HTTPError(http_status.HTTP_404_NOT_FOUND, data={
'message_short': 'Invalid schema name',
'message_long': 'No registration schema with that name could be found.'
})
ret = _view_project(node, auth, primary=True)
my_meta = serialize_meta_schema(meta_schema)
if has_anonymous_link(node, auth):
for indx, schema_page in enumerate(my_meta['schema']['pages']):
for idx, schema_question in enumerate(schema_page['questions']):
if schema_question['title'] in settings.ANONYMIZED_TITLES:
del my_meta['schema']['pages'][indx]['questions'][idx]
ret['node']['registered_schema'] = serialize_meta_schema(meta_schema)
return ret
else:
status.push_status_message(
'You have been redirected to the project\'s registrations page. From here you can initiate a new Draft Registration to complete the registration process',
trust=False,
id='redirected_to_registrations',
)
return redirect(node.web_url_for('node_registrations', view=kwargs.get('template'), _guid=True))
@must_be_valid_project # returns project
@must_have_permission(ADMIN)
@must_be_contributor_and_not_group_member
@must_not_be_registration
def project_before_register(auth, node, **kwargs):
"""Returns prompt informing user that addons, if any, won't be registered."""
# TODO: Avoid generating HTML code in Python; all HTML should be in display layer
messages = {
'full': {
'addons': set(),
'message': 'The content and version history of <strong>{0}</strong> will be copied to the registration.',
},
'partial': {
'addons': set(),
'message': 'The current version of the content in <strong>{0}</strong> will be copied to the registration, but version history will be lost.'
},
'none': {
'addons': set(),
'message': 'The contents of <strong>{0}</strong> cannot be registered at this time, and will not be included as part of this registration.',
},
}
errors = {}
addon_set = [n.get_addons() for n in itertools.chain([node], node.get_descendants_recursive(primary_only=True))]
for addon in itertools.chain(*addon_set):
if not addon.complete:
continue
archive_errors = getattr(addon, 'archive_errors', None)
error = None
if archive_errors:
error = archive_errors()
if error:
errors[addon.config.short_name] = error
continue
name = addon.config.short_name
if name in settings.ADDONS_ARCHIVABLE:
messages[settings.ADDONS_ARCHIVABLE[name]]['addons'].add(addon.config.full_name)
else:
messages['none']['addons'].add(addon.config.full_name)
error_messages = list(errors.values())
prompts = [
m['message'].format(util.conjunct(m['addons']))
for m in messages.values() if m['addons']
]
if node.has_pointers_recursive:
prompts.append(
language.BEFORE_REGISTER_HAS_POINTERS.format(
category=node.project_or_component
)
)
return {
'prompts': prompts,
'errors': error_messages
}
def osf_admin_change_status_identifier(node):
if node.get_identifier_value('doi'):
node.request_identifier_update(category='doi')
def get_referent_by_identifier(category, value):
"""Look up identifier by `category` and `value` and redirect to its referent
if found.
"""
try:
identifier = Identifier.objects.get(category=category, value=value)
except Identifier.DoesNotExist:
raise HTTPError(http_status.HTTP_404_NOT_FOUND)
if identifier.referent.url:
return redirect(identifier.referent.url)
raise HTTPError(http_status.HTTP_404_NOT_FOUND)
@fail_archive_on_error
@must_be_signed
@must_be_registration
def registration_callbacks(node, payload, *args, **kwargs):
if payload.get('action', None) in DOWNLOAD_ACTIONS:
return {'status': 'success'}
errors = payload.get('errors')
src_provider = payload['source']['provider']
if errors:
node.archive_job.update_target(
src_provider,
ARCHIVER_FAILURE,
errors=errors,
)
else:
# Dataverse requires two seperate targets, one
# for draft files and one for published files
if src_provider == 'dataverse':
src_provider += '-' + (payload['destination']['name'].split(' ')[-1].lstrip('(').rstrip(')').strip())
node.archive_job.update_target(
src_provider,
ARCHIVER_SUCCESS,
)
project_signals.archive_callback.send(node)
| mfraezz/osf.io | website/project/views/register.py | Python | apache-2.0 | 10,365 | 0.002605 |
import LingerActions.LingerBaseAction as lingerActions
class StopProcessAndChildrenAction(lingerActions.LingerBaseAction):
"""Logging that there was a change in a file"""
def __init__(self, configuration):
super(StopProcessAndChildrenAction, self).__init__(configuration)
# Fields
self.process_adapter = self.configuration['process_adapter']
def get_process_adapter(self):
return self.get_adapter_by_uuid(self.process_adapter)
def act(self, configuration):
self.logger.debug("In Stop Children action")
self.get_process_adapter().stop_with_all_children()
class StopProcessAndChildrenActionFactory(lingerActions.LingerBaseActionFactory):
"""StopProcessAndChildrenActionFactory generates StopProcessAndChildrenAction instances"""
def __init__(self):
super(StopProcessAndChildrenActionFactory, self).__init__()
self.item = StopProcessAndChildrenAction
def get_instance_name(self):
return "StopProcessAndChildrenAction"
def get_fields(self):
fields, optional_fields = super(StopProcessAndChildrenActionFactory, self).get_fields()
fields += [('process_adapter','uuid')]
return (fields, optional_fields)
| GreenBlast/Linger | LingerActions/StopProcessAndChildrenAction.py | Python | mit | 1,242 | 0.005636 |
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import print_function
import os
import pwd
import re
import sys
import time
from twitter.common import app
from apache.thermos.cli.common import get_path_detector
from apache.thermos.common.ckpt import CheckpointDispatcher
from apache.thermos.monitoring.detector import TaskDetector
from gen.apache.thermos.ttypes import ProcessState, TaskState
@app.command
@app.command_option("--verbosity", default=0, dest='verbose', type='int',
help="Display more verbosity")
@app.command_option("--only", default=None, dest='only', type='choice',
choices=('active', 'finished'), help="Display only tasks of this type.")
def status(args, options):
"""Get the status of task(s).
Usage: thermos status [options] [task_name(s) or task_regexp(s)]
"""
path_detector = get_path_detector()
def format_task(detector, task_id):
checkpoint_filename = detector.get_checkpoint(task_id)
checkpoint_stat = os.stat(checkpoint_filename)
try:
checkpoint_owner = pwd.getpwuid(checkpoint_stat.st_uid).pw_name
except KeyError:
checkpoint_owner = 'uid:%s' % checkpoint_stat.st_uid
print(' %-20s [owner: %8s]' % (task_id, checkpoint_owner), end='')
if options.verbose == 0:
print()
if options.verbose > 0:
state = CheckpointDispatcher.from_file(checkpoint_filename)
if state is None or state.header is None:
print(' - checkpoint stream CORRUPT or outdated format')
return
print(' state: %8s' % TaskState._VALUES_TO_NAMES.get(state.statuses[-1].state, 'Unknown'),
end='')
print(' start: %25s' % time.asctime(time.localtime(state.header.launch_time_ms / 1000.0)))
if options.verbose > 1:
print(' user: %s' % state.header.user, end='')
if state.header.ports:
print(' ports: %s' % ' '.join('%s -> %s' % (key, val)
for key, val in state.header.ports.items()))
else:
print(' ports: None')
print(' sandbox: %s' % state.header.sandbox)
if options.verbose > 2:
print(' process table:')
for process, process_history in state.processes.items():
print(' - %s runs: %s' % (process, len(process_history)), end='')
last_run = process_history[-1]
print(' last: pid=%s, rc=%s, finish:%s, state:%s' % (
last_run.pid or 'None',
last_run.return_code if last_run.return_code is not None else '',
time.asctime(time.localtime(last_run.stop_time)) if last_run.stop_time else 'None',
ProcessState._VALUES_TO_NAMES.get(last_run.state, 'Unknown')))
print()
matchers = map(re.compile, args or ['.*'])
active = []
finished = []
for root in path_detector.get_paths():
detector = TaskDetector(root)
active.extend((detector, t_id) for _, t_id in detector.get_task_ids(state='active')
if any(pattern.match(t_id) for pattern in matchers))
finished.extend((detector, t_id)for _, t_id in detector.get_task_ids(state='finished')
if any(pattern.match(t_id) for pattern in matchers))
found = False
if options.only is None or options.only == 'active':
if active:
print('Active tasks:')
found = True
for detector, task_id in active:
format_task(detector, task_id)
print()
if options.only is None or options.only == 'finished':
if finished:
print('Finished tasks:')
found = True
for detector, task_id in finished:
format_task(detector, task_id)
print()
if not found:
print('No tasks found.')
sys.exit(1)
| protochron/aurora | src/main/python/apache/thermos/cli/commands/status.py | Python | apache-2.0 | 4,163 | 0.010569 |
# Licensed to the .NET Foundation under one or more agreements.
# The .NET Foundation licenses this file to you under the Apache 2.0 License.
# See the LICENSE file in the project root for more information.
from common import *
import testdata
class oldstyle:
def __init__(self, value): self.value = value
def __repr__(self): return "oldstyle(%s)" % self.value
def __add__(self, other): return self.value + other
def __sub__(self, other): return self.value - other
def __mul__(self, other): return self.value * other
def __div__(self, other): return self.value / other
def __floordiv__(self, other): return self.value // other
def __mod__(self, other): return self.value % other
def __divmod__(self, other): return divmod(self.value, other)
def __pow__(self, other): return self.value ** other
def __lshift__(self, other): return self.value << other
def __rshift__(self, other): return self.value >> other
def __and__(self, other): return self.value & other
def __xor__(self, other): return self.value ^ other
def __or__(self, other): return self.value | other
class oldstyle_reflect:
def __init__(self, value): self.value = value
def __repr__(self): return "oldstyle_reflect(%s)" % self.value
def __radd__(self, other): return other + self.value
def __rsub__(self, other): return other - self.value
def __rmul__(self, other):
print("\toldstyle_reflect.__rmul__")
return other * self.value
def __rdiv__(self, other): return other / self.value
def __rfloordiv__(self, other): return other // self.value
def __rmod__(self, other): return other % self.value
def __rdivmod__(self, other): return divmod(other, self.value)
def __rpow__(self, other): return other ** self.value
def __rlshift__(self, other): return other << self.value
def __rrshift__(self, other): return other >> self.value
def __rand__(self, other): return self.value & other
def __rxor__(self, other): return self.value ^ other
def __ror__(self, other): return self.value | other
class oldstyle_inplace:
def __init__(self, value): self.value = value
def __repr__(self): return "oldstyle_inplace(%s)" % self.value
def __iadd__(self, other): return self.value + other
def __isub__(self, other): return self.value - other
def __imul__(self, other): return self.value * other
def __idiv__(self, other): return self.value / other
def __ifloordiv__(self, other): return self.value // other
def __imod__(self, other): return self.value % other
def __idivmod__(self, other): return divmod(self.value, other)
def __ipow__(self, other): return self.value ** other
def __ilshift__(self, other): return self.value << other
def __irshift__(self, other): return self.value >> other
def __iand__(self, other): return self.value & other
def __ixor__(self, other): return self.value ^ other
def __ior__(self, other): return self.value | other
class oldstyle_notdefined:
def __init__(self, value): self.value = value
def __repr__(self): return "oldstyle_notdefined(%s)" % self.value
class newstyle(object):
def __init__(self, value): self.value = value
def __repr__(self): return "newstyle(%s, %r)" % (self.value, type(self.value))
def __add__(self, other): return self.value + other
def __sub__(self, other): return self.value - other
def __mul__(self, other): return self.value * other
def __div__(self, other): return self.value / other
def __floordiv__(self, other): return self.value // other
def __mod__(self, other): return self.value % other
def __divmod__(self, other): return divmod(self.value, other)
def __pow__(self, other): return self.value ** other
def __lshift__(self, other): return self.value << other
def __rshift__(self, other): return self.value >> other
def __and__(self, other): return self.value & other
def __xor__(self, other): return self.value ^ other
def __or__(self, other): return self.value | other
class newstyle_reflect(object):
def __init__(self, value): self.value = value
def __repr__(self): return "newstyle_reflect(%s, %r)" % (self.value, type(self.value))
def __radd__(self, other): return other + self.value
def __rsub__(self, other): return other - self.value
def __rmul__(self, other):
print("\tnewstyle_reflect.__rmul__")
return other * self.value
def __rdiv__(self, other): return other / self.value
def __rfloordiv__(self, other): return other // self.value
def __rmod__(self, other): return other % self.value
def __rdivmod__(self, other): return divmod(other, self.value)
def __rpow__(self, other): return other ** self.value
def __rlshift__(self, other): return other << self.value
def __rrshift__(self, other): return other >> self.value
def __rand__(self, other): return self.value & other
def __rxor__(self, other): return self.value ^ other
def __ror__(self, other): return self.value | other
class newstyle_inplace(object):
def __init__(self, value): self.value = value
def __repr__(self): return "newstyle_inplace(%s, %r)" % (self.value, type(self.value))
def __iadd__(self, other): return self.value + other
def __isub__(self, other): return self.value - other
def __imul__(self, other): return self.value * other
def __idiv__(self, other): return self.value / other
def __ifloordiv__(self, other): return self.value // other
def __imod__(self, other): return self.value % other
def __idivmod__(self, other): return divmod(self.value, other)
def __ipow__(self, other): return self.value ** other
def __ilshift__(self, other): return self.value << other
def __irshift__(self, other): return self.value >> other
def __iand__(self, other): return self.value & other
def __ixor__(self, other): return self.value ^ other
def __ior__(self, other): return self.value | other
class newstyle_notdefined(object):
def __init__(self, value): self.value = value
def __repr__(self): return "newstyle_notdefined(%s, %r)" % (self.value, type(self.value))
import sys
class common(object):
def normal(self, leftc, rightc):
for a in leftc:
for b in rightc:
try:
printwith("case", a, "+", b, type(a), type(b))
printwithtype(a + b)
except:
printwith("same", sys.exc_info()[0])
try:
printwith("case", a, "-", b, type(a), type(b))
printwithtype(a - b)
except:
printwith("same", sys.exc_info()[0])
try:
printwith("case", a, "*", b, type(a), type(b))
printwithtype(a * b)
except:
printwith("same", sys.exc_info()[0])
try:
printwith("case", a, "/", b, type(a), type(b))
printwithtype(a / b)
except:
printwith("same", sys.exc_info()[0])
try:
printwith("case", a, "//", b, type(a), type(b))
printwithtype(a // b)
except:
printwith("same", sys.exc_info()[0])
try:
printwith("case", a, "%", b, type(a), type(b))
printwithtype(a % b)
except:
printwith("same", sys.exc_info()[0])
try:
printwith("case", a, "**", b, type(a), type(b))
printwithtype(a ** b)
except:
printwith("same", sys.exc_info()[0])
try:
printwith("case", a, "<<", b, type(a), type(b))
printwithtype(a << b)
except:
printwith("same", sys.exc_info()[0])
try:
printwith("case", a, ">>", b, type(a), type(b))
printwithtype(a >> b)
except:
printwith("same", sys.exc_info()[0])
try:
printwith("case", a, "&", b, type(a), type(b))
printwithtype(a & b)
except:
printwith("same", sys.exc_info()[0])
try:
printwith("case", a, "^", b, type(a), type(b))
printwithtype(a ^ b)
except:
printwith("same", sys.exc_info()[0])
try:
printwith("case", a, "|", b, type(a), type(b))
printwithtype(a | b)
except:
printwith("same", sys.exc_info()[0])
def clone_list(self, l):
l2 = []
for x in l:
if x is newstyle_inplace:
l2.append(newstyle_inplace(x.value))
elif x is oldstyle_inplace:
l2.append(oldstyle_inplace(x.value))
else :
l2.append(x)
return l2
def inplace(self, leftc, rightc):
rc = self.clone_list(rightc)
for b in rc:
lc = self.clone_list(leftc)
for a in lc:
try:
op = "+"
printwith("case", "%s %s= %s" % (a, op, b), type(a), type(b))
a += b
printwithtype(a)
except:
printwith("same", sys.exc_info()[0])
lc = self.clone_list(leftc)
for a in lc:
try:
op = "-"
printwith("case", "%s %s= %s" % (a, op, b), type(a), type(b))
a -= b
printwithtype(a)
except:
printwith("same", sys.exc_info()[0])
lc = self.clone_list(leftc)
for a in lc:
try:
op = "*"
printwith("case", "%s %s= %s" % (a, op, b), type(a), type(b))
a *= b
printwithtype(a)
except:
printwith("same", sys.exc_info()[0])
lc = self.clone_list(leftc)
for a in lc:
try:
op = "//"
printwith("case", "%s %s= %s" % (a, op, b), type(a), type(b))
a //= b
printwithtype(a)
except:
printwith("same", sys.exc_info()[0])
lc = self.clone_list(leftc)
for a in lc:
try:
op = "%"
printwith("case", "%s %s= %s" % (a, op, b), type(a), type(b))
a %= b
printwithtype(a)
except:
printwith("same", sys.exc_info()[0])
lc = self.clone_list(leftc)
for a in lc:
try:
op = "**"
printwith("case", "%s %s= %s" % (a, op, b), type(a), type(b))
a **= b
printwithtype(a)
except:
printwith("same", sys.exc_info()[0])
lc = self.clone_list(leftc)
for a in lc:
try:
op = "<<"
printwith("case", "%s %s= %s" % (a, op, b), type(a), type(b))
a <<= b
printwithtype(a)
except:
printwith("same", sys.exc_info()[0])
lc = self.clone_list(leftc)
for a in lc:
try:
op = ">>"
printwith("case", "%s %s= %s" % (a, op, b), type(a), type(b))
a >>= b
printwithtype(a)
except:
printwith("same", sys.exc_info()[0])
lc = self.clone_list(leftc)
for a in lc:
try:
op = "&"
printwith("case", "%s %s= %s" % (a, op, b), type(a), type(b))
a &= b
printwithtype(a)
except:
printwith("same", sys.exc_info()[0])
lc = self.clone_list(leftc)
for a in lc:
try:
op = "^"
printwith("case", "%s %s= %s" % (a, op, b), type(a), type(b))
a ^= b
printwithtype(a)
except:
printwith("same", sys.exc_info()[0])
lc = self.clone_list(leftc)
for a in lc:
try:
op = "|"
printwith("case", "%s %s= %s" % (a, op, b), type(a), type(b))
a |= b
printwithtype(a)
except:
printwith("same", sys.exc_info()[0])
class ops_simple(common):
def __init__(self):
self.collection = testdata.merge_lists(
[None],
testdata.list_bool,
testdata.list_int,
testdata.list_float,
testdata.list_long[:-1], # the last number is very long
testdata.list_complex,
testdata.list_myint,
testdata.list_myfloat,
testdata.list_mylong,
testdata.list_mycomplex,
testdata.get_Int64_Byte(),
)
self.collection_oldstyle = [oldstyle(x) for x in self.collection]
self.collection_oldstyle_reflect = [oldstyle_reflect(x) for x in self.collection]
self.collection_oldstyle_notdefined = [oldstyle_notdefined(x) for x in self.collection]
self.collection_newstyle = [newstyle(x) for x in self.collection]
self.collection_newstyle_reflect = [newstyle_reflect(x) for x in self.collection]
self.collection_newstyle_notdefined = [newstyle_notdefined(x) for x in self.collection]
self.collection_oldstyle_inplace = [oldstyle_inplace(x) for x in self.collection]
self.collection_newstyle_inplace = [newstyle_inplace(x) for x in self.collection]
def test_normal(self): super(ops_simple, self).normal(self.collection, self.collection)
def test_normal_oc_left(self): super(ops_simple, self).normal(self.collection_oldstyle, self.collection)
def test_normal_oc_right(self): super(ops_simple, self).normal(self.collection, self.collection_oldstyle)
def test_normal_nc_left(self): super(ops_simple, self).normal(self.collection_newstyle, self.collection)
def test_normal_nc_right(self): super(ops_simple, self).normal(self.collection, self.collection_newstyle)
def test_reflect_oc_right(self): super(ops_simple, self).normal(self.collection, self.collection_oldstyle_reflect)
def test_reflect_nc_right(self): super(ops_simple, self).normal(self.collection, self.collection_newstyle_reflect)
def test_oc_notdefined(self): super(ops_simple, self).normal(self.collection_oldstyle_notdefined, self.collection)
def test_nc_notdefined(self): super(ops_simple, self).normal(self.collection_newstyle_notdefined, self.collection)
def test_oc_notdefined_oc_reflect(self): super(ops_simple, self).normal(self.collection_oldstyle_notdefined, self.collection_oldstyle_reflect)
def test_nc_notdefined_nc_reflect(self): super(ops_simple, self).normal(self.collection_newstyle_notdefined, self.collection_newstyle_reflect)
def test_inplace(self): super(ops_simple, self).inplace(self.collection, self.collection)
def test_inplace_ol(self): super(ops_simple, self).inplace(self.collection_oldstyle_inplace, self.collection)
def test_inplace_nl(self): super(ops_simple, self).inplace(self.collection_newstyle_inplace, self.collection)
runtests(ops_simple)
| IronLanguages/ironpython3 | Tests/compat/sbs_simple_ops.py | Python | apache-2.0 | 17,059 | 0.013424 |
#!/usr/bin/env python
''' Debug & Test support for matplot to python conversion.
'''
import os
import numpy as np
from scipy.io import loadmat
def dmpdat(s, e):
""" Dump a data structure with its name & shape.
Params:
-------
s: str. The name of the structure
e: expression. An expression to dump. Implicitly assumes e is
array_like
"""
print("%s:" % s)
print(e)
print("%s.shape:" % s)
print(e.shape)
print("%s.dtype:" % s)
print(e.dtype)
print("-------------------------------------------")
def hbrk(msg=None):
if msg is not None:
print(msg)
exit(-1)
def brk(s, e):
""" Used for debugging, just break the script, dumping data.
"""
dmpdat(s, e)
exit(-1)
def chkdat(t, s, e, rtol=1e-05, atol=1e-08):
""" Check this matrix against data dumped by octave, with
given tolerance
"""
mat = loadmat(os.path.join('check_data', t, s) + '.mat')['ex']
is_equal = np.allclose(e, mat, rtol=rtol, atol=atol)
#is_equal = np.array_equal(e, mat)
print("%s:%s:iEqual=%d" % (t, s, is_equal))
if not is_equal:
dmpdat(s + '<python>', e)
dmpdat(s + '<matlab>', mat)
np.savetxt(os.path.join("check_data", t, s) + '_python_err', e)
np.savetxt(os.path.join("check_data", t, s) + '_matlab_err', mat)
print("FAILED check on expr: %s, signal: %s" % (s, t))
#hbrk()
return is_equal
| tooringanalytics/pyambiguity | m2py.py | Python | mit | 1,433 | 0.001396 |
import tests.model_control.test_ozone_custom_models_enabled as testmod
testmod.build_model( ['RelativeDifference'] , ['PolyTrend'] , ['Seasonal_Minute'] , ['ARX'] ); | antoinecarme/pyaf | tests/model_control/detailed/transf_RelativeDifference/model_control_one_enabled_RelativeDifference_PolyTrend_Seasonal_Minute_ARX.py | Python | bsd-3-clause | 167 | 0.047904 |
from django import forms
from django.contrib.admin.widgets import AutocompleteSelect
from django.forms import ModelChoiceField
from django.test import TestCase, override_settings
from django.utils import translation
from .models import Album, Band
class AlbumForm(forms.ModelForm):
class Meta:
model = Album
fields = ['band', 'featuring']
widgets = {
'band': AutocompleteSelect(
Album._meta.get_field('band').remote_field,
attrs={'class': 'my-class'},
),
'featuring': AutocompleteSelect(
Album._meta.get_field('featuring').remote_field,
)
}
class NotRequiredBandForm(forms.Form):
band = ModelChoiceField(
queryset=Album.objects.all(),
widget=AutocompleteSelect(Album._meta.get_field('band').remote_field),
required=False,
)
class RequiredBandForm(forms.Form):
band = ModelChoiceField(
queryset=Album.objects.all(),
widget=AutocompleteSelect(Album._meta.get_field('band').remote_field),
required=True,
)
@override_settings(ROOT_URLCONF='admin_widgets.urls')
class AutocompleteMixinTests(TestCase):
empty_option = '<option value=""></option>'
maxDiff = 1000
def test_build_attrs(self):
form = AlbumForm()
attrs = form['band'].field.widget.get_context(name='my_field', value=None, attrs={})['widget']['attrs']
self.assertEqual(attrs, {
'class': 'my-classadmin-autocomplete',
'data-ajax--cache': 'true',
'data-ajax--type': 'GET',
'data-ajax--url': '/admin_widgets/band/autocomplete/',
'data-theme': 'admin-autocomplete',
'data-allow-clear': 'false',
'data-placeholder': ''
})
def test_build_attrs_not_required_field(self):
form = NotRequiredBandForm()
attrs = form['band'].field.widget.build_attrs({})
self.assertJSONEqual(attrs['data-allow-clear'], True)
def test_build_attrs_required_field(self):
form = RequiredBandForm()
attrs = form['band'].field.widget.build_attrs({})
self.assertJSONEqual(attrs['data-allow-clear'], False)
def test_get_url(self):
rel = Album._meta.get_field('band').remote_field
w = AutocompleteSelect(rel)
url = w.get_url()
self.assertEqual(url, '/admin_widgets/band/autocomplete/')
def test_render_options(self):
beatles = Band.objects.create(name='The Beatles', style='rock')
who = Band.objects.create(name='The Who', style='rock')
# With 'band', a ForeignKey.
form = AlbumForm(initial={'band': beatles.pk})
output = form.as_table()
selected_option = '<option value="%s" selected>The Beatles</option>' % beatles.pk
option = '<option value="%s">The Who</option>' % who.pk
self.assertIn(selected_option, output)
self.assertNotIn(option, output)
# With 'featuring', a ManyToManyField.
form = AlbumForm(initial={'featuring': [beatles.pk, who.pk]})
output = form.as_table()
selected_option = '<option value="%s" selected>The Beatles</option>' % beatles.pk
option = '<option value="%s" selected>The Who</option>' % who.pk
self.assertIn(selected_option, output)
self.assertIn(option, output)
def test_render_options_required_field(self):
"""Empty option is present if the field isn't required."""
form = NotRequiredBandForm()
output = form.as_table()
self.assertIn(self.empty_option, output)
def test_render_options_not_required_field(self):
"""Empty option isn't present if the field isn't required."""
form = RequiredBandForm()
output = form.as_table()
self.assertNotIn(self.empty_option, output)
def test_media(self):
rel = Album._meta.get_field('band').remote_field
base_files = (
'admin/js/vendor/jquery/jquery.min.js',
'admin/js/vendor/select2/select2.full.min.js',
# Language file is inserted here.
'admin/js/jquery.init.js',
'admin/js/autocomplete.js',
)
languages = (
('de', 'de'),
# Language with code 00 does not exist.
('00', None),
# Language files are case sensitive.
('sr-cyrl', 'sr-Cyrl'),
('zh-cn', 'zh-CN'),
)
for lang, select_lang in languages:
with self.subTest(lang=lang):
if select_lang:
expected_files = (
base_files[:2] +
(('admin/js/vendor/select2/i18n/%s.js' % select_lang),) +
base_files[2:]
)
else:
expected_files = base_files
with translation.override(lang):
self.assertEqual(AutocompleteSelect(rel).media._js, expected_files)
| edmorley/django | tests/admin_widgets/test_autocomplete_widget.py | Python | bsd-3-clause | 5,005 | 0.000999 |
"""HTTP related handlers.
Note that some other HTTP handlers live in more specific modules: _auth.py,
_gzip.py, etc.
Copyright 2002-2006 John J Lee <jjl@pobox.com>
This code is free software; you can redistribute it and/or modify it
under the terms of the BSD or ZPL 2.1 licenses (see the file
COPYING.txt included with the distribution).
"""
import time, htmlentitydefs, logging, \
fakesocket, urllib2, urllib, httplib, sgmllib
from urllib2 import URLError, HTTPError, BaseHandler
from cStringIO import StringIO
from _clientcookie import CookieJar
from _headersutil import is_html
from _html import unescape, unescape_charref
from _request import Request
from _response import closeable_response, response_seek_wrapper
import _rfc3986
import _sockettimeout
debug = logging.getLogger("mechanize").debug
debug_robots = logging.getLogger("mechanize.robots").debug
# monkeypatch urllib2.HTTPError to show URL
## def urllib2_str(self):
## return 'HTTP Error %s: %s (%s)' % (
## self.code, self.msg, self.geturl())
## urllib2.HTTPError.__str__ = urllib2_str
CHUNK = 1024 # size of chunks fed to HTML HEAD parser, in bytes
DEFAULT_ENCODING = 'latin-1'
#try:
# socket._fileobject("fake socket", close=True)
#except TypeError:
# python <= 2.4
# create_readline_wrapper = socket._fileobject
#else:
def create_readline_wrapper(fh):
return fakesocket._fileobject(fh, close=True)
# This adds "refresh" to the list of redirectables and provides a redirection
# algorithm that doesn't go into a loop in the presence of cookies
# (Python 2.4 has this new algorithm, 2.3 doesn't).
class HTTPRedirectHandler(BaseHandler):
# maximum number of redirections to any single URL
# this is needed because of the state that cookies introduce
max_repeats = 4
# maximum total number of redirections (regardless of URL) before
# assuming we're in a loop
max_redirections = 10
# Implementation notes:
# To avoid the server sending us into an infinite loop, the request
# object needs to track what URLs we have already seen. Do this by
# adding a handler-specific attribute to the Request object. The value
# of the dict is used to count the number of times the same URL has
# been visited. This is needed because visiting the same URL twice
# does not necessarily imply a loop, thanks to state introduced by
# cookies.
# Always unhandled redirection codes:
# 300 Multiple Choices: should not handle this here.
# 304 Not Modified: no need to handle here: only of interest to caches
# that do conditional GETs
# 305 Use Proxy: probably not worth dealing with here
# 306 Unused: what was this for in the previous versions of protocol??
def redirect_request(self, newurl, req, fp, code, msg, headers):
"""Return a Request or None in response to a redirect.
This is called by the http_error_30x methods when a redirection
response is received. If a redirection should take place, return a
new Request to allow http_error_30x to perform the redirect;
otherwise, return None to indicate that an HTTPError should be
raised.
"""
if code in (301, 302, 303, "refresh") or \
(code == 307 and not req.has_data()):
# Strictly (according to RFC 2616), 301 or 302 in response to
# a POST MUST NOT cause a redirection without confirmation
# from the user (of urllib2, in this case). In practice,
# essentially all clients do redirect in this case, so we do
# the same.
# XXX really refresh redirections should be visiting; tricky to
# fix, so this will wait until post-stable release
new = Request(newurl,
headers=req.headers,
origin_req_host=req.get_origin_req_host(),
unverifiable=True,
visit=False,
)
new._origin_req = getattr(req, "_origin_req", req)
return new
else:
raise HTTPError(req.get_full_url(), code, msg, headers, fp)
def http_error_302(self, req, fp, code, msg, headers):
# Some servers (incorrectly) return multiple Location headers
# (so probably same goes for URI). Use first header.
if headers.has_key('location'):
newurl = headers.getheaders('location')[0]
elif headers.has_key('uri'):
newurl = headers.getheaders('uri')[0]
else:
return
newurl = _rfc3986.clean_url(newurl, "latin-1")
newurl = _rfc3986.urljoin(req.get_full_url(), newurl)
# XXX Probably want to forget about the state of the current
# request, although that might interact poorly with other
# handlers that also use handler-specific request attributes
new = self.redirect_request(newurl, req, fp, code, msg, headers)
if new is None:
return
# loop detection
# .redirect_dict has a key url if url was previously visited.
if hasattr(req, 'redirect_dict'):
visited = new.redirect_dict = req.redirect_dict
if (visited.get(newurl, 0) >= self.max_repeats or
len(visited) >= self.max_redirections):
raise HTTPError(req.get_full_url(), code,
self.inf_msg + msg, headers, fp)
else:
visited = new.redirect_dict = req.redirect_dict = {}
visited[newurl] = visited.get(newurl, 0) + 1
# Don't close the fp until we are sure that we won't use it
# with HTTPError.
fp.read()
fp.close()
return self.parent.open(new)
http_error_301 = http_error_303 = http_error_307 = http_error_302
http_error_refresh = http_error_302
inf_msg = "The HTTP server returned a redirect error that would " \
"lead to an infinite loop.\n" \
"The last 30x error message was:\n"
# XXX would self.reset() work, instead of raising this exception?
class EndOfHeadError(Exception): pass
class AbstractHeadParser:
# only these elements are allowed in or before HEAD of document
head_elems = ("html", "head",
"title", "base",
"script", "style", "meta", "link", "object")
_entitydefs = htmlentitydefs.name2codepoint
_encoding = DEFAULT_ENCODING
def __init__(self):
self.http_equiv = []
def start_meta(self, attrs):
http_equiv = content = None
for key, value in attrs:
if key == "http-equiv":
http_equiv = self.unescape_attr_if_required(value)
elif key == "content":
content = self.unescape_attr_if_required(value)
if http_equiv is not None and content is not None:
self.http_equiv.append((http_equiv, content))
def end_head(self):
raise EndOfHeadError()
def handle_entityref(self, name):
#debug("%s", name)
self.handle_data(unescape(
'&%s;' % name, self._entitydefs, self._encoding))
def handle_charref(self, name):
#debug("%s", name)
self.handle_data(unescape_charref(name, self._encoding))
def unescape_attr(self, name):
#debug("%s", name)
return unescape(name, self._entitydefs, self._encoding)
def unescape_attrs(self, attrs):
#debug("%s", attrs)
escaped_attrs = {}
for key, val in attrs.items():
escaped_attrs[key] = self.unescape_attr(val)
return escaped_attrs
def unknown_entityref(self, ref):
self.handle_data("&%s;" % ref)
def unknown_charref(self, ref):
self.handle_data("&#%s;" % ref)
try:
import HTMLParser
except ImportError:
pass
else:
class XHTMLCompatibleHeadParser(AbstractHeadParser,
HTMLParser.HTMLParser):
def __init__(self):
HTMLParser.HTMLParser.__init__(self)
AbstractHeadParser.__init__(self)
def handle_starttag(self, tag, attrs):
if tag not in self.head_elems:
raise EndOfHeadError()
try:
method = getattr(self, 'start_' + tag)
except AttributeError:
try:
method = getattr(self, 'do_' + tag)
except AttributeError:
pass # unknown tag
else:
method(attrs)
else:
method(attrs)
def handle_endtag(self, tag):
if tag not in self.head_elems:
raise EndOfHeadError()
try:
method = getattr(self, 'end_' + tag)
except AttributeError:
pass # unknown tag
else:
method()
def unescape(self, name):
# Use the entitydefs passed into constructor, not
# HTMLParser.HTMLParser's entitydefs.
return self.unescape_attr(name)
def unescape_attr_if_required(self, name):
return name # HTMLParser.HTMLParser already did it
class HeadParser(AbstractHeadParser, sgmllib.SGMLParser):
def _not_called(self):
assert False
def __init__(self):
sgmllib.SGMLParser.__init__(self)
AbstractHeadParser.__init__(self)
def handle_starttag(self, tag, method, attrs):
if tag not in self.head_elems:
raise EndOfHeadError()
if tag == "meta":
method(attrs)
def unknown_starttag(self, tag, attrs):
self.handle_starttag(tag, self._not_called, attrs)
def handle_endtag(self, tag, method):
if tag in self.head_elems:
method()
else:
raise EndOfHeadError()
def unescape_attr_if_required(self, name):
return self.unescape_attr(name)
def parse_head(fileobj, parser):
"""Return a list of key, value pairs."""
while 1:
data = fileobj.read(CHUNK)
try:
parser.feed(data)
except EndOfHeadError:
break
if len(data) != CHUNK:
# this should only happen if there is no HTML body, or if
# CHUNK is big
break
return parser.http_equiv
class HTTPEquivProcessor(BaseHandler):
"""Append META HTTP-EQUIV headers to regular HTTP headers."""
handler_order = 300 # before handlers that look at HTTP headers
def __init__(self, head_parser_class=HeadParser,
i_want_broken_xhtml_support=False,
):
self.head_parser_class = head_parser_class
self._allow_xhtml = i_want_broken_xhtml_support
def http_response(self, request, response):
if not hasattr(response, "seek"):
response = response_seek_wrapper(response)
http_message = response.info()
url = response.geturl()
ct_hdrs = http_message.getheaders("content-type")
if is_html(ct_hdrs, url, self._allow_xhtml):
try:
try:
html_headers = parse_head(response,
self.head_parser_class())
finally:
response.seek(0)
except (HTMLParser.HTMLParseError,
sgmllib.SGMLParseError):
pass
else:
for hdr, val in html_headers:
# add a header
http_message.dict[hdr.lower()] = val
text = hdr + ": " + val
for line in text.split("\n"):
http_message.headers.append(line + "\n")
return response
https_response = http_response
class HTTPCookieProcessor(BaseHandler):
"""Handle HTTP cookies.
Public attributes:
cookiejar: CookieJar instance
"""
def __init__(self, cookiejar=None):
if cookiejar is None:
cookiejar = CookieJar()
self.cookiejar = cookiejar
def http_request(self, request):
self.cookiejar.add_cookie_header(request)
return request
def http_response(self, request, response):
self.cookiejar.extract_cookies(response, request)
return response
https_request = http_request
https_response = http_response
try:
import robotparser
except ImportError:
pass
else:
class MechanizeRobotFileParser(robotparser.RobotFileParser):
def __init__(self, url='', opener=None):
robotparser.RobotFileParser.__init__(self, url)
self._opener = opener
self._timeout = _sockettimeout._GLOBAL_DEFAULT_TIMEOUT
def set_opener(self, opener=None):
import _opener
if opener is None:
opener = _opener.OpenerDirector()
self._opener = opener
def set_timeout(self, timeout):
self._timeout = timeout
def read(self):
"""Reads the robots.txt URL and feeds it to the parser."""
if self._opener is None:
self.set_opener()
req = Request(self.url, unverifiable=True, visit=False,
timeout=self._timeout)
try:
f = self._opener.open(req)
except HTTPError, f:
pass
#except (IOError, socket.error, OSError), exc:
except (IOError, OSError), exc:
debug_robots("ignoring error opening %r: %s" %
(self.url, exc))
return
lines = []
line = f.readline()
while line:
lines.append(line.strip())
line = f.readline()
status = f.code
if status == 401 or status == 403:
self.disallow_all = True
debug_robots("disallow all")
elif status >= 400:
self.allow_all = True
debug_robots("allow all")
elif status == 200 and lines:
debug_robots("parse lines")
self.parse(lines)
class RobotExclusionError(urllib2.HTTPError):
def __init__(self, request, *args):
apply(urllib2.HTTPError.__init__, (self,)+args)
self.request = request
class HTTPRobotRulesProcessor(BaseHandler):
# before redirections, after everything else
handler_order = 800
try:
from httplib import HTTPMessage
except:
from mimetools import Message
http_response_class = Message
else:
http_response_class = HTTPMessage
def __init__(self, rfp_class=MechanizeRobotFileParser):
self.rfp_class = rfp_class
self.rfp = None
self._host = None
def http_request(self, request):
scheme = request.get_type()
if scheme not in ["http", "https"]:
# robots exclusion only applies to HTTP
return request
if request.get_selector() == "/robots.txt":
# /robots.txt is always OK to fetch
return request
host = request.get_host()
# robots.txt requests don't need to be allowed by robots.txt :-)
origin_req = getattr(request, "_origin_req", None)
if (origin_req is not None and
origin_req.get_selector() == "/robots.txt" and
origin_req.get_host() == host
):
return request
if host != self._host:
self.rfp = self.rfp_class()
try:
self.rfp.set_opener(self.parent)
except AttributeError:
debug("%r instance does not support set_opener" %
self.rfp.__class__)
self.rfp.set_url(scheme+"://"+host+"/robots.txt")
self.rfp.set_timeout(request.timeout)
self.rfp.read()
self._host = host
ua = request.get_header("User-agent", "")
if self.rfp.can_fetch(ua, request.get_full_url()):
return request
else:
# XXX This should really have raised URLError. Too late now...
msg = "request disallowed by robots.txt"
raise RobotExclusionError(
request,
request.get_full_url(),
403, msg,
self.http_response_class(StringIO()), StringIO(msg))
https_request = http_request
class HTTPRefererProcessor(BaseHandler):
"""Add Referer header to requests.
This only makes sense if you use each RefererProcessor for a single
chain of requests only (so, for example, if you use a single
HTTPRefererProcessor to fetch a series of URLs extracted from a single
page, this will break).
There's a proper implementation of this in mechanize.Browser.
"""
def __init__(self):
self.referer = None
def http_request(self, request):
if ((self.referer is not None) and
not request.has_header("Referer")):
request.add_unredirected_header("Referer", self.referer)
return request
def http_response(self, request, response):
self.referer = response.geturl()
return response
https_request = http_request
https_response = http_response
def clean_refresh_url(url):
# e.g. Firefox 1.5 does (something like) this
if ((url.startswith('"') and url.endswith('"')) or
(url.startswith("'") and url.endswith("'"))):
url = url[1:-1]
return _rfc3986.clean_url(url, "latin-1") # XXX encoding
def parse_refresh_header(refresh):
"""
>>> parse_refresh_header("1; url=http://example.com/")
(1.0, 'http://example.com/')
>>> parse_refresh_header("1; url='http://example.com/'")
(1.0, 'http://example.com/')
>>> parse_refresh_header("1")
(1.0, None)
>>> parse_refresh_header("blah")
Traceback (most recent call last):
ValueError: invalid literal for float(): blah
"""
ii = refresh.find(";")
if ii != -1:
pause, newurl_spec = float(refresh[:ii]), refresh[ii+1:]
jj = newurl_spec.find("=")
key = None
if jj != -1:
key, newurl = newurl_spec[:jj], newurl_spec[jj+1:]
newurl = clean_refresh_url(newurl)
if key is None or key.strip().lower() != "url":
raise ValueError()
else:
pause, newurl = float(refresh), None
return pause, newurl
class HTTPRefreshProcessor(BaseHandler):
"""Perform HTTP Refresh redirections.
Note that if a non-200 HTTP code has occurred (for example, a 30x
redirect), this processor will do nothing.
By default, only zero-time Refresh headers are redirected. Use the
max_time attribute / constructor argument to allow Refresh with longer
pauses. Use the honor_time attribute / constructor argument to control
whether the requested pause is honoured (with a time.sleep()) or
skipped in favour of immediate redirection.
Public attributes:
max_time: see above
honor_time: see above
"""
handler_order = 1000
def __init__(self, max_time=0, honor_time=True):
self.max_time = max_time
self.honor_time = honor_time
self._sleep = time.sleep
def http_response(self, request, response):
code, msg, hdrs = response.code, response.msg, response.info()
if code == 200 and hdrs.has_key("refresh"):
refresh = hdrs.getheaders("refresh")[0]
try:
pause, newurl = parse_refresh_header(refresh)
except ValueError:
debug("bad Refresh header: %r" % refresh)
return response
if newurl is None:
newurl = response.geturl()
if (self.max_time is None) or (pause <= self.max_time):
if pause > 1E-3 and self.honor_time:
self._sleep(pause)
hdrs["location"] = newurl
# hardcoded http is NOT a bug
response = self.parent.error(
"http", request, response,
"refresh", msg, hdrs)
else:
debug("Refresh header ignored: %r" % refresh)
return response
https_response = http_response
class HTTPErrorProcessor(BaseHandler):
"""Process HTTP error responses.
The purpose of this handler is to to allow other response processors a
look-in by removing the call to parent.error() from
AbstractHTTPHandler.
For non-200 error codes, this just passes the job on to the
Handler.<proto>_error_<code> methods, via the OpenerDirector.error
method. Eventually, urllib2.HTTPDefaultErrorHandler will raise an
HTTPError if no other handler handles the error.
"""
handler_order = 1000 # after all other processors
def http_response(self, request, response):
code, msg, hdrs = response.code, response.msg, response.info()
if code != 200:
# hardcoded http is NOT a bug
response = self.parent.error(
"http", request, response, code, msg, hdrs)
return response
https_response = http_response
class HTTPDefaultErrorHandler(BaseHandler):
def http_error_default(self, req, fp, code, msg, hdrs):
# why these error methods took the code, msg, headers args in the first
# place rather than a response object, I don't know, but to avoid
# multiple wrapping, we're discarding them
if isinstance(fp, urllib2.HTTPError):
response = fp
else:
response = urllib2.HTTPError(
req.get_full_url(), code, msg, hdrs, fp)
assert code == response.code
assert msg == response.msg
assert hdrs == response.hdrs
raise response
class AbstractHTTPHandler(BaseHandler):
def __init__(self, debuglevel=0):
self._debuglevel = debuglevel
def set_http_debuglevel(self, level):
self._debuglevel = level
def do_request_(self, request):
host = request.get_host()
if not host:
raise URLError('no host given')
if request.has_data(): # POST
data = request.get_data()
if not request.has_header('Content-type'):
request.add_unredirected_header(
'Content-type',
'application/x-www-form-urlencoded')
if not request.has_header('Content-length'):
request.add_unredirected_header(
'Content-length', '%d' % len(data))
scheme, sel = urllib.splittype(request.get_selector())
sel_host, sel_path = urllib.splithost(sel)
if not request.has_header('Host'):
request.add_unredirected_header('Host', sel_host or host)
for name, value in self.parent.addheaders:
name = name.capitalize()
if not request.has_header(name):
request.add_unredirected_header(name, value)
return request
def do_open(self, http_class, req):
"""Return an addinfourl object for the request, using http_class.
http_class must implement the HTTPConnection API from httplib.
The addinfourl return value is a file-like object. It also
has methods and attributes including:
- info(): return a mimetools.Message object for the headers
- geturl(): return the original request URL
- code: HTTP status code
"""
host_port = req.get_host()
if not host_port:
raise URLError('no host given')
try:
h = http_class(host_port, timeout=req.timeout)
except TypeError:
# Python < 2.6, no per-connection timeout support
h = http_class(host_port)
h.set_debuglevel(self._debuglevel)
headers = dict(req.headers)
headers.update(req.unredirected_hdrs)
# We want to make an HTTP/1.1 request, but the addinfourl
# class isn't prepared to deal with a persistent connection.
# It will try to read all remaining data from the socket,
# which will block while the server waits for the next request.
# So make sure the connection gets closed after the (only)
# request.
headers["Connection"] = "close"
headers = dict(
[(name.title(), val) for name, val in headers.items()])
try:
h.request(req.get_method(), req.get_selector(), req.data, headers)
r = h.getresponse()
#except socket.error, err: # XXX what error?
except (Exception), err: # XXX what error?
raise URLError(err)
# Pick apart the HTTPResponse object to get the addinfourl
# object initialized properly.
# Wrap the HTTPResponse object in socket's file object adapter
# for Windows. That adapter calls recv(), so delegate recv()
# to read(). This weird wrapping allows the returned object to
# have readline() and readlines() methods.
# XXX It might be better to extract the read buffering code
# out of socket._fileobject() and into a base class.
r.recv = r.read
fp = create_readline_wrapper(r)
resp = closeable_response(fp, r.msg, req.get_full_url(),
r.status, r.reason)
return resp
class HTTPHandler(AbstractHTTPHandler):
def http_open(self, req):
return self.do_open(httplib.HTTPConnection, req)
http_request = AbstractHTTPHandler.do_request_
if hasattr(httplib, 'HTTPS'):
class HTTPSConnectionFactory:
def __init__(self, key_file, cert_file):
self._key_file = key_file
self._cert_file = cert_file
def __call__(self, hostport):
return httplib.HTTPSConnection(
hostport,
key_file=self._key_file, cert_file=self._cert_file)
class HTTPSHandler(AbstractHTTPHandler):
def __init__(self, client_cert_manager=None):
AbstractHTTPHandler.__init__(self)
self.client_cert_manager = client_cert_manager
def https_open(self, req):
if self.client_cert_manager is not None:
key_file, cert_file = self.client_cert_manager.find_key_cert(
req.get_full_url())
conn_factory = HTTPSConnectionFactory(key_file, cert_file)
else:
conn_factory = httplib.HTTPSConnection
return self.do_open(conn_factory, req)
https_request = AbstractHTTPHandler.do_request_
| Andrew-Dickinson/FantasyFRC | customMechanize/_googleappengine.py | Python | gpl-2.0 | 26,831 | 0.001565 |
BADGES = [
{
'badge_id': 'tech',
'img_path': '/static/img/badges/wrench.svg',
'name': 'Junction Technical Staff',
'description': 'Actively serves on Junction staff',
'priority': 2000
},
{
'badge_id': 'staff',
'img_path': '/static/img/badges/award_fill.svg',
'name': 'Junction Staff',
'description': 'Actively serves on Junction staff',
'priority': 1000
},
{
'badge_id': 'staff_emeritus',
'img_path': '/static/img/badges/heart_fill.svg',
'name': 'Staff Emeritus',
'description': 'Served on Junction staff once',
'priority': 0
},
{
'badge_id': 'butts',
'img_path': '/static/img/badges/heart_stroke.svg',
'name': 'BUTTS',
'description': 'Butts are nice',
'priority': 0
}
]
| JunctionAt/JunctionWWW | config/badges.py | Python | agpl-3.0 | 862 | 0 |
# Licensed under a 3-clause BSD style license - see LICENSE.rst
from __future__ import (absolute_import, division, print_function,
unicode_literals)
from ..extern.six.moves.urllib.parse import parse_qs
from ..extern.six.moves.urllib.request import urlopen
from ..extern.six.moves import input
from ..utils.data import get_pkg_data_contents
from .standard_profile import (SAMPSimpleXMLRPCRequestHandler,
ThreadingXMLRPCServer)
__all__ = []
CROSS_DOMAIN = get_pkg_data_contents('data/crossdomain.xml')
CLIENT_ACCESS_POLICY = get_pkg_data_contents('data/clientaccesspolicy.xml')
class WebProfileRequestHandler(SAMPSimpleXMLRPCRequestHandler):
"""
Handler of XMLRPC requests performed through the Web Profile.
"""
def _send_CORS_header(self):
if self.headers.get('Origin') is not None:
method = self.headers.get('Access-Control-Request-Method')
if method and self.command == "OPTIONS":
# Preflight method
self.send_header('Content-Length', '0')
self.send_header('Access-Control-Allow-Origin',
self.headers.get('Origin'))
self.send_header('Access-Control-Allow-Methods', method)
self.send_header('Access-Control-Allow-Headers', 'Content-Type')
self.send_header('Access-Control-Allow-Credentials', 'true')
else:
# Simple method
self.send_header('Access-Control-Allow-Origin',
self.headers.get('Origin'))
self.send_header('Access-Control-Allow-Headers', 'Content-Type')
self.send_header('Access-Control-Allow-Credentials', 'true')
def end_headers(self):
self._send_CORS_header()
SAMPSimpleXMLRPCRequestHandler.end_headers(self)
def _serve_cross_domain_xml(self):
cross_domain = False
if self.path == "/crossdomain.xml":
# Adobe standard
response = CROSS_DOMAIN
self.send_response(200, 'OK')
self.send_header('Content-Type', 'text/x-cross-domain-policy')
self.send_header("Content-Length", "{0}".format(len(response)))
self.end_headers()
self.wfile.write(response.encode('utf-8'))
self.wfile.flush()
cross_domain = True
elif self.path == "/clientaccesspolicy.xml":
# Microsoft standard
response = CLIENT_ACCESS_POLICY
self.send_response(200, 'OK')
self.send_header('Content-Type', 'text/xml')
self.send_header("Content-Length", "{0}".format(len(response)))
self.end_headers()
self.wfile.write(response.encode('utf-8'))
self.wfile.flush()
cross_domain = True
return cross_domain
def do_POST(self):
if self._serve_cross_domain_xml():
return
return SAMPSimpleXMLRPCRequestHandler.do_POST(self)
def do_HEAD(self):
if not self.is_http_path_valid():
self.report_404()
return
if self._serve_cross_domain_xml():
return
def do_OPTIONS(self):
self.send_response(200, 'OK')
self.end_headers()
def do_GET(self):
if not self.is_http_path_valid():
self.report_404()
return
split_path = self.path.split('?')
if split_path[0] in ['/translator/{}'.format(clid) for clid in self.server.clients]:
# Request of a file proxying
urlpath = parse_qs(split_path[1])
try:
proxyfile = urlopen(urlpath["ref"][0])
self.send_response(200, 'OK')
self.end_headers()
self.wfile.write(proxyfile.read())
proxyfile.close()
except IOError:
self.report_404()
return
if self._serve_cross_domain_xml():
return
def is_http_path_valid(self):
valid_paths = (["/clientaccesspolicy.xml", "/crossdomain.xml"] +
['/translator/{}'.format(clid) for clid in self.server.clients])
return self.path.split('?')[0] in valid_paths
class WebProfileXMLRPCServer(ThreadingXMLRPCServer):
"""
XMLRPC server supporting the SAMP Web Profile.
"""
def __init__(self, addr, log=None, requestHandler=WebProfileRequestHandler,
logRequests=True, allow_none=True, encoding=None):
self.clients = []
ThreadingXMLRPCServer.__init__(self, addr, log, requestHandler,
logRequests, allow_none, encoding)
def add_client(self, client_id):
self.clients.append(client_id)
def remove_client(self, client_id):
try:
self.clients.remove(client_id)
except ValueError:
# No warning here because this method gets called for all clients,
# not just web clients, and we expect it to fail for non-web
# clients.
pass
def web_profile_text_dialog(request, queue):
samp_name = "unknown"
if isinstance(request[0], str):
# To support the old protocol version
samp_name = request[0]
else:
samp_name = request[0]["samp.name"]
text = \
"""A Web application which declares to be
Name: {}
Origin: {}
is requesting to be registered with the SAMP Hub.
Pay attention that if you permit its registration, such
application will acquire all current user privileges, like
file read/write.
Do you give your consent? [yes|no]""".format(samp_name, request[2])
print(text)
answer = input(">>> ")
queue.put(answer.lower() in ["yes", "y"])
| AustereCuriosity/astropy | astropy/samp/web_profile.py | Python | bsd-3-clause | 5,804 | 0.000689 |
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (c) 2009, Gustavo Narea <me@gustavonarea.net>.
# All Rights Reserved.
#
# This software is subject to the provisions of the BSD-like license at
# http://www.repoze.org/LICENSE.txt. A copy of the license should accompany
# this distribution. THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL
# EXPRESS OR IMPLIED WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND
# FITNESS FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""Fixture collection for the test suite.""" | jokajak/itweb | data/env/lib/python2.6/site-packages/repoze.who_testutil-1.0.1-py2.6.egg/tests/fixture/__init__.py | Python | gpl-3.0 | 729 | 0.001372 |
import os, sys
PATH = os.path.join(os.path.dirname(__file__), '..')
sys.path += [
os.path.join(PATH, 'project/apps'),
os.path.join(PATH, 'project'),
os.path.join(PATH, '..'),
PATH]
os.environ['DJANGO_SETTINGS_MODULE'] = 'project.settings.production'
import django.core.handlers.wsgi
application = django.core.handlers.wsgi.WSGIHandler()
| claudiob/pypeton | pypeton/files/django/deploy/django_wsgi_production.py | Python | mit | 344 | 0.026163 |
#!/usr/bin/env python3
from framework import do_exit, get_globals, main
def do_work():
global g_test_import
global globals1
print("do_work")
globals1 = get_globals()
g_test_import = globals1["g_test_import"]
print("do_work: g_test_import = %s" % str(g_test_import))
main(do_work)
| jtraver/dev | python3/globals/test1.py | Python | mit | 308 | 0.003247 |
from __future__ import print_function
from numpy import int16
import time
def connect(route,**args):
'''
route can either be I.I2C , or a radioLink instance
'''
return SHT21(route,**args)
class SHT21():
RESET = 0xFE
TEMP_ADDRESS = 0xF3
HUMIDITY_ADDRESS = 0xF5
selected=0xF3
NUMPLOTS=1
PLOTNAMES = ['Data']
ADDRESS = 0x40
name = 'Humidity/Temperature'
def __init__(self,I2C,**args):
self.I2C=I2C
self.ADDRESS = args.get('address',self.ADDRESS)
self.name = 'Humidity/Temperature'
'''
try:
print ('switching baud to 400k')
self.I2C.configI2C(400e3)
except:
print ('FAILED TO CHANGE BAUD RATE')
'''
self.params={'selectParameter':['temperature','humidity']}
self.init('')
def init(self,x):
self.I2C.writeBulk(self.ADDRESS,[self.RESET]) #soft reset
time.sleep(0.1)
def rawToTemp(self,vals):
if vals:
if len(vals):
v = (vals[0]<<8)|(vals[1]&0xFC) #make integer & remove status bits
v*=175.72; v/= (1<<16); v-=46.85
return [v]
return False
def rawToRH(self,vals):
if vals:
if len(vals):
v = (vals[0]<<8)|(vals[1]&0xFC) #make integer & remove status bits
v*=125.; v/= (1<<16); v-=6
return [v]
return False
@staticmethod
def _calculate_checksum(data, number_of_bytes):
"""5.7 CRC Checksum using the polynomial given in the datasheet
Credits: https://github.com/jaques/sht21_python/blob/master/sht21.py
"""
# CRC
POLYNOMIAL = 0x131 # //P(x)=x^8+x^5+x^4+1 = 100110001
crc = 0
# calculates 8-Bit checksum with given polynomial
for byteCtr in range(number_of_bytes):
crc ^= (data[byteCtr])
for bit in range(8, 0, -1):
if crc & 0x80:
crc = (crc << 1) ^ POLYNOMIAL
else:
crc = (crc << 1)
return crc
def selectParameter(self,param):
if param=='temperature':self.selected=self.TEMP_ADDRESS
elif param=='humidity':self.selected=self.HUMIDITY_ADDRESS
def getRaw(self):
self.I2C.writeBulk(self.ADDRESS,[self.selected])
if self.selected==self.TEMP_ADDRESS:time.sleep(0.1)
elif self.selected==self.HUMIDITY_ADDRESS:time.sleep(0.05)
vals = self.I2C.simpleRead(self.ADDRESS,3)
if vals:
if self._calculate_checksum(vals,2)!=vals[2]:
return False
print (vals)
if self.selected==self.TEMP_ADDRESS:return self.rawToTemp(vals)
elif self.selected==self.HUMIDITY_ADDRESS:return self.rawToRH(vals)
| jithinbp/SEELablet | SEEL/SENSORS/SHT21.py | Python | gpl-3.0 | 2,349 | 0.063857 |
# dialog_add.py.in - dialog to add a new repository
#
# Copyright (c) 2004-2005 Canonical
# 2005 Michiel Sikkes
#
# Authors:
# Michael Vogt <mvo@debian.org>
# Michiel Sikkes <michiels@gnome.org>
# Sebastian Heinlein <glatzor@ubuntu.com>
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
# USA
import os
from gi.repository import Gtk
from gettext import gettext as _
from aptsources.sourceslist import SourceEntry
from softwareproperties.gtk.utils import *
class DialogAdd:
def __init__(self, parent, sourceslist, datadir, distro):
"""
Initialize the dialog that allows to add a new source entering the
raw apt line
"""
self.sourceslist = sourceslist
self.parent = parent
self.datadir = datadir
# gtk stuff
setup_ui(self, os.path.join(datadir, "gtkbuilder", "dialog-add.ui"), domain="software-properties")
self.dialog = self.dialog_add_custom
self.dialog.set_transient_for(self.parent)
self.entry = self.entry_source_line
self.button_add = self.button_add_source
self.entry.connect("changed", self.check_line)
# Create an example deb line from the currently used distro
if distro:
example = "%s %s %s %s" % (distro.binary_type,
distro.source_template.base_uri,
distro.codename,
distro.source_template.components[0].name)
else:
example = "deb http://ftp.debian.org sarge main"
# L10N: the example is of the format: deb http://ftp.debian.org sarge main
msg = _("The APT line includes the type, location and components of a "
"repository, for example '%s'.") % ("<i>%s</i>" % example)
self.label_example_line.set_label(msg)
def run(self):
res = self.dialog.run()
self.dialog.hide()
if res == Gtk.ResponseType.OK:
line = self.entry.get_text() + "\n"
else:
line = None
return line
def check_line(self, *args):
"""
Check for a valid apt line and set the sensitiveness of the
button 'add' accordingly
"""
line = self.entry.get_text() + "\n"
if line.startswith("ppa:"):
self.button_add.set_sensitive(True)
return
source_entry = SourceEntry(line)
if source_entry.invalid == True or source_entry.disabled == True:
self.button_add.set_sensitive(False)
else:
self.button_add.set_sensitive(True)
| ruibarreira/linuxtrail | usr/lib/python3/dist-packages/softwareproperties/gtk/DialogAdd.py | Python | gpl-3.0 | 3,128 | 0.005754 |
# proxy module
from codetools.util.equivalence import *
| enthought/etsproxy | enthought/util/equivalence.py | Python | bsd-3-clause | 56 | 0 |
def test_default_log(creplay, testlog):
ret = creplay('echo', 'foo', creplay_args=[], cwd=testlog.dirname)
assert ret.success
assert ret.stdout == 'foo\n'
assert ret.stderr == ''
def test_echo_n(creplay, logfile):
ret = creplay('echo', '-n', 'foo')
assert ret.success
assert ret.stdout == 'foo'
assert ret.stderr == ''
def test_err(creplay, logfile):
ret = creplay('foo', 'bar')
assert not ret.success
assert ret.stdout == ''
assert ret.stderr == 'Error\n'
def test_order(creplay, tmpdir, logfile):
ret = creplay('./script.py')
assert ret.success
assert ret.stdout == 'foo\n123baz\n'
assert ret.stderr == 'bar\n'
def test_record_replay(crecord, tmpdir, logfile, testlog):
ret = crecord('creplay', '-l', testlog.strpath, 'foo')
assert ret.success
assert ret.stdout == 'foo\nbaz\n'
assert ret.stderr == 'bar\n'
lines = set(logfile.read().split('\n')[1:-1])
# Unfortunately the order can get messed up.
assert lines == {'> foo', '! bar', '> baz', '= 0'}
| kvas-it/cli-mock | tests/test_creplay.py | Python | mit | 1,051 | 0 |
import os, json
iflnm = os.path.join('..','psiturk-rg-cont','trialdata.csv')
oflnm = "rawdata.csv"
with open(iflnm, 'rU') as ifl, open(oflnm, 'w') as ofl:
ofl.write('WID,Condition,Trial,TrialBase,Class,ContainmentType,ContainmentLevel,TrialNum,MotionDirection,Response,RT,Goal,Switched,RawResponse,WasBad\n')
for rln in ifl:
rln = rln.strip('\n')
wid, _, _, rdat = rln.split(',',3)
dat = json.loads(rdat.strip("\"'").replace("\"\"", "\""))
if isinstance(dat[5], bool):
trnm, order, rt, rawresp, mottype, wassw, score, realgoal, wasbad, cond = dat
trspl = trnm.split('_')
dowrite = True
trbase = trspl[0] + '_' + trspl[1]
tnum = trspl[1]
if trspl[0] == 'regular':
trclass = "regular"
conttype = "NA"
contlevel = "NA"
else:
trclass = "contained"
conttype = trspl[0]
contlevel = trspl[2]
if not wassw:
wassw = "False"
if rawresp == 201:
actresp = "R"
normresp = "R"
elif rawresp == 202:
actresp = "G"
normresp = "G"
elif rawresp == 299:
actresp = "NA"
normresp = "NA"
else:
dowrite = False
if realgoal == 201:
rg = "R"
elif realgoal == 202:
rg = "G"
else:
dowrite = False
else:
wassw = "True"
if rawresp == 201:
actresp = "R"
normresp = "G"
elif rawresp == 202:
actresp = "G"
normresp = "R"
elif rawresp == 299:
actresp = "NA"
normresp = "NA"
else:
dowrite = False
if realgoal == 201:
rg = "G"
elif realgoal == 202:
rg = "R"
else:
dowrite = False
if mottype == 1:
mot = 'Fwd'
elif mottype == 0:
mot = 'None'
else:
mot = 'Rev'
if wasbad:
wb = 'True'
else:
wb = 'False'
if dowrite:
ofl.write(wid + ',' + str(cond) + ',' + trnm + ',' + trbase + ',' + trclass + ',' + conttype + ',' + contlevel + ',' + tnum + ',')
ofl.write(mot + ',' + normresp + ',' + str(rt) + ',' + rg + ',' + wassw + ',' + actresp + ',' + wb + '\n')
| kasmith/cbmm-project-christmas | ContainmentAnalysis/parseData.py | Python | mit | 2,805 | 0.003565 |
"""
stubo
~~~~~
Stub-O-Matic - Enable automated testing by mastering system dependencies.
Use when reality is simply not good enough.
:copyright: (c) 2015 by OpenCredo.
:license: GPLv3, see LICENSE for more details.
"""
import os
import sys
version = "0.8.3"
version_info = tuple(version.split('.'))
def stubo_path():
# Find folder that this module is contained in
module = sys.modules[__name__]
return os.path.dirname(os.path.abspath(module.__file__))
def static_path(*args):
return os.path.join(stubo_path(), 'static', *args)
| rusenask/mirage | stubo/__init__.py | Python | gpl-3.0 | 585 | 0.005128 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Pyle makes it easy to use Python as a replacement for command line tools such as `sed` or `perl`.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from future import standard_library
standard_library.install_aliases()
from future.utils import string_types
import argparse
import io
import re
import sh
import sys
import traceback
__version__ = "0.4.1"
STANDARD_MODULES = {
're': re,
'sh': sh
}
def truncate_ellipsis(line, length=30):
"""Truncate a line to the specified length followed by ``...`` unless its shorter than length already."""
return line if len(line) < length else line[:length - 3] + "..."
def pyle_evaluate(expressions=None, modules=(), inplace=False, files=None, print_traceback=False):
"""The main method of pyle."""
eval_globals = {}
eval_globals.update(STANDARD_MODULES)
for module_arg in modules or ():
for module in module_arg.strip().split(","):
module = module.strip()
if module:
eval_globals[module] = __import__(module)
if not expressions:
# Default 'do nothing' program
expressions = ['line']
encoding = sys.getdefaultencoding()
files = files or ['-']
eval_locals = {}
for file in files:
if file == '-':
file = sys.stdin
out_buf = sys.stdout if not inplace else io.StringIO()
out_line = None
with (io.open(file, 'r', encoding=encoding) if not hasattr(file, 'read') else file) as in_file:
for num, line in enumerate(in_file.readlines()):
was_whole_line = False
if line[-1] == '\n':
was_whole_line = True
line = line[:-1]
expr = ""
try:
for expr in expressions:
words = [word.strip()
for word in re.split(r'\s+', line)
if word]
eval_locals.update({
'line': line,
'words': words,
'filename': in_file.name,
'num': num
})
out_line = eval(expr, eval_globals, eval_locals)
if out_line is None:
continue
# If the result is something list-like or iterable,
# output each item space separated.
if not isinstance(out_line, string_types):
try:
out_line = u' '.join(str(part) for part in out_line)
except:
# Guess it wasn't a list after all.
out_line = str(out_line)
line = out_line
except Exception as e:
sys.stdout.flush()
sys.stderr.write("At %s:%d ('%s'): `%s`: %s\n" % (
in_file.name, num, truncate_ellipsis(line), expr, e))
if print_traceback:
traceback.print_exc(None, sys.stderr)
else:
if out_line is None:
continue
out_line = out_line or u''
out_buf.write(out_line)
if was_whole_line:
out_buf.write('\n')
if inplace:
with io.open(file, 'w', encoding=encoding) as out_file:
out_file.write(out_buf.getvalue())
out_buf.close()
def pyle(argv=None):
"""Execute pyle with the specified arguments, or sys.argv if no arguments specified."""
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument("-m", "--modules", dest="modules", action='append',
help="import MODULE before evaluation. May be specified more than once.")
parser.add_argument("-i", "--inplace", dest="inplace", action='store_true', default=False,
help="edit files in place. When used with file name arguments, the files will be replaced by the output of the evaluation")
parser.add_argument("-e", "--expression", action="append",
dest="expressions", help="an expression to evaluate for each line")
parser.add_argument('files', nargs='*',
help="files to read as input. If used with --inplace, the files will be replaced with the output")
parser.add_argument("--traceback", action="store_true", default=False,
help="print a traceback on stderr when an expression fails for a line")
args = parser.parse_args() if not argv else parser.parse_args(argv)
pyle_evaluate(args.expressions, args.modules, args.inplace, args.files, args.traceback)
if __name__ == '__main__':
pyle()
| aljungberg/pyle | pyle.py | Python | bsd-3-clause | 5,025 | 0.004776 |
from operator import itemgetter
from lxml import html
from fuzzywuzzy import fuzz
from helpers.utils import cached_request, thread_pool, replace_all
class StreamsApi:
def __init__(self, data, cache):
self.data = data
self.cache = cache
def get(self, url='', ttl=3600):
base_url = 'livefootballol.me'
response = cached_request(url=url, cache=self.cache, base_url=base_url, ttl=ttl)
try:
response = html.fromstring(response)
except TypeError:
response = None
return response
def get_channels_pages(self):
data = self.get('channels')
items = ['channels']
if data is not None:
for page in data.xpath('//div[@id="system"]//div[@class="pagination"]//a[@class=""]'):
items.append(page.get('href'))
return items
def get_channels_page_links(self, url):
data = self.get(url)
items = []
if data is not None:
for channel in data.xpath('//div[@id="system"]//table//a[contains(@href, "acestream")]'):
items.append(channel.get('href'))
return items
def get_channels_links(self):
pages = self.get_channels_pages()
items = thread_pool(self.get_channels_page_links, pages)
return items
def get_channel_details(self, url):
data = self.get(url)
items = []
if data is None:
return items
try:
root = data.xpath('//div[@id="system"]//table')[0]
name = root.xpath('.//td[text()="Name"]//following-sibling::td[1]')[0]
lang = root.xpath('.//td[text()="Language"]//following-sibling::td[1]')[0]
rate = root.xpath('.//td[text()="Bitrate"]//following-sibling::td[1]')[0]
strm = root.xpath('.//a[starts-with(@href, "acestream:")]')
name = name.text_content().strip()
lang = lang.text_content().strip()
rate = rate.text_content().strip()
name = self.parse_name(name)
lang = 'Unknown' if lang == '' or lang.isdigit() else lang
lang = 'Bulgarian' if lang == 'Bulgaria' else lang
rate = 0 if rate == '' else int(rate.replace('Kbps', ''))
channel = { 'name': name, 'language': lang.title() }
stream = { 'rate': rate, 'language': lang[:3].upper(), 'url': None, 'hd_url': None, 'host': 'Acestream' }
for link in strm:
href = link.get('href')
text = link.getparent().text_content()
if 'HD' in text:
stream['hd_url'] = href
else:
stream['url'] = href
if stream['url'] is not None and lang != 'Unknown':
items.append({ 'channel': channel, 'stream': stream })
except (IndexError, ValueError):
pass
return items
def get_channels(self):
links = self.get_channels_links()
items = thread_pool(self.get_channel_details, links)
return items
def save_channels(self):
data = self.get_channels()
items = []
for item in data:
stream = item['stream']
channel = self.data.set_single('channel', item['channel'], 'name')
ch_id = "%s_%s" % (channel.id, stream['host'].lower())
stream.update({ 'channel': channel.id, 'ch_id': ch_id })
items.append(stream)
self.data.set_multiple('stream', items, 'ch_id')
def get_events_page(self):
data = self.get()
page = None
if data is not None:
link = data.xpath('//div[@id="system"]//a[starts-with(@href, "/live-football")]')
page = link[0].get('href') if len(link) else None
return page
def get_events_page_links(self):
link = self.get_events_page()
data = self.get(url=link, ttl=120)
items = []
if data is not None:
for link in data.xpath('//div[@id="system"]//list[1]//a[contains(@href, "/streaming/")]'):
items.append(link.get('href'))
return items
def get_event_channels(self, url):
data = self.get(url=url, ttl=60)
items = []
if data is None:
return items
try:
root = data.xpath('//div[@id="system"]//table')[0]
comp = root.xpath('.//td[text()="Competition"]//following-sibling::td[1]')[0]
team = root.xpath('.//td[text()="Match"]//following-sibling::td[1]')[0]
comp = comp.text_content().strip()
team = team.text_content().strip().split('-')
home = team[0].strip()
away = team[1].strip()
event = { 'competition': comp, 'home': home, 'away': away }
chann = []
for link in data.xpath('//div[@id="system"]//a[contains(@href, "/channels/")]'):
name = link.text_content()
name = self.parse_name(name)
chann.append(name)
if chann:
items.append({ 'event': event, 'channels': chann })
except (IndexError, ValueError):
pass
return items
def get_events(self):
links = self.get_events_page_links()
items = thread_pool(self.get_event_channels, links)
return items
def save_events(self):
fixtures = self.data.load_fixtures(today_only=True)
events = self.get_events()
items = []
for fixture in fixtures:
channels = self.get_fixture_channels(events, fixture)
streams = self.data.get_multiple('stream', 'channel', channels)
for stream in streams:
items.append({
'fs_id': "%s_%s" % (fixture.id, stream.id),
'fixture': fixture.id,
'stream': stream
})
self.data.set_multiple('event', items, 'fs_id')
def get_fixture_channels(self, events, fixture):
chann = []
items = []
for item in events:
evnt = item['event']
comp = fuzz.ratio(fixture.competition.name, evnt['competition'])
home = fuzz.ratio(fixture.home_team.name, evnt['home'])
away = fuzz.ratio(fixture.away_team.name, evnt['away'])
comb = (comp + home + away) / 3
items.append({ 'ratio': comb, 'channels': item['channels'] })
if items:
sort = sorted(items, key=itemgetter('ratio'), reverse=True)[0]
if sort['ratio'] > 70:
chann = self.data.get_multiple('channel', 'name', sort['channels'])
chann = [c.id for c in chann]
return chann
def parse_name(self, name):
find = ['Acestream', 'AceStream']
name = replace_all(name, find, '').strip()
return name
| jonian/kickoff-player | apis/streams.py | Python | gpl-3.0 | 6,137 | 0.017598 |
"""
WSGI config for mymenu project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "mymenu.settings")
application = get_wsgi_application()
| quangnguyen-asnet/python-django | mymenu/mymenu/wsgi.py | Python | mit | 389 | 0 |
"""Views for items (modules)."""
from __future__ import absolute_import
import hashlib
import logging
from uuid import uuid4
from datetime import datetime
from pytz import UTC
import json
from collections import OrderedDict
from functools import partial
from static_replace import replace_static_urls
from xmodule_modifiers import wrap_xblock, request_token
from django.conf import settings
from django.core.exceptions import PermissionDenied
from django.contrib.auth.decorators import login_required
from django.http import HttpResponseBadRequest, HttpResponse, Http404
from django.utils.translation import ugettext as _
from django.views.decorators.http import require_http_methods
from xblock.fields import Scope
from xblock.fragment import Fragment
import xmodule
from xmodule.tabs import StaticTab, CourseTabList
from xmodule.modulestore import ModuleStoreEnum, EdxJSONEncoder
from xmodule.modulestore.django import modulestore
from xmodule.modulestore.exceptions import ItemNotFoundError, InvalidLocationError
from xmodule.modulestore.inheritance import own_metadata
from xmodule.modulestore.draft_and_published import DIRECT_ONLY_CATEGORIES
from xmodule.x_module import PREVIEW_VIEWS, STUDIO_VIEW, STUDENT_VIEW
from xmodule.course_module import DEFAULT_START_DATE
from django.contrib.auth.models import User
from util.date_utils import get_default_time_display
from util.json_request import expect_json, JsonResponse
from student.auth import has_studio_write_access, has_studio_read_access
from contentstore.utils import find_release_date_source, find_staff_lock_source, is_currently_visible_to_students, \
ancestor_has_staff_lock, has_children_visible_to_specific_content_groups
from contentstore.views.helpers import is_unit, xblock_studio_url, xblock_primary_child_category, \
xblock_type_display_name, get_parent_xblock
from contentstore.views.preview import get_preview_fragment
from edxmako.shortcuts import render_to_string
from models.settings.course_grading import CourseGradingModel
from cms.lib.xblock.runtime import handler_url, local_resource_url
from opaque_keys.edx.keys import UsageKey, CourseKey
from opaque_keys.edx.locator import LibraryUsageLocator
from cms.lib.xblock.authoring_mixin import VISIBILITY_VIEW
__all__ = [
'orphan_handler', 'xblock_handler', 'xblock_view_handler', 'xblock_outline_handler', 'xblock_container_handler'
]
log = logging.getLogger(__name__)
CREATE_IF_NOT_FOUND = ['course_info']
# Useful constants for defining predicates
NEVER = lambda x: False
ALWAYS = lambda x: True
# In order to allow descriptors to use a handler url, we need to
# monkey-patch the x_module library.
# TODO: Remove this code when Runtimes are no longer created by modulestores
xmodule.x_module.descriptor_global_handler_url = handler_url
xmodule.x_module.descriptor_global_local_resource_url = local_resource_url
def hash_resource(resource):
"""
Hash a :class:`xblock.fragment.FragmentResource`.
"""
md5 = hashlib.md5()
md5.update(repr(resource))
return md5.hexdigest()
def usage_key_with_run(usage_key_string):
"""
Converts usage_key_string to a UsageKey, adding a course run if necessary
"""
usage_key = UsageKey.from_string(usage_key_string)
usage_key = usage_key.replace(course_key=modulestore().fill_in_run(usage_key.course_key))
return usage_key
def _filter_entrance_exam_grader(graders):
"""
If the entrance exams feature is enabled we need to hide away the grader from
views/controls like the 'Grade as' dropdown that allows a course author to select
the grader type for a given section of a course
"""
if settings.FEATURES.get('ENTRANCE_EXAMS', False):
graders = [grader for grader in graders if grader.get('type') != u'Entrance Exam']
return graders
# pylint: disable=unused-argument
@require_http_methods(("DELETE", "GET", "PUT", "POST", "PATCH"))
@login_required
@expect_json
def xblock_handler(request, usage_key_string):
"""
The restful handler for xblock requests.
DELETE
json: delete this xblock instance from the course.
GET
json: returns representation of the xblock (locator id, data, and metadata).
if ?fields=graderType, it returns the graderType for the unit instead of the above.
html: returns HTML for rendering the xblock (which includes both the "preview" view and the "editor" view)
PUT or POST or PATCH
json: if xblock locator is specified, update the xblock instance. The json payload can contain
these fields, all optional:
:data: the new value for the data.
:children: the unicode representation of the UsageKeys of children for this xblock.
:metadata: new values for the metadata fields. Any whose values are None will be deleted not set
to None! Absent ones will be left alone.
:nullout: which metadata fields to set to None
:graderType: change how this unit is graded
:publish: can be:
'make_public': publish the content
'republish': publish this item *only* if it was previously published
'discard_changes' - reverts to the last published version
Note: If 'discard_changes', the other fields will not be used; that is, it is not possible
to update and discard changes in a single operation.
The JSON representation on the updated xblock (minus children) is returned.
if usage_key_string is not specified, create a new xblock instance, either by duplicating
an existing xblock, or creating an entirely new one. The json playload can contain
these fields:
:parent_locator: parent for new xblock, required for both duplicate and create new instance
:duplicate_source_locator: if present, use this as the source for creating a duplicate copy
:category: type of xblock, required if duplicate_source_locator is not present.
:display_name: name for new xblock, optional
:boilerplate: template name for populating fields, optional and only used
if duplicate_source_locator is not present
The locator (unicode representation of a UsageKey) for the created xblock (minus children) is returned.
"""
if usage_key_string:
usage_key = usage_key_with_run(usage_key_string)
access_check = has_studio_read_access if request.method == 'GET' else has_studio_write_access
if not access_check(request.user, usage_key.course_key):
raise PermissionDenied()
if request.method == 'GET':
accept_header = request.META.get('HTTP_ACCEPT', 'application/json')
if 'application/json' in accept_header:
fields = request.REQUEST.get('fields', '').split(',')
if 'graderType' in fields:
# right now can't combine output of this w/ output of _get_module_info, but worthy goal
return JsonResponse(CourseGradingModel.get_section_grader_type(usage_key))
# TODO: pass fields to _get_module_info and only return those
with modulestore().bulk_operations(usage_key.course_key):
response = _get_module_info(_get_xblock(usage_key, request.user))
return JsonResponse(response)
else:
return HttpResponse(status=406)
elif request.method == 'DELETE':
_delete_item(usage_key, request.user)
return JsonResponse()
else: # Since we have a usage_key, we are updating an existing xblock.
return _save_xblock(
request.user,
_get_xblock(usage_key, request.user),
data=request.json.get('data'),
children_strings=request.json.get('children'),
metadata=request.json.get('metadata'),
nullout=request.json.get('nullout'),
grader_type=request.json.get('graderType'),
publish=request.json.get('publish'),
)
elif request.method in ('PUT', 'POST'):
if 'duplicate_source_locator' in request.json:
parent_usage_key = usage_key_with_run(request.json['parent_locator'])
duplicate_source_usage_key = usage_key_with_run(request.json['duplicate_source_locator'])
source_course = duplicate_source_usage_key.course_key
dest_course = parent_usage_key.course_key
if (
not has_studio_write_access(request.user, dest_course) or
not has_studio_read_access(request.user, source_course)
):
raise PermissionDenied()
dest_usage_key = _duplicate_item(
parent_usage_key,
duplicate_source_usage_key,
request.user,
request.json.get('display_name'),
)
return JsonResponse({"locator": unicode(dest_usage_key), "courseKey": unicode(dest_usage_key.course_key)})
else:
return _create_item(request)
else:
return HttpResponseBadRequest(
"Only instance creation is supported without a usage key.",
content_type="text/plain"
)
# pylint: disable=unused-argument
@require_http_methods(("GET"))
@login_required
@expect_json
def xblock_view_handler(request, usage_key_string, view_name):
"""
The restful handler for requests for rendered xblock views.
Returns a json object containing two keys:
html: The rendered html of the view
resources: A list of tuples where the first element is the resource hash, and
the second is the resource description
"""
usage_key = usage_key_with_run(usage_key_string)
if not has_studio_read_access(request.user, usage_key.course_key):
raise PermissionDenied()
accept_header = request.META.get('HTTP_ACCEPT', 'application/json')
if 'application/json' in accept_header:
store = modulestore()
xblock = store.get_item(usage_key)
container_views = ['container_preview', 'reorderable_container_child_preview', 'container_child_preview']
# wrap the generated fragment in the xmodule_editor div so that the javascript
# can bind to it correctly
xblock.runtime.wrappers.append(partial(
wrap_xblock,
'StudioRuntime',
usage_id_serializer=unicode,
request_token=request_token(request),
))
if view_name in (STUDIO_VIEW, VISIBILITY_VIEW):
try:
fragment = xblock.render(view_name)
# catch exceptions indiscriminately, since after this point they escape the
# dungeon and surface as uneditable, unsaveable, and undeletable
# component-goblins.
except Exception as exc: # pylint: disable=broad-except
log.debug("Unable to render %s for %r", view_name, xblock, exc_info=True)
fragment = Fragment(render_to_string('html_error.html', {'message': str(exc)}))
elif view_name in (PREVIEW_VIEWS + container_views):
is_pages_view = view_name == STUDENT_VIEW # Only the "Pages" view uses student view in Studio
can_edit = has_studio_write_access(request.user, usage_key.course_key)
# Determine the items to be shown as reorderable. Note that the view
# 'reorderable_container_child_preview' is only rendered for xblocks that
# are being shown in a reorderable container, so the xblock is automatically
# added to the list.
reorderable_items = set()
if view_name == 'reorderable_container_child_preview':
reorderable_items.add(xblock.location)
paging = None
try:
if request.REQUEST.get('enable_paging', 'false') == 'true':
paging = {
'page_number': int(request.REQUEST.get('page_number', 0)),
'page_size': int(request.REQUEST.get('page_size', 0)),
}
except ValueError:
# pylint: disable=too-many-format-args
return HttpResponse(
content="Couldn't parse paging parameters: enable_paging: "
"%s, page_number: %s, page_size: %s".format(
request.REQUEST.get('enable_paging', 'false'),
request.REQUEST.get('page_number', 0),
request.REQUEST.get('page_size', 0)
),
status=400,
content_type="text/plain",
)
# Set up the context to be passed to each XBlock's render method.
context = {
'is_pages_view': is_pages_view, # This setting disables the recursive wrapping of xblocks
'is_unit_page': is_unit(xblock),
'can_edit': can_edit,
'root_xblock': xblock if (view_name == 'container_preview') else None,
'reorderable_items': reorderable_items,
'paging': paging,
}
fragment = get_preview_fragment(request, xblock, context)
# Note that the container view recursively adds headers into the preview fragment,
# so only the "Pages" view requires that this extra wrapper be included.
if is_pages_view:
fragment.content = render_to_string('component.html', {
'xblock_context': context,
'xblock': xblock,
'locator': usage_key,
'preview': fragment.content,
'label': xblock.display_name or xblock.scope_ids.block_type,
})
else:
raise Http404
hashed_resources = OrderedDict()
for resource in fragment.resources:
hashed_resources[hash_resource(resource)] = resource
return JsonResponse({
'html': fragment.content,
'resources': hashed_resources.items()
})
else:
return HttpResponse(status=406)
# pylint: disable=unused-argument
@require_http_methods(("GET"))
@login_required
@expect_json
def xblock_outline_handler(request, usage_key_string):
"""
The restful handler for requests for XBlock information about the block and its children.
This is used by the course outline in particular to construct the tree representation of
a course.
"""
usage_key = usage_key_with_run(usage_key_string)
if not has_studio_read_access(request.user, usage_key.course_key):
raise PermissionDenied()
response_format = request.REQUEST.get('format', 'html')
if response_format == 'json' or 'application/json' in request.META.get('HTTP_ACCEPT', 'application/json'):
store = modulestore()
root_xblock = store.get_item(usage_key)
return JsonResponse(create_xblock_info(
root_xblock,
include_child_info=True,
course_outline=True,
include_children_predicate=lambda xblock: not xblock.category == 'vertical'
))
else:
return Http404
@require_http_methods(("GET"))
@login_required
@expect_json
def xblock_container_handler(request, usage_key_string):
"""
The restful handler for requests for XBlock information about the block and its children.
This is used by the container page in particular to get additional information about publish state
and ancestor state.
"""
usage_key = usage_key_with_run(usage_key_string)
if not has_studio_read_access(request.user, usage_key.course_key):
raise PermissionDenied()
response_format = request.REQUEST.get('format', 'html')
if response_format == 'json' or 'application/json' in request.META.get('HTTP_ACCEPT', 'application/json'):
with modulestore().bulk_operations(usage_key.course_key):
response = _get_module_info(
_get_xblock(usage_key, request.user), include_ancestor_info=True, include_publishing_info=True
)
return JsonResponse(response)
else:
return Http404
def _update_with_callback(xblock, user, old_metadata=None, old_content=None):
"""
Updates the xblock in the modulestore.
But before doing so, it calls the xblock's editor_saved callback function.
"""
if callable(getattr(xblock, "editor_saved", None)):
if old_metadata is None:
old_metadata = own_metadata(xblock)
if old_content is None:
old_content = xblock.get_explicitly_set_fields_by_scope(Scope.content)
xblock.editor_saved(user, old_metadata, old_content)
# Update after the callback so any changes made in the callback will get persisted.
return modulestore().update_item(xblock, user.id)
def _save_xblock(user, xblock, data=None, children_strings=None, metadata=None, nullout=None,
grader_type=None, publish=None):
"""
Saves xblock w/ its fields. Has special processing for grader_type, publish, and nullout and Nones in metadata.
nullout means to truly set the field to None whereas nones in metadata mean to unset them (so they revert
to default).
"""
store = modulestore()
# Perform all xblock changes within a (single-versioned) transaction
with store.bulk_operations(xblock.location.course_key):
# Don't allow updating an xblock and discarding changes in a single operation (unsupported by UI).
if publish == "discard_changes":
store.revert_to_published(xblock.location, user.id)
# Returning the same sort of result that we do for other save operations. In the future,
# we may want to return the full XBlockInfo.
return JsonResponse({'id': unicode(xblock.location)})
old_metadata = own_metadata(xblock)
old_content = xblock.get_explicitly_set_fields_by_scope(Scope.content)
if data:
# TODO Allow any scope.content fields not just "data" (exactly like the get below this)
xblock.data = data
else:
data = old_content['data'] if 'data' in old_content else None
if children_strings is not None:
children = []
for child_string in children_strings:
children.append(usage_key_with_run(child_string))
# if new children have been added, remove them from their old parents
new_children = set(children) - set(xblock.children)
for new_child in new_children:
old_parent_location = store.get_parent_location(new_child)
if old_parent_location:
old_parent = store.get_item(old_parent_location)
old_parent.children.remove(new_child)
old_parent = _update_with_callback(old_parent, user)
else:
# the Studio UI currently doesn't present orphaned children, so assume this is an error
return JsonResponse({"error": "Invalid data, possibly caused by concurrent authors."}, 400)
# make sure there are no old children that became orphans
# In a single-author (no-conflict) scenario, all children in the persisted list on the server should be
# present in the updated list. If there are any children that have been dropped as part of this update,
# then that would be an error.
#
# We can be even more restrictive in a multi-author (conflict), by returning an error whenever
# len(old_children) > 0. However, that conflict can still be "merged" if the dropped child had been
# re-parented. Hence, the check for the parent in the any statement below.
#
# Note that this multi-author conflict error should not occur in modulestores (such as Split) that support
# atomic write transactions. In Split, if there was another author who moved one of the "old_children"
# into another parent, then that child would have been deleted from this parent on the server. However,
# this is error could occur in modulestores (such as Draft) that do not support atomic write-transactions
old_children = set(xblock.children) - set(children)
if any(
store.get_parent_location(old_child) == xblock.location
for old_child in old_children
):
# since children are moved as part of a single transaction, orphans should not be created
return JsonResponse({"error": "Invalid data, possibly caused by concurrent authors."}, 400)
# set the children on the xblock
xblock.children = children
# also commit any metadata which might have been passed along
if nullout is not None or metadata is not None:
# the postback is not the complete metadata, as there's system metadata which is
# not presented to the end-user for editing. So let's use the original (existing_item) and
# 'apply' the submitted metadata, so we don't end up deleting system metadata.
if nullout is not None:
for metadata_key in nullout:
setattr(xblock, metadata_key, None)
# update existing metadata with submitted metadata (which can be partial)
# IMPORTANT NOTE: if the client passed 'null' (None) for a piece of metadata that means 'remove it'. If
# the intent is to make it None, use the nullout field
if metadata is not None:
for metadata_key, value in metadata.items():
field = xblock.fields[metadata_key]
if value is None:
field.delete_from(xblock)
else:
try:
value = field.from_json(value)
except ValueError as verr:
reason = _("Invalid data")
if verr.message:
reason = _("Invalid data ({details})").format(details=verr.message)
return JsonResponse({"error": reason}, 400)
field.write_to(xblock, value)
# update the xblock and call any xblock callbacks
xblock = _update_with_callback(xblock, user, old_metadata, old_content)
# for static tabs, their containing course also records their display name
if xblock.location.category == 'static_tab':
course = store.get_course(xblock.location.course_key)
# find the course's reference to this tab and update the name.
static_tab = CourseTabList.get_tab_by_slug(course.tabs, xblock.location.name)
# only update if changed
if static_tab and static_tab['name'] != xblock.display_name:
static_tab['name'] = xblock.display_name
store.update_item(course, user.id)
result = {
'id': unicode(xblock.location),
'data': data,
'metadata': own_metadata(xblock)
}
if grader_type is not None:
result.update(CourseGradingModel.update_section_grader_type(xblock, grader_type, user))
# If publish is set to 'republish' and this item is not in direct only categories and has previously been published,
# then this item should be republished. This is used by staff locking to ensure that changing the draft
# value of the staff lock will also update the published version, but only at the unit level.
if publish == 'republish' and xblock.category not in DIRECT_ONLY_CATEGORIES:
if modulestore().has_published_version(xblock):
publish = 'make_public'
# Make public after updating the xblock, in case the caller asked for both an update and a publish.
# Used by Bok Choy tests and by republishing of staff locks.
if publish == 'make_public':
modulestore().publish(xblock.location, user.id)
# Note that children aren't being returned until we have a use case.
return JsonResponse(result, encoder=EdxJSONEncoder)
@login_required
@expect_json
def create_item(request):
"""
Exposes internal helper method without breaking existing bindings/dependencies
"""
return _create_item(request)
@login_required
@expect_json
def _create_item(request):
"""View for create items."""
usage_key = usage_key_with_run(request.json['parent_locator'])
if not has_studio_write_access(request.user, usage_key.course_key):
raise PermissionDenied()
category = request.json['category']
display_name = request.json.get('display_name')
if isinstance(usage_key, LibraryUsageLocator):
# Only these categories are supported at this time.
if category not in ['html', 'problem', 'video']:
return HttpResponseBadRequest(
"Category '%s' not supported for Libraries" % category, content_type='text/plain'
)
store = modulestore()
with store.bulk_operations(usage_key.course_key):
parent = store.get_item(usage_key)
dest_usage_key = usage_key.replace(category=category, name=uuid4().hex)
# get the metadata, display_name, and definition from the request
metadata = {}
data = None
template_id = request.json.get('boilerplate')
if template_id:
clz = parent.runtime.load_block_type(category)
if clz is not None:
template = clz.get_template(template_id)
if template is not None:
metadata = template.get('metadata', {})
data = template.get('data')
if display_name is not None:
metadata['display_name'] = display_name
# Entrance Exams: Chapter module positioning
child_position = None
if settings.FEATURES.get('ENTRANCE_EXAMS', False):
is_entrance_exam = request.json.get('is_entrance_exam', False)
if category == 'chapter' and is_entrance_exam:
metadata['is_entrance_exam'] = is_entrance_exam
metadata['in_entrance_exam'] = True # Inherited metadata, all children will have it
child_position = 0
# TODO need to fix components that are sending definition_data as strings, instead of as dicts
# For now, migrate them into dicts here.
if isinstance(data, basestring):
data = {'data': data}
created_block = store.create_child(
request.user.id,
usage_key,
dest_usage_key.block_type,
block_id=dest_usage_key.block_id,
definition_data=data,
metadata=metadata,
runtime=parent.runtime,
position=child_position
)
# Entrance Exams: Grader assignment
if settings.FEATURES.get('ENTRANCE_EXAMS', False):
course = store.get_course(usage_key.course_key)
if hasattr(course, 'entrance_exam_enabled') and course.entrance_exam_enabled:
if category == 'sequential' and request.json.get('parent_locator') == course.entrance_exam_id:
grader = {
"type": "Entrance Exam",
"min_count": 0,
"drop_count": 0,
"short_label": "Entrance",
"weight": 0
}
grading_model = CourseGradingModel.update_grader_from_json(
course.id,
grader,
request.user
)
CourseGradingModel.update_section_grader_type(
created_block,
grading_model['type'],
request.user
)
# VS[compat] cdodge: This is a hack because static_tabs also have references from the course module, so
# if we add one then we need to also add it to the policy information (i.e. metadata)
# we should remove this once we can break this reference from the course to static tabs
if category == 'static_tab':
display_name = display_name or _("Empty") # Prevent name being None
course = store.get_course(dest_usage_key.course_key)
course.tabs.append(
StaticTab(
name=display_name,
url_slug=dest_usage_key.name,
)
)
store.update_item(course, request.user.id)
return JsonResponse(
{"locator": unicode(created_block.location), "courseKey": unicode(created_block.location.course_key)}
)
def _duplicate_item(parent_usage_key, duplicate_source_usage_key, user, display_name=None):
"""
Duplicate an existing xblock as a child of the supplied parent_usage_key.
"""
store = modulestore()
with store.bulk_operations(duplicate_source_usage_key.course_key):
source_item = store.get_item(duplicate_source_usage_key)
# Change the blockID to be unique.
dest_usage_key = source_item.location.replace(name=uuid4().hex)
category = dest_usage_key.block_type
# Update the display name to indicate this is a duplicate (unless display name provided).
# Can't use own_metadata(), b/c it converts data for JSON serialization -
# not suitable for setting metadata of the new block
duplicate_metadata = {}
for field in source_item.fields.values():
if field.scope == Scope.settings and field.is_set_on(source_item):
duplicate_metadata[field.name] = field.read_from(source_item)
if display_name is not None:
duplicate_metadata['display_name'] = display_name
else:
if source_item.display_name is None:
duplicate_metadata['display_name'] = _("Duplicate of {0}").format(source_item.category)
else:
duplicate_metadata['display_name'] = _("Duplicate of '{0}'").format(source_item.display_name)
dest_module = store.create_item(
user.id,
dest_usage_key.course_key,
dest_usage_key.block_type,
block_id=dest_usage_key.block_id,
definition_data=source_item.get_explicitly_set_fields_by_scope(Scope.content),
metadata=duplicate_metadata,
runtime=source_item.runtime,
)
# Children are not automatically copied over (and not all xblocks have a 'children' attribute).
# Because DAGs are not fully supported, we need to actually duplicate each child as well.
if source_item.has_children:
dest_module.children = []
for child in source_item.children:
dupe = _duplicate_item(dest_module.location, child, user=user)
if dupe not in dest_module.children: # _duplicate_item may add the child for us.
dest_module.children.append(dupe)
store.update_item(dest_module, user.id)
if 'detached' not in source_item.runtime.load_block_type(category)._class_tags:
parent = store.get_item(parent_usage_key)
# If source was already a child of the parent, add duplicate immediately afterward.
# Otherwise, add child to end.
if source_item.location in parent.children:
source_index = parent.children.index(source_item.location)
parent.children.insert(source_index + 1, dest_module.location)
else:
parent.children.append(dest_module.location)
store.update_item(parent, user.id)
return dest_module.location
@login_required
@expect_json
def delete_item(request, usage_key):
"""
Exposes internal helper method without breaking existing bindings/dependencies
"""
_delete_item(usage_key, request.user)
def _delete_item(usage_key, user):
"""
Deletes an existing xblock with the given usage_key.
If the xblock is a Static Tab, removes it from course.tabs as well.
"""
store = modulestore()
with store.bulk_operations(usage_key.course_key):
# VS[compat] cdodge: This is a hack because static_tabs also have references from the course module, so
# if we add one then we need to also add it to the policy information (i.e. metadata)
# we should remove this once we can break this reference from the course to static tabs
if usage_key.category == 'static_tab':
course = store.get_course(usage_key.course_key)
existing_tabs = course.tabs or []
course.tabs = [tab for tab in existing_tabs if tab.get('url_slug') != usage_key.name]
store.update_item(course, user.id)
store.delete_item(usage_key, user.id)
# pylint: disable=unused-argument
@login_required
@require_http_methods(("GET", "DELETE"))
def orphan_handler(request, course_key_string):
"""
View for handling orphan related requests. GET gets all of the current orphans.
DELETE removes all orphans (requires is_staff access)
An orphan is a block whose category is not in the DETACHED_CATEGORY list, is not the root, and is not reachable
from the root via children
"""
course_usage_key = CourseKey.from_string(course_key_string)
if request.method == 'GET':
if has_studio_read_access(request.user, course_usage_key):
return JsonResponse([unicode(item) for item in modulestore().get_orphans(course_usage_key)])
else:
raise PermissionDenied()
if request.method == 'DELETE':
if request.user.is_staff:
deleted_items = _delete_orphans(course_usage_key, request.user.id, commit=True)
return JsonResponse({'deleted': deleted_items})
else:
raise PermissionDenied()
def _delete_orphans(course_usage_key, user_id, commit=False):
"""
Helper function to delete orphans for a given course.
If `commit` is False, this function does not actually remove
the orphans.
"""
store = modulestore()
items = store.get_orphans(course_usage_key)
if commit:
for itemloc in items:
# need to delete all versions
store.delete_item(itemloc, user_id, revision=ModuleStoreEnum.RevisionOption.all)
return [unicode(item) for item in items]
def _get_xblock(usage_key, user):
"""
Returns the xblock for the specified usage key. Note: if failing to find a key with a category
in the CREATE_IF_NOT_FOUND list, an xblock will be created and saved automatically.
"""
store = modulestore()
with store.bulk_operations(usage_key.course_key):
try:
return store.get_item(usage_key, depth=None)
except ItemNotFoundError:
if usage_key.category in CREATE_IF_NOT_FOUND:
# Create a new one for certain categories only. Used for course info handouts.
return store.create_item(user.id, usage_key.course_key, usage_key.block_type, block_id=usage_key.block_id)
else:
raise
except InvalidLocationError:
log.error("Can't find item by location.")
return JsonResponse({"error": "Can't find item by location: " + unicode(usage_key)}, 404)
def _get_module_info(xblock, rewrite_static_links=True, include_ancestor_info=False, include_publishing_info=False):
"""
metadata, data, id representation of a leaf module fetcher.
:param usage_key: A UsageKey
"""
with modulestore().bulk_operations(xblock.location.course_key):
data = getattr(xblock, 'data', '')
if rewrite_static_links:
data = replace_static_urls(
data,
None,
course_id=xblock.location.course_key
)
# Pre-cache has changes for the entire course because we'll need it for the ancestor info
# Except library blocks which don't [yet] use draft/publish
if not isinstance(xblock.location, LibraryUsageLocator):
modulestore().has_changes(modulestore().get_course(xblock.location.course_key, depth=None))
# Note that children aren't being returned until we have a use case.
xblock_info = create_xblock_info(
xblock, data=data, metadata=own_metadata(xblock), include_ancestor_info=include_ancestor_info
)
if include_publishing_info:
add_container_page_publishing_info(xblock, xblock_info)
return xblock_info
def create_xblock_info(xblock, data=None, metadata=None, include_ancestor_info=False, include_child_info=False,
course_outline=False, include_children_predicate=NEVER, parent_xblock=None, graders=None):
"""
Creates the information needed for client-side XBlockInfo.
If data or metadata are not specified, their information will not be added
(regardless of whether or not the xblock actually has data or metadata).
There are three optional boolean parameters:
include_ancestor_info - if true, ancestor info is added to the response
include_child_info - if true, direct child info is included in the response
course_outline - if true, the xblock is being rendered on behalf of the course outline.
There are certain expensive computations that do not need to be included in this case.
In addition, an optional include_children_predicate argument can be provided to define whether or
not a particular xblock should have its children included.
"""
is_library_block = isinstance(xblock.location, LibraryUsageLocator)
is_xblock_unit = is_unit(xblock, parent_xblock)
# this should not be calculated for Sections and Subsections on Unit page or for library blocks
has_changes = None
if (is_xblock_unit or course_outline) and not is_library_block:
has_changes = modulestore().has_changes(xblock)
if graders is None:
if not is_library_block:
graders = CourseGradingModel.fetch(xblock.location.course_key).graders
else:
graders = []
# Filter the graders data as needed
graders = _filter_entrance_exam_grader(graders)
# Compute the child info first so it can be included in aggregate information for the parent
should_visit_children = include_child_info and (course_outline and not is_xblock_unit or not course_outline)
if should_visit_children and xblock.has_children:
child_info = _create_xblock_child_info(
xblock,
course_outline,
graders,
include_children_predicate=include_children_predicate,
)
else:
child_info = None
if xblock.category != 'course':
visibility_state = _compute_visibility_state(xblock, child_info, is_xblock_unit and has_changes)
else:
visibility_state = None
published = modulestore().has_published_version(xblock) if not is_library_block else None
#instead of adding a new feature directly into xblock-info, we should add them into override_type.
override_type = {}
if getattr(xblock, "is_entrance_exam", None):
override_type['is_entrance_exam'] = xblock.is_entrance_exam
xblock_info = {
"id": unicode(xblock.location),
"display_name": xblock.display_name_with_default,
"category": xblock.category,
"edited_on": get_default_time_display(xblock.subtree_edited_on) if xblock.subtree_edited_on else None,
"published": published,
"published_on": get_default_time_display(xblock.published_on) if published and xblock.published_on else None,
"studio_url": xblock_studio_url(xblock, parent_xblock),
"released_to_students": datetime.now(UTC) > xblock.start,
"release_date": _get_release_date(xblock),
"visibility_state": visibility_state,
"has_explicit_staff_lock": xblock.fields['visible_to_staff_only'].is_set_on(xblock),
"start": xblock.fields['start'].to_json(xblock.start),
"graded": xblock.graded,
"due_date": get_default_time_display(xblock.due),
"due": xblock.fields['due'].to_json(xblock.due),
"format": xblock.format,
"course_graders": json.dumps([grader.get('type') for grader in graders]),
"has_changes": has_changes,
"override_type": override_type,
}
if data is not None:
xblock_info["data"] = data
if metadata is not None:
xblock_info["metadata"] = metadata
if include_ancestor_info:
xblock_info['ancestor_info'] = _create_xblock_ancestor_info(xblock, course_outline)
if child_info:
xblock_info['child_info'] = child_info
if visibility_state == VisibilityState.staff_only:
xblock_info["ancestor_has_staff_lock"] = ancestor_has_staff_lock(xblock, parent_xblock)
else:
xblock_info["ancestor_has_staff_lock"] = False
if course_outline:
if xblock_info["has_explicit_staff_lock"]:
xblock_info["staff_only_message"] = True
elif child_info and child_info["children"]:
xblock_info["staff_only_message"] = all([child["staff_only_message"] for child in child_info["children"]])
else:
xblock_info["staff_only_message"] = False
return xblock_info
def add_container_page_publishing_info(xblock, xblock_info): # pylint: disable=invalid-name
"""
Adds information about the xblock's publish state to the supplied
xblock_info for the container page.
"""
def safe_get_username(user_id):
"""
Guard against bad user_ids, like the infamous "**replace_user**".
Note that this will ignore our special known IDs (ModuleStoreEnum.UserID).
We should consider adding special handling for those values.
:param user_id: the user id to get the username of
:return: username, or None if the user does not exist or user_id is None
"""
if user_id:
try:
return User.objects.get(id=user_id).username
except: # pylint: disable=bare-except
pass
return None
xblock_info["edited_by"] = safe_get_username(xblock.subtree_edited_by)
xblock_info["published_by"] = safe_get_username(xblock.published_by)
xblock_info["currently_visible_to_students"] = is_currently_visible_to_students(xblock)
xblock_info["has_content_group_components"] = has_children_visible_to_specific_content_groups(xblock)
if xblock_info["release_date"]:
xblock_info["release_date_from"] = _get_release_date_from(xblock)
if xblock_info["visibility_state"] == VisibilityState.staff_only:
xblock_info["staff_lock_from"] = _get_staff_lock_from(xblock)
else:
xblock_info["staff_lock_from"] = None
class VisibilityState(object):
"""
Represents the possible visibility states for an xblock:
live - the block and all of its descendants are live to students (excluding staff only items)
Note: Live means both published and released.
ready - the block is ready to go live and all of its descendants are live or ready (excluding staff only items)
Note: content is ready when it is published and scheduled with a release date in the future.
unscheduled - the block and all of its descendants have no release date (excluding staff only items)
Note: it is valid for items to be published with no release date in which case they are still unscheduled.
needs_attention - the block or its descendants are not fully live, ready or unscheduled (excluding staff only items)
For example: one subsection has draft content, or there's both unreleased and released content in one section.
staff_only - all of the block's content is to be shown to staff only
Note: staff only items do not affect their parent's state.
"""
live = 'live'
ready = 'ready'
unscheduled = 'unscheduled'
needs_attention = 'needs_attention'
staff_only = 'staff_only'
def _compute_visibility_state(xblock, child_info, is_unit_with_changes):
"""
Returns the current publish state for the specified xblock and its children
"""
if xblock.visible_to_staff_only:
return VisibilityState.staff_only
elif is_unit_with_changes:
# Note that a unit that has never been published will fall into this category,
# as well as previously published units with draft content.
return VisibilityState.needs_attention
is_unscheduled = xblock.start == DEFAULT_START_DATE
is_live = datetime.now(UTC) > xblock.start
children = child_info and child_info.get('children', [])
if children and len(children) > 0:
all_staff_only = True
all_unscheduled = True
all_live = True
for child in child_info['children']:
child_state = child['visibility_state']
if child_state == VisibilityState.needs_attention:
return child_state
elif not child_state == VisibilityState.staff_only:
all_staff_only = False
if not child_state == VisibilityState.unscheduled:
all_unscheduled = False
if not child_state == VisibilityState.live:
all_live = False
if all_staff_only:
return VisibilityState.staff_only
elif all_unscheduled:
return VisibilityState.unscheduled if is_unscheduled else VisibilityState.needs_attention
elif all_live:
return VisibilityState.live if is_live else VisibilityState.needs_attention
else:
return VisibilityState.ready if not is_unscheduled else VisibilityState.needs_attention
if is_unscheduled:
return VisibilityState.unscheduled
elif is_live:
return VisibilityState.live
else:
return VisibilityState.ready
def _create_xblock_ancestor_info(xblock, course_outline):
"""
Returns information about the ancestors of an xblock. Note that the direct parent will also return
information about all of its children.
"""
ancestors = []
def collect_ancestor_info(ancestor, include_child_info=False):
"""
Collect xblock info regarding the specified xblock and its ancestors.
"""
if ancestor:
direct_children_only = lambda parent: parent == ancestor
ancestors.append(create_xblock_info(
ancestor,
include_child_info=include_child_info,
course_outline=course_outline,
include_children_predicate=direct_children_only
))
collect_ancestor_info(get_parent_xblock(ancestor))
collect_ancestor_info(get_parent_xblock(xblock), include_child_info=True)
return {
'ancestors': ancestors
}
def _create_xblock_child_info(xblock, course_outline, graders, include_children_predicate=NEVER):
"""
Returns information about the children of an xblock, as well as about the primary category
of xblock expected as children.
"""
child_info = {}
child_category = xblock_primary_child_category(xblock)
if child_category:
child_info = {
'category': child_category,
'display_name': xblock_type_display_name(child_category, default_display_name=child_category),
}
if xblock.has_children and include_children_predicate(xblock):
child_info['children'] = [
create_xblock_info(
child, include_child_info=True, course_outline=course_outline,
include_children_predicate=include_children_predicate,
parent_xblock=xblock,
graders=graders
) for child in xblock.get_children()
]
return child_info
def _get_release_date(xblock):
"""
Returns the release date for the xblock, or None if the release date has never been set.
"""
# Treat DEFAULT_START_DATE as a magic number that means the release date has not been set
return get_default_time_display(xblock.start) if xblock.start != DEFAULT_START_DATE else None
def _get_release_date_from(xblock):
"""
Returns a string representation of the section or subsection that sets the xblock's release date
"""
return _xblock_type_and_display_name(find_release_date_source(xblock))
def _get_staff_lock_from(xblock):
"""
Returns a string representation of the section or subsection that sets the xblock's release date
"""
source = find_staff_lock_source(xblock)
return _xblock_type_and_display_name(source) if source else None
def _xblock_type_and_display_name(xblock):
"""
Returns a string representation of the xblock's type and display name
"""
return _('{section_or_subsection} "{display_name}"').format(
section_or_subsection=xblock_type_display_name(xblock),
display_name=xblock.display_name_with_default)
| jazkarta/edx-platform-for-isc | cms/djangoapps/contentstore/views/item.py | Python | agpl-3.0 | 49,511 | 0.003716 |
from __future__ import absolute_import
# Copyright (c) 2010-2016 openpyxl
from openpyxl.compat import basestring, unicode
from openpyxl.descriptors.serialisable import Serialisable
from openpyxl.descriptors import (
Alias,
Typed,
Integer,
Set,
MinMax,
)
from openpyxl.descriptors.excel import Percentage
from openpyxl.descriptors.nested import (
NestedNoneSet,
NestedValue,
NestedInteger,
)
from openpyxl.styles.colors import RGB
from openpyxl.xml.constants import DRAWING_NS
from openpyxl.descriptors.excel import ExtensionList as OfficeArtExtensionList
PRESET_COLORS = [
'aliceBlue', 'antiqueWhite', 'aqua', 'aquamarine',
'azure', 'beige', 'bisque', 'black', 'blanchedAlmond', 'blue',
'blueViolet', 'brown', 'burlyWood', 'cadetBlue', 'chartreuse',
'chocolate', 'coral', 'cornflowerBlue', 'cornsilk', 'crimson', 'cyan',
'darkBlue', 'darkCyan', 'darkGoldenrod', 'darkGray', 'darkGrey',
'darkGreen', 'darkKhaki', 'darkMagenta', 'darkOliveGreen', 'darkOrange',
'darkOrchid', 'darkRed', 'darkSalmon', 'darkSeaGreen', 'darkSlateBlue',
'darkSlateGray', 'darkSlateGrey', 'darkTurquoise', 'darkViolet',
'dkBlue', 'dkCyan', 'dkGoldenrod', 'dkGray', 'dkGrey', 'dkGreen',
'dkKhaki', 'dkMagenta', 'dkOliveGreen', 'dkOrange', 'dkOrchid', 'dkRed',
'dkSalmon', 'dkSeaGreen', 'dkSlateBlue', 'dkSlateGray', 'dkSlateGrey',
'dkTurquoise', 'dkViolet', 'deepPink', 'deepSkyBlue', 'dimGray',
'dimGrey', 'dodgerBlue', 'firebrick', 'floralWhite', 'forestGreen',
'fuchsia', 'gainsboro', 'ghostWhite', 'gold', 'goldenrod', 'gray',
'grey', 'green', 'greenYellow', 'honeydew', 'hotPink', 'indianRed',
'indigo', 'ivory', 'khaki', 'lavender', 'lavenderBlush', 'lawnGreen',
'lemonChiffon', 'lightBlue', 'lightCoral', 'lightCyan',
'lightGoldenrodYellow', 'lightGray', 'lightGrey', 'lightGreen',
'lightPink', 'lightSalmon', 'lightSeaGreen', 'lightSkyBlue',
'lightSlateGray', 'lightSlateGrey', 'lightSteelBlue', 'lightYellow',
'ltBlue', 'ltCoral', 'ltCyan', 'ltGoldenrodYellow', 'ltGray', 'ltGrey',
'ltGreen', 'ltPink', 'ltSalmon', 'ltSeaGreen', 'ltSkyBlue',
'ltSlateGray', 'ltSlateGrey', 'ltSteelBlue', 'ltYellow', 'lime',
'limeGreen', 'linen', 'magenta', 'maroon', 'medAquamarine', 'medBlue',
'medOrchid', 'medPurple', 'medSeaGreen', 'medSlateBlue',
'medSpringGreen', 'medTurquoise', 'medVioletRed', 'mediumAquamarine',
'mediumBlue', 'mediumOrchid', 'mediumPurple', 'mediumSeaGreen',
'mediumSlateBlue', 'mediumSpringGreen', 'mediumTurquoise',
'mediumVioletRed', 'midnightBlue', 'mintCream', 'mistyRose', 'moccasin',
'navajoWhite', 'navy', 'oldLace', 'olive', 'oliveDrab', 'orange',
'orangeRed', 'orchid', 'paleGoldenrod', 'paleGreen', 'paleTurquoise',
'paleVioletRed', 'papayaWhip', 'peachPuff', 'peru', 'pink', 'plum',
'powderBlue', 'purple', 'red', 'rosyBrown', 'royalBlue', 'saddleBrown',
'salmon', 'sandyBrown', 'seaGreen', 'seaShell', 'sienna', 'silver',
'skyBlue', 'slateBlue', 'slateGray', 'slateGrey', 'snow', 'springGreen',
'steelBlue', 'tan', 'teal', 'thistle', 'tomato', 'turquoise', 'violet',
'wheat', 'white', 'whiteSmoke', 'yellow', 'yellowGreen'
]
SCHEME_COLORS= ['bg1', 'tx1', 'bg2', 'tx2', 'accent1', 'accent2', 'accent3',
'accent4', 'accent5', 'accent6', 'hlink', 'folHlink', 'phClr', 'dk1', 'lt1',
'dk2', 'lt2'
]
class Transform(Serialisable):
pass
class SystemColor(Serialisable):
tagname = "sysClr"
# color transform options
tint = NestedInteger(allow_none=True)
shade = NestedInteger(allow_none=True)
comp = Typed(expected_type=Transform, allow_none=True)
inv = Typed(expected_type=Transform, allow_none=True)
gray = Typed(expected_type=Transform, allow_none=True)
alpha = NestedInteger(allow_none=True)
alphaOff = NestedInteger(allow_none=True)
alphaMod = NestedInteger(allow_none=True)
hue = NestedInteger(allow_none=True)
hueOff = NestedInteger(allow_none=True)
hueMod = NestedInteger(allow_none=True)
sat = NestedInteger(allow_none=True)
satOff = NestedInteger(allow_none=True)
satMod = NestedInteger(allow_none=True)
lum = NestedInteger(allow_none=True)
lumOff = NestedInteger(allow_none=True)
lumMod = NestedInteger(allow_none=True)
red = NestedInteger(allow_none=True)
redOff = NestedInteger(allow_none=True)
redMod = NestedInteger(allow_none=True)
green = NestedInteger(allow_none=True)
greenOff = NestedInteger(allow_none=True)
greenMod = NestedInteger(allow_none=True)
blue = NestedInteger(allow_none=True)
blueOff = NestedInteger(allow_none=True)
blueMod = NestedInteger(allow_none=True)
gamma = Typed(expected_type=Transform, allow_none=True)
invGamma = Typed(expected_type=Transform, allow_none=True)
val = Set(values=(["bg1", "tx1", "bg2", "tx2", "accent1", "accent2",
"accent3", "accent4", "accent5", "accent6", "hlink", "folHlink", "phClr",
"dk1", "lt1", "dk2", "lt2", ]))
lastClr = Typed(expected_type=RGB, allow_none=True)
__elements__ = ('tint', 'shade', 'comp', 'inv', 'gray', "alpha",
"alphaOff", "alphaMod", "hue", "hueOff", "hueMod", "hueOff", "sat",
"satOff", "satMod", "lum", "lumOff", "lumMod", "red", "redOff", "redMod",
"green", "greenOff", "greenMod", "blue", "blueOff", "blueMod", "gamma",
"invGamma")
def __init__(self,
val="bg1",
lastClr=None,
tint=None,
shade=None,
comp=None,
inv=None,
gray=None,
alpha=None,
alphaOff=None,
alphaMod=None,
hue=None,
hueOff=None,
hueMod=None,
sat=None,
satOff=None,
satMod=None,
lum=None,
lumOff=None,
lumMod=None,
red=None,
redOff=None,
redMod=None,
green=None,
greenOff=None,
greenMod=None,
blue=None,
blueOff=None,
blueMod=None,
gamma=None,
invGamma=None
):
self.val = val
self.lastClr = lastClr
self.tint = tint
self.shade = shade
self.comp = comp
self.inv = inv
self.gray = gray
self.alpha = alpha
self.alphaOff = alphaOff
self.alphaMod = alphaMod
self.hue = hue
self.hueOff = hueOff
self.hueMod = hueMod
self.sat = sat
self.satOff = satOff
self.satMod = satMod
self.lum = lum
self.lumOff = lumOff
self.lumMod = lumMod
self.red = red
self.redOff = redOff
self.redMod = redMod
self.green = green
self.greenOff = greenOff
self.greenMod = greenMod
self.blue = blue
self.blueOff = blueOff
self.blueMod = blueMod
self.gamma = gamma
self.invGamma = invGamma
class HSLColor(Serialisable):
tagname = "hslClr"
hue = Integer()
sat = MinMax(min=0, max=100)
lum = MinMax(min=0, max=100)
#TODO add color transform options
def __init__(self,
hue=None,
sat=None,
lum=None,
):
self.hue = hue
self.sat = sat
self.lum = lum
class RGBPercent(Serialisable):
tagname = "rgbClr"
r = MinMax(min=0, max=100)
g = MinMax(min=0, max=100)
b = MinMax(min=0, max=100)
#TODO add color transform options
def __init__(self,
r=None,
g=None,
b=None,
):
self.r = r
self.g = g
self.b = b
class ColorChoice(Serialisable):
tagname = "colorChoice"
namespace = DRAWING_NS
scrgbClr = Typed(expected_type=RGBPercent, allow_none=True)
RGBPercent = Alias('scrgbClr')
srgbClr = NestedValue(expected_type=unicode, allow_none=True) # needs pattern and can have transform
RGB = Alias('srgbClr')
hslClr = Typed(expected_type=HSLColor, allow_none=True)
sysClr = Typed(expected_type=SystemColor, allow_none=True)
schemeClr = NestedNoneSet(values=SCHEME_COLORS)
prstClr = NestedNoneSet(values=PRESET_COLORS)
__elements__ = ('scrgbClr', 'srgbClr', 'hslClr', 'sysClr', 'schemeClr', 'prstClr')
def __init__(self,
scrgbClr=None,
srgbClr=None,
hslClr=None,
sysClr=None,
schemeClr=None,
prstClr=None,
):
self.scrgbClr = scrgbClr
self.srgbClr = srgbClr
self.hslClr = hslClr
self.sysClr = sysClr
self.schemeClr = schemeClr
self.prstClr = prstClr
_COLOR_SET = ('dk1', 'lt1', 'dk2', 'lt2', 'accent1', 'accent2', 'accent3',
'accent4', 'accent5', 'accent6', 'hlink', 'folHlink')
class ColorMapping(Serialisable):
tagname = "clrMapOvr"
bg1 = Set(values=_COLOR_SET)
tx1 = Set(values=_COLOR_SET)
bg2 = Set(values=_COLOR_SET)
tx2 = Set(values=_COLOR_SET)
accent1 = Set(values=_COLOR_SET)
accent2 = Set(values=_COLOR_SET)
accent3 = Set(values=_COLOR_SET)
accent4 = Set(values=_COLOR_SET)
accent5 = Set(values=_COLOR_SET)
accent6 = Set(values=_COLOR_SET)
hlink = Set(values=_COLOR_SET)
folHlink = Set(values=_COLOR_SET)
extLst = Typed(expected_type=OfficeArtExtensionList, allow_none=True)
def __init__(self,
bg1="lt1",
tx1="dk1",
bg2="lt2",
tx2="dk2",
accent1="accent1",
accent2="accent2",
accent3="accent3",
accent4="accent4",
accent5="accent5",
accent6="accent6",
hlink="hlink",
folHlink="folHlink",
extLst=None,
):
self.bg1 = bg1
self.tx1 = tx1
self.bg2 = bg2
self.tx2 = tx2
self.accent1 = accent1
self.accent2 = accent2
self.accent3 = accent3
self.accent4 = accent4
self.accent5 = accent5
self.accent6 = accent6
self.hlink = hlink
self.folHlink = folHlink
self.extLst = extLst
class ColorChoiceDescriptor(Typed):
"""
Objects can choose from 7 different kinds of color system.
Assume RGBHex if a string is passed in.
"""
expected_type = ColorChoice
allow_none = True
def __set__(self, instance, value):
if isinstance(value, basestring):
value = ColorChoice(srgbClr=value)
super(ColorChoiceDescriptor, self).__set__(instance, value)
| aragos/tichu-tournament | python/openpyxl/drawing/colors.py | Python | mit | 11,201 | 0.002053 |
import collections
import re
import urlparse
class DSN(collections.MutableMapping):
''' Hold the results of a parsed dsn.
This is very similar to urlparse.ParseResult tuple.
http://docs.python.org/2/library/urlparse.html#results-of-urlparse-and-urlsplit
It exposes the following attributes:
scheme
schemes -- if your scheme has +'s in it, then this will contain a list of schemes split by +
path
paths -- the path segment split by /, so "/foo/bar" would be ["foo", "bar"]
host -- same as hostname (I just like host better)
hostname
hostloc -- host:port
username
password
netloc
query -- a dict of the query string
query_str -- the raw query string
port
fragment
'''
DSN_REGEXP = re.compile(r'^\S+://\S+')
FIELDS = ('scheme', 'netloc', 'path', 'params', 'query', 'fragment')
def __init__(self, dsn, **defaults):
''' Parse a dsn to parts similar to urlparse.
This is a nuts function that can serve as a good basis to parsing a custom dsn
:param dsn: the dsn to parse
:type dsn: str
:param defaults: any values you want to have defaults for if they aren't in the dsn
:type defaults: dict
'''
assert self.DSN_REGEXP.match(dsn), \
"{} is invalid, only full dsn urls (scheme://host...) allowed".format(dsn)
first_colon = dsn.find(':')
scheme = dsn[0:first_colon]
dsn_url = dsn[first_colon+1:]
url = urlparse.urlparse(dsn_url)
options = {}
if url.query:
for k, kv in urlparse.parse_qs(url.query, True, True).iteritems():
if len(kv) > 1:
options[k] = kv
else:
options[k] = kv[0]
self.scheme = scheme
self.hostname = url.hostname
self.path = url.path
self.params = url.params
self.query = options
self.fragment = url.fragment
self.username = url.username
self.password = url.password
self.port = url.port
self.query_str = url.query
for k, v in defaults.iteritems():
self.set_default(k, v)
def __iter__(self):
for f in self.FIELDS:
yield getattr(self, f, '')
def __len__(self):
return len(iter(self))
def __getitem__(self, field):
return getattr(self, field, None)
def __setitem__(self, field, value):
setattr(self, field, value)
def __delitem__(self, field):
delattr(self, field)
@property
def schemes(self):
'''the scheme, split by plus signs'''
return self.scheme.split('+')
@property
def netloc(self):
'''return username:password@hostname:port'''
s = ''
prefix = ''
if self.username:
s += self.username
prefix = '@'
if self.password:
s += ":{}".format(self.password)
prefix = '@'
s += "{}{}".format(prefix, self.hostloc)
return s
@property
def paths(self):
'''the path attribute split by /'''
return filter(None, self.path.split('/'))
@property
def host(self):
'''the hostname, but I like host better'''
return self.hostname
@property
def hostloc(self):
'''return host:port'''
hostloc = self.hostname
if self.port:
hostloc = '{}:{}'.format(hostloc, self.port)
return hostloc
def set_default(self, key, value):
''' Set a default value for key.
This is different than dict's setdefault because it will set default either
if the key doesn't exist, or if the value at the key evaluates to False, so
an empty string or a None will value will be updated.
:param key: the item to update
:type key: str
:param value: the items new value if key has a current value that evaluates to False
'''
if not getattr(self, key, None):
setattr(self, key, value)
def get_url(self):
'''return the dsn back into url form'''
return urlparse.urlunparse((
self.scheme,
self.netloc,
self.path,
self.params,
self.query_str,
self.fragment,
))
def copy(self):
return DSN(self.get_url())
def __str__(self):
return self.get_url()
| mylokin/servy | servy/utils/dsntool.py | Python | mit | 4,496 | 0.001779 |
import virtualenv
import textwrap
output = virtualenv.create_bootstrap_script(textwrap.dedent("""
import os, subprocess
def after_install(options, home_dir):
subprocess.call([join(home_dir, 'bin', 'pip'),
'install', 'ipython', 'django', 'psycopg2'])
"""))
f = open('bootstrap.py', 'w').write(output)
| Yellowen/Sharamaan | bin/bootstrap_creator.py | Python | gpl-2.0 | 327 | 0 |
import pytest
from named_dates.named_dates import\
day_of_nth_weekday, NoNthWeekdayError
# For reference throughout these tests, October 1, 2015 is
# a Thursday (weekday = 3).
def test_weekday_equals_first_of_month():
# Tests that day_of_nth_weekday works when the requested weekday is the
# first weekday is the month.
assert day_of_nth_weekday(2015, 10, 3, nth=1) == 1
assert day_of_nth_weekday(2015, 10, 3, nth=2) == 8
assert day_of_nth_weekday(2015, 10, 3, nth=3) == 15
assert day_of_nth_weekday(2015, 10, 3, nth=4) == 22
assert day_of_nth_weekday(2015, 10, 3, nth=5) == 29
with pytest.raises(NoNthWeekdayError):
day_of_nth_weekday(2015, 10, 3, nth=0)
with pytest.raises(NoNthWeekdayError):
day_of_nth_weekday(2015, 10, 3, nth=6)
def test_weekday_greater_than_first_of_month():
# Tests that day_of_nth_weekday works when the requested weekday is
# greater than the first weekday of the month.
assert day_of_nth_weekday(2015, 10, 5, nth=1) == 3
assert day_of_nth_weekday(2015, 10, 5, nth=2) == 10
assert day_of_nth_weekday(2015, 10, 5, nth=5) == 31
with pytest.raises(NoNthWeekdayError):
day_of_nth_weekday(2015, 10, 5, nth=6)
def test_weekday_less_than_first_of_month():
# Tests that day_of_nth_weekday works when the requested weekday is
# less than the first weekday of the month.
assert day_of_nth_weekday(2015, 10, 1, nth=1) == 6
assert day_of_nth_weekday(2015, 10, 1, nth=2) == 13
assert day_of_nth_weekday(2015, 10, 1, nth=3) == 20
assert day_of_nth_weekday(2015, 10, 1, nth=4) == 27
with pytest.raises(NoNthWeekdayError):
day_of_nth_weekday(2015, 10, 1, nth=5)
def test_from_end():
# October 31 is a Saturday (day 5)
assert day_of_nth_weekday(2015, 10, 5, nth=1, from_end=True) == 31
assert day_of_nth_weekday(2015, 10, 5, nth=2, from_end=True) == 24
assert day_of_nth_weekday(2015, 10, 5, nth=5, from_end=True) == 3
with pytest.raises(NoNthWeekdayError):
assert day_of_nth_weekday(2015, 10, 5, nth=6, from_end=True)
assert day_of_nth_weekday(2015, 10, 3, nth=1, from_end=True) == 29
assert day_of_nth_weekday(2015, 10, 3, nth=2, from_end=True) == 22
assert day_of_nth_weekday(2015, 10, 3, nth=5, from_end=True) == 1
with pytest.raises(NoNthWeekdayError):
assert day_of_nth_weekday(2015, 10, 3, nth=6, from_end=True)
assert day_of_nth_weekday(2015, 10, 6, nth=1, from_end=True) == 25
assert day_of_nth_weekday(2015, 10, 6, nth=2, from_end=True) == 18
assert day_of_nth_weekday(2015, 10, 6, nth=4, from_end=True) == 4
with pytest.raises(NoNthWeekdayError):
assert day_of_nth_weekday(2015, 10, 6, nth=5, from_end=True)
def test_bad_kwargs_disallowed():
with pytest.raises(TypeError):
day_of_nth_weekday(2015, 1, 1, bad_kwarg=1)
| pschoenfelder/named-dates | tests/test_day_of_nth_weekday.py | Python | mit | 2,862 | 0 |
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Fieldset, Layout
from django import forms
from django.contrib.auth.forms import AuthenticationForm
from django.contrib.auth.models import User
from django.contrib.auth.password_validation import validate_password
from django.core.exceptions import ValidationError
from django.db import transaction
from django.forms import ModelForm
from django.utils.translation import ugettext_lazy as _
from django_filters import FilterSet
from easy_select2 import Select2
from crispy_layout_mixin import form_actions, to_row
from utils import (TIPO_TELEFONE, YES_NO_CHOICES, get_medicos,
get_or_create_grupo)
from .models import Especialidade, EspecialidadeMedico, Usuario
class EspecialidadeMedicoFilterSet(FilterSet):
class Meta:
model = EspecialidadeMedico
fields = ['especialidade']
def __init__(self, *args, **kwargs):
super(EspecialidadeMedicoFilterSet, self).__init__(*args, **kwargs)
row1 = to_row([('especialidade', 12)])
self.form.helper = FormHelper()
self.form.helper.form_method = 'GET'
self.form.helper.layout = Layout(
Fieldset(_('Pesquisar Médico'),
row1, form_actions(save_label='Filtrar'))
)
class MudarSenhaForm(forms.Form):
nova_senha = forms.CharField(
label="Nova Senha", max_length=30,
widget=forms.PasswordInput(
attrs={'class': 'form-control form-control-lg',
'name': 'senha',
'placeholder': 'Nova Senha'}))
confirmar_senha = forms.CharField(
label="Confirmar Senha", max_length=30,
widget=forms.PasswordInput(
attrs={'class': 'form-control form-control-lg',
'name': 'confirmar_senha',
'placeholder': 'Confirmar Senha'}))
class LoginForm(AuthenticationForm):
username = forms.CharField(
label="Username", max_length=30,
widget=forms.TextInput(
attrs={'class': 'form-control form-control-lg',
'name': 'username',
'placeholder': 'Usuário'}))
password = forms.CharField(
label="Password", max_length=30,
widget=forms.PasswordInput(
attrs={'class': 'form-control',
'name': 'password',
'placeholder': 'Senha'}))
class UsuarioForm(ModelForm):
# Usuário
password = forms.CharField(
max_length=20,
label=_('Senha'),
widget=forms.PasswordInput())
password_confirm = forms.CharField(
max_length=20,
label=_('Confirmar Senha'),
widget=forms.PasswordInput())
class Meta:
model = Usuario
fields = ['username', 'email', 'nome', 'password', 'password_confirm',
'data_nascimento', 'sexo', 'plano', 'tipo', 'cep', 'end',
'numero', 'complemento', 'bairro', 'referencia',
'primeiro_telefone', 'segundo_telefone']
widgets = {'email': forms.TextInput(
attrs={'style': 'text-transform:lowercase;'})}
def __init__(self, *args, **kwargs):
super(UsuarioForm, self).__init__(*args, **kwargs)
self.fields['primeiro_telefone'].widget.attrs['class'] = 'telefone'
self.fields['segundo_telefone'].widget.attrs['class'] = 'telefone'
def valida_igualdade(self, texto1, texto2, msg):
if texto1 != texto2:
raise ValidationError(msg)
return True
def clean(self):
if ('password' not in self.cleaned_data or
'password_confirm' not in self.cleaned_data):
raise ValidationError(_('Favor informar senhas atuais ou novas'))
msg = _('As senhas não conferem.')
self.valida_igualdade(
self.cleaned_data['password'],
self.cleaned_data['password_confirm'],
msg)
try:
validate_password(self.cleaned_data['password'])
except ValidationError as error:
raise ValidationError(error)
return self.cleaned_data
@transaction.atomic
def save(self, commit=False):
usuario = super(UsuarioForm, self).save(commit)
# Cria User
u = User.objects.create(username=usuario.username, email=usuario.email)
u.set_password(self.cleaned_data['password'])
u.is_active = True
u.groups.add(get_or_create_grupo(self.cleaned_data['tipo'].descricao))
u.save()
usuario.user = u
usuario.save()
return usuario
class UsuarioEditForm(ModelForm):
# Primeiro Telefone
primeiro_tipo = forms.ChoiceField(
widget=forms.Select(),
choices=TIPO_TELEFONE,
label=_('Tipo Telefone'))
primeiro_ddd = forms.CharField(max_length=2, label=_('DDD'))
primeiro_numero = forms.CharField(max_length=10, label=_('Número'))
primeiro_principal = forms.TypedChoiceField(
widget=forms.Select(),
label=_('Telefone Principal?'),
choices=YES_NO_CHOICES)
# Primeiro Telefone
segundo_tipo = forms.ChoiceField(
required=False,
widget=forms.Select(),
choices=TIPO_TELEFONE,
label=_('Tipo Telefone'))
segundo_ddd = forms.CharField(required=False, max_length=2, label=_('DDD'))
segundo_numero = forms.CharField(
required=False, max_length=10, label=_('Número'))
segundo_principal = forms.ChoiceField(
required=False,
widget=forms.Select(),
label=_('Telefone Principal?'),
choices=YES_NO_CHOICES)
class Meta:
model = Usuario
fields = ['username', 'email', 'nome', 'data_nascimento', 'sexo',
'plano', 'tipo', 'cep', 'end', 'numero', 'complemento',
'bairro', 'referencia', 'primeiro_telefone',
'segundo_telefone']
widgets = {'username': forms.TextInput(attrs={'readonly': 'readonly'}),
'email': forms.TextInput(
attrs={'style': 'text-transform:lowercase;'}),
}
def __init__(self, *args, **kwargs):
super(UsuarioEditForm, self).__init__(*args, **kwargs)
self.fields['primeiro_telefone'].widget.attrs['class'] = 'telefone'
self.fields['segundo_telefone'].widget.attrs['class'] = 'telefone'
def valida_igualdade(self, texto1, texto2, msg):
if texto1 != texto2:
raise ValidationError(msg)
return True
def clean_primeiro_numero(self):
cleaned_data = self.cleaned_data
telefone = Telefone()
telefone.tipo = self.data['primeiro_tipo']
telefone.ddd = self.data['primeiro_ddd']
telefone.numero = self.data['primeiro_numero']
telefone.principal = self.data['primeiro_principal']
cleaned_data['primeiro_telefone'] = telefone
return cleaned_data
def clean_segundo_numero(self):
cleaned_data = self.cleaned_data
telefone = Telefone()
telefone.tipo = self.data['segundo_tipo']
telefone.ddd = self.data['segundo_ddd']
telefone.numero = self.data['segundo_numero']
telefone.principal = self.data['segundo_principal']
cleaned_data['segundo_telefone'] = telefone
return cleaned_data
@transaction.atomic
def save(self, commit=False):
usuario = super(UsuarioEditForm, self).save(commit)
# Primeiro telefone
tel = usuario.primeiro_telefone
tel.tipo = self.data['primeiro_tipo']
tel.ddd = self.data['primeiro_ddd']
tel.numero = self.data['primeiro_numero']
tel.principal = self.data['primeiro_principal']
tel.save()
usuario.primeiro_telefone = tel
# Segundo telefone
tel = usuario.segundo_telefone
if tel:
tel.tipo = self.data['segundo_tipo']
tel.ddd = self.data['segundo_ddd']
tel.numero = self.data['segundo_numero']
tel.principal = self.data['segundo_principal']
tel.save()
usuario.segundo_telefone = tel
# User
u = usuario.user
u.email = usuario.email
u.groups.remove(u.groups.first())
u.groups.add(get_or_create_grupo(self.cleaned_data['tipo'].descricao))
u.save()
usuario.save()
return usuario
class EspecialidadeMedicoForm(ModelForm):
medico = forms.ModelChoiceField(
queryset=get_medicos(),
widget=Select2(select2attrs={'width': '535px'}))
especialidade = forms.ModelChoiceField(
queryset=Especialidade.objects.all(),
widget=Select2(select2attrs={'width': '535px'}))
class Meta:
model = EspecialidadeMedico
fields = ['especialidade', 'medico']
| eduardoedson/scp | usuarios/forms.py | Python | mit | 8,823 | 0 |
from django.db import models
# Create your models here.
class Server(models.Model):
# Server
title = models.CharField(max_length=1000, help_text="Server Title", blank=False)
abstract = models.CharField(max_length=2000, help_text="Server Abstract", blank=True)
keywords = models.CharField(max_length=2000, help_text="Comma Separated List of Keywords", blank=True)
# Contact
contact_person = models.CharField(max_length=1000, help_text="Person to Contact", blank=True)
contact_organization = models.CharField(max_length=1000, help_text="Contact Organization", blank=True)
contact_position = models.CharField(max_length=1000, help_text="Contact Position (Optional)", blank=True)
contact_street_address = models.CharField(max_length=1000, help_text="Street Address (Optional)", blank=True)
contact_city_address = models.CharField(max_length=1000, help_text="Address: City (Optional)", blank=True)
contact_state_address = models.CharField(max_length=1000, help_text="Address: State or Providence (Optional)", blank=True)
contact_code_address = models.CharField(max_length=1000, help_text="Address: Postal Code (Optional)", blank=True)
contact_country_address = models.CharField(max_length=1000, help_text="Address: Country (Optional)", blank=True)
contact_telephone = models.CharField(max_length=1000, help_text="Contact Telephone Number (Optional)", blank=True)
contact_email = models.CharField(max_length=1000, help_text="Contact Email Address", blank=True)
contact_site = models.CharField(max_length=1000, help_text="Contact Web Site", blank=True)
# This implementation
implementation_site = models.CharField(max_length=1000, help_text="Web Address for This Implementation", blank=False)
def __unicode__(self):
return self.implementation_site
# Add other implementation specific classes here
class StreamGauge(models.Model):
river_segment_id = models.CharField(max_length=1000, help_text="NHN River Segment ID for both US and Canadian River Reaches", blank=False)
sos_endpoint = models.CharField(max_length=1000, help_text="SOS Endpoint for this Stream Gauge and ID", blank=True)
stream_gauge_id = models.CharField(max_length=1000, help_text="Stream gauge ID that corresponds to the station in the SOS endpoint", blank=False, unique=True)
stream_gauge_name = models.CharField(max_length=1000, help_text="Stream gauge name", blank=True)
stream_gauge_offerings = models.CharField(max_length=10000, help_text="Comma separated list of offerings for this station through SOS endpoint", blank=True)
stream_gauge_parameters = models.CharField(max_length=50000, help_text="Comma separated list of observedProperty parameters for this station through SOS endpoint", blank=True)
stream_gauge_x = models.DecimalField(help_text="Longitude or X coodinate", blank=True, max_digits=20, decimal_places=8)
stream_gauge_y = models.DecimalField(help_text="Latitude or Y coordinate", blank=True, max_digits=20, decimal_places=8)
def __unicode__(self):
return self.stream_gauge_id
| asascience-open/chisp1_wps | wps/models.py | Python | gpl-3.0 | 3,153 | 0.012369 |
from django.forms import ModelForm
from django import forms
from crispy_forms import layout
from crispy_forms.layout import Layout, HTML
from hs_core.forms import BaseFormHelper, Helper
from hs_core.hydroshare import users
from hs_modelinstance.models import ModelOutput, ExecutedBy
from hs_modflow_modelinstance.models import StudyArea, GridDimensions, StressPeriod, \
GroundWaterFlow, BoundaryCondition, ModelCalibration, ModelInput, GeneralElements
class MetadataField(layout.Field):
def __init__(self, *args, **kwargs):
kwargs['css_class'] = 'form-control input-sm'
super(MetadataField, self).__init__(*args, **kwargs)
# ModelOutput element forms
class ModelOutputFormHelper(BaseFormHelper):
def __init__(self, allow_edit=True, res_short_id=None, element_id=None, element_name=None,
*args, **kwargs):
# the order in which the model fields are listed for the FieldSet is the order these fields
# will be displayed
layout = Layout(
MetadataField('includes_output'),
)
kwargs['element_name_label'] = 'Includes output files?'
super(ModelOutputFormHelper, self).__init__(allow_edit, res_short_id, element_id,
element_name, layout, *args, **kwargs)
class ModelOutputForm(ModelForm):
includes_output = forms.TypedChoiceField(choices=((True, 'Yes'), (False, 'No')),
widget=forms.RadioSelect(
attrs={'style': 'width:auto;margin-top:-5px'}))
def __init__(self, allow_edit=True, res_short_id=None, element_id=None, *args, **kwargs):
super(ModelOutputForm, self).__init__(*args, **kwargs)
self.helper = ModelOutputFormHelper(allow_edit, res_short_id, element_id,
element_name='ModelOutput')
class Meta:
model = ModelOutput
fields = ('includes_output',)
class ModelOutputValidationForm(forms.Form):
includes_output = forms.TypedChoiceField(choices=((True, 'Yes'), (False, 'No')), required=False)
def clean_includes_output(self):
data = self.cleaned_data['includes_output']
if data == u'False':
return False
else:
return True
# ExecutedBy element forms
class ExecutedByFormHelper(BaseFormHelper):
def __init__(self, allow_edit=True, res_short_id=None, element_id=None, element_name=None,
*args, **kwargs):
# pop the model program shortid out of the kwargs dictionary
mp_id = kwargs.pop('mpshortid')
# get all model program resources and build option HTML elements for each one.
# ModelProgram shortid is concatenated to the selectbox id so that it is accessible in the
# template.
mp_resource = users.get_resource_list(type=['ModelProgramResource'])
options = '\n'.join(['<option value=%s>%s</option>' % (r.short_id, r.title) for r in
mp_resource])
options = '<option value=Unspecified>Unspecified</option>' + options
selectbox = HTML('<div class="div-selectbox">'
' <select class="selectbox" id="selectbox_'+mp_id+'">' + options +
'</select>'
'</div><br>')
# the order in which the model fields are listed for the FieldSet is the order these fields
# will be displayed
layout = Layout(
MetadataField('model_name', style="display:none"),
selectbox,
HTML("""
<div id=program_details_div style="display:none">
<table id="program_details_table" class="modelprogram">
<tr><td>Description: </td><td></td></tr>
<tr><td>Release Date: </td><td></td></tr>
<tr><td>Version: </td><td></td></tr>
<tr><td>Language: </td><td></td></tr>
<tr><td>Operating System: </td><td></td></tr>
<tr><td>Url: </td><td></td></tr>
</table>
</div>
"""),
)
kwargs['element_name_label'] = 'Model Program used for execution'
super(ExecutedByFormHelper, self).__init__(allow_edit, res_short_id, element_id,
element_name, layout, *args, **kwargs)
class ExecutedByForm(ModelForm):
def __init__(self, allow_edit=True, res_short_id=None, element_id=None, *args, **kwargs):
super(ExecutedByForm, self).__init__(*args, **kwargs)
# set mpshort id to 'Unspecified' if a foreign key has not been established yet,
# otherwise use mp short id
mpshortid = 'Unspecified'
if self.instance.model_program_fk is not None:
mpshortid = self.instance.model_program_fk.short_id
kwargs = dict(mpshortid=mpshortid)
self.helper = ExecutedByFormHelper(allow_edit, res_short_id, element_id,
element_name='ExecutedBy', **kwargs)
class Meta:
model = ExecutedBy
exclude = ('content_object', 'model_program_fk',)
class ExecutedByValidationForm(forms.Form):
model_name = forms.CharField(max_length=200)
# StudyArea element forms
class StudyAreaFormHelper(BaseFormHelper):
def __init__(self, allow_edit=True, res_short_id=None, element_id=None, element_name=None,
*args, **kwargs):
# the order in which the model fields are listed for the FieldSet is the order these fields
# will be displayed
layout = Layout(
MetadataField('totalLength'),
MetadataField('totalWidth'),
MetadataField('maximumElevation'),
MetadataField('minimumElevation'),
)
kwargs['element_name_label'] = 'Study Area'
super(StudyAreaFormHelper, self).__init__(allow_edit, res_short_id, element_id,
element_name, layout, *args, **kwargs)
class StudyAreaForm(ModelForm):
def __init__(self, allow_edit=True, res_short_id=None, element_id=None, *args, **kwargs):
super(StudyAreaForm, self).__init__(*args, **kwargs)
self.helper = StudyAreaFormHelper(allow_edit, res_short_id, element_id,
element_name='StudyArea')
class Meta:
model = StudyArea
fields = ('totalLength',
'totalWidth',
'maximumElevation',
'minimumElevation',
)
class StudyAreaValidationForm(forms.Form):
totalLength = forms.CharField(max_length=100, required=False)
totalWidth = forms.CharField(max_length=100, required=False)
maximumElevation = forms.CharField(max_length=100, required=False)
minimumElevation = forms.CharField(max_length=100, required=False)
# GridDimensions element forms
class GridDimensionsFormHelper(BaseFormHelper):
def __init__(self, allow_edit=True, res_short_id=None, element_id=None, element_name=None,
*args, **kwargs):
# the order in which the model fields are listed for the FieldSet is the order these fields
# will be displayed
layout = Layout(
MetadataField('numberOfLayers'),
MetadataField('typeOfRows'),
MetadataField('numberOfRows'),
MetadataField('typeOfColumns'),
MetadataField('numberOfColumns'),
)
kwargs['element_name_label'] = 'Grid Dimensions'
super(GridDimensionsFormHelper, self).__init__(allow_edit, res_short_id, element_id,
element_name, layout, *args, **kwargs)
class GridDimensionsForm(ModelForm):
grid_type_choices = (('Choose a type', 'Choose a type'),) + GridDimensions.gridTypeChoices
def __init__(self, allow_edit=True, res_short_id=None, element_id=None, *args, **kwargs):
super(GridDimensionsForm, self).__init__(*args, **kwargs)
self.helper = GridDimensionsFormHelper(allow_edit, res_short_id, element_id,
element_name='GridDimensions')
self.fields['typeOfRows'].choices = self.grid_type_choices
self.fields['typeOfColumns'].choices = self.grid_type_choices
class Meta:
model = GridDimensions
fields = ('numberOfLayers',
'typeOfRows',
'numberOfRows',
'typeOfColumns',
'numberOfColumns',
)
class GridDimensionsValidationForm(forms.Form):
numberOfLayers = forms.CharField(max_length=100, required=False)
typeOfRows = forms.CharField(max_length=100, required=False)
numberOfRows = forms.CharField(max_length=100, required=False)
typeOfColumns = forms.CharField(max_length=100, required=False)
numberOfColumns = forms.CharField(max_length=100, required=False)
# StressPeriod element forms
class StressPeriodFormHelper(BaseFormHelper):
def __init__(self, allow_edit=True, res_short_id=None, element_id=None, element_name=None,
*args, **kwargs):
# the order in which the model fields are listed for the FieldSet is the order these fields
# will be displayed
layout = Layout(
MetadataField('stressPeriodType'),
MetadataField('steadyStateValue'),
MetadataField('transientStateValueType'),
MetadataField('transientStateValue'),
)
kwargs['element_name_label'] = 'Stress Period'
super(StressPeriodFormHelper, self).__init__(allow_edit, res_short_id, element_id,
element_name, layout, *args, **kwargs)
class StressPeriodForm(ModelForm):
stress_period_type_choices = \
(('Choose a type', 'Choose a type'),) + StressPeriod.stressPeriodTypeChoices
transient_state_value_type_choices = \
(('Choose a type', 'Choose a type'),) + StressPeriod.transientStateValueTypeChoices
def __init__(self, allow_edit=True, res_short_id=None, element_id=None, *args, **kwargs):
super(StressPeriodForm, self).__init__(*args, **kwargs)
self.helper = StressPeriodFormHelper(allow_edit, res_short_id, element_id,
element_name='StressPeriod')
self.fields['stressPeriodType'].choices = self.stress_period_type_choices
self.fields['transientStateValueType'].choices = self.transient_state_value_type_choices
class Meta:
model = StressPeriod
fields = ('stressPeriodType',
'steadyStateValue',
'transientStateValueType',
'transientStateValue',
)
class StressPeriodValidationForm(forms.Form):
stressPeriodType = forms.CharField(max_length=100, required=False)
steadyStateValue = forms.CharField(max_length=100, required=False)
transientStateValueType = forms.CharField(max_length=100, required=False)
transientStateValue = forms.CharField(max_length=100, required=False)
# GroundWaterFlow element forms
class GroundWaterFlowFormHelper(BaseFormHelper):
def __init__(self, allow_edit=True, res_short_id=None, element_id=None, element_name=None,
*args, **kwargs):
# the order in which the model fields are listed for the FieldSet is the order these fields
# will be displayed
layout = Layout(
MetadataField('flowPackage'),
MetadataField('unsaturatedZonePackage'),
MetadataField('horizontalFlowBarrierPackage'),
MetadataField('seawaterIntrusionPackage'),
MetadataField('flowParameter'),
)
kwargs['element_name_label'] = 'Groundwater Flow'
super(GroundWaterFlowFormHelper, self).__init__(allow_edit, res_short_id, element_id,
element_name, layout, *args, **kwargs)
class GroundWaterFlowForm(ModelForm):
flow_package_choices = \
(('Choose a package', 'Choose a package'),) + GroundWaterFlow.flowPackageChoices
flow_parameter_choices = \
(('Choose a parameter', 'Choose a parameter'),) + GroundWaterFlow.flowParameterChoices
unsaturatedZonePackage = forms.BooleanField(
label='Includes Unsaturated Zone Package package (UZF) ', widget=forms.CheckboxInput(
attrs={'style': 'width:auto;margin-top:-5px'}))
horizontalFlowBarrierPackage = forms.BooleanField(
label='Includes Horizontal Flow Barrier package (HFB6)', widget=forms.CheckboxInput(
attrs={'style': 'width:auto;margin-top:-5px'}))
seawaterIntrusionPackage = forms.BooleanField(
label='Includes Seawater Intrusion package (SWI2)', widget=forms.CheckboxInput(
attrs={'style': 'width:auto;margin-top:-5px'}))
def __init__(self, allow_edit=True, res_short_id=None, element_id=None, *args, **kwargs):
super(GroundWaterFlowForm, self).__init__(*args, **kwargs)
self.helper = GroundWaterFlowFormHelper(allow_edit, res_short_id, element_id,
element_name='GroundWaterFlow')
self.fields['flowPackage'].choices = self.flow_package_choices
self.fields['flowParameter'].choices = self.flow_parameter_choices
class Meta:
model = GroundWaterFlow
fields = ('flowPackage',
'unsaturatedZonePackage',
'horizontalFlowBarrierPackage',
'seawaterIntrusionPackage',
'flowParameter',
)
class GroundWaterFlowValidationForm(forms.Form):
flowPackage = forms.CharField(max_length=100, required=False)
unsaturatedZonePackage = forms.BooleanField(required=False)
horizontalFlowBarrierPackage = forms.BooleanField(required=False)
seawaterIntrusionPackage = forms.BooleanField(required=False)
flowParameter = forms.CharField(max_length=100, required=False)
# BoundaryCondition element forms
class BoundaryConditionFormHelper(BaseFormHelper):
def __init__(self, allow_edit=True, res_short_id=None, element_id=None, element_name=None,
*args, **kwargs):
# the order in which the model fields are listed for the FieldSet is the order these fields
# will be displayed
layout = Layout(
MetadataField('specified_head_boundary_packages'),
MetadataField('other_specified_head_boundary_packages'),
MetadataField('specified_flux_boundary_packages'),
MetadataField('other_specified_flux_boundary_packages'),
MetadataField('head_dependent_flux_boundary_packages'),
MetadataField('other_head_dependent_flux_boundary_packages'),
)
kwargs['element_name_label'] = 'Boundary Condition'
super(BoundaryConditionFormHelper, self).__init__(allow_edit, res_short_id, element_id,
element_name, layout, *args, **kwargs)
class BoundaryConditionForm(ModelForm):
specified_head_boundary_packages = forms.MultipleChoiceField(
choices=BoundaryCondition.specifiedHeadBoundaryPackageChoices,
widget=forms.CheckboxSelectMultiple(attrs={'style': 'width:auto;margin-top:-5px'}))
specified_flux_boundary_packages = forms.MultipleChoiceField(
choices=BoundaryCondition.specifiedFluxBoundaryPackageChoices,
widget=forms.CheckboxSelectMultiple(attrs={'style': 'width:auto;margin-top:-5px'}))
head_dependent_flux_boundary_packages = forms.MultipleChoiceField(
choices=BoundaryCondition.headDependentFluxBoundaryPackageChoices,
widget=forms.CheckboxSelectMultiple(attrs={'style': 'width:auto;margin-top:-5px'}))
def __init__(self, allow_edit=True, res_short_id=None, element_id=None, *args, **kwargs):
super(BoundaryConditionForm, self).__init__(*args, **kwargs)
self.helper = BoundaryConditionFormHelper(allow_edit, res_short_id, element_id,
element_name='BoundaryCondition')
if self.instance:
if self.instance.id:
self.fields['specified_head_boundary_packages'].initial = \
[types.description for types in
self.instance.specified_head_boundary_packages.all()]
self.fields['specified_flux_boundary_packages'].initial = \
[packages.description for packages in
self.instance.specified_flux_boundary_packages.all()]
self.fields['head_dependent_flux_boundary_packages'].initial = \
[packages.description for packages in
self.instance.head_dependent_flux_boundary_packages.all()]
class Meta:
model = BoundaryCondition
exclude = ('specified_head_boundary_packages',
'specified_flux_boundary_packages',
'head_dependent_flux_boundary_packages',
)
fields = ('other_specified_head_boundary_packages',
'other_specified_flux_boundary_packages',
'other_head_dependent_flux_boundary_packages',
)
class BoundaryConditionValidationForm(forms.Form):
specified_head_boundary_packages = forms.MultipleChoiceField(
choices=BoundaryCondition.specifiedHeadBoundaryPackageChoices, required=False)
specified_flux_boundary_packages = forms.MultipleChoiceField(
choices=BoundaryCondition.specifiedFluxBoundaryPackageChoices, required=False)
head_dependent_flux_boundary_packages = forms.MultipleChoiceField(
choices=BoundaryCondition.headDependentFluxBoundaryPackageChoices, required=False)
other_specified_head_boundary_packages = forms.CharField(max_length=200, required=False)
other_specified_flux_boundary_packages = forms.CharField(max_length=200, required=False)
other_head_dependent_flux_boundary_packages = forms.CharField(max_length=200, required=False)
# ModelCalibration element forms
class ModelCalibrationFormHelper(BaseFormHelper):
def __init__(self, allow_edit=True, res_short_id=None, element_id=None, element_name=None,
*args, **kwargs):
# the order in which the model fields are listed for the FieldSet is the order these fields
# will be displayed
layout = Layout(
MetadataField('calibratedParameter'),
MetadataField('observationType'),
MetadataField('observationProcessPackage'),
MetadataField('calibrationMethod'),
)
kwargs['element_name_label'] = 'Model Calibration'
super(ModelCalibrationFormHelper, self).__init__(allow_edit, res_short_id, element_id,
element_name, layout, *args, **kwargs)
class ModelCalibrationForm(ModelForm):
observation_process_package_choices = (('Choose a package', 'Choose a package'),) + \
ModelCalibration.observationProcessPackageChoices
def __init__(self, allow_edit=True, res_short_id=None, element_id=None, *args, **kwargs):
super(ModelCalibrationForm, self).__init__(*args, **kwargs)
self.helper = ModelCalibrationFormHelper(allow_edit, res_short_id, element_id,
element_name='ModelCalibration')
self.fields['observationProcessPackage'].choices = self.observation_process_package_choices
class Meta:
model = ModelCalibration
fields = ('calibratedParameter',
'observationType',
'observationProcessPackage',
'calibrationMethod',
)
class ModelCalibrationValidationForm(forms.Form):
calibratedParameter = forms.CharField(max_length=100, required=False)
observationType = forms.CharField(max_length=100, required=False)
observationProcessPackage = forms.CharField(max_length=100, required=False)
calibrationMethod = forms.CharField(max_length=100, required=False)
# ModelInput element forms
class ModelInputFormHelper(BaseFormHelper):
def __init__(self, allow_edit=True, res_short_id=None, element_id=None, element_name=None,
*args, **kwargs):
# the order in which the model fields are listed for the FieldSet is the order these fields
# will be displayed
layout = Layout(
MetadataField('inputType'),
MetadataField('inputSourceName'),
MetadataField('inputSourceURL'),
)
kwargs['element_name_label'] = 'Model Input'
super(ModelInputFormHelper, self).__init__(allow_edit, res_short_id, element_id,
element_name, layout, *args, **kwargs)
class ModelInputForm(ModelForm):
def __init__(self, allow_edit=True, res_short_id=None, element_id=None, *args, **kwargs):
super(ModelInputForm, self).__init__(*args, **kwargs)
self.helper = ModelInputFormHelper(allow_edit, res_short_id, element_id,
element_name='ModelInput')
if res_short_id:
self.action = "/hydroshare/hsapi/_internal/%s/modelinput/add-metadata/" % res_short_id
else:
self.action = ""
@property
def form_id(self):
form_id = 'id_modelinput_%s' % self.number
return form_id
@property
def form_id_button(self):
return "'" + self.form_id + "'"
class Meta:
model = ModelInput
fields = ('inputType',
'inputSourceName',
'inputSourceURL',
)
class ModelInputValidationForm(forms.Form):
inputType = forms.CharField(max_length=100, required=False)
inputSourceName = forms.CharField(max_length=100, required=False)
inputSourceURL = forms.URLField(required=False)
# GeneralElements element forms
class GeneralElementsFormHelper(BaseFormHelper):
def __init__(self, allow_edit=True, res_short_id=None, element_id=None, element_name=None,
*args, **kwargs):
# the order in which the model fields are listed for the FieldSet is the order these fields
# will be displayed
layout = Layout(
MetadataField('modelParameter'),
MetadataField('modelSolver'),
MetadataField('output_control_package'),
MetadataField('subsidencePackage'),
)
kwargs['element_name_label'] = 'General'
super(GeneralElementsFormHelper, self).__init__(allow_edit, res_short_id, element_id,
element_name, layout, *args, **kwargs)
class GeneralElementsForm(ModelForm):
model_solver_choices = \
(('Choose a solver', 'Choose a solver'),) + GeneralElements.modelSolverChoices
output_control_package = forms.MultipleChoiceField(
choices=GeneralElements.outputControlPackageChoices,
widget=forms.CheckboxSelectMultiple(attrs={'style': 'width:auto;margin-top:-5px'}))
subsidence_package_choices = \
(('Choose a package', 'Choose a package'),) + GeneralElements.subsidencePackageChoices
def __init__(self, allow_edit=True, res_short_id=None, element_id=None, *args, **kwargs):
super(GeneralElementsForm, self).__init__(*args, **kwargs)
self.helper = GeneralElementsFormHelper(allow_edit, res_short_id, element_id,
element_name='GeneralElements')
self.fields['modelSolver'].choices = self.model_solver_choices
self.fields['subsidencePackage'].choices = self.subsidence_package_choices
if self.instance:
if self.instance.id:
self.fields['output_control_package'].initial = \
[types.description for types in self.instance.output_control_package.all()]
class Meta:
model = GeneralElements
exclude = ('output_control_package',)
fields = ('modelParameter',
'modelSolver',
'subsidencePackage',
)
class GeneralElementsValidationForm(forms.Form):
modelParameter = forms.CharField(max_length=100, required=False)
modelSolver = forms.CharField(max_length=100, required=False)
output_control_package = forms.MultipleChoiceField(
choices=GeneralElements.outputControlPackageChoices,
required=False)
subsidencePackage = forms.CharField(max_length=100, required=False)
ModelInputLayoutEdit = Layout(
HTML('<div class="col-xs-12 col-sm-6"> '
'<div class="form-group" id="modelinput"> '
'{% load crispy_forms_tags %} '
'{% for form in model_input_formset.forms %} '
'<form id="{{form.form_id}}" action="{{ form.action }}" '
'method="POST" enctype="multipart/form-data"> '
'{% crispy form %} '
'<div class="row" style="margin-top:10px">'
'<div class="col-md-12">'
'<span class="glyphicon glyphicon-trash icon-button btn-remove" data-toggle="modal" '
'data-placement="auto" title="Delete Model Input" '
'data-target="#delete-modelinput-element-dialog_{{ form.number }}"></span>'
'</div>'
'<div class="col-md-3">'
'<button type="button" class="btn btn-primary pull-right btn-form-submit">'
'Save changes</button>' # TODO: TESTING
'</div>'
'</div>'
'{% crispy form.delete_modal_form %} '
'</form> '
'{% endfor %}</div>'
'</div> '
),
HTML('<div style="margin-top:10px" class="col-md-2">'
'<p><a id="add-modelinput" class="btn btn-success" data-toggle="modal" '
'data-target="#add-modelinput-dialog">'
'<i class="fa fa-plus"></i>Add Model Input</a>'
'</div>'
),
)
ModalDialogLayoutAddModelInput = Helper.get_element_add_modal_form('ModelInput',
'add_modelinput_modal_form')
| ResearchSoftwareInstitute/MyHPOM | hs_modflow_modelinstance/forms.py | Python | bsd-3-clause | 27,274 | 0.003886 |
'''
run with ex: mpiexec -n 10 python article_simulated_estimate_mpi.py
Created on Jul 11, 2014
@author: jonaswallin
'''
from __future__ import division
import time
import scipy.spatial as ss
import article_simulatedata
from mpi4py import MPI
import numpy as np
import BayesFlow as bm
import matplotlib
import matplotlib.pyplot as plt
import numpy.random as npr
import BayesFlow.plot as bm_plot
import matplotlib.ticker as ticker
from article_plotfunctions import plotQ_joint, plotQ, plot_theta
folderFigs = "/Users/jonaswallin/Dropbox/articles/FlowCap/figs/"
sim = 10**2
nCells = 1500
thin = 2
nPers = 80
save_fig = 0
Y = []
####
# COLLECTING THE DATA
####
if MPI.COMM_WORLD.Get_rank() == 0: # @UndefinedVariable
Y,act_komp, mus, Thetas, Sigmas, P = np.array(article_simulatedata.simulate_data_v1(nCells = nCells, nPersons = nPers))
else:
Y = None
act_komp = None
#npr.seed(123546)
####
# Setting up model
####
hGMM = bm.hierarical_mixture_mpi(K = 4)
hGMM.set_data(Y)
hGMM.set_prior_param0()
hGMM.update_GMM()
hGMM.update_prior()
hGMM.set_p_labelswitch(1.)
hGMM.set_prior_actiavation(10)
hGMM.set_nu_MH_param(10,200)
for i,GMM in enumerate(hGMM.GMMs):
GMM._label =i
for i in range(min(sim,2000)):
hGMM.sample()
np.set_printoptions(precision=3)
#hGMM.reset_prior()
bm.distance_sort_MPI(hGMM)
hGMM.set_p_activation([0.7,0.7])
if MPI.COMM_WORLD.Get_rank() == 0: # @UndefinedVariable
theta_sim = []
Q_sim = []
nu_sim = []
Y_sim = []
Y0_sim = []
##############
# MCMC PART
##############
##############
# BURN IN
##############
for i in range(min(np.int(np.ceil(0.1*sim)),8000)):#burn in
hGMM.sample()
if MPI.COMM_WORLD.Get_rank() == 0: # @UndefinedVariable
mus_vec = np.zeros((len(Y), hGMM.K, hGMM.d))
actkomp_vec = np.zeros((len(Y), hGMM.K))
count = 0
hGMM.set_p_labelswitch(.4)
for i in range(sim):#
# sampling the thining
for k in range(thin):
# simulating
hGMM.sample()
##
# since label switching affects the posterior of mu, and active_komp
# it needs to be estimated each time
##
labels = hGMM.get_labelswitches()
if MPI.COMM_WORLD.Get_rank() == 0: # @UndefinedVariable
for j in range(labels.shape[0]):
if labels[j,0] != -1:
mus_vec[j,labels[j,0],:], mus_vec[j,labels[j,1],:] = mus_vec[j,labels[j,1],:], mus_vec[j,labels[j,0],:]
actkomp_vec[j,labels[j,0]], actkomp_vec[j,labels[j,1]] = actkomp_vec[j,labels[j,1]], actkomp_vec[j,labels[j,0]]
###################
# storing data
# for post analysis
###################
mus_ = hGMM.get_mus()
thetas = hGMM.get_thetas()
Qs = hGMM.get_Qs()
nus = hGMM.get_nus()
if sim - i < nCells * nPers:
Y_sample = hGMM.sampleY()
active_komp = hGMM.get_activekompontent()
if MPI.COMM_WORLD.Get_rank() == 0: # @UndefinedVariable
print "iter =%d"%i
count += 1
mus_vec += mus_
actkomp_vec += active_komp
theta_sim.append(thetas)
Q_sim.append(Qs/(nus.reshape(nus.shape[0],1,1)- Qs.shape[1]-1) )
nu_sim.append(nus)
# storing the samples equal to number to the first indiviual
if sim - i < nCells:
Y0_sim.append(hGMM.GMMs[0].simulate_one_obs().reshape(3))
Y_sim.append(Y_sample)
if MPI.COMM_WORLD.Get_rank() == 0: # @UndefinedVariable
actkomp_vec /= count
mus_vec /= count
mus_ = mus_vec
hGMM.save_to_file("/Users/jonaswallin/Dropbox/temp/")
##
# fixing ploting options
##
matplotlib.rcParams['ps.useafm'] = True
matplotlib.rcParams['pdf.use14corefonts'] = True
matplotlib.rcParams['text.usetex'] = True
matplotlib.rcParams['text.latex.preamble']=[r"\usepackage{amsmath}"]
#hGMM.plot_GMM_scatter_all([0, 1])
mus_colors = ['r','b','k','m']
f, ax = hGMM.plot_mus([0,1,2], colors =mus_colors, size_point = 5 )
if MPI.COMM_WORLD.Get_rank() == 0: # @UndefinedVariable
######################
#ordering mus
mus_true_mean = []
mus_mean = []
for k in range(hGMM.K):
mus_true_mean.append(np.array(np.ma.masked_invalid(mus[:,k,:]).mean(0)))
mus_mean.append(np.array(np.ma.masked_invalid(mus_[:,k,:].T).mean(0)))
mus_true_mean = np.array(mus_true_mean)
mus_mean = np.array(mus_mean)
ss_mat = ss.distance.cdist( mus_true_mean, mus_mean, "euclidean")
#print ss_mat
col_index = []
for k in range(hGMM.K):
col_index.append( np.argmin(ss_mat[k,:]))
#print col_index
#####################
######################
theta_sim = np.array(theta_sim)
Q_sim = np.array(Q_sim)
nu_sim = np.array(nu_sim)
np.set_printoptions(precision=2)
perc_theta = []
perc_Q_vec = []
for k in range(hGMM.K):
perc_ = np.percentile(theta_sim[:,col_index[k],:] - Thetas[k],[2.5,50,97.5],axis=0)
perc_theta.append(np.array(perc_).T)
#print "%d & %s & %s & %s & \\hline" %(k, np.mean(theta_sim[:,col_index[k],:],0) - Thetas[k],perc_[0],perc_[1])
perc_Q = np.percentile(Q_sim[:,col_index[k],:] - Sigmas[k],[2.5,50,97.5],axis=0)
#print "Q = %s"%(np.mean(Q_sim[:,col_index[k],:],0))
perc_Q_vec.append(perc_Q)
theta_string = ""
Q_string = ""
theta_diff = np.mean(theta_sim[:,col_index[k],:],0) - Thetas[k]
Q_diff = np.mean(Q_sim[:,col_index[k],:] - Sigmas[k] ,0)
for d in range(hGMM.d):
theta_string += " %.2f (%.2f, %.2f) &"%(perc_[1][d], perc_[0][d], perc_[2][d])
for dd in range(hGMM.d):
Q_string += " %.3f (%.3f, %.3f) &"%(perc_Q[1][d,dd],perc_Q[0][d,dd],perc_Q[2][d,dd] )
Q_string = Q_string[:-1]
Q_string +="\\\ \n"
theta_string = theta_string[:-1]
print "theta[%d]= \n%s\n"%(k,theta_string)
print "Q[%d]= \n%s "%(k,Q_string)
perc_nu = np.percentile(nu_sim[:,col_index[k]] - 100,[2.5,50,97.5],axis=0)
print "nu = %.2f (%d, %d)"%(perc_nu[1],perc_nu[0],perc_nu[2])
Y_sim = np.array(Y_sim)
Y0_sim = np.array(Y0_sim)
for k in range(hGMM.K):
k_ = np.where(np.array(col_index)==k)[0][0]
print("k_ == %s"%k_)
mu_k = mus[:,k_,:].T
#print actkomp_vec[:,col_index[k]]
index = np.isnan(mu_k[:,0])==False
ax.scatter(mu_k[index,0],mu_k[index,1],mu_k[index,2], s=50, edgecolor=mus_colors[k],facecolors='none')
ax.view_init(48,22)
fig_nu = plt.figure(figsize=(6,0.5))
ax_nu = fig_nu.add_subplot(111)
for k in range(hGMM.K):
ax_nu.plot(nu_sim[:,col_index[k]])
f_histY = bm_plot.histnd(Y_sim, 50, [0, 100], [0,100])
f_histY0 = bm_plot.histnd(Y0_sim, 50, [0, 100], [0,100])
f_theta = plot_theta(np.array(perc_theta))
figs_Q = plotQ(perc_Q_vec)
fig_Q_joint = plotQ_joint(perc_Q_vec)
np.set_printoptions(precision=4, suppress=True)
for i, GMM in enumerate(hGMM.GMMs):
#print("p[%d,%d] = %s"%(hGMM.comm.Get_rank(),i,GMM.p))
hGMM.comm.Barrier()
if MPI.COMM_WORLD.Get_rank() == 0 and save_fig: # @UndefinedVariable
print col_index
fig_nu.savefig(folderFigs + "nus_simulated.eps", type="eps",transparent=True,bbox_inches='tight')
fig_nu.savefig(folderFigs + "nus_simulated.pdf", type="pdf",transparent=True,bbox_inches='tight')
f.savefig(folderFigs + "dcluster_centers_simulated.eps", type="eps",transparent=True,bbox_inches='tight')
f.savefig(folderFigs + "dcluster_centers_simulated.pdf", type="pdf",transparent=True,bbox_inches='tight')
f_histY.savefig(folderFigs + "hist2d_simulated.eps", type="eps",bbox_inches='tight')
f_histY.savefig(folderFigs + "hist2d_simulated.pdf", type="pdf",bbox_inches='tight')
f_histY0.savefig(folderFigs + "hist2d_indv_simulated.eps", type="eps",bbox_inches='tight')
f_histY0.savefig(folderFigs + "hist2d_indv_simulated.pdf", type="pdf",bbox_inches='tight')
f_theta.savefig(folderFigs + "theta_simulated.pdf", type="pdf",transparent=True,bbox_inches='tight')
f_theta.savefig(folderFigs + "theta_simulated.eps", type="eps",transparent=True,bbox_inches='tight')
fig_Q_joint.savefig(folderFigs + "Qjoint_simulated.pdf", type="pdf",transparent=True,bbox_inches='tight')
fig_Q_joint.savefig(folderFigs + "Qjoint_simulated.eps", type="eps",transparent=True,bbox_inches='tight')
for i,f_Q in enumerate(figs_Q):
f_Q.savefig(folderFigs + "Q%d_simulated.pdf"%(i+1), type="pdf",transparent=True,bbox_inches='tight')
f_Q.savefig(folderFigs + "Q%d_simulated.eps"%(i+1), type="eps",transparent=True,bbox_inches='tight')
else:
plt.show() | JonasWallin/BayesFlow | examples/article1/article_simulated_estimate_mpi.py | Python | gpl-2.0 | 8,046 | 0.048223 |
#!/usr/bin/env python
#
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for support of portable globes."""
import json
import os
import shlex
import subprocess
import sys
import time
import urlparse
import xml.sax.saxutils as saxutils
import distutils.dir_util
import distutils.errors
import errors
BYTES_PER_MEGABYTE = 1024.0 * 1024.0
NAME_TEMPLATE = "%s_%s"
class OsCommandError(Exception):
"""Thrown if os command fails."""
pass
# TODO: consider to use a lib like bleach that is specifically
# aimed at foiling XSS attacks.
# Additional characters that need to be escaped for HTML defined in a dictionary
# the character to its escape string.
# xml.sax.saxutils.escape() takes care of &, < and >.
_HTML_ESCAPE_TABLE = {
'"': """,
"'": "'",
"`": "`",
"|": "|"
}
def HtmlEscape(text):
"""Escapes a string for HTML.
Args:
text: source string that needs to be escaped for HTML.
Returns:
HTML escaped string.
"""
if not text:
return text
return saxutils.escape(text, _HTML_ESCAPE_TABLE)
def FileSize(file_path):
"""Returns size of file in megabytes."""
return os.path.getsize(file_path) / BYTES_PER_MEGABYTE
def SizeAsString(size):
"""Converts megabyte float to a string."""
if size < 1000.0:
return "%0.2fMB" % size
size /= 1024.0
if size < 1000.0:
return "%0.2fGB" % size
else:
return "%0.2fTB" % (size / 1024.0)
def FileSizeAsString(file_path):
"""Returns size of file as a string."""
return SizeAsString(FileSize(file_path))
def DirectorySize(directory):
"""Returns size of directory in megabytes."""
directory_size = 0
if os.path.isdir(directory):
for (path, unused_dirs, files) in os.walk(directory):
for file_name in files:
file_path = os.path.join(path, file_name)
directory_size += os.path.getsize(file_path)
return directory_size / BYTES_PER_MEGABYTE
def DirectorySizeAsString(directory):
"""Returns size of directory as a string."""
return SizeAsString(DirectorySize(directory))
def CreateDirectory(directory):
"""Create entire directory path."""
if os.path.exists(directory):
return
try:
os.makedirs(directory)
except OSError:
PrintAndLog("Raising error: Cannot create directory \'%s\'" % directory)
raise
def CopyDirectory(source, destination, logger):
"""Copy from source to destination, which will be created if it does not exist."""
cmd = "Copying %s to %s" % (source, destination)
PrintAndLog(cmd, logger)
try:
distutils.dir_util.copy_tree(source, destination)
except distutils.errors.DistutilsFileError:
PrintAndLog("Raising error: Cannot copy to directory %s" % destination)
raise
def DiskSpace(path):
"""Returns remaining disk space in Megabytes."""
mount_info = os.statvfs(path)
return mount_info.f_bsize * mount_info.f_bavail / BYTES_PER_MEGABYTE
def Uid():
"""Returns a uid for identifying a globe building sequence."""
return "%d_%f" % (os.getpid(), time.time())
def GlobesToText(globes, template, sort_item, reverse=False, is_text=False):
"""Fills in globe template for each globe and returns as array of strings."""
result = []
# If it is text, sort the lower case version of the text.
if is_text:
items = sorted(globes.iteritems(),
key=lambda globe_pair: globe_pair[1][sort_item].lower(),
reverse=reverse)
# If it is NOT text, use default less than comparison.
else:
items = sorted(globes.iteritems(),
key=lambda globe_pair: globe_pair[1][sort_item],
reverse=reverse)
for [unused_key, globe] in iter(items):
next_entry = template
for [globe_term, globe_value] in globe.iteritems():
replace_item = "[$%s]" % globe_term.upper()
if globe_term == "globe" or globe_term == "info_loaded":
pass
elif globe_term == "size":
next_entry = next_entry.replace(replace_item, SizeAsString(globe_value))
else:
next_entry = next_entry.replace(replace_item, globe_value)
result.append(next_entry)
return result
def GlobeNameReplaceParams(globe_name):
"""Returns a single replacement parameter for the globe name."""
return {"[$GLOBE_NAME]": globe_name}
def ReplaceParams(text, replace_params):
"""Replace keys with values in the given text."""
for (key, value) in replace_params.iteritems():
text = text.replace(key, value)
return text
def OutputFile(file_name, replace_params):
"""Outputs a file to standard out with the globe name replaced."""
fp = open(file_name)
text = fp.read()
fp.close()
print ReplaceParams(text, replace_params)
def CreateInfoFile(path, description):
"""Create globe info file."""
content = "Portable Globe\n"
content += GmTimeStamp()
content += "\n%s" % TimeStamp()
content += "Globe description: %s\n" % description
CreateFile(path, content)
def CreateFile(path, content):
"""Create globe info file."""
try:
fp = open(path, "w")
fp.write(content)
fp.close()
except IOError as error:
print error
sys.exit(1)
def TimeStamp():
"""Create timestamp based on local time."""
return time.strftime("%Y-%m-%d %H:%M:%S\n", time.localtime())
def GmTimeStamp():
"""Create timestamp based on Greenwich Mean Time."""
return time.strftime("%Y-%m-%d %H:%M:%S GMT\n", time.gmtime())
def ConvertToQtNode(level, col, row):
"""Converts col, row, and level to corresponding qtnode string."""
qtnode = "0"
half_ndim = 1 << (level - 1)
for unused_ in xrange(level):
if row >= half_ndim and col < half_ndim:
qtnode += "0"
row -= half_ndim
elif row >= half_ndim and col >= half_ndim:
qtnode += "1"
row -= half_ndim
col -= half_ndim
elif row < half_ndim and col >= half_ndim:
qtnode += "2"
col -= half_ndim
else:
qtnode += "3"
half_ndim >>= 1
return qtnode
def JsBoolString(bool_value):
"""Write boolean value as javascript boolean."""
if bool_value:
return "true"
else:
return "false"
def WriteHeader(content_type="text/html"):
"""Output header for web page."""
# Pick up one print from the Python print.
print "Content-Type: %s\n" % content_type
def ExecuteCmd(os_cmd, logger, dry_run=False):
"""Execute os command and log results.
Runs command, waits until it finishes, then analyses the return code, and
reports either "SUCCESS" or "FAILED".
Use if output of command is not desired, otherwise it should be redirected
to a file or use RunCmd below.
Args:
os_cmd: Linux shell command to execute.
logger: Logger responsible for outputting log messages.
dry_run: Whether command should only be printed but not run.
Throws:
OsCommandError
"""
PrintAndLog("Executing: %s" % os_cmd, logger)
if dry_run:
PrintAndLog("-- dry run --", logger)
return
try:
if isinstance(os_cmd, str):
os_cmd = shlex.split(os_cmd)
p = subprocess.Popen(os_cmd, shell=False,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
err_data = p.communicate()[1]
return_code = p.returncode
if return_code != 0:
PrintAndLog("Raising error: %s (return code %d)\n"
% (err_data, return_code), logger)
raise OsCommandError()
else:
PrintAndLog("SUCCESS", logger, None)
except Exception, e:
PrintAndLog("FAILED: %s" % e.__str__(), logger)
raise OsCommandError()
def ExecuteCmdInBackground(os_cmd, logger):
"""Execute os command in the background and log results.
Runs command in the background and returns immediately without waiting for
the execution to finish.
Use if the command will take longer time to finish than request timeout.
Args:
os_cmd: Linux shell command to execute.
logger: Logger responsible for outputting log messages.
Throws:
OsCommandError
"""
PrintAndLog("Executing in background: %s" % os_cmd, logger)
try:
subprocess.Popen(os_cmd + " &", shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
except Exception, e:
PrintAndLog("FAILED: %s" % e.__str__(), logger)
raise OsCommandError()
def RunCmd(os_cmd):
"""Execute os command and return list of results and errors.
Runs command, waits until it finishes, then returns the output of execution
(if succeeded) or error information (if failed).
Use if output of command is needed.
Args:
os_cmd: Linux shell command to execute.
Returns:
Array of result lines.
"""
try:
if isinstance(os_cmd, str):
os_cmd = shlex.split(os_cmd)
results = subprocess.check_output(os_cmd)
return results.split("\n")
except subprocess.CalledProcessError as e:
# print "FAILURE: %s" % e.__str__()
return ["", e.__str__()]
def PrintAndLog(msg, logger=None, prefix="\n"):
if prefix:
print "%s%s" % (prefix, msg)
else:
print msg
if logger:
logger.Log(msg)
def GetDbrootInfoJson(globe, name):
"""Get dbroot info as a json string.
Args:
globe: portable_globe object.
name: name of portable globe
Returns:
Dbroot info in Json formatted string.
"""
dbroot_info = {"name": name,
"has_imagery": globe.HasImagery(),
"has_terrain": globe.HasTerrain(),
"is_proto_imagery": globe.IsProtoImagery(),
}
return json.dumps(dbroot_info)
def NormalizeTargetPath(target):
"""Normalizes the target path.
Adds leading slash if needed, strips ending slashes.
Args:
target: The target path (fusion db publish point).
Returns:
Normalized target path.
"""
if not target:
return target
target = target.strip()
target = target.rstrip("/")
if not target:
return target
if target[0] != "/":
target = "/{0}".format(target)
return target
def GetServerAndPathFromUrl(url):
"""Gets a server and a path from the url.
Args:
url: the URL.
Returns:
tuple (server, path). The server is 'scheme://host:port'.
The path can be empty string.
Raises:
InvalidValueError: when the url is not valid.
"""
server = ""
path = ""
url_obj = urlparse.urlparse(url)
if url_obj.scheme and url_obj.netloc and url_obj.path:
server = "{0}://{1}".format(url_obj.scheme, url_obj.netloc)
path = url_obj.path
elif url_obj.scheme and url_obj.netloc:
server = "{0}://{1}".format(url_obj.scheme, url_obj.netloc)
elif url_obj.path:
path = url_obj.path
else:
raise errors.InvalidValueError("Invalid URL: %s" % url)
return (server, path)
def IsProcessRunningForGlobe(tool_name, base_dir):
"""Checks whether specified job is running for portable.
Checks if process is running by detecting it in the output returned by
executing "ps -ef | grep base_dir".
Args:
tool_name: tool name to check if it is present in list of running
processes.
base_dir: base directory for corresponding portable.
Returns:
whether specified job is running.
"""
ps_cmd = "ps -ef"
grep_cmd = "grep \"%s\"" % base_dir
ps_subprocess = subprocess.Popen(shlex.split(ps_cmd),
shell=False,
stdout=subprocess.PIPE)
grep_subprocess = subprocess.Popen(shlex.split(grep_cmd),
shell=False,
stdin=ps_subprocess.stdout,
stdout=subprocess.PIPE)
ps_subprocess.stdout.close() # Allow p1 to receive a SIGPIPE if p2 exits.
procs = grep_subprocess.communicate()[0]
if procs:
procs = procs.split("/n")
for proc in procs:
if proc.find(tool_name) > 0:
return True
return False
class Log(object):
"""Simple logger class."""
def __init__(self, log_file, enabled=True):
self.log_file_ = log_file
self.enabled_ = enabled
def CheckLogFolder(self):
return os.path.exists(os.path.dirname(self.log_file_))
def Clear(self):
"""Clear the log file."""
if not self.CheckLogFolder():
return
fp = open(self.log_file_, "w")
fp.close()
def Log(self, message):
"""Log message to cutter log."""
if not self.enabled_ or not self.CheckLogFolder():
return
fp = open(self.log_file_, "a")
fp.write("%s" % TimeStamp())
fp.write("%s\n" % message)
fp.close()
| tst-ahernandez/earthenterprise | earth_enterprise/src/fusion/portableglobe/cutter/cgi-bin/common/utils.py | Python | apache-2.0 | 12,945 | 0.01151 |
##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Editres(AutotoolsPackage):
"""Dynamic resource editor for X Toolkit applications."""
homepage = "http://cgit.freedesktop.org/xorg/app/editres"
url = "https://www.x.org/archive/individual/app/editres-1.0.6.tar.gz"
version('1.0.6', '310c504347ca499874593ac96e935353')
depends_on('libxaw')
depends_on('libx11')
depends_on('libxt')
depends_on('libxmu')
depends_on('pkgconfig', type='build')
depends_on('util-macros', type='build')
| EmreAtes/spack | var/spack/repos/builtin/packages/editres/package.py | Python | lgpl-2.1 | 1,739 | 0.000575 |
import unittest
from pecan_swagger import utils
class TestUtils(unittest.TestCase):
def test_swagger_build(self):
from .resources import example_app
expected = {
"swagger": "2.0",
"info": {
"version": "1.0",
"title": "example_app"
},
"produces": [],
"consumes": [],
"paths": {
"/api": {
"get": {}
},
"/messages": {
"get": {},
"post": {}
},
"/profile": {
"get": {},
"post": {}
},
"/profile/image": {
"get": {},
"post": {}
},
"/profile/stats": {
"get": {}
}
}
}
actual = utils.swagger_build('example_app', '1.0')
self.assertDictEqual(expected, actual)
def test_swagger_build_wsme(self):
from .resources import example_wsme_app
expected = \
{
"consumes": [],
"info": {
"title": "example_wsme_app",
"version": "1.0"
},
"paths": {
"/api": {
"get": {}
},
"/messages": {
"get": {},
"post": {}
},
"/profile": {
"get": {},
"post": {}
},
"/profile/image": {
"get": {},
"post": {}
},
"/profile/stats": {
"get": {}
},
"/wsmemessages": {
"get": {
"description": "",
"parameters": [],
"responses": {
200: {
"description": "",
"schema": {
"items": {
"items": {
"properties": {
"id": {
"maxLength": 255,
"minLength": 1,
"type": "string"
},
"message": {
"maxLength": 255,
"minLength": 1,
"type": "string"
},
"message_from": {
"enum": [
'1.OSOMATSU',
'2.KARAMATSU',
'3.CHOROMATSU',
'4.ICHIMATSU',
'5.JUSHIMATSU',
'6.TODOMATSU'
],
"type": "string"
},
"message_size": {
"minimum": 1,
"type": "integer"
}
}
},
"type": "object"
},
"type": "array"
}
}
}
},
"post": {
"description": "",
"parameters": [
{
"in": "query",
"name": "message",
"required": True,
"type": "string"
}
],
"responses": {
201: {
"description": "",
"schema": {
"items": {
"properties": {
"id": {
"maxLength": 255,
"minLength": 1,
"type": "string"
},
"message": {
"maxLength": 255,
"minLength": 1,
"type": "string"
},
"message_from": {
"enum": [
'1.OSOMATSU',
'2.KARAMATSU',
'3.CHOROMATSU',
'4.ICHIMATSU',
'5.JUSHIMATSU',
'6.TODOMATSU'
],
"type": "string"
},
"message_size": {
"minimum": 1,
"type": "integer"
}
}
},
"type": "object"
}
}
}
}
},
"/wsmemessages/<specifier>": {
"delete": {
"description": "",
"parameters": [
{
"in": "query",
"name": "id",
"required": True,
"type": "string"
}
],
"responses": {
204: {
"description": ""
}
}
},
"get": {
"description": "",
"parameters": [
{
"in": "query",
"name": "id",
"required": True,
"type": "string"
}
],
"responses": {
200: {
"description": "",
"schema": {
"items": {
"properties": {
"id": {
"maxLength": 255,
"minLength": 1,
"type": "string"
},
"message": {
"maxLength": 255,
"minLength": 1,
"type": "string"
},
"message_from": {
"enum": [
'1.OSOMATSU',
'2.KARAMATSU',
'3.CHOROMATSU',
'4.ICHIMATSU',
'5.JUSHIMATSU',
'6.TODOMATSU'
],
"type": "string"
},
"message_size": {
"minimum": 1,
"type": "integer"
}
}
},
"type": "object"
}
}
}
}
},
"/wsmemessages/detail": {
"get": {
"description": "",
"parameters": [
{
"in": "query",
"name": "id",
"required": True,
"type": "string"
}
],
"responses": {
200: {
"description": "",
"schema": {
"items": {
"properties": {
"id": {
"maxLength": 255,
"minLength": 1,
"type": "string"
},
"message": {
"maxLength": 255,
"minLength": 1,
"type": "string"
},
"message_from": {
"enum": [
'1.OSOMATSU',
'2.KARAMATSU',
'3.CHOROMATSU',
'4.ICHIMATSU',
'5.JUSHIMATSU',
'6.TODOMATSU'
],
"type": "string"
},
"message_size": {
"minimum": 1,
"type": "integer"
}
}
},
"type": "object"
}
}
}
}
}
},
"produces": [],
"swagger": "2.0"
}
actual = utils.swagger_build('example_wsme_app', '1.0')
import codecs, json
fout = codecs.open('example_wsme_app.json', 'w', 'utf_8')
json.dump(actual, fout, sort_keys=True, indent=2)
self.maxDiff = None
self.assertDictEqual(expected, actual)
| shu-mutou/pecan-swagger | tests/test_utils.py | Python | bsd-3-clause | 10,382 | 0.000096 |
import unittest
from flask import current_app
from app import create_app, db
class BasicsTestCase(unittest.TestCase):
# Runs before each test
def setUp(self):
self.app = create_app('testing')
self.app_context = self.app.app_context()
self.app_context.push()
db.create_all()
# Runs after each test
def tearDown(self):
db.session.remove()
db.drop_all()
self.app_context.pop()
# Make sure the app exists
def test_app_exists(self):
self.assertFalse(current_app is None)
# Make sure the app is running with TESTING config
def test_app_is_testing(self):
self.assertTrue(current_app.config['TESTING'])
| bitmotive/flask-boilerplate | tests/test_basics.py | Python | mit | 705 | 0 |
# -*- coding: utf-8 -*-
##############################################################################
#
# Law Follow Up
# Copyright (C) 2013 Sistemas ADHOC
# No email
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import re
from openerp import netsvc
from openerp.osv import osv, fields
from openerp.tools.translate import _
class commission_treatment(osv.osv):
"""Commission Treatment"""
_inherit = 'law_tracking.commission_treatment'
def _get_name(self, cr, uid, ids, field_names, arg, context=None):
if context is None:
context = {}
if isinstance(ids, (int, long)):
ids = [ids]
res = {}
for data in self.browse(cr, uid, ids, context=context):
chamber = ''
if data.partner_id.chamber == 'deputies':
chamber = _('Deputies')
else:
chamber = _('Senators')
if data.law_project_id:
res[data.id] = data.law_project_id.name + ' - ' + chamber + ' - ' + data.partner_id.name
# elif data.sen_law_project_id:
# res[data.id] = data.sen_law_project_id.name + ' - ' + data.partner_id.chamber + ' - ' + data.partner_id.name
else:
res[data.id] = ''
return res
def name_search(self, cr, user, name='', args=None, operator='ilike', context=None, limit=100):
if not args:
args = []
ids = set()
if name:
ids.update(self.search(cr, user, args + [('partner_id.name',operator,name)], limit=(limit and (limit-len(ids)) or False) , context=context))
if not limit or len(ids) < limit:
ids.update(self.search(cr, user, args + [('law_project_id.name',operator,name)], limit=limit, context=context))
ids = list(ids)
else:
ids = self.search(cr, user, args, limit=limit, context=context)
result = self.name_get(cr, user, ids, context=context)
return result
def _get_has_treatments(self, cr, uid, ids, field_names, arg, context=None):
if context is None:
context = {}
if isinstance(ids, (int, long)):
ids = [ids]
res = {}
for data in self.browse(cr, uid, ids, context=context):
res[data.id] = False
if data.treatment_detail_ids:
res[data.id] = True
return res
_columns = {
'name': fields.function(_get_name, type='char', string='Name'),
'has_treatments': fields.function(_get_has_treatments, type='boolean', string='Has Treatments?'),
}
_sql_constraints = [
('unique', 'unique(law_project_id, partner_id)', 'Commission must be unique'),
]
def _check_commission(self, cr, uid, ids, context=None):
record = self.browse(cr, uid, ids, context=context)
for data in record:
for treatment_detail in data.treatment_detail_ids:
if treatment_detail.order_paper_id.commission_id != data.partner_id:
return False
return True
_constraints = [
(_check_commission, 'Error: All commission treatments should be from the same commission', ['En Comisiones']),
]
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| ingadhoc/odoo-law-tracking | law_tracking_x/commission_treatment.py | Python | agpl-3.0 | 4,111 | 0.006568 |
"""
This config file runs the simplest dev environment using sqlite, and db-based
sessions. Assumes structure:
/envroot/
/db # This is where it'll write the database file
/edx-platform # The location of this repo
/log # Where we're going to write log files
"""
# We intentionally define lots of variables that aren't used, and
# want to import all variables from base settings files
# pylint: disable=W0401, W0614
from .common import *
import os
from path import path
from warnings import filterwarnings, simplefilter
from uuid import uuid4
# mongo connection settings
MONGO_PORT_NUM = int(os.environ.get('EDXAPP_TEST_MONGO_PORT', '27017'))
MONGO_HOST = os.environ.get('EDXAPP_TEST_MONGO_HOST', 'localhost')
os.environ['DJANGO_LIVE_TEST_SERVER_ADDRESS'] = 'localhost:8000-9000'
THIS_UUID = uuid4().hex[:5]
# can't test start dates with this True, but on the other hand,
# can test everything else :)
FEATURES['DISABLE_START_DATES'] = True
# Most tests don't use the discussion service, so we turn it off to speed them up.
# Tests that do can enable this flag, but must use the UrlResetMixin class to force urls.py
# to reload. For consistency in user-experience, keep the value of this setting in sync with
# the one in cms/envs/test.py
FEATURES['ENABLE_DISCUSSION_SERVICE'] = False
FEATURES['ENABLE_SERVICE_STATUS'] = True
FEATURES['ENABLE_HINTER_INSTRUCTOR_VIEW'] = True
FEATURES['ENABLE_INSTRUCTOR_LEGACY_DASHBOARD'] = True
FEATURES['ENABLE_SHOPPING_CART'] = True
FEATURES['ENABLE_VERIFIED_CERTIFICATES'] = True
# Enable this feature for course staff grade downloads, to enable acceptance tests
FEATURES['ENABLE_S3_GRADE_DOWNLOADS'] = True
FEATURES['ALLOW_COURSE_STAFF_GRADE_DOWNLOADS'] = True
# Toggles embargo on for testing
FEATURES['EMBARGO'] = True
# Need wiki for courseware views to work. TODO (vshnayder): shouldn't need it.
WIKI_ENABLED = True
# Makes the tests run much faster...
SOUTH_TESTS_MIGRATE = False # To disable migrations and use syncdb instead
# Nose Test Runner
TEST_RUNNER = 'django_nose.NoseTestSuiteRunner'
_system = 'lms'
_report_dir = REPO_ROOT / 'reports' / _system
_report_dir.makedirs_p()
NOSE_ARGS = [
'--id-file', REPO_ROOT / '.testids' / _system / 'noseids',
'--xunit-file', _report_dir / 'nosetests.xml',
]
# Local Directories
TEST_ROOT = path("test_root")
# Want static files in the same dir for running on jenkins.
STATIC_ROOT = TEST_ROOT / "staticfiles"
STATUS_MESSAGE_PATH = TEST_ROOT / "status_message.json"
COURSES_ROOT = TEST_ROOT / "data"
DATA_DIR = COURSES_ROOT
COMMON_TEST_DATA_ROOT = COMMON_ROOT / "test" / "data"
# Where the content data is checked out. This may not exist on jenkins.
GITHUB_REPO_ROOT = ENV_ROOT / "data"
USE_I18N = True
LANGUAGE_CODE = 'en' # tests assume they will get English.
XQUEUE_INTERFACE = {
"url": "http://sandbox-xqueue.edx.org",
"django_auth": {
"username": "lms",
"password": "***REMOVED***"
},
"basic_auth": ('anant', 'agarwal'),
}
XQUEUE_WAITTIME_BETWEEN_REQUESTS = 5 # seconds
# Don't rely on a real staff grading backend
MOCK_STAFF_GRADING = True
MOCK_PEER_GRADING = True
# TODO (cpennington): We need to figure out how envs/test.py can inject things
# into common.py so that we don't have to repeat this sort of thing
STATICFILES_DIRS = [
COMMON_ROOT / "static",
PROJECT_ROOT / "static",
]
STATICFILES_DIRS += [
(course_dir, COMMON_TEST_DATA_ROOT / course_dir)
for course_dir in os.listdir(COMMON_TEST_DATA_ROOT)
if os.path.isdir(COMMON_TEST_DATA_ROOT / course_dir)
]
# Avoid having to run collectstatic before the unit test suite
# If we don't add these settings, then Django templates that can't
# find pipelined assets will raise a ValueError.
# http://stackoverflow.com/questions/12816941/unit-testing-with-django-pipeline
STATICFILES_STORAGE='pipeline.storage.NonPackagingPipelineStorage'
PIPELINE_ENABLED=False
update_module_store_settings(
MODULESTORE,
module_store_options={
'fs_root': TEST_ROOT / "data",
},
xml_store_options={
'data_dir': COMMON_TEST_DATA_ROOT,
},
doc_store_settings={
'host': MONGO_HOST,
'port': MONGO_PORT_NUM,
'db': 'test_xmodule',
'collection': 'test_modulestore{0}'.format(THIS_UUID),
},
)
CONTENTSTORE = {
'ENGINE': 'xmodule.contentstore.mongo.MongoContentStore',
'DOC_STORE_CONFIG': {
'host': MONGO_HOST,
'db': 'xcontent',
'port': MONGO_PORT_NUM,
}
}
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': TEST_ROOT / 'db' / 'edx.db'
},
}
CACHES = {
# This is the cache used for most things.
# In staging/prod envs, the sessions also live here.
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'edx_loc_mem_cache',
'KEY_FUNCTION': 'util.memcache.safe_key',
},
# The general cache is what you get if you use our util.cache. It's used for
# things like caching the course.xml file for different A/B test groups.
# We set it to be a DummyCache to force reloading of course.xml in dev.
# In staging environments, we would grab VERSION from data uploaded by the
# push process.
'general': {
'BACKEND': 'django.core.cache.backends.dummy.DummyCache',
'KEY_PREFIX': 'general',
'VERSION': 4,
'KEY_FUNCTION': 'util.memcache.safe_key',
},
'mongo_metadata_inheritance': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': os.path.join(tempfile.gettempdir(), 'mongo_metadata_inheritance'),
'TIMEOUT': 300,
'KEY_FUNCTION': 'util.memcache.safe_key',
},
'loc_cache': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'edx_location_mem_cache',
},
}
# Dummy secret key for dev
SECRET_KEY = '85920908f28904ed733fe576320db18cabd7b6cd'
# hide ratelimit warnings while running tests
filterwarnings('ignore', message='No request passed to the backend, unable to rate-limit')
# Ignore deprecation warnings (so we don't clutter Jenkins builds/production)
# https://docs.python.org/2/library/warnings.html#the-warnings-filter
simplefilter('ignore') # Change to "default" to see the first instance of each hit
# or "error" to convert all into errors
######### Third-party auth ##########
FEATURES['ENABLE_THIRD_PARTY_AUTH'] = True
################################## OPENID #####################################
FEATURES['AUTH_USE_OPENID'] = True
FEATURES['AUTH_USE_OPENID_PROVIDER'] = True
################################## SHIB #######################################
FEATURES['AUTH_USE_SHIB'] = True
FEATURES['SHIB_DISABLE_TOS'] = True
FEATURES['RESTRICT_ENROLL_BY_REG_METHOD'] = True
OPENID_CREATE_USERS = False
OPENID_UPDATE_DETAILS_FROM_SREG = True
OPENID_USE_AS_ADMIN_LOGIN = False
OPENID_PROVIDER_TRUSTED_ROOTS = ['*']
############################## OAUTH2 Provider ################################
FEATURES['ENABLE_OAUTH2_PROVIDER'] = True
########################### External REST APIs #################################
FEATURES['ENABLE_MOBILE_REST_API'] = True
FEATURES['ENABLE_VIDEO_ABSTRACTION_LAYER_API'] = True
###################### Payment ##############################3
# Enable fake payment processing page
FEATURES['ENABLE_PAYMENT_FAKE'] = True
# Configure the payment processor to use the fake processing page
# Since both the fake payment page and the shoppingcart app are using
# the same settings, we can generate this randomly and guarantee
# that they are using the same secret.
from random import choice
import string
RANDOM_SHARED_SECRET = ''.join(
choice(string.letters + string.digits + string.punctuation)
for x in range(250)
)
CC_PROCESSOR_NAME = 'CyberSource2'
CC_PROCESSOR['CyberSource2']['SECRET_KEY'] = RANDOM_SHARED_SECRET
CC_PROCESSOR['CyberSource2']['ACCESS_KEY'] = "0123456789012345678901"
CC_PROCESSOR['CyberSource2']['PROFILE_ID'] = "edx"
CC_PROCESSOR['CyberSource2']['PURCHASE_ENDPOINT'] = "/shoppingcart/payment_fake"
FEATURES['STORE_BILLING_INFO'] = True
########################### SYSADMIN DASHBOARD ################################
FEATURES['ENABLE_SYSADMIN_DASHBOARD'] = True
GIT_REPO_DIR = TEST_ROOT / "course_repos"
################################# CELERY ######################################
CELERY_ALWAYS_EAGER = True
CELERY_RESULT_BACKEND = 'cache'
BROKER_TRANSPORT = 'memory'
############################ STATIC FILES #############################
DEFAULT_FILE_STORAGE = 'django.core.files.storage.FileSystemStorage'
MEDIA_ROOT = TEST_ROOT / "uploads"
MEDIA_URL = "/static/uploads/"
STATICFILES_DIRS.append(("uploads", MEDIA_ROOT))
new_staticfiles_dirs = []
# Strip out any static files that aren't in the repository root
# so that the tests can run with only the edx-platform directory checked out
for static_dir in STATICFILES_DIRS:
# Handle both tuples and non-tuple directory definitions
try:
_, data_dir = static_dir
except ValueError:
data_dir = static_dir
if data_dir.startswith(REPO_ROOT):
new_staticfiles_dirs.append(static_dir)
STATICFILES_DIRS = new_staticfiles_dirs
FILE_UPLOAD_TEMP_DIR = TEST_ROOT / "uploads"
FILE_UPLOAD_HANDLERS = (
'django.core.files.uploadhandler.MemoryFileUploadHandler',
'django.core.files.uploadhandler.TemporaryFileUploadHandler',
)
########################### Server Ports ###################################
# These ports are carefully chosen so that if the browser needs to
# access them, they will be available through the SauceLabs SSH tunnel
LETTUCE_SERVER_PORT = 8003
XQUEUE_PORT = 8040
YOUTUBE_PORT = 8031
LTI_PORT = 8765
VIDEO_SOURCE_PORT = 8777
################### Make tests faster
#http://slacy.com/blog/2012/04/make-your-tests-faster-in-django-1-4/
PASSWORD_HASHERS = (
# 'django.contrib.auth.hashers.PBKDF2PasswordHasher',
# 'django.contrib.auth.hashers.PBKDF2SHA1PasswordHasher',
# 'django.contrib.auth.hashers.BCryptPasswordHasher',
'django.contrib.auth.hashers.SHA1PasswordHasher',
'django.contrib.auth.hashers.MD5PasswordHasher',
# 'django.contrib.auth.hashers.CryptPasswordHasher',
)
### This enables the Metrics tab for the Instructor dashboard ###########
FEATURES['CLASS_DASHBOARD'] = True
################### Make tests quieter
# OpenID spews messages like this to stderr, we don't need to see them:
# Generated checkid_setup request to http://testserver/openid/provider/login/ with assocication {HMAC-SHA1}{51d49995}{s/kRmA==}
import openid.oidutil
openid.oidutil.log = lambda message, level = 0: None
PLATFORM_NAME = "edX"
SITE_NAME = "edx.org"
# set up some testing for microsites
MICROSITE_CONFIGURATION = {
"test_microsite": {
"domain_prefix": "testmicrosite",
"university": "test_microsite",
"platform_name": "Test Microsite",
"logo_image_url": "test_microsite/images/header-logo.png",
"email_from_address": "test_microsite@edx.org",
"payment_support_email": "test_microsite@edx.org",
"ENABLE_MKTG_SITE": False,
"SITE_NAME": "test_microsite.localhost",
"course_org_filter": "TestMicrositeX",
"course_about_show_social_links": False,
"css_overrides_file": "test_microsite/css/test_microsite.css",
"show_partners": False,
"show_homepage_promo_video": False,
"course_index_overlay_text": "This is a Test Microsite Overlay Text.",
"course_index_overlay_logo_file": "test_microsite/images/header-logo.png",
"homepage_overlay_html": "<h1>This is a Test Microsite Overlay HTML</h1>",
"ALWAYS_REDIRECT_HOMEPAGE_TO_DASHBOARD_FOR_AUTHENTICATED_USER": False,
},
"default": {
"university": "default_university",
"domain_prefix": "www",
}
}
MICROSITE_ROOT_DIR = COMMON_ROOT / 'test' / 'test_microsites'
MICROSITE_TEST_HOSTNAME = 'testmicrosite.testserver'
FEATURES['USE_MICROSITES'] = True
# add extra template directory for test-only templates
MAKO_TEMPLATES['main'].extend([
COMMON_ROOT / 'test' / 'templates'
])
######### LinkedIn ########
LINKEDIN_API['COMPANY_ID'] = '0000000'
# Setting for the testing of Software Secure Result Callback
VERIFY_STUDENT["SOFTWARE_SECURE"] = {
"API_ACCESS_KEY": "BBBBBBBBBBBBBBBBBBBB",
"API_SECRET_KEY": "CCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCC",
}
VIDEO_CDN_URL = {
'CN': 'http://api.xuetangx.com/edx/video?s3_url='
}
######### dashboard git log settings #########
MONGODB_LOG = {
'host': MONGO_HOST,
'port': MONGO_PORT_NUM,
'user': '',
'password': '',
'db': 'xlog',
}
| c0710204/edx-platform | lms/envs/test.py | Python | agpl-3.0 | 12,700 | 0.002756 |
from django.contrib import admin
from django.contrib.contenttypes import generic
from models import Attribute, BaseModel
from django.utils.translation import ugettext_lazy as _
class MetaInline(generic.GenericTabularInline):
model = Attribute
extra = 0
class BaseAdmin(admin.ModelAdmin):
"""
def get_readonly_fields(self, request, obj=None):
fs = super(BaseAdmin, self).get_readonly_fields(request, obj)
fs += ('created_by', 'last_updated_by',)
return fs
def get_fieldsets(self, request, obj=None):
fs = super(BaseAdmin, self).get_fieldsets(request, obj)
fs[0][1]['fields'].remove('created_by')
fs[0][1]['fields'].remove('last_updated_by')
fs.extend([(_('Other informations'), {'fields':['created_by','last_updated_by'], 'classes':['collapse']})])
return fs
def changelist_view(self, request, extra_context=None):
if request.user.has_perm('%s.can_view_deleted' % self.model._meta.app_label):
if not "deleted_flag" in self.list_filter:
self.list_filter += ("deleted_flag",)
return super(BaseAdmin, self).changelist_view(request, extra_context)
def queryset(self, request):
return super(BaseAdmin, self).queryset(request).exclude(deleted_flag=True)
"""
def save_model(self, request, obj, form, change):
if not change:
obj.created_by = request.user
obj.last_updated_by = request.user
obj.save()
def save_formset(self, request, form, formset, change):
instances = formset.save(commit=False)
for instance in instances:
if isinstance(instance, BaseModel): #Check if it is the correct type of inline
if not instance.created_by_id:
instance.created_by = request.user
instance.last_updated_by = request.user
instance.save()
| Mercy-Nekesa/sokoapp | sokoapp/utils/admin.py | Python | mit | 1,954 | 0.006141 |
import os
import warnings
from django.conf import settings, global_settings
from django.core.exceptions import ImproperlyConfigured
from django.http import HttpRequest
from django.test import SimpleTestCase, TransactionTestCase, TestCase, signals
from django.test.utils import override_settings
from django.utils import unittest, six
@override_settings(TEST='override')
class FullyDecoratedTranTestCase(TransactionTestCase):
def test_override(self):
self.assertEqual(settings.TEST, 'override')
@override_settings(TEST='override2')
def test_method_override(self):
self.assertEqual(settings.TEST, 'override2')
def test_decorated_testcase_name(self):
self.assertEqual(FullyDecoratedTranTestCase.__name__, 'FullyDecoratedTranTestCase')
def test_decorated_testcase_module(self):
self.assertEqual(FullyDecoratedTranTestCase.__module__, __name__)
@override_settings(TEST='override')
class FullyDecoratedTestCase(TestCase):
def test_override(self):
self.assertEqual(settings.TEST, 'override')
@override_settings(TEST='override2')
def test_method_override(self):
self.assertEqual(settings.TEST, 'override2')
class ClassDecoratedTestCaseSuper(TestCase):
"""
Dummy class for testing max recursion error in child class call to
super(). Refs #17011.
"""
def test_max_recursion_error(self):
pass
@override_settings(TEST='override')
class ClassDecoratedTestCase(ClassDecoratedTestCaseSuper):
def test_override(self):
self.assertEqual(settings.TEST, 'override')
@override_settings(TEST='override2')
def test_method_override(self):
self.assertEqual(settings.TEST, 'override2')
def test_max_recursion_error(self):
"""
Overriding a method on a super class and then calling that method on
the super class should not trigger infinite recursion. See #17011.
"""
try:
super(ClassDecoratedTestCase, self).test_max_recursion_error()
except RuntimeError:
self.fail()
class SettingsTests(TestCase):
def setUp(self):
self.testvalue = None
signals.setting_changed.connect(self.signal_callback)
def tearDown(self):
signals.setting_changed.disconnect(self.signal_callback)
def signal_callback(self, sender, setting, value, **kwargs):
if setting == 'TEST':
self.testvalue = value
def test_override(self):
settings.TEST = 'test'
self.assertEqual('test', settings.TEST)
with self.settings(TEST='override'):
self.assertEqual('override', settings.TEST)
self.assertEqual('test', settings.TEST)
del settings.TEST
def test_override_change(self):
settings.TEST = 'test'
self.assertEqual('test', settings.TEST)
with self.settings(TEST='override'):
self.assertEqual('override', settings.TEST)
settings.TEST = 'test2'
self.assertEqual('test', settings.TEST)
del settings.TEST
def test_override_doesnt_leak(self):
self.assertRaises(AttributeError, getattr, settings, 'TEST')
with self.settings(TEST='override'):
self.assertEqual('override', settings.TEST)
settings.TEST = 'test'
self.assertRaises(AttributeError, getattr, settings, 'TEST')
@override_settings(TEST='override')
def test_decorator(self):
self.assertEqual('override', settings.TEST)
def test_context_manager(self):
self.assertRaises(AttributeError, getattr, settings, 'TEST')
override = override_settings(TEST='override')
self.assertRaises(AttributeError, getattr, settings, 'TEST')
override.enable()
self.assertEqual('override', settings.TEST)
override.disable()
self.assertRaises(AttributeError, getattr, settings, 'TEST')
def test_class_decorator(self):
# SimpleTestCase can be decorated by override_settings, but not ut.TestCase
class SimpleTestCaseSubclass(SimpleTestCase):
pass
class UnittestTestCaseSubclass(unittest.TestCase):
pass
decorated = override_settings(TEST='override')(SimpleTestCaseSubclass)
self.assertIsInstance(decorated, type)
self.assertTrue(issubclass(decorated, SimpleTestCase))
with six.assertRaisesRegex(self, Exception,
"Only subclasses of Django SimpleTestCase*"):
decorated = override_settings(TEST='override')(UnittestTestCaseSubclass)
def test_signal_callback_context_manager(self):
self.assertRaises(AttributeError, getattr, settings, 'TEST')
with self.settings(TEST='override'):
self.assertEqual(self.testvalue, 'override')
self.assertEqual(self.testvalue, None)
@override_settings(TEST='override')
def test_signal_callback_decorator(self):
self.assertEqual(self.testvalue, 'override')
#
# Regression tests for #10130: deleting settings.
#
def test_settings_delete(self):
settings.TEST = 'test'
self.assertEqual('test', settings.TEST)
del settings.TEST
self.assertRaises(AttributeError, getattr, settings, 'TEST')
def test_settings_delete_wrapped(self):
self.assertRaises(TypeError, delattr, settings, '_wrapped')
def test_override_settings_delete(self):
"""
Allow deletion of a setting in an overriden settings set (#18824)
"""
previous_i18n = settings.USE_I18N
with self.settings(USE_I18N=False):
del settings.USE_I18N
self.assertRaises(AttributeError, getattr, settings, 'USE_I18N')
self.assertEqual(settings.USE_I18N, previous_i18n)
def test_allowed_include_roots_string(self):
"""
ALLOWED_INCLUDE_ROOTS is not allowed to be incorrectly set to a string
rather than a tuple.
"""
self.assertRaises(ValueError, setattr, settings,
'ALLOWED_INCLUDE_ROOTS', '/var/www/ssi/')
class TrailingSlashURLTests(TestCase):
"""
Tests for the MEDIA_URL and STATIC_URL settings.
They must end with a slash to ensure there's a deterministic way to build
paths in templates.
"""
settings_module = settings
def setUp(self):
self._original_media_url = self.settings_module.MEDIA_URL
self._original_static_url = self.settings_module.STATIC_URL
def tearDown(self):
self.settings_module.MEDIA_URL = self._original_media_url
self.settings_module.STATIC_URL = self._original_static_url
def test_blank(self):
"""
The empty string is accepted, even though it doesn't end in a slash.
"""
self.settings_module.MEDIA_URL = ''
self.assertEqual('', self.settings_module.MEDIA_URL)
self.settings_module.STATIC_URL = ''
self.assertEqual('', self.settings_module.STATIC_URL)
def test_end_slash(self):
"""
It works if the value ends in a slash.
"""
self.settings_module.MEDIA_URL = '/foo/'
self.assertEqual('/foo/', self.settings_module.MEDIA_URL)
self.settings_module.MEDIA_URL = 'http://media.foo.com/'
self.assertEqual('http://media.foo.com/',
self.settings_module.MEDIA_URL)
self.settings_module.STATIC_URL = '/foo/'
self.assertEqual('/foo/', self.settings_module.STATIC_URL)
self.settings_module.STATIC_URL = 'http://static.foo.com/'
self.assertEqual('http://static.foo.com/',
self.settings_module.STATIC_URL)
def test_no_end_slash(self):
"""
An ImproperlyConfigured exception is raised if the value doesn't end
in a slash.
"""
with self.assertRaises(ImproperlyConfigured):
self.settings_module.MEDIA_URL = '/foo'
with self.assertRaises(ImproperlyConfigured):
self.settings_module.MEDIA_URL = 'http://media.foo.com'
with self.assertRaises(ImproperlyConfigured):
self.settings_module.STATIC_URL = '/foo'
with self.assertRaises(ImproperlyConfigured):
self.settings_module.STATIC_URL = 'http://static.foo.com'
def test_double_slash(self):
"""
If the value ends in more than one slash, presume they know what
they're doing.
"""
self.settings_module.MEDIA_URL = '/stupid//'
self.assertEqual('/stupid//', self.settings_module.MEDIA_URL)
self.settings_module.MEDIA_URL = 'http://media.foo.com/stupid//'
self.assertEqual('http://media.foo.com/stupid//',
self.settings_module.MEDIA_URL)
self.settings_module.STATIC_URL = '/stupid//'
self.assertEqual('/stupid//', self.settings_module.STATIC_URL)
self.settings_module.STATIC_URL = 'http://static.foo.com/stupid//'
self.assertEqual('http://static.foo.com/stupid//',
self.settings_module.STATIC_URL)
class SecureProxySslHeaderTest(TestCase):
settings_module = settings
def setUp(self):
self._original_setting = self.settings_module.SECURE_PROXY_SSL_HEADER
def tearDown(self):
self.settings_module.SECURE_PROXY_SSL_HEADER = self._original_setting
def test_none(self):
self.settings_module.SECURE_PROXY_SSL_HEADER = None
req = HttpRequest()
self.assertEqual(req.is_secure(), False)
def test_set_without_xheader(self):
self.settings_module.SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTOCOL', 'https')
req = HttpRequest()
self.assertEqual(req.is_secure(), False)
def test_set_with_xheader_wrong(self):
self.settings_module.SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTOCOL', 'https')
req = HttpRequest()
req.META['HTTP_X_FORWARDED_PROTOCOL'] = 'wrongvalue'
self.assertEqual(req.is_secure(), False)
def test_set_with_xheader_right(self):
self.settings_module.SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTOCOL', 'https')
req = HttpRequest()
req.META['HTTP_X_FORWARDED_PROTOCOL'] = 'https'
self.assertEqual(req.is_secure(), True)
class EnvironmentVariableTest(TestCase):
"""
Ensures proper settings file is used in setup_environ if
DJANGO_SETTINGS_MODULE is set in the environment.
"""
# Decide what to do with these tests when setup_environ() gets removed in Django 1.6
def setUp(self):
self.original_value = os.environ.get('DJANGO_SETTINGS_MODULE')
self.save_warnings_state()
warnings.filterwarnings('ignore', category=DeprecationWarning, module='django.core.management')
def tearDown(self):
self.restore_warnings_state()
if self.original_value:
os.environ['DJANGO_SETTINGS_MODULE'] = self.original_value
elif 'DJANGO_SETTINGS_MODULE' in os.environ:
del(os.environ['DJANGO_SETTINGS_MODULE'])
def test_env_var_used(self):
"""
If the environment variable is set, do not ignore it. However, the
kwarg original_settings_path takes precedence.
This tests both plus the default (neither set).
"""
from django.core.management import setup_environ
# whatever was already there
original_module = os.environ.get(
'DJANGO_SETTINGS_MODULE',
'the default'
)
# environment variable set by user
user_override = 'custom.settings'
# optional argument to setup_environ
orig_path = 'original.path'
# expect default
setup_environ(global_settings)
self.assertEqual(
os.environ.get('DJANGO_SETTINGS_MODULE'),
original_module
)
# override with environment variable
os.environ['DJANGO_SETTINGS_MODULE'] = user_override
setup_environ(global_settings)
self.assertEqual(
os.environ.get('DJANGO_SETTINGS_MODULE'),
user_override
)
# pass in original_settings_path (should take precedence)
os.environ['DJANGO_SETTINGS_MODULE'] = user_override
setup_environ(global_settings, original_settings_path = orig_path)
self.assertEqual(
os.environ.get('DJANGO_SETTINGS_MODULE'),
orig_path
)
| waseem18/oh-mainline | vendor/packages/Django/tests/regressiontests/settings_tests/tests.py | Python | agpl-3.0 | 12,386 | 0.00113 |
# This file is part of the Shapy Project.
# Licensing information can be found in the LICENSE file.
# (C) 2015 The Shapy Team. All rights reserved.
import StringIO
from pyrr.objects import Quaternion, Matrix44, Vector3, Vector4
class Scene(object):
"""Class representing a whole scene."""
class Object(object):
"""Class representing an object in a scene."""
def __init__(self, data={}):
"""Initializes an empty object."""
# Name of the object.
self.id = data.get('id', 'unnamed')
# Translation vector.
self.tx = data.get('tx', 0.0)
self.ty = data.get('ty', 0.0)
self.tz = data.get('tz', 0.0)
# Scaling vector.
self.sx = data.get('sx', 1.0)
self.sy = data.get('sy', 1.0)
self.sz = data.get('sz', 1.0)
# Rotation quaternion.
self.rx = data.get('rx', 0.0)
self.ry = data.get('ry', 0.0)
self.rz = data.get('rz', 0.0)
self.rw = data.get('rw', 0.0)
# Map of vertices.
self.verts = dict(
(int(k), (v[0], v[1], v[2]))
for k, v in (data['verts'] or {}).iteritems()
)
# Map of edges.
self.edges = dict(
(int(k), (v[0], v[1]))
for k, v in (data['edges'] or {}).iteritems()
)
# Map of UV points.
self.uvPoints = dict(
(int(k), (v[0], v[1]))
for k, v in (data['uvPoints'] or {}).iteritems()
)
# Map of UV edges.
self.uvEdges = dict(
(int(k), (v[0], v[1]))
for k, v in (data['uvEdges'] or {}).iteritems()
)
# Map of faces.
self.faces = dict(
(int(k), (v[0], v[1], v[2], v[3], v[4], v[5]))
for k, v in (data['faces'] or {}).iteritems()
)
# Model matrix.
q = Quaternion()
q.x = self.rx
q.y = self.ry
q.z = self.rz
q.w = self.rw
trans = Matrix44.from_translation([self.tx, self.ty, self.tz])
scale = Matrix44([
[self.sx, 0, 0, 0],
[0, self.sy, 0, 0],
[0, 0, self.sz, 0],
[0, 0, 0, 1]
])
self.model = trans * q * scale
@property
def __dict__(self):
"""Converts the object to a serializable dictionary."""
return {
'tx': self.tx, 'ty': self.ty, 'tz': self.tz,
'sx': self.sx, 'sy': self.sy, 'sz': self.sz,
'rx': self.rx, 'ry': self.ry, 'rz': self.rz, 'rw': self.rw
}
def __init__(self, name, data={}):
"""Initializes an empty scene."""
self.objects = dict(
(k, Scene.Object(v)) for k, v in (data['objects'] or {}).iteritems())
@property
def __dict__(self):
"""Converts the scene to a serializable dictionary."""
return {
'objects': dict((k, v.__dict__) for k, v in self.objects.iteritems())
}
def to_stl(self):
"""Converts the scene to STL format."""
s = StringIO.StringIO()
for id, obj in self.objects.iteritems():
print >>s, 'solid %s' % obj.id
for _, v in obj.faces.iteritems():
e0 = obj.edges[abs(v[0])]
e1 = obj.edges[abs(v[1])]
e2 = obj.edges[abs(v[2])]
v0 = obj.verts[e0[0] if v[0] >= 0 else e0[1]]
v1 = obj.verts[e1[0] if v[1] >= 0 else e1[1]]
v2 = obj.verts[e2[0] if v[2] >= 0 else e2[1]]
v0 = obj.model * Vector4([v0[0], v0[1], v0[2], 1.0])
v1 = obj.model * Vector4([v1[0], v1[1], v1[2], 1.0])
v2 = obj.model * Vector4([v2[0], v2[1], v2[2], 1.0])
a = v1 - v0
b = v2 - v0
n = Vector3([a.x, a.y, a.z]).cross(Vector3([b.x, b.y, b.z]))
n.normalise()
print >>s, 'facet normal %f %f %f' % (n.x, n.y, n.z)
print >>s, 'outer loop'
print >>s, 'vertex %f %f %f' % (v0.x, v0.y, v0.z)
print >>s, 'vertex %f %f %f' % (v1.x, v1.y, v1.z)
print >>s, 'vertex %f %f %f' % (v2.x, v2.y, v2.z)
print >>s, 'end loop'
print >>s, 'endsolid %s' % obj.id
return s.getvalue()
def to_obj(self):
"""Converts the scene to wavefront obj format."""
s = StringIO.StringIO()
for id, obj in self.objects.iteritems():
print >>s, 'o "%s"' % id
vmap = {}
i = 1
for k, v in obj.verts.iteritems():
v = obj.model * Vector4([float(v[0]), float(v[1]), float(v[2]), 1.])
vmap[k] = i
i += 1
print >>s, 'v %f %f %f' % (v.x, v.y, v.z)
uvmap = {}
i = 1
for k, v in obj.uvPoints.iteritems():
uvmap[k] = i
i += 1
print >>s, 'vt %f %f' % v
for _, v in obj.faces.iteritems():
e0 = obj.edges[abs(v[0])]
e1 = obj.edges[abs(v[1])]
e2 = obj.edges[abs(v[2])]
v0 = vmap[e0[0] if v[0] >= 0 else e0[1]]
v1 = vmap[e1[0] if v[1] >= 0 else e1[1]]
v2 = vmap[e2[0] if v[2] >= 0 else e2[1]]
ue0 = obj.uvEdges[abs(v[3])]
ue1 = obj.uvEdges[abs(v[4])]
ue2 = obj.uvEdges[abs(v[5])]
uv0 = uvmap[ue0[0] if v[3] >= 0 else ue0[1]]
uv1 = uvmap[ue1[0] if v[4] >= 0 else ue1[1]]
uv2 = uvmap[ue2[0] if v[5] >= 0 else ue2[1]]
print >>s, 'f %d/%d %d/%d %d/%d' % (v0, uv0, v1, uv1, v2, uv2)
return s.getvalue() | ICShapy/shapy | shapy/scene.py | Python | mit | 5,133 | 0.011884 |
__author__ = 'Cosmin Popescu'
| vim-scripts/Vim-SQL-Workbench | resources/py/lib/__init__.py | Python | gpl-3.0 | 30 | 0 |
# Opus/UrbanSim urban simulation software.
# Copyright (C) 2005-2009 University of Washington
# See opus_core/LICENSE
from urbansim.configs.hlcm_estimation_config import HLCMEstimationConfig
from psrc_parcel.configs.baseline_estimation import BaselineEstimation
from opus_core.session_configuration import SessionConfiguration
from opus_core.store.attribute_cache import AttributeCache
from my_estimation_config import my_configuration
class HlcmParcelEstimation(BaselineEstimation): # comment out for urbansim.configs.hlcm_estimation_config
#class HlcmParcelEstimation(HLCMEstimationConfig): # comment out for psrc_parcel.configs.baseline_estimation
def update_config(self):
# HLCMEstimationConfig.update_config(self) # comment out for psrc_parcel.configs.baseline_estimation
#
self.replace(my_configuration)
estimate_config = {}
# estimate_config["export_estimation_data"]=True
# estimate_config["estimation_data_file_name"]="/tmp/HLCM_building_estimate_data"
# estimate_config["use_biogeme_data_format"]=True
# estimate_config["weights_for_estimation_string"]= "has_eg_1_units=building.residential_units>=1" #"psrc.parcel.residential_units_when_has_eg_1_surveyed_households_and_is_in_county_033"
#"sampling_filter=(building.disaggregate(building_type.building_type_name)=='single_family_residential') + (building.disaggregate(building_type.building_type_name)=='multi_family_residential') + (building.disaggregate(building_type.building_type_name)=='condo_residential')"
#"has_eg_1_units=urbansim.building.residential_units>=1"
# estimate_config["stratum"] = "psrc.parcel.is_in_city_seattle" #"psrc.parcel.stratify_by_is_in_city_seattle_and_is_single_family_unit"
# estimate_config["sample_size_from_each_stratum"] = 5
# estimate_config["sample_size_from_chosen_stratum"] = 4
# estimate_config["include_chosen_choice"] = True
estimate_config['wesml_sampling_correction_variable'] = 'psrc_parcel.building.wesml_sampling_correction_variable'
#estimate_config['submodel_string'] = "None"
# self["models_configuration"]["household_location_choice_model"]["controller"]["init"]["arguments"]['sample_size_locations'] = 30
self["models_configuration"]["household_location_choice_model"]["controller"]["init"]["arguments"]['sampler']="'opus_core.samplers.weighted_sampler'"#"'opus_core.samplers.stratified_sampler'" #
self["models_configuration"]["household_location_choice_model"]["controller"]["init"]["arguments"]["estimate_config"] = estimate_config
self["models_configuration"]["household_location_choice_model"]["controller"]["init"]["arguments"]["estimation_weight_string"] = "'has_eg_1_units=building.residential_units>=1'"
self["models_configuration"]["household_location_choice_model"]["controller"]["init"]["arguments"]["capacity_string"] = "'has_eg_1_units=building.residential_units>=1'"
self["models_configuration"]["household_location_choice_model"]["controller"]["init"]["arguments"]["number_of_agents_string"] = "'(building.building_id < 0).astype(int32)'"
# self["models_configuration"]["household_location_choice_model"]["controller"]["init"]["arguments"]["estimation_weight_string"] = "'urbansim_parcel.building.vacant_residential_units'"
# self["models_configuration"]["household_location_choice_model"]["controller"]["init"]["arguments"]["estimation_weight_string"] = "'psrc_parcel.building.residential_units'"
#{"weights_for_estimation_string":"psrc.parcel.residential_units_when_has_eg_1_surveyed_households_and_is_in_county_033"}
self["models_configuration"]["household_location_choice_model"]["controller"]["init"]["arguments"]["location_set"] = "building"
#self["models_configuration"]["household_location_choice_model"]["controller"]["init"]["arguments"]["location_id_string"] = "'household.parcel_id'"
# self["models_configuration"]["household_location_choice_model"]["controller"]["init"]["arguments"]['submodel_string'] = "'psrc.household.number_of_nonhome_based_workers'"
self["models_configuration"]["household_location_choice_model"]["controller"]["init"]["arguments"]["variable_package"]="'urbansim_parcel'"
self["models_configuration"]["household_location_choice_model"]["controller"]["prepare_for_estimate"]["arguments"]["join_datasets"] = 'True'
self["models_configuration"]["household_location_choice_model"]["controller"]["prepare_for_estimate"]["arguments"]["index_to_unplace"] = 'None'
self["models_configuration"]["household_location_choice_model"]["controller"]["prepare_for_estimate"]["arguments"]["filter"] = "'household.move == 1'"#None #"'psrc.household.customized_filter'"
self["models_configuration"]["household_location_choice_model"]["controller"]["init"]["arguments"]['filter'] = "'urbansim_parcel.building.is_residential'"
# self["datasets_to_preload"].merge({"tour":{}, "person":{}})
# self["datasets_to_cache_after_each_model"] += ["person"]
self["models"] = [
# {"household_relocation_model": ["run"]},
# {"tour_schedule_model": ["run"]},
{"household_location_choice_model": ["estimate"]}
]
if __name__ == '__main__':
from my_estimation_config import my_configuration
from urbansim.estimation.estimator import Estimator
from urbansim.estimation.estimator import update_controller_by_specification_from_module
from opus_core.simulation_state import SimulationState
from opus_core.store.attribute_cache import AttributeCache
run_configuration = HlcmParcelEstimation()
run_configuration.update_config()
run_configuration = update_controller_by_specification_from_module(
run_configuration, "household_location_choice_model",
"inprocess.bhylee.hlcm_parcel_specification")
er = Estimator(run_configuration, save_estimation_results=False)
er.estimate()
# er.create_prediction_success_table()
# er.create_prediction_success_table(choice_geography_id="area_type_id=building.disaggregate(zone.area_type_id, intermediates=[parcel])" )
# er.create_prediction_success_table(choice_geography_id="building_type_id=building.building_type_id" )
# er.create_prediction_success_table(choice_geography_id="large_area_id=building.disaggregate(faz.large_area_id, intermediates=[zone, parcel])" )
# er.reestimate("hlcm_parcel_specification") | christianurich/VIBe2UrbanSim | 3rdparty/opus/src/inprocess/bhylee/hlcm_parcel_estimation.py | Python | gpl-2.0 | 6,635 | 0.008742 |
"""
**GitHubAccount**
Represents an account at GitHub.
- Matches a link that looks like it goes to a GitHub profile page.
- Returns the "your site" URL from the user's GitHub profile.
- Use on the command line: ``github:username``.
"""
from __future__ import print_function, division
import re
import requests
from lxml import html
from . import Account
_URL_RE = re.compile(r'https?://(www.)?github.com/(?P<username>\w+)/?\Z')
class GitHubAccount(Account):
def __init__(self, username=None, url=None, **_):
if username is not None:
self._username = username
elif url is not None:
match = _URL_RE.match(url)
if match:
self._username = match.group('username')
else:
raise ValueError('No username match.')
else:
raise ValueError('No usable parameters!')
def expand(self, info):
# Load their profile page.
url = 'https://github.com/%s' % self._username
page = requests.get(url)
tree = html.fromstring(page.text)
# Save info
info['usernames'] = self._username
for span in tree.xpath(r'//*[@itemprop="name"]'):
info['name'] = span.text_content().strip()
for span in tree.xpath(r'//*[@itemprop="worksFor"]'):
info['employer'] = span.text_content().strip()
for span in tree.xpath(r'//*[@itemprop="homeLocation"]'):
info['location'] = span.text_content().strip()
# Search for a website!
for anchor in tree.xpath(r'//a[contains(@class,"url")]'):
yield {'url': anchor.attrib['href']}
for anchor in tree.xpath(r'//a[contains(@class, "email")]'):
yield {'url': anchor.attrib['href']}
@staticmethod
def match(**options):
return (
'url' in options
and _URL_RE.match(options['url'])
)
@staticmethod
def shortname():
return 'github'
def __str__(self):
return 'GitHubAccount(username=%r)' % self._username
def __hash__(self):
return hash(self._username)
def __eq__(self, other):
return type(other) is GitHubAccount and self._username == other._username
| brenns10/social | social/accounts/github.py | Python | bsd-3-clause | 2,234 | 0.000895 |
# -*- coding: UTF-8 -*-
import pytest
from mcm.comparators import UniqueKeyComparator, SingleElementComparator, OrderedComparator
from mcm.datastructures import CmdPathRow
@pytest.fixture
def compare_data(request):
single = {
'wanted':CmdPathRow({"primary-ntp":"1.1.1.1"}),
'present':CmdPathRow({"primary-ntp":"213.222.193.35"}),
'difference':CmdPathRow({"primary-ntp":"1.1.1.1"}),
}
default = {
'wanted':CmdPathRow({'name':'admin', 'group':'read'}),
'present':CmdPathRow({'name':'admin', 'group':'full', '.id':'*2'}),
'extra':CmdPathRow({'name':'operator', 'group':'read', '.id':'*3'}),
'difference':CmdPathRow({'group':'read', '.id':'*2'}),
}
if 'single' in request.cls.__name__.lower():
return single
else:
return default
class Test_SingleComparator:
def setup(self):
self.comparator = SingleElementComparator()
def test_difference_in_SET(self, compare_data):
ADD, SET, DEL = self.comparator.compare(wanted=(compare_data['wanted'],), present=(compare_data['present'],))
assert SET == (compare_data['difference'],)
def test_empty_SET_when_same_data(self, compare_data):
ADD, SET, DEL = self.comparator.compare(wanted=(compare_data['wanted'],), present=(compare_data['wanted'],))
assert SET == tuple()
def test_empty_SET_when_empty_wanted(self, compare_data):
ADD, SET, DEL = self.comparator.compare(wanted=tuple(), present=(compare_data['present'],))
assert SET == tuple()
def test_empty_ADD(self, compare_data):
ADD, SET, DEL = self.comparator.compare(wanted=(compare_data['wanted'],), present=(compare_data['present'],))
assert ADD == tuple()
def test_empty_DEL(self, compare_data):
ADD, SET, DEL = self.comparator.compare(wanted=(compare_data['wanted'],), present=(compare_data['present'],))
assert DEL == tuple()
class Test_OrderedComparator:
def setup(self):
self.comparator = OrderedComparator()
def test_extra_in_DEL(self, compare_data):
ADD, SET, DEL = self.comparator.compare(wanted=(compare_data['wanted'],), present=(compare_data['present'],compare_data['extra']))
assert DEL == (compare_data['extra'],)
def test_present_in_DEL_when_empty_wanted(self, compare_data):
ADD, SET, DEL = self.comparator.compare(wanted=(), present=(compare_data['present'], compare_data['extra']))
assert DEL == (compare_data['present'], compare_data['extra'])
def test_empty_ADD_when_empty_wanted(self, compare_data):
ADD, SET, DEL = self.comparator.compare(wanted=(), present=(compare_data['present'],))
assert ADD == tuple()
def test_difference_in_SET(self, compare_data):
ADD, SET, DEL = self.comparator.compare(wanted=(compare_data['wanted'],), present=(compare_data['present'],))
assert SET == (compare_data['difference'],)
class Test_UniqueKeyComparator:
def setup(self):
self.comparator = UniqueKeyComparator( keys=('name',) )
def test_extra_in_DEL(self, compare_data):
ADD, SET, DEL = self.comparator.compare(wanted=(compare_data['wanted'],), present=(compare_data['present'], compare_data['extra']))
assert DEL == (compare_data['extra'],)
def test_present_in_DEL_when_empty_wanted(self, compare_data):
ADD, SET, DEL = self.comparator.compare(wanted=(), present=(compare_data['present'], compare_data['extra']))
# compare sets instead of tuples. order in witch objects exist in DEL does not matter
assert set(DEL) == set((compare_data['present'], compare_data['extra']))
def test_compare_returns_difference_in_SET(self, compare_data):
ADD, SET, DEL = self.comparator.compare(wanted=(compare_data['wanted'],), present=(compare_data['present'],compare_data['extra']))
assert SET == (compare_data['difference'],)
def test_wanted_in_ADD(self, compare_data):
ADD, SET, DEL = self.comparator.compare(wanted=(compare_data['wanted'],), present=())
assert ADD == (compare_data['wanted'],)
| luqasz/mcm | tests/integration/test_compare.py | Python | gpl-2.0 | 4,109 | 0.010465 |
"""
Module that handles the loading of parsers
written by Glenn De Backer < glenn at simplicity dot be>
License: GPLv2
"""
import glob
import os
class LoaderParsers(object):
""" Parsers loader class """
def __init__(self):
""" Default constructor """
self.available_parsers = {}
self.get_local_parsers()
def validate_parser(self, parser_class):
""" validates parsers """
class_properties_methods = parser_class.__dict__
# check if class has certain methods
if not "define_document_grammar" in class_properties_methods:
return False
if not "parse" in class_properties_methods:
return False
return True
def load_parser(self, py_filename):
""" Load parser """
# create names
parser_name = os.path.splitext(py_filename)[0]
parser_class_name = "%sParser" % parser_name.title()
module_name = "modules.parsers.%s" % parser_name
# load class dynamically
mod = __import__(module_name, fromlist=[parser_class_name])
parser_class = getattr(mod, parser_class_name)
# check if parser is valid class
is_valid_parser_class = self.validate_parser(parser_class)
if is_valid_parser_class:
# store class object in dictionary available_parsers
self.available_parsers[parser_name] = parser_class()
else:
raise Exception("Parser %s is invalid parser" % parser_name)
def get_local_parsers(self):
""" Get parsers """
for py_file_path in glob.glob("modules/parsers/*.py"):
# get basename
python_file = os.path.basename(py_file_path)
# skip init python file
if python_file != "__init__.py":
self.load_parser(python_file)
def get_parsers(self):
""" Get available parsers """
return self.available_parsers
def get_parsers_names(self):
""" Get parser names """
return self.available_parsers.keys()
| simplicitylab/doc2source | modules/loader_parsers.py | Python | gpl-2.0 | 2,064 | 0.001453 |
#!/usr/bin/env python3
#
# Copyright 2020 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# Final all .whl files in a directory, and make a index.html page
# in PEP503 (https://www.python.org/dev/peps/pep-0503/) format
import argparse
import datetime
import email
import hashlib
import html
import logging
import os
import sys
import zipfile
parser = argparse.ArgumentParser()
parser.add_argument('toplevel', help="directory to index")
parser.add_argument('-d', '--debug', dest="debug", action='store_true')
parser.add_argument('-o', '--output', dest="output",
default='index.html', help="Output filename, - for stdout")
args = parser.parse_args()
level = logging.DEBUG if args.debug else logging.INFO
logging.basicConfig(level=level)
class NotAWheelException(Exception):
pass
class NoMetadataException(Exception):
pass
class NoRequirementsException(Exception):
pass
class BadFormatException(Exception):
pass
def get_requirements(filename):
# This is an implementation of the description on finding
# requirements from a wheel provided by chrahunt at:
# https://github.com/pypa/pip/issues/7586#issuecomment-573534655
with zipfile.ZipFile(filename) as zip:
metadata = None
names = zip.namelist()
for name in names:
if name.endswith('.dist-info/METADATA'):
metadata = zip.open(name)
# finish loop and sanity check we got the right one?
break
if not metadata:
return NoMetadataException
parsed = email.message_from_binary_file(metadata)
requirements = parsed.get_all('Requires-Python')
if not requirements:
raise NoRequirementsException
if len(requirements) > 1:
print("Multiple requirements headers found?")
raise BadFormatException
return html.escape(requirements[0])
def get_sha256(filename):
sha256 = hashlib.sha256()
with open(filename, "rb") as f:
for b in iter(lambda: f.read(4096), b''):
sha256.update(b)
return(sha256.hexdigest())
def create_index(path, files):
project = os.path.basename(path)
output = '''<html>
<head>
<title>%s</title>
</head>
<body>
<ul>
''' % (project)
for f in files:
f_full = os.path.join(path, f)
requirements = ''
try:
logging.debug("Checking for requirements of : %s" % f_full)
requirements = get_requirements(f_full)
logging.debug("requirements are: %s" % requirements)
# NOTE(ianw): i'm not really sure if any of these should be
# terminal, as it would mean pip can't read the file anyway. Just
# log for now.
except NoMetadataException:
logging.debug("no metadata")
pass
except NoRequirementsException:
logging.debug("no python requirements")
pass
except BadFormatException:
logging.debug("Could not open")
pass
sha256 = get_sha256(f_full)
logging.debug("sha256 for %s: %s" % (f_full, sha256))
output += ' <li><a href="%s#sha256=%s"' % (f, sha256)
if requirements:
output += ' data-requires-python="%s" ' % (requirements)
output += '>%s</a></li>\n' % (f)
output += ''' </ul>
</body>
</html>
'''
now = datetime.datetime.now()
output += '<!-- last update: %s -->\n' % now.isoformat()
return output
logging.debug("Building indexes from: %s" % args.toplevel)
for root, dirs, files in os.walk(args.toplevel):
# sanity check we are only called from leaf directories by the
# driver script
if dirs:
print("This should only be called from leaf directories")
sys.exit(1)
logging.debug("Processing %s" % root)
output = create_index(root, files)
logging.debug("Final output write")
if args.output == '-':
out_file = sys.stdout
else:
out_path = os.path.join(root, args.output)
logging.debug("Writing index file: %s" % out_path)
out_file = open(out_path, "w")
out_file.write(output)
logging.debug("Done!")
| openstack-infra/project-config | roles/copy-wheels/files/wheel-indexer.py | Python | apache-2.0 | 4,718 | 0 |
import py
html = py.xml.html
class my(html):
"a custom style"
class body(html.body):
style = html.Style(font_size = "120%")
class h2(html.h2):
style = html.Style(background = "grey")
class p(html.p):
style = html.Style(font_weight="bold")
doc = my.html(
my.head(),
my.body(
my.h2("hello world"),
my.p("bold as bold can")
)
)
print doc.unicode(indent=2)
| youtube/cobalt | third_party/web_platform_tests/tools/py/doc/example/genhtmlcss.py | Python | bsd-3-clause | 426 | 0.014085 |
# -*- coding: UTF-8 -*-
logger.info("Loading 15 objects to table cal_recurrentevent...")
# fields: id, start_date, start_time, end_date, end_time, name, user, every_unit, every, monday, tuesday, wednesday, thursday, friday, saturday, sunday, max_events, event_type, description
loader.save(create_cal_recurrentevent(1,date(2013,1,1),None,None,None,['Neujahr', "Jour de l'an", "New Year's Day"],None,u'Y',1,True,True,True,True,True,True,True,None,1,u''))
loader.save(create_cal_recurrentevent(2,date(2013,5,1),None,None,None,['Tag der Arbeit', 'Premier Mai', "International Workers' Day"],None,u'Y',1,True,True,True,True,True,True,True,None,1,u''))
loader.save(create_cal_recurrentevent(3,date(2013,7,21),None,None,None,['Nationalfeiertag', 'F\xeate nationale', 'National Day'],None,u'Y',1,True,True,True,True,True,True,True,None,1,u''))
loader.save(create_cal_recurrentevent(4,date(2013,8,15),None,None,None,['Mari\xe4 Himmelfahrt', 'Assomption de Marie', 'Assumption of Mary'],None,u'Y',1,True,True,True,True,True,True,True,None,1,u''))
loader.save(create_cal_recurrentevent(5,date(2013,10,31),None,None,None,['Allerseelen', 'Comm\xe9moration des fid\xe8les d\xe9funts', "All Souls' Day"],None,u'Y',1,True,True,True,True,True,True,True,None,1,u''))
loader.save(create_cal_recurrentevent(6,date(2013,11,1),None,None,None,['Allerheiligen', 'Toussaint', "All Saints' Day"],None,u'Y',1,True,True,True,True,True,True,True,None,1,u''))
loader.save(create_cal_recurrentevent(7,date(2013,11,11),None,None,None,['Waffenstillstand', 'Armistice', 'Armistice with Germany'],None,u'Y',1,True,True,True,True,True,True,True,None,1,u''))
loader.save(create_cal_recurrentevent(8,date(2013,12,25),None,None,None,['Weihnachten', 'No\xebl', 'Christmas'],None,u'Y',1,True,True,True,True,True,True,True,None,1,u''))
loader.save(create_cal_recurrentevent(9,date(2013,3,31),None,None,None,['Ostersonntag', 'P\xe2ques', 'Easter sunday'],None,u'E',1,False,False,False,False,False,False,False,None,1,u''))
loader.save(create_cal_recurrentevent(10,date(2013,4,1),None,None,None,['Ostermontag', 'Lundi de P\xe2ques', 'Easter monday'],None,u'E',1,False,False,False,False,False,False,False,None,1,u''))
loader.save(create_cal_recurrentevent(11,date(2013,5,9),None,None,None,['Christi Himmelfahrt', 'Ascension', 'Ascension of Jesus'],None,u'E',1,False,False,False,False,False,False,False,None,1,u''))
loader.save(create_cal_recurrentevent(12,date(2013,5,20),None,None,None,['Pfingsten', 'Pentec\xf4te', 'Pentecost'],None,u'E',1,False,False,False,False,False,False,False,None,1,u''))
loader.save(create_cal_recurrentevent(13,date(2013,3,29),None,None,None,['Karfreitag', 'Vendredi Saint', 'Good Friday'],None,u'E',1,False,False,False,False,False,False,False,None,1,u''))
loader.save(create_cal_recurrentevent(14,date(2013,2,13),None,None,None,['Aschermittwoch', 'Mercredi des Cendres', 'Ash Wednesday'],None,u'E',1,False,False,False,False,False,False,False,None,1,u''))
loader.save(create_cal_recurrentevent(15,date(2013,2,11),None,None,None,['Rosenmontag', 'Lundi de carnaval', 'Rosenmontag'],None,u'E',1,False,False,False,False,False,False,False,None,1,u''))
loader.flush_deferred_objects()
| lino-framework/welfare | lino_welfare/projects/gerd/tests/dumps/18.8.0/cal_recurrentevent.py | Python | agpl-3.0 | 3,162 | 0.099937 |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.5 on 2017-06-16 19:25
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('silo', '0013_deletedsilos'),
]
operations = [
migrations.CreateModel(
name='FormulaColumnMapping',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('mapping', models.TextField()),
('operation', models.TextField()),
('column_name', models.TextField()),
('silo', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='silo.Silo')),
],
),
]
| mercycorps/TolaTables | silo/migrations/0014_formulacolumnmapping.py | Python | gpl-2.0 | 815 | 0.002454 |
from django.conf import settings
from django.conf.urls import url
from .views import get_summoner_v3, live_match, test_something, live_match_detail, FrontendAppView, ApiLiveMatch, ChampionInfoView
urlpatterns = [
url(r'^summoner/', get_summoner_v3, name='summoner_lookup'),
url(r'^live/$', live_match, name='live_match'),
url(r'^live/([a-zA-Z0-9]+)/(.+)/$', live_match_detail, name='live-match-detail'),
url(r'^api/live/([a-zA-Z0-9]+)/(.+)/$', ApiLiveMatch.as_view(), name='api-live-match'),
url(r'api/champions/$', ChampionInfoView.as_view(), name='api-champion-info'),
url(r'^summonerprofile/', get_summoner_v3, name='summoner_profile'),
url(r'test/', test_something, name='test'),
url(r'^', FrontendAppView.as_view()),
] | belleandindygames/league | league/champ_chooser/urls.py | Python | mit | 756 | 0.006614 |
import pygame, sys, os, random
from classes import *
from pygame.locals import *
blocksFile = "blocks.txt"
thisBlock = ""
allBlocks = []
boardWidth = 15
boardHeight = 20
gameOver = False
# Make all the blocks which are in file "blocks.txt"
file = open(blocksFile, "r")
while file:
line = file.readline()
if line.find("END") >= 0:
break
if line.find("/") >= 0:
allBlocks.append(blockStyle(thisBlock))
thisBlock = ""
continue
thisBlock = thisBlock + line
# Make board
gameBoard = board(boardWidth, boardHeight)
# All pygame init
pygame.init()
gameWindow = pygame.display.set_mode((640, 480))
pygame.display.set_caption('PyTetris')
clock = pygame.time.Clock()
playerBlock = block(boardWidth, boardHeight, allBlocks[random.randrange(len(allBlocks))].getStyle(), gameBoard)
pygame.time.Clock()
pygame.time.set_timer(pygame.USEREVENT + 1, 150)
pygame.time.set_timer(pygame.USEREVENT + 2, 1000)
#Game loop
while gameOver == False:
clock.tick(60)
for event in pygame.event.get():
if event.type == pygame.QUIT:
gameOver = True
elif event.type == KEYDOWN and event.key == K_ESCAPE:
gameOver = True
elif event.type == pygame.USEREVENT + 1:
playerBlock.handlePlayerInput()
elif event.type == pygame.USEREVENT + 2:
playerBlock.updatePlayer()
if playerBlock.isDown == True:
playerBlock.changeStyle(allBlocks[random.randrange(len(allBlocks))].getStyle())
gameWindow.fill((0,0,0))
gameBoard.drawBoard()
gameBoard.update()
playerBlock.drawBlock()
pygame.display.flip()
pygame.quit()
| MadMac/PyTetris | src/main/main.py | Python | mit | 1,650 | 0.004848 |
"""Resets the password for virtual machine. The virtual machine must be in a "Stopped" state and the template must already support this feature for this command to take effect. [async]"""
from baseCmd import *
from baseResponse import *
class resetPasswordForVirtualMachineCmd (baseCmd):
typeInfo = {}
def __init__(self):
self.isAsync = "true"
"""The ID of the virtual machine"""
"""Required"""
self.id = None
self.typeInfo['id'] = 'uuid'
self.required = ["id", ]
class resetPasswordForVirtualMachineResponse (baseResponse):
typeInfo = {}
def __init__(self):
"""the ID of the virtual machine"""
self.id = None
self.typeInfo['id'] = 'string'
"""the account associated with the virtual machine"""
self.account = None
self.typeInfo['account'] = 'string'
"""the number of cpu this virtual machine is running with"""
self.cpunumber = None
self.typeInfo['cpunumber'] = 'integer'
"""the speed of each cpu"""
self.cpuspeed = None
self.typeInfo['cpuspeed'] = 'integer'
"""the amount of the vm's CPU currently used"""
self.cpuused = None
self.typeInfo['cpuused'] = 'string'
"""the date when this virtual machine was created"""
self.created = None
self.typeInfo['created'] = 'date'
"""Vm details in key/value pairs."""
self.details = None
self.typeInfo['details'] = 'map'
"""the read (io) of disk on the vm"""
self.diskioread = None
self.typeInfo['diskioread'] = 'long'
"""the write (io) of disk on the vm"""
self.diskiowrite = None
self.typeInfo['diskiowrite'] = 'long'
"""the read (bytes) of disk on the vm"""
self.diskkbsread = None
self.typeInfo['diskkbsread'] = 'long'
"""the write (bytes) of disk on the vm"""
self.diskkbswrite = None
self.typeInfo['diskkbswrite'] = 'long'
"""the ID of the disk offering of the virtual machine"""
self.diskofferingid = None
self.typeInfo['diskofferingid'] = 'string'
"""the name of the disk offering of the virtual machine"""
self.diskofferingname = None
self.typeInfo['diskofferingname'] = 'string'
"""user generated name. The name of the virtual machine is returned if no displayname exists."""
self.displayname = None
self.typeInfo['displayname'] = 'string'
"""an optional field whether to the display the vm to the end user or not."""
self.displayvm = None
self.typeInfo['displayvm'] = 'boolean'
"""the name of the domain in which the virtual machine exists"""
self.domain = None
self.typeInfo['domain'] = 'string'
"""the ID of the domain in which the virtual machine exists"""
self.domainid = None
self.typeInfo['domainid'] = 'string'
"""the virtual network for the service offering"""
self.forvirtualnetwork = None
self.typeInfo['forvirtualnetwork'] = 'boolean'
"""the group name of the virtual machine"""
self.group = None
self.typeInfo['group'] = 'string'
"""the group ID of the virtual machine"""
self.groupid = None
self.typeInfo['groupid'] = 'string'
"""Os type ID of the virtual machine"""
self.guestosid = None
self.typeInfo['guestosid'] = 'string'
"""true if high-availability is enabled, false otherwise"""
self.haenable = None
self.typeInfo['haenable'] = 'boolean'
"""the ID of the host for the virtual machine"""
self.hostid = None
self.typeInfo['hostid'] = 'string'
"""the name of the host for the virtual machine"""
self.hostname = None
self.typeInfo['hostname'] = 'string'
"""the hypervisor on which the template runs"""
self.hypervisor = None
self.typeInfo['hypervisor'] = 'string'
"""instance name of the user vm; this parameter is returned to the ROOT admin only"""
self.instancename = None
self.typeInfo['instancename'] = 'string'
"""true if vm contains XS tools inorder to support dynamic scaling of VM cpu/memory."""
self.isdynamicallyscalable = None
self.typeInfo['isdynamicallyscalable'] = 'boolean'
"""an alternate display text of the ISO attached to the virtual machine"""
self.isodisplaytext = None
self.typeInfo['isodisplaytext'] = 'string'
"""the ID of the ISO attached to the virtual machine"""
self.isoid = None
self.typeInfo['isoid'] = 'string'
"""the name of the ISO attached to the virtual machine"""
self.isoname = None
self.typeInfo['isoname'] = 'string'
"""ssh key-pair"""
self.keypair = None
self.typeInfo['keypair'] = 'string'
"""the memory allocated for the virtual machine"""
self.memory = None
self.typeInfo['memory'] = 'integer'
"""the name of the virtual machine"""
self.name = None
self.typeInfo['name'] = 'string'
"""the incoming network traffic on the vm"""
self.networkkbsread = None
self.typeInfo['networkkbsread'] = 'long'
"""the outgoing network traffic on the host"""
self.networkkbswrite = None
self.typeInfo['networkkbswrite'] = 'long'
"""OS type id of the vm"""
self.ostypeid = None
self.typeInfo['ostypeid'] = 'long'
"""the password (if exists) of the virtual machine"""
self.password = None
self.typeInfo['password'] = 'string'
"""true if the password rest feature is enabled, false otherwise"""
self.passwordenabled = None
self.typeInfo['passwordenabled'] = 'boolean'
"""the project name of the vm"""
self.project = None
self.typeInfo['project'] = 'string'
"""the project id of the vm"""
self.projectid = None
self.typeInfo['projectid'] = 'string'
"""public IP address id associated with vm via Static nat rule"""
self.publicip = None
self.typeInfo['publicip'] = 'string'
"""public IP address id associated with vm via Static nat rule"""
self.publicipid = None
self.typeInfo['publicipid'] = 'string'
"""device ID of the root volume"""
self.rootdeviceid = None
self.typeInfo['rootdeviceid'] = 'long'
"""device type of the root volume"""
self.rootdevicetype = None
self.typeInfo['rootdevicetype'] = 'string'
"""the ID of the service offering of the virtual machine"""
self.serviceofferingid = None
self.typeInfo['serviceofferingid'] = 'string'
"""the name of the service offering of the virtual machine"""
self.serviceofferingname = None
self.typeInfo['serviceofferingname'] = 'string'
"""State of the Service from LB rule"""
self.servicestate = None
self.typeInfo['servicestate'] = 'string'
"""the state of the virtual machine"""
self.state = None
self.typeInfo['state'] = 'string'
"""an alternate display text of the template for the virtual machine"""
self.templatedisplaytext = None
self.typeInfo['templatedisplaytext'] = 'string'
"""the ID of the template for the virtual machine. A -1 is returned if the virtual machine was created from an ISO file."""
self.templateid = None
self.typeInfo['templateid'] = 'string'
"""the name of the template for the virtual machine"""
self.templatename = None
self.typeInfo['templatename'] = 'string'
"""the user's ID who deployed the virtual machine"""
self.userid = None
self.typeInfo['userid'] = 'string'
"""the user's name who deployed the virtual machine"""
self.username = None
self.typeInfo['username'] = 'string'
"""the vgpu type used by the virtual machine"""
self.vgpu = None
self.typeInfo['vgpu'] = 'string'
"""the ID of the availablility zone for the virtual machine"""
self.zoneid = None
self.typeInfo['zoneid'] = 'string'
"""the name of the availability zone for the virtual machine"""
self.zonename = None
self.typeInfo['zonename'] = 'string'
"""list of affinity groups associated with the virtual machine"""
self.affinitygroup = []
"""the list of nics associated with vm"""
self.nic = []
"""list of security groups associated with the virtual machine"""
self.securitygroup = []
"""the list of resource tags associated with vm"""
self.tags = []
"""the ID of the latest async job acting on this object"""
self.jobid = None
self.typeInfo['jobid'] = ''
"""the current status of the latest async job acting on this object"""
self.jobstatus = None
self.typeInfo['jobstatus'] = ''
class affinitygroup:
def __init__(self):
""""the ID of the affinity group"""
self.id = None
""""the account owning the affinity group"""
self.account = None
""""the description of the affinity group"""
self.description = None
""""the domain name of the affinity group"""
self.domain = None
""""the domain ID of the affinity group"""
self.domainid = None
""""the name of the affinity group"""
self.name = None
""""the project name of the affinity group"""
self.project = None
""""the project ID of the affinity group"""
self.projectid = None
""""the type of the affinity group"""
self.type = None
""""virtual machine IDs associated with this affinity group"""
self.virtualmachineIds = None
class nic:
def __init__(self):
""""the ID of the nic"""
self.id = None
""""the broadcast uri of the nic"""
self.broadcasturi = None
""""device id for the network when plugged into the virtual machine"""
self.deviceid = None
""""the gateway of the nic"""
self.gateway = None
""""the IPv6 address of network"""
self.ip6address = None
""""the cidr of IPv6 network"""
self.ip6cidr = None
""""the gateway of IPv6 network"""
self.ip6gateway = None
""""the ip address of the nic"""
self.ipaddress = None
""""true if nic is default, false otherwise"""
self.isdefault = None
""""the isolation uri of the nic"""
self.isolationuri = None
""""true if nic is default, false otherwise"""
self.macaddress = None
""""the netmask of the nic"""
self.netmask = None
""""the ID of the corresponding network"""
self.networkid = None
""""the name of the corresponding network"""
self.networkname = None
""""the Secondary ipv4 addr of nic"""
self.secondaryip = None
""""the traffic type of the nic"""
self.traffictype = None
""""the type of the nic"""
self.type = None
""""Id of the vm to which the nic belongs"""
self.virtualmachineid = None
class tags:
def __init__(self):
""""the account associated with the tag"""
self.account = None
""""customer associated with the tag"""
self.customer = None
""""the domain associated with the tag"""
self.domain = None
""""the ID of the domain associated with the tag"""
self.domainid = None
""""tag key name"""
self.key = None
""""the project name where tag belongs to"""
self.project = None
""""the project id the tag belongs to"""
self.projectid = None
""""id of the resource"""
self.resourceid = None
""""resource type"""
self.resourcetype = None
""""tag value"""
self.value = None
class egressrule:
def __init__(self):
""""account owning the security group rule"""
self.account = None
""""the CIDR notation for the base IP address of the security group rule"""
self.cidr = None
""""the ending IP of the security group rule"""
self.endport = None
""""the code for the ICMP message response"""
self.icmpcode = None
""""the type of the ICMP message response"""
self.icmptype = None
""""the protocol of the security group rule"""
self.protocol = None
""""the id of the security group rule"""
self.ruleid = None
""""security group name"""
self.securitygroupname = None
""""the starting IP of the security group rule"""
self.startport = None
""""the list of resource tags associated with the rule"""
self.tags = []
""""the account associated with the tag"""
self.account = None
""""customer associated with the tag"""
self.customer = None
""""the domain associated with the tag"""
self.domain = None
""""the ID of the domain associated with the tag"""
self.domainid = None
""""tag key name"""
self.key = None
""""the project name where tag belongs to"""
self.project = None
""""the project id the tag belongs to"""
self.projectid = None
""""id of the resource"""
self.resourceid = None
""""resource type"""
self.resourcetype = None
""""tag value"""
self.value = None
class tags:
def __init__(self):
""""the account associated with the tag"""
self.account = None
""""customer associated with the tag"""
self.customer = None
""""the domain associated with the tag"""
self.domain = None
""""the ID of the domain associated with the tag"""
self.domainid = None
""""tag key name"""
self.key = None
""""the project name where tag belongs to"""
self.project = None
""""the project id the tag belongs to"""
self.projectid = None
""""id of the resource"""
self.resourceid = None
""""resource type"""
self.resourcetype = None
""""tag value"""
self.value = None
class tags:
def __init__(self):
""""the account associated with the tag"""
self.account = None
""""customer associated with the tag"""
self.customer = None
""""the domain associated with the tag"""
self.domain = None
""""the ID of the domain associated with the tag"""
self.domainid = None
""""tag key name"""
self.key = None
""""the project name where tag belongs to"""
self.project = None
""""the project id the tag belongs to"""
self.projectid = None
""""id of the resource"""
self.resourceid = None
""""resource type"""
self.resourcetype = None
""""tag value"""
self.value = None
class ingressrule:
def __init__(self):
""""account owning the security group rule"""
self.account = None
""""the CIDR notation for the base IP address of the security group rule"""
self.cidr = None
""""the ending IP of the security group rule"""
self.endport = None
""""the code for the ICMP message response"""
self.icmpcode = None
""""the type of the ICMP message response"""
self.icmptype = None
""""the protocol of the security group rule"""
self.protocol = None
""""the id of the security group rule"""
self.ruleid = None
""""security group name"""
self.securitygroupname = None
""""the starting IP of the security group rule"""
self.startport = None
""""the list of resource tags associated with the rule"""
self.tags = []
""""the account associated with the tag"""
self.account = None
""""customer associated with the tag"""
self.customer = None
""""the domain associated with the tag"""
self.domain = None
""""the ID of the domain associated with the tag"""
self.domainid = None
""""tag key name"""
self.key = None
""""the project name where tag belongs to"""
self.project = None
""""the project id the tag belongs to"""
self.projectid = None
""""id of the resource"""
self.resourceid = None
""""resource type"""
self.resourcetype = None
""""tag value"""
self.value = None
class tags:
def __init__(self):
""""the account associated with the tag"""
self.account = None
""""customer associated with the tag"""
self.customer = None
""""the domain associated with the tag"""
self.domain = None
""""the ID of the domain associated with the tag"""
self.domainid = None
""""tag key name"""
self.key = None
""""the project name where tag belongs to"""
self.project = None
""""the project id the tag belongs to"""
self.projectid = None
""""id of the resource"""
self.resourceid = None
""""resource type"""
self.resourcetype = None
""""tag value"""
self.value = None
class tags:
def __init__(self):
""""the account associated with the tag"""
self.account = None
""""customer associated with the tag"""
self.customer = None
""""the domain associated with the tag"""
self.domain = None
""""the ID of the domain associated with the tag"""
self.domainid = None
""""tag key name"""
self.key = None
""""the project name where tag belongs to"""
self.project = None
""""the project id the tag belongs to"""
self.projectid = None
""""id of the resource"""
self.resourceid = None
""""resource type"""
self.resourcetype = None
""""tag value"""
self.value = None
class securitygroup:
def __init__(self):
""""the ID of the security group"""
self.id = None
""""the account owning the security group"""
self.account = None
""""the description of the security group"""
self.description = None
""""the domain name of the security group"""
self.domain = None
""""the domain ID of the security group"""
self.domainid = None
""""the name of the security group"""
self.name = None
""""the project name of the group"""
self.project = None
""""the project id of the group"""
self.projectid = None
""""the number of virtualmachines associated with this securitygroup"""
self.virtualmachinecount = None
""""the list of virtualmachine ids associated with this securitygroup"""
self.virtualmachineids = None
""""the list of egress rules associated with the security group"""
self.egressrule = []
""""account owning the security group rule"""
self.account = None
""""the CIDR notation for the base IP address of the security group rule"""
self.cidr = None
""""the ending IP of the security group rule"""
self.endport = None
""""the code for the ICMP message response"""
self.icmpcode = None
""""the type of the ICMP message response"""
self.icmptype = None
""""the protocol of the security group rule"""
self.protocol = None
""""the id of the security group rule"""
self.ruleid = None
""""security group name"""
self.securitygroupname = None
""""the starting IP of the security group rule"""
self.startport = None
""""the list of resource tags associated with the rule"""
self.tags = []
""""the account associated with the tag"""
self.account = None
""""customer associated with the tag"""
self.customer = None
""""the domain associated with the tag"""
self.domain = None
""""the ID of the domain associated with the tag"""
self.domainid = None
""""tag key name"""
self.key = None
""""the project name where tag belongs to"""
self.project = None
""""the project id the tag belongs to"""
self.projectid = None
""""id of the resource"""
self.resourceid = None
""""resource type"""
self.resourcetype = None
""""tag value"""
self.value = None
""""the list of ingress rules associated with the security group"""
self.ingressrule = []
""""account owning the security group rule"""
self.account = None
""""the CIDR notation for the base IP address of the security group rule"""
self.cidr = None
""""the ending IP of the security group rule"""
self.endport = None
""""the code for the ICMP message response"""
self.icmpcode = None
""""the type of the ICMP message response"""
self.icmptype = None
""""the protocol of the security group rule"""
self.protocol = None
""""the id of the security group rule"""
self.ruleid = None
""""security group name"""
self.securitygroupname = None
""""the starting IP of the security group rule"""
self.startport = None
""""the list of resource tags associated with the rule"""
self.tags = []
""""the account associated with the tag"""
self.account = None
""""customer associated with the tag"""
self.customer = None
""""the domain associated with the tag"""
self.domain = None
""""the ID of the domain associated with the tag"""
self.domainid = None
""""tag key name"""
self.key = None
""""the project name where tag belongs to"""
self.project = None
""""the project id the tag belongs to"""
self.projectid = None
""""id of the resource"""
self.resourceid = None
""""resource type"""
self.resourcetype = None
""""tag value"""
self.value = None
""""the list of resource tags associated with the rule"""
self.tags = []
""""the account associated with the tag"""
self.account = None
""""customer associated with the tag"""
self.customer = None
""""the domain associated with the tag"""
self.domain = None
""""the ID of the domain associated with the tag"""
self.domainid = None
""""tag key name"""
self.key = None
""""the project name where tag belongs to"""
self.project = None
""""the project id the tag belongs to"""
self.projectid = None
""""id of the resource"""
self.resourceid = None
""""resource type"""
self.resourcetype = None
""""tag value"""
self.value = None
""""the ID of the latest async job acting on this object"""
self.jobid = None
""""the current status of the latest async job acting on this object"""
self.jobstatus = None
class tags:
def __init__(self):
""""the account associated with the tag"""
self.account = None
""""customer associated with the tag"""
self.customer = None
""""the domain associated with the tag"""
self.domain = None
""""the ID of the domain associated with the tag"""
self.domainid = None
""""tag key name"""
self.key = None
""""the project name where tag belongs to"""
self.project = None
""""the project id the tag belongs to"""
self.projectid = None
""""id of the resource"""
self.resourceid = None
""""resource type"""
self.resourcetype = None
""""tag value"""
self.value = None
| MissionCriticalCloud/marvin | marvin/cloudstackAPI/resetPasswordForVirtualMachine.py | Python | apache-2.0 | 24,253 | 0.00099 |
"""
Java
====
Installs Java, currently restricted to version 7.
**Fabric environment:**
.. code-block:: yaml
blueprints:
- blues.java
"""
from fabric.decorators import task
from refabric.api import run, info
from refabric.context_managers import sudo
from . import debian
__all__ = ['setup']
@task
def setup():
"""
Install Java
"""
install()
def install():
with sudo():
lbs_release = debian.lbs_release()
if lbs_release == '12.04':
debian.add_apt_ppa('webupd8team/java')
debian.debconf_set_selections('shared/accepted-oracle-license-v1-1 select true',
'shared/accepted-oracle-license-v1-1 seen true')
package = 'oracle-java7-installer'
elif lbs_release >= '16.04':
package = 'default-jdk'
elif lbs_release >= '14.04':
package = 'openjdk-7-jdk'
else:
package = 'java7-jdk'
if package != 'default-jdk':
info('Install Java 7 JDK')
else:
info('Install default Java JDK')
debian.apt_get('install', package)
| 5monkeys/blues | blues/java.py | Python | mit | 1,149 | 0.001741 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import operator
import shutil
import sys
import unittest
import six
from base64 import b64encode
import logging
import pkg_resources
import tg
import mock
from tg import tmpl_context as c, app_globals as g
from datadiff.tools import assert_equal
from nose.tools import assert_in, assert_less, assert_less_equal
from ming.orm import FieldProperty, Mapper
from ming.orm import ThreadLocalORMSession
from testfixtures import LogCapture
from alluratest.controller import setup_basic_test, setup_global_objects, TestController
from allura import model as M
from allura.command.taskd import TaskdCommand
from allura.lib import helpers as h
from allura.lib import search
from allura.lib.exceptions import CompoundError
from allura.lib.mail_util import MAX_MAIL_LINE_OCTETS
from allura.tasks import event_tasks
from allura.tasks import index_tasks
from allura.tasks import mail_tasks
from allura.tasks import notification_tasks
from allura.tasks import repo_tasks
from allura.tasks import export_tasks
from allura.tasks import admin_tasks
from allura.tests import decorators as td
from allura.lib.decorators import event_handler, task
class TestRepoTasks(unittest.TestCase):
@mock.patch('allura.tasks.repo_tasks.c.app')
@mock.patch('allura.tasks.repo_tasks.g.post_event')
def test_clone_posts_event_on_failure(self, post_event, app):
fake_source_url = 'fake_source_url'
fake_traceback = 'fake_traceback'
app.repo.init_as_clone.side_effect = Exception(fake_traceback)
repo_tasks.clone(None, None, fake_source_url)
assert_equal(post_event.call_args[0][0], 'repo_clone_task_failed')
assert_equal(post_event.call_args[0][1], fake_source_url)
assert_equal(post_event.call_args[0][2], None)
# ignore args[3] which is a traceback string
@mock.patch('allura.tasks.repo_tasks.session', autospec=True)
@mock.patch.object(M, 'MergeRequest')
def test_merge(self, MR, session):
mr = mock.Mock(_id='_id',
activity_name='merge req', activity_url='/fake/url', activity_extras={}, node_id=None)
MR.query.get.return_value = mr
repo_tasks.merge(mr._id)
mr.app.repo.merge.assert_called_once_with(mr)
assert_equal(mr.status, 'merged')
session.assert_called_once_with(mr)
session.return_value.flush.assert_called_once_with(mr)
@mock.patch.object(M, 'MergeRequest')
def test_can_merge(self, MR):
mr = M.MergeRequest(_id='_id')
MR.query.get.return_value = mr
repo_tasks.can_merge(mr._id)
mr.app.repo.can_merge.assert_called_once_with(mr)
val = mr.app.repo.can_merge.return_value
mr.set_can_merge_cache.assert_called_once_with(val)
# used in test_post_event_from_within_task below
@task
def _task_that_creates_event(event_name,):
g.post_event(event_name)
# event does not get flushed to db right away (at end of task, ming middleware will flush it)
assert not M.MonQTask.query.get(task_name='allura.tasks.event_tasks.event', args=[event_name])
class TestEventTasks(unittest.TestCase):
def setUp(self):
setup_basic_test()
setup_global_objects()
self.called_with = []
def test_fire_event(self):
event_tasks.event('my_event', self, 1, 2, a=5)
assert self.called_with == [((1, 2), {'a': 5})], self.called_with
def test_post_event_explicit_flush(self):
g.post_event('my_event1', flush_immediately=True)
assert M.MonQTask.query.get(task_name='allura.tasks.event_tasks.event', args=['my_event1'])
g.post_event('my_event2', flush_immediately=False)
assert not M.MonQTask.query.get(task_name='allura.tasks.event_tasks.event', args=['my_event2'])
ThreadLocalORMSession.flush_all()
assert M.MonQTask.query.get(task_name='allura.tasks.event_tasks.event', args=['my_event2'])
def test_post_event_from_script(self):
# simulate post_event being called from a paster script command:
with mock.patch.dict(tg.request.environ, PATH_INFO='--script--'):
g.post_event('my_event3')
# event task is flushed to db right away:
assert M.MonQTask.query.get(task_name='allura.tasks.event_tasks.event', args=['my_event3'])
def test_post_event_from_within_task(self):
# instead of M.MonQTask.run_ready() run real 'taskd' so we get all the setup we need
taskd = TaskdCommand('taskd')
taskd.parse_args([pkg_resources.resource_filename('allura', '../test.ini')])
taskd.keep_running = True
taskd.restart_when_done = False
_task_that_creates_event.post('my_event4')
with mock.patch('allura.command.taskd.setproctitle') as setproctitle:
def stop_taskd_after_this_task(*args):
taskd.keep_running = False
setproctitle.side_effect = stop_taskd_after_this_task # avoid proc title change; useful hook to stop taskd
taskd.worker()
# after the initial task is done, the event task has been persisted:
assert M.MonQTask.query.get(task_name='allura.tasks.event_tasks.event', args=['my_event4'])
def test_compound_error(self):
t = raise_exc.post()
with LogCapture(level=logging.ERROR) as l, \
mock.patch.dict(tg.config, {'monq.raise_errors': False}): # match normal non-test behavior
t()
# l.check() would be nice, but string is too detailed to check
assert_equal(l.records[0].name, 'allura.model.monq_model')
msg = l.records[0].getMessage()
assert_in("AssertionError('assert 0'", msg)
assert_in("AssertionError('assert 5'", msg)
assert_in(' on job <MonQTask ', msg)
assert_in(' (error) P:10 allura.tests.test_tasks.raise_exc ', msg)
for x in range(10):
assert ('assert %d' % x) in t.result
class TestIndexTasks(unittest.TestCase):
def setUp(self):
setup_basic_test()
setup_global_objects()
def test_add_projects(self):
g.solr.db.clear()
old_solr_size = len(g.solr.db)
projects = M.Project.query.find().all()
index_tasks.add_projects.post([p._id for p in projects])
M.MonQTask.run_ready()
new_solr_size = len(g.solr.db)
assert old_solr_size + len(projects) == new_solr_size
@td.with_wiki
def test_del_projects(self):
projects = M.Project.query.find().all()
index_tasks.add_projects([p._id for p in projects])
with mock.patch('allura.tasks.index_tasks.g.solr') as solr:
index_tasks.del_projects([p.index_id() for p in projects])
assert solr.delete.call_count, 1
for project in projects:
assert project.index_id() in solr.delete.call_args[1]['q']
@td.with_wiki
def test_add_artifacts(self):
from allura.lib.search import find_shortlinks
with mock.patch('allura.lib.search.find_shortlinks') as find_slinks:
find_slinks.side_effect = lambda s: find_shortlinks(s)
old_shortlinks = M.Shortlink.query.find().count()
old_solr_size = len(g.solr.db)
artifacts = [_TestArtifact() for x in range(5)]
for i, a in enumerate(artifacts):
a._shorthand_id = 't%d' % i
a.text = 'This is a reference to [t3]'
arefs = [M.ArtifactReference.from_artifact(a) for a in artifacts]
ref_ids = [r._id for r in arefs]
M.artifact_orm_session.flush()
index_tasks.add_artifacts(ref_ids)
new_shortlinks = M.Shortlink.query.find().count()
new_solr_size = len(g.solr.db)
assert old_shortlinks + \
5 == new_shortlinks, 'Shortlinks not created'
assert old_solr_size + \
5 == new_solr_size, "Solr additions didn't happen"
M.main_orm_session.flush()
M.main_orm_session.clear()
t3 = _TestArtifact.query.get(_shorthand_id='t3')
assert len(t3.backrefs) == 5, t3.backrefs
assert_equal(find_slinks.call_args_list,
[mock.call(a.index().get('text')) for a in artifacts])
@td.with_wiki
@mock.patch('allura.tasks.index_tasks.g.solr')
def test_del_artifacts(self, solr):
old_shortlinks = M.Shortlink.query.find().count()
artifacts = [_TestArtifact(_shorthand_id='ta_%s' % x)
for x in range(5)]
M.artifact_orm_session.flush()
arefs = [M.ArtifactReference.from_artifact(a) for a in artifacts]
ref_ids = [r._id for r in arefs]
M.artifact_orm_session.flush()
index_tasks.add_artifacts(ref_ids)
M.main_orm_session.flush()
M.main_orm_session.clear()
new_shortlinks = M.Shortlink.query.find().count()
assert old_shortlinks + 5 == new_shortlinks, 'Shortlinks not created'
assert solr.add.call_count == 1
sort_key = operator.itemgetter('id')
assert_equal(
sorted(solr.add.call_args[0][0], key=sort_key),
sorted((ref.artifact.solarize() for ref in arefs),
key=sort_key))
index_tasks.del_artifacts(ref_ids)
M.main_orm_session.flush()
M.main_orm_session.clear()
new_shortlinks = M.Shortlink.query.find().count()
assert old_shortlinks == new_shortlinks, 'Shortlinks not deleted'
solr_query = 'id:({})'.format(' || '.join(ref_ids))
solr.delete.assert_called_once_with(q=solr_query)
class TestMailTasks(unittest.TestCase):
def setUp(self):
setup_basic_test()
setup_global_objects()
# these tests go down through the mail_util.SMTPClient.sendmail method
# since usage is generally through the task, and not using mail_util
# directly
def test_send_email_ascii_with_user_lookup(self):
c.user = M.User.by_username('test-admin')
with mock.patch.object(mail_tasks.smtp_client, '_client') as _client:
mail_tasks.sendmail(
fromaddr=str(c.user._id),
destinations=[str(c.user._id)],
text='This is a test',
reply_to=g.noreply,
subject='Test subject',
message_id=h.gen_message_id())
assert_equal(_client.sendmail.call_count, 1)
return_path, rcpts, body = _client.sendmail.call_args[0]
body = body.split('\n')
assert_equal(rcpts, [c.user.get_pref('email_address')])
assert_in('Reply-To: %s' % g.noreply, body)
assert_in('From: "Test Admin" <test-admin@users.localhost>', body)
assert_in('Subject: Test subject', body)
# plain
assert_in('This is a test', body)
# html
assert_in(
'<div class="markdown_content"><p>This is a test</p></div>', body)
def test_send_email_nonascii(self):
with mock.patch.object(mail_tasks.smtp_client, '_client') as _client:
mail_tasks.sendmail(
fromaddr='"По" <foo@bar.com>',
destinations=['blah@blah.com'],
text='Громады стройные теснятся',
reply_to=g.noreply,
subject='По оживлённым берегам',
message_id=h.gen_message_id())
assert_equal(_client.sendmail.call_count, 1)
return_path, rcpts, body = _client.sendmail.call_args[0]
body = body.split('\n')
assert_equal(rcpts, ['blah@blah.com'])
assert_in('Reply-To: %s' % g.noreply, body)
# The address portion must not be encoded, only the name portion can be.
# Also py2 and py3 vary in handling of double-quote separators when the name portion is encoded
unquoted_cyrillic_No = '=?utf-8?b?0J/Qvg==?=' # По
quoted_cyrillic_No = '=?utf-8?b?ItCf0L4i?=' # "По"
assert (f'From: {quoted_cyrillic_No} <foo@bar.com>' in body or
f'From: {unquoted_cyrillic_No} <foo@bar.com>' in body), body
assert_in(
'Subject: =?utf-8?b?0J/QviDQvtC20LjQstC70ZHQvdC90YvQvCDQsdC10YDQtdCz0LDQvA==?=', body)
assert_in('Content-Type: text/plain; charset="utf-8"', body)
assert_in('Content-Transfer-Encoding: base64', body)
assert_in(six.ensure_text(b64encode('Громады стройные теснятся'.encode())), body)
def test_send_email_with_disabled_user(self):
c.user = M.User.by_username('test-admin')
c.user.disabled = True
destination_user = M.User.by_username('test-user-1')
destination_user.preferences['email_address'] = 'user1@mail.com'
ThreadLocalORMSession.flush_all()
with mock.patch.object(mail_tasks.smtp_client, '_client') as _client:
mail_tasks.sendmail(
fromaddr=str(c.user._id),
destinations=[str(destination_user._id)],
text='This is a test',
reply_to=g.noreply,
subject='Test subject',
message_id=h.gen_message_id())
assert_equal(_client.sendmail.call_count, 1)
return_path, rcpts, body = _client.sendmail.call_args[0]
body = body.split('\n')
assert_in('From: %s' % g.noreply, body)
def test_send_email_with_disabled_destination_user(self):
c.user = M.User.by_username('test-admin')
destination_user = M.User.by_username('test-user-1')
destination_user.preferences['email_address'] = 'user1@mail.com'
destination_user.disabled = True
ThreadLocalORMSession.flush_all()
with mock.patch.object(mail_tasks.smtp_client, '_client') as _client:
mail_tasks.sendmail(
fromaddr=str(c.user._id),
destinations=[str(destination_user._id)],
text='This is a test',
reply_to=g.noreply,
subject='Test subject',
message_id=h.gen_message_id())
assert_equal(_client.sendmail.call_count, 0)
def test_sendsimplemail_with_disabled_user(self):
c.user = M.User.by_username('test-admin')
with mock.patch.object(mail_tasks.smtp_client, '_client') as _client:
mail_tasks.sendsimplemail(
fromaddr=str(c.user._id),
toaddr='test@mail.com',
text='This is a test',
reply_to=g.noreply,
subject='Test subject',
message_id=h.gen_message_id())
assert_equal(_client.sendmail.call_count, 1)
return_path, rcpts, body = _client.sendmail.call_args[0]
body = body.split('\n')
assert_in('From: "Test Admin" <test-admin@users.localhost>', body)
c.user.disabled = True
ThreadLocalORMSession.flush_all()
mail_tasks.sendsimplemail(
fromaddr=str(c.user._id),
toaddr='test@mail.com',
text='This is a test',
reply_to=g.noreply,
subject='Test subject',
message_id=h.gen_message_id())
assert_equal(_client.sendmail.call_count, 2)
return_path, rcpts, body = _client.sendmail.call_args[0]
body = body.split('\n')
assert_in('From: %s' % g.noreply, body)
def test_email_sender_to_headers(self):
c.user = M.User.by_username('test-admin')
with mock.patch.object(mail_tasks.smtp_client, '_client') as _client:
mail_tasks.sendsimplemail(
fromaddr=str(c.user._id),
toaddr='test@mail.com',
text='This is a test',
reply_to=g.noreply,
subject='Test subject',
sender='tickets@test.p.domain.net',
message_id=h.gen_message_id())
assert_equal(_client.sendmail.call_count, 1)
return_path, rcpts, body = _client.sendmail.call_args[0]
body = body.split('\n')
assert_in('From: "Test Admin" <test-admin@users.localhost>', body)
assert_in('Sender: tickets@test.p.domain.net', body)
assert_in('To: test@mail.com', body)
_client.reset_mock()
mail_tasks.sendmail(
fromaddr=str(c.user._id),
destinations=[str(c.user._id)],
text='This is a test',
reply_to='123@tickets.test.p.domain.net',
subject='Test subject',
sender='tickets@test.p.domain.net',
message_id=h.gen_message_id())
assert_equal(_client.sendmail.call_count, 1)
return_path, rcpts, body = _client.sendmail.call_args[0]
body = body.split('\n')
assert_in('From: "Test Admin" <test-admin@users.localhost>', body)
assert_in('Sender: tickets@test.p.domain.net', body)
assert_in('To: 123@tickets.test.p.domain.net', body)
def test_email_references_header(self):
c.user = M.User.by_username('test-admin')
with mock.patch.object(mail_tasks.smtp_client, '_client') as _client:
mail_tasks.sendsimplemail(
fromaddr=str(c.user._id),
toaddr='test@mail.com',
text='This is a test',
reply_to=g.noreply,
subject='Test subject',
references=['a', 'b', 'c'],
message_id=h.gen_message_id())
assert_equal(_client.sendmail.call_count, 1)
return_path, rcpts, body = _client.sendmail.call_args[0]
body = body.split('\n')
assert_in('From: "Test Admin" <test-admin@users.localhost>', body)
assert_in('References: <a> <b> <c>', body)
_client.reset_mock()
mail_tasks.sendmail(
fromaddr=str(c.user._id),
destinations=[str(c.user._id)],
text='This is a test',
reply_to=g.noreply,
subject='Test subject',
references='ref',
message_id=h.gen_message_id())
assert_equal(_client.sendmail.call_count, 1)
return_path, rcpts, body = _client.sendmail.call_args[0]
body = body.split('\n')
assert_in('From: "Test Admin" <test-admin@users.localhost>', body)
assert_in('References: <ref>', body)
def test_cc(self):
c.user = M.User.by_username('test-admin')
with mock.patch.object(mail_tasks.smtp_client, '_client') as _client:
mail_tasks.sendsimplemail(
fromaddr=str(c.user._id),
toaddr='test@mail.com',
text='This is a test',
reply_to=g.noreply,
subject='Test subject',
cc='someone@example.com',
message_id=h.gen_message_id())
assert_equal(_client.sendmail.call_count, 1)
return_path, rcpts, body = _client.sendmail.call_args[0]
assert_in('CC: someone@example.com', body)
assert_in('someone@example.com', rcpts)
def test_fromaddr_objectid_not_str(self):
c.user = M.User.by_username('test-admin')
with mock.patch.object(mail_tasks.smtp_client, '_client') as _client:
mail_tasks.sendsimplemail(
fromaddr=c.user._id,
toaddr='test@mail.com',
text='This is a test',
reply_to=g.noreply,
subject='Test subject',
message_id=h.gen_message_id())
assert_equal(_client.sendmail.call_count, 1)
return_path, rcpts, body = _client.sendmail.call_args[0]
assert_in('From: "Test Admin" <test-admin@users.localhost>', body)
def test_send_email_long_lines_use_quoted_printable(self):
with mock.patch.object(mail_tasks.smtp_client, '_client') as _client:
mail_tasks.sendsimplemail(
fromaddr='"По" <foo@bar.com>',
toaddr='blah@blah.com',
text=('0123456789' * 100) + '\n\n' + ('Громады стро ' * 100),
reply_to=g.noreply,
subject='По оживлённым берегам',
references=['foo@example.com'] * 100, # needs to handle really long headers as well
message_id=h.gen_message_id())
return_path, rcpts, body = _client.sendmail.call_args[0]
body = body.split('\n')
for line in body:
assert_less_equal(len(line), MAX_MAIL_LINE_OCTETS)
# plain text
assert_in('012345678901234567890123456789012345678901234567890123456789012345678901234=', body)
assert_in('=D0=93=D1=80=D0=BE=D0=BC=D0=B0=D0=B4=D1=8B =D1=81=D1=82=D1=80=D0=BE =D0=93=', body)
# html
assert_in('<div class=3D"markdown_content"><p>0123456789012345678901234567890123456789=', body)
assert_in('<p>=D0=93=D1=80=D0=BE=D0=BC=D0=B0=D0=B4=D1=8B =D1=81=D1=82=D1=80=D0=BE =D0=', body)
@td.with_wiki
def test_receive_email_ok(self):
c.user = M.User.by_username('test-admin')
import forgewiki
with mock.patch.object(forgewiki.wiki_main.ForgeWikiApp, 'handle_message') as f:
mail_tasks.route_email(
'0.0.0.0', c.user.email_addresses[0],
['Page@wiki.test.p.in.localhost'],
'This is a mail message')
args, kwargs = f.call_args
assert args[0] == 'Page'
assert len(args) == 2
@td.with_tool('test', 'Tickets', 'bugs')
def test_receive_autoresponse(self):
message = '''Date: Wed, 30 Oct 2013 01:38:40 -0700
From: <test-admin@domain.net>
To: <1@bugs.test.p.in.localhost>
Message-ID: <super-unique-id>
Subject: Not here Re: Message notification
Precedence: bulk
X-Autoreply: yes
Auto-Submitted: auto-replied
I'm not here'''
import forgetracker
c.user = M.User.by_username('test-admin')
with mock.patch.object(forgetracker.tracker_main.ForgeTrackerApp, 'handle_message') as hm:
mail_tasks.route_email(
'0.0.0.0',
c.user.email_addresses[0],
['1@bugs.test.p.in.localhost'],
message)
assert_equal(hm.call_count, 0)
@td.with_tool('test', 'Tickets', 'bugs')
def test_email_posting_disabled(self):
message = 'Hello, world!'
import forgetracker
c.user = M.User.by_username('test-admin')
with mock.patch.object(forgetracker.tracker_main.ForgeTrackerApp, 'handle_message') as hm:
c.app.config.options = {'AllowEmailPosting': False}
mail_tasks.route_email(
'0.0.0.0',
c.user.email_addresses[0],
['1@bugs.test.p.in.localhost'],
message)
assert_equal(hm.call_count, 0)
class TestUserNotificationTasks(TestController):
def setUp(self):
super().setUp()
self.setup_with_tools()
@td.with_wiki
def setup_with_tools(self):
pass
def test_send_usermentions_notification(self):
c.user = M.User.by_username('test-admin')
test_user = M.User.by_username('test-user-1')
test_user.set_pref('mention_notifications', True)
M.MonQTask.query.remove()
d = dict(title='foo', text='Hey @test-user-1!')
self.app.post('/wiki/foo/update', params=d)
M.MonQTask.run_ready()
# check email notification
tasks = M.MonQTask.query.find(
dict(task_name='allura.tasks.mail_tasks.sendsimplemail')).all()
assert_equal(len(tasks), 1)
assert_equal(tasks[0].kwargs['subject'],
'[test:wiki] Your name was mentioned')
assert_equal(tasks[0].kwargs['toaddr'], 'test-user-1@allura.local')
assert_equal(tasks[0].kwargs['reply_to'], g.noreply)
text = tasks[0].kwargs['text']
assert_in('Your name was mentioned at [foo]', text)
assert_in('by Test Admin', text)
assert_in('auth/subscriptions#notifications', text)
class TestNotificationTasks(unittest.TestCase):
def setUp(self):
setup_basic_test()
setup_global_objects()
def test_delivers_messages(self):
with mock.patch.object(M.Mailbox, 'deliver') as deliver:
with mock.patch.object(M.Mailbox, 'fire_ready') as fire_ready:
notification_tasks.notify('42', ['52'], 'none')
assert deliver.called_with('42', ['52'], 'none')
assert fire_ready.called_with()
@event_handler('my_event')
def _my_event(event_type, testcase, *args, **kwargs):
testcase.called_with.append((args, kwargs))
@task
def raise_exc():
errs = []
for x in range(10):
try:
assert False, 'assert %d' % x
except Exception:
errs.append(sys.exc_info())
raise CompoundError(*errs)
class _TestArtifact(M.Artifact):
_shorthand_id = FieldProperty(str)
text = FieldProperty(str)
def url(self):
return ''
def shorthand_id(self):
return getattr(self, '_shorthand_id', self._id)
def index(self):
return dict(
super().index(),
text=self.text)
class TestExportTasks(unittest.TestCase):
def setUp(self):
setup_basic_test()
setup_global_objects()
project = M.Project.query.get(shortname='test')
shutil.rmtree(project.bulk_export_path(tg.config['bulk_export_path']), ignore_errors=True)
def tearDown(self):
project = M.Project.query.get(shortname='test')
shutil.rmtree(project.bulk_export_path(tg.config['bulk_export_path']), ignore_errors=True)
def test_bulk_export_filter_exportable(self):
exportable = mock.Mock(exportable=True)
not_exportable = mock.Mock(exportable=False)
BE = export_tasks.BulkExport()
self.assertEqual(
BE.filter_exportable([None, exportable, not_exportable]), [exportable])
def test_bulk_export_filter_successful(self):
BE = export_tasks.BulkExport()
self.assertEqual(
BE.filter_successful(['foo', None, '0']), ['foo', '0'])
@mock.patch('allura.tasks.export_tasks.shutil')
@mock.patch('allura.tasks.export_tasks.zipdir')
@mock.patch.dict(tg.config, {'bulk_export_filename': '{project}.zip'})
@td.with_wiki
def test_bulk_export(self, zipdir, shutil):
M.MonQTask.query.remove()
export_tasks.bulk_export(['wiki'])
temp = '/tmp/bulk_export/p/test/test'
zipfn = '/tmp/bulk_export/p/test/test.zip'
zipdir.assert_called_with(temp, zipfn)
shutil.rmtree.assert_called_once_with(six.ensure_binary(temp))
# check notification
tasks = M.MonQTask.query.find(
dict(task_name='allura.tasks.mail_tasks.sendsimplemail')).all()
assert_equal(len(tasks), 1)
assert_equal(tasks[0].kwargs['subject'],
'Bulk export for project test completed')
assert_equal(tasks[0].kwargs['fromaddr'], '"Allura" <noreply@localhost>')
assert_equal(tasks[0].kwargs['reply_to'], g.noreply)
text = tasks[0].kwargs['text']
assert_in('The bulk export for project test is completed.', text)
assert_in('The following tools were exported:\n- wiki', text)
assert_in('Sample instructions for test', text)
def test_bulk_export_status(self):
assert_equal(c.project.bulk_export_status(), None)
export_tasks.bulk_export.post(['wiki'])
assert_equal(c.project.bulk_export_status(), 'busy')
class TestAdminTasks(unittest.TestCase):
def test_install_app_docstring(self):
assert_in('ep_name, mount_point=None', admin_tasks.install_app.__doc__)
Mapper.compile_all()
| apache/allura | Allura/allura/tests/test_tasks.py | Python | apache-2.0 | 28,836 | 0.001149 |
00000 0 output/setDirected.py.err
13678 1 output/setDirected.py.out
| Conedy/Conedy | testing/network/expected/sum_setDirected.py | Python | gpl-2.0 | 76 | 0 |
"""
Decorators
"""
from __future__ import unicode_literals
from functools import wraps
from django.http import HttpResponseBadRequest
from django.utils.decorators import available_attrs
from django_ajax.shortcuts import render_to_json
def ajax(function=None, mandatory=True, **ajax_kwargs):
"""
Decorator who guesses the user response type and translates to a serialized
JSON response. Usage::
@ajax
def my_view(request):
do_something()
# will send {'status': 200, 'statusText': 'OK', 'content': null}
@ajax
def my_view(request):
return {'key': 'value'}
# will send {'status': 200, 'statusText': 'OK',
'content': {'key': 'value'}}
@ajax
def my_view(request):
return HttpResponse('<h1>Hi!</h1>')
# will send {'status': 200, 'statusText': 'OK',
'content': '<h1>Hi!</h1>'}
@ajax
def my_view(request):
return redirect('home')
# will send {'status': 302, 'statusText': 'FOUND', 'content': '/'}
# combination with others decorators:
@ajax
@login_required
@require_POST
def my_view(request):
pass
# if request user is not authenticated then the @login_required
# decorator redirect to login page.
# will send {'status': 302, 'statusText': 'FOUND',
'content': '/login'}
# if request method is 'GET' then the @require_POST decorator return
# a HttpResponseNotAllowed response.
# will send {'status': 405, 'statusText': 'METHOD NOT ALLOWED',
'content': null}
"""
def decorator(func):
@wraps(func, assigned=available_attrs(func))
def inner(request, *args, **kwargs):
if mandatory and not request.is_ajax():
return HttpResponseBadRequest()
if request.is_ajax():
# return json response
try:
return render_to_json(func(request, *args, **kwargs), **ajax_kwargs)
except Exception as exception:
return render_to_json(exception)
else:
# return standard response
return func(request, *args, **kwargs)
return inner
if function:
return decorator(function)
return decorator
| furious-luke/django-ajax | django_ajax/decorators.py | Python | mit | 2,485 | 0.000805 |
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Generated code. DO NOT EDIT!
#
# Snippet for DeleteEntityType
# NOTE: This snippet has been automatically generated for illustrative purposes only.
# It may require modifications to work in your environment.
# To install the latest published package dependency, execute the following:
# python3 -m pip install google-cloud-dialogflow
# [START dialogflow_v2beta1_generated_EntityTypes_DeleteEntityType_sync]
from google.cloud import dialogflow_v2beta1
def sample_delete_entity_type():
# Create a client
client = dialogflow_v2beta1.EntityTypesClient()
# Initialize request argument(s)
request = dialogflow_v2beta1.DeleteEntityTypeRequest(
name="name_value",
)
# Make the request
client.delete_entity_type(request=request)
# [END dialogflow_v2beta1_generated_EntityTypes_DeleteEntityType_sync]
| googleapis/python-dialogflow | samples/generated_samples/dialogflow_v2beta1_generated_entity_types_delete_entity_type_sync.py | Python | apache-2.0 | 1,440 | 0.000694 |
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import frappe
import HTMLParser
import smtplib, quopri
from frappe import msgprint, throw, _
from frappe.email.smtp import SMTPServer, get_outgoing_email_account
from frappe.email.email_body import get_email, get_formatted_html
from frappe.utils.verified_command import get_signed_params, verify_request
from html2text import html2text
from frappe.utils import get_url, nowdate, encode, now_datetime, add_days, split_emails, cstr, cint
from rq.timeouts import JobTimeoutException
from frappe.utils.scheduler import log
class EmailLimitCrossedError(frappe.ValidationError): pass
def send(recipients=None, sender=None, subject=None, message=None, reference_doctype=None,
reference_name=None, unsubscribe_method=None, unsubscribe_params=None, unsubscribe_message=None,
attachments=None, reply_to=None, cc=[], message_id=None, in_reply_to=None, send_after=None,
expose_recipients=None, send_priority=1, communication=None, now=False, read_receipt=None,
queue_separately=False, is_notification=False, add_unsubscribe_link=1):
"""Add email to sending queue (Email Queue)
:param recipients: List of recipients.
:param sender: Email sender.
:param subject: Email subject.
:param message: Email message.
:param reference_doctype: Reference DocType of caller document.
:param reference_name: Reference name of caller document.
:param send_priority: Priority for Email Queue, default 1.
:param unsubscribe_method: URL method for unsubscribe. Default is `/api/method/frappe.email.queue.unsubscribe`.
:param unsubscribe_params: additional params for unsubscribed links. default are name, doctype, email
:param attachments: Attachments to be sent.
:param reply_to: Reply to be captured here (default inbox)
:param in_reply_to: Used to send the Message-Id of a received email back as In-Reply-To.
:param send_after: Send this email after the given datetime. If value is in integer, then `send_after` will be the automatically set to no of days from current date.
:param communication: Communication link to be set in Email Queue record
:param now: Send immediately (don't send in the background)
:param queue_separately: Queue each email separately
:param is_notification: Marks email as notification so will not trigger notifications from system
:param add_unsubscribe_link: Send unsubscribe link in the footer of the Email, default 1.
"""
if not unsubscribe_method:
unsubscribe_method = "/api/method/frappe.email.queue.unsubscribe"
if not recipients and not cc:
return
if isinstance(recipients, basestring):
recipients = split_emails(recipients)
if isinstance(cc, basestring):
cc = split_emails(cc)
if isinstance(send_after, int):
send_after = add_days(nowdate(), send_after)
email_account = get_outgoing_email_account(True, append_to=reference_doctype)
if not sender or sender == "Administrator":
sender = email_account.default_sender
check_email_limit(recipients)
formatted = get_formatted_html(subject, message, email_account=email_account)
try:
text_content = html2text(formatted)
except HTMLParser.HTMLParseError:
text_content = "See html attachment"
if reference_doctype and reference_name:
unsubscribed = [d.email for d in frappe.db.get_all("Email Unsubscribe", "email",
{"reference_doctype": reference_doctype, "reference_name": reference_name})]
unsubscribed += [d.email for d in frappe.db.get_all("Email Unsubscribe", "email",
{"global_unsubscribe": 1})]
else:
unsubscribed = []
recipients = [r for r in list(set(recipients)) if r and r not in unsubscribed]
email_content = formatted
email_text_context = text_content
if add_unsubscribe_link and reference_doctype and (unsubscribe_message or reference_doctype=="Newsletter") and add_unsubscribe_link==1:
unsubscribe_link = get_unsubscribe_message(unsubscribe_message, expose_recipients)
email_content = email_content.replace("<!--unsubscribe link here-->", unsubscribe_link.html)
email_text_context += unsubscribe_link.text
# add to queue
add(recipients, sender, subject,
formatted=email_content,
text_content=email_text_context,
reference_doctype=reference_doctype,
reference_name=reference_name,
attachments=attachments,
reply_to=reply_to,
cc=cc,
message_id=message_id,
in_reply_to=in_reply_to,
send_after=send_after,
send_priority=send_priority,
email_account=email_account,
communication=communication,
add_unsubscribe_link=add_unsubscribe_link,
unsubscribe_method=unsubscribe_method,
unsubscribe_params=unsubscribe_params,
expose_recipients=expose_recipients,
read_receipt=read_receipt,
queue_separately=queue_separately,
is_notification = is_notification,
now=now)
def add(recipients, sender, subject, **kwargs):
"""Add to Email Queue"""
if kwargs.get('queue_separately') or len(recipients) > 20:
email_queue = None
for r in recipients:
if not email_queue:
email_queue = get_email_queue([r], sender, subject, **kwargs)
if kwargs.get('now'):
email_queue(email_queue.name, now=True)
else:
duplicate = email_queue.get_duplicate([r])
duplicate.insert(ignore_permissions=True)
if kwargs.get('now'):
send_one(duplicate.name, now=True)
frappe.db.commit()
else:
email_queue = get_email_queue(recipients, sender, subject, **kwargs)
if kwargs.get('now'):
send_one(email_queue.name, now=True)
def get_email_queue(recipients, sender, subject, **kwargs):
'''Make Email Queue object'''
e = frappe.new_doc('Email Queue')
e.priority = kwargs.get('send_priority')
try:
mail = get_email(recipients,
sender=sender,
subject=subject,
formatted=kwargs.get('formatted'),
text_content=kwargs.get('text_content'),
attachments=kwargs.get('attachments'),
reply_to=kwargs.get('reply_to'),
cc=kwargs.get('cc'),
email_account=kwargs.get('email_account'),
expose_recipients=kwargs.get('expose_recipients'))
mail.set_message_id(kwargs.get('message_id'),kwargs.get('is_notification'))
if kwargs.get('read_receipt'):
mail.msg_root["Disposition-Notification-To"] = sender
if kwargs.get('in_reply_to'):
mail.set_in_reply_to(kwargs.get('in_reply_to'))
e.message_id = mail.msg_root["Message-Id"].strip(" <>")
e.message = cstr(mail.as_string())
e.sender = mail.sender
except frappe.InvalidEmailAddressError:
# bad Email Address - don't add to queue
frappe.log_error('Invalid Email ID Sender: {0}, Recipients: {1}'.format(mail.sender,
', '.join(mail.recipients)), 'Email Not Sent')
e.set_recipients(recipients + kwargs.get('cc', []))
e.reference_doctype = kwargs.get('reference_doctype')
e.reference_name = kwargs.get('reference_name')
e.add_unsubscribe_link = kwargs.get("add_unsubscribe_link")
e.unsubscribe_method = kwargs.get('unsubscribe_method')
e.unsubscribe_params = kwargs.get('unsubscribe_params')
e.expose_recipients = kwargs.get('expose_recipients')
e.communication = kwargs.get('communication')
e.send_after = kwargs.get('send_after')
e.show_as_cc = ",".join(kwargs.get('cc', []))
e.insert(ignore_permissions=True)
return e
def check_email_limit(recipients):
# if using settings from site_config.json, check email limit
# No limit for own email settings
smtp_server = SMTPServer()
if (smtp_server.email_account
and getattr(smtp_server.email_account, "from_site_config", False)
or frappe.flags.in_test):
monthly_email_limit = frappe.conf.get('limits', {}).get('emails')
if frappe.flags.in_test:
monthly_email_limit = 500
if not monthly_email_limit:
return
# get count of mails sent this month
this_month = get_emails_sent_this_month()
if (this_month + len(recipients)) > monthly_email_limit:
throw(_("Cannot send this email. You have crossed the sending limit of {0} emails for this month.").format(monthly_email_limit),
EmailLimitCrossedError)
def get_emails_sent_this_month():
return frappe.db.sql("""select count(name) from `tabEmail Queue` where
status='Sent' and MONTH(creation)=MONTH(CURDATE())""")[0][0]
def get_unsubscribe_message(unsubscribe_message, expose_recipients):
if not unsubscribe_message:
unsubscribe_message = _("Unsubscribe from this list")
html = """<div style="margin: 15px auto; padding: 0px 7px; text-align: center; color: #8d99a6;">
<!--cc message-->
<p style="margin: 15px auto;">
<a href="<!--unsubscribe url-->" style="color: #8d99a6; text-decoration: underline;
target="_blank">{unsubscribe_message}
</a>
</p>
</div>""".format(unsubscribe_message=unsubscribe_message)
if expose_recipients == "footer":
text = "\n<!--cc message-->"
else:
text = ""
text += "\n\n{unsubscribe_message}: <!--unsubscribe url-->\n".format(unsubscribe_message=unsubscribe_message)
return frappe._dict({
"html": html,
"text": text
})
def get_unsubcribed_url(reference_doctype, reference_name, email, unsubscribe_method, unsubscribe_params):
params = {"email": email.encode("utf-8"),
"doctype": reference_doctype.encode("utf-8"),
"name": reference_name.encode("utf-8")}
if unsubscribe_params:
params.update(unsubscribe_params)
query_string = get_signed_params(params)
# for test
frappe.local.flags.signed_query_string = query_string
return get_url(unsubscribe_method + "?" + get_signed_params(params))
@frappe.whitelist(allow_guest=True)
def unsubscribe(doctype, name, email):
# unsubsribe from comments and communications
if not verify_request():
return
try:
frappe.get_doc({
"doctype": "Email Unsubscribe",
"email": email,
"reference_doctype": doctype,
"reference_name": name
}).insert(ignore_permissions=True)
except frappe.DuplicateEntryError:
frappe.db.rollback()
else:
frappe.db.commit()
return_unsubscribed_page(email, doctype, name)
def return_unsubscribed_page(email, doctype, name):
frappe.respond_as_web_page(_("Unsubscribed"),
_("{0} has left the conversation in {1} {2}").format(email, _(doctype), name),
indicator_color='green')
def flush(from_test=False):
"""flush email queue, every time: called from scheduler"""
# additional check
cache = frappe.cache()
check_email_limit([])
auto_commit = not from_test
if frappe.are_emails_muted():
msgprint(_("Emails are muted"))
from_test = True
smtpserver = SMTPServer()
make_cache_queue()
for i in xrange(cache.llen('cache_email_queue')):
email = cache.lpop('cache_email_queue')
if cint(frappe.defaults.get_defaults().get("hold_queue"))==1:
break
if email:
send_one(email, smtpserver, auto_commit, from_test=from_test)
# NOTE: removing commit here because we pass auto_commit
# finally:
# frappe.db.commit()
def make_cache_queue():
'''cache values in queue before sendign'''
cache = frappe.cache()
emails = frappe.db.sql('''select
name
from
`tabEmail Queue`
where
(status='Not Sent' or status='Partially Sent') and
(send_after is null or send_after < %(now)s)
order
by priority desc, creation asc
limit 500''', { 'now': now_datetime() })
# reset value
cache.delete_value('cache_email_queue')
for e in emails:
cache.rpush('cache_email_queue', e[0])
def send_one(email, smtpserver=None, auto_commit=True, now=False, from_test=False):
'''Send Email Queue with given smtpserver'''
email = frappe.db.sql('''select
name, status, communication, message, sender, reference_doctype,
reference_name, unsubscribe_param, unsubscribe_method, expose_recipients,
show_as_cc, add_unsubscribe_link
from
`tabEmail Queue`
where
name=%s
for update''', email, as_dict=True)[0]
recipients_list = frappe.db.sql('''select name, recipient, status from
`tabEmail Queue Recipient` where parent=%s''',email.name,as_dict=1)
if frappe.are_emails_muted():
frappe.msgprint(_("Emails are muted"))
return
if cint(frappe.defaults.get_defaults().get("hold_queue"))==1 :
return
if email.status not in ('Not Sent','Partially Sent') :
# rollback to release lock and return
frappe.db.rollback()
return
frappe.db.sql("""update `tabEmail Queue` set status='Sending', modified=%s where name=%s""",
(now_datetime(), email.name), auto_commit=auto_commit)
if email.communication:
frappe.get_doc('Communication', email.communication).set_delivery_status(commit=auto_commit)
try:
if not frappe.flags.in_test:
if not smtpserver: smtpserver = SMTPServer()
smtpserver.setup_email_account(email.reference_doctype)
for recipient in recipients_list:
if recipient.status != "Not Sent":
continue
message = prepare_message(email, recipient.recipient, recipients_list)
if not frappe.flags.in_test:
smtpserver.sess.sendmail(email.sender, recipient.recipient, encode(message))
recipient.status = "Sent"
frappe.db.sql("""update `tabEmail Queue Recipient` set status='Sent', modified=%s where name=%s""",
(now_datetime(), recipient.name), auto_commit=auto_commit)
#if all are sent set status
if any("Sent" == s.status for s in recipients_list):
frappe.db.sql("""update `tabEmail Queue` set status='Sent', modified=%s where name=%s""",
(now_datetime(), email.name), auto_commit=auto_commit)
else:
frappe.db.sql("""update `tabEmail Queue` set status='Error', error=%s
where name=%s""", ("No recipients to send to", email.name), auto_commit=auto_commit)
if frappe.flags.in_test:
frappe.flags.sent_mail = message
return
if email.communication:
frappe.get_doc('Communication', email.communication).set_delivery_status(commit=auto_commit)
except (smtplib.SMTPServerDisconnected,
smtplib.SMTPConnectError,
smtplib.SMTPHeloError,
smtplib.SMTPAuthenticationError,
JobTimeoutException):
# bad connection/timeout, retry later
if any("Sent" == s.status for s in recipients_list):
frappe.db.sql("""update `tabEmail Queue` set status='Partially Sent', modified=%s where name=%s""",
(now_datetime(), email.name), auto_commit=auto_commit)
else:
frappe.db.sql("""update `tabEmail Queue` set status='Not Sent', modified=%s where name=%s""",
(now_datetime(), email.name), auto_commit=auto_commit)
if email.communication:
frappe.get_doc('Communication', email.communication).set_delivery_status(commit=auto_commit)
# no need to attempt further
return
except Exception, e:
frappe.db.rollback()
if any("Sent" == s.status for s in recipients_list):
frappe.db.sql("""update `tabEmail Queue` set status='Partially Errored', error=%s where name=%s""",
(unicode(e), email.name), auto_commit=auto_commit)
else:
frappe.db.sql("""update `tabEmail Queue` set status='Error', error=%s
where name=%s""", (unicode(e), email.name), auto_commit=auto_commit)
if email.communication:
frappe.get_doc('Communication', email.communication).set_delivery_status(commit=auto_commit)
if now:
raise e
else:
# log to Error Log
log('frappe.email.queue.flush', unicode(e))
def prepare_message(email, recipient, recipients_list):
message = email.message
if email.add_unsubscribe_link and email.reference_doctype: # is missing the check for unsubscribe message but will not add as there will be no unsubscribe url
unsubscribe_url = get_unsubcribed_url(email.reference_doctype, email.reference_name, recipient,
email.unsubscribe_method, email.unsubscribe_params)
message = message.replace("<!--unsubscribe url-->", quopri.encodestring(unsubscribe_url))
if email.expose_recipients == "header":
pass
else:
if email.expose_recipients == "footer":
if isinstance(email.show_as_cc, basestring):
email.show_as_cc = email.show_as_cc.split(",")
email_sent_to = [r.recipient for r in recipients_list]
email_sent_cc = ", ".join([e for e in email_sent_to if e in email.show_as_cc])
email_sent_to = ", ".join([e for e in email_sent_to if e not in email.show_as_cc])
if email_sent_cc:
email_sent_message = _("This email was sent to {0} and copied to {1}").format(email_sent_to,email_sent_cc)
else:
email_sent_message = _("This email was sent to {0}").format(email_sent_to)
message = message.replace("<!--cc message-->", quopri.encodestring(email_sent_message))
message = message.replace("<!--recipient-->", recipient)
return message
def clear_outbox():
"""Remove low priority older than 31 days in Outbox and expire mails not sent for 7 days.
Called daily via scheduler."""
frappe.db.sql("""delete q, r from `tabEmail Queue` as q, `tabEmail Queue Recipient` as r where q.name = r.parent and q.priority=0 and
datediff(now(), q.modified) > 31""")
frappe.db.sql("""update `tabEmail Queue` as q, `tabEmail Queue Recipient` as r set q.status='Expired', r.status='Expired'
where q.name = r.parent and datediff(curdate(), q.modified) > 7 and q.status='Not Sent' and r.status='Not Sent'""")
| rohitwaghchaure/frappe | frappe/email/queue.py | Python | mit | 16,756 | 0.028408 |
#!/usr/bin/env python
'''
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
import optparse
from optparse import OptionGroup
import sys
import urllib2
import time
import json
import base64
import xml
import xml.etree.ElementTree as ET
import os
import logging
logger = logging.getLogger('AmbariConfig')
HTTP_PROTOCOL = 'http'
HTTPS_PROTOCOL = 'https'
SET_ACTION = 'set'
GET_ACTION = 'get'
DELETE_ACTION = 'delete'
GET_REQUEST_TYPE = 'GET'
PUT_REQUEST_TYPE = 'PUT'
# JSON Keywords
PROPERTIES = 'properties'
ATTRIBUTES = 'properties_attributes'
CLUSTERS = 'Clusters'
DESIRED_CONFIGS = 'desired_configs'
TYPE = 'type'
TAG = 'tag'
ITEMS = 'items'
TAG_PREFIX = 'version'
CLUSTERS_URL = '/api/v1/clusters/{0}'
DESIRED_CONFIGS_URL = CLUSTERS_URL + '?fields=Clusters/desired_configs'
CONFIGURATION_URL = CLUSTERS_URL + '/configurations?type={1}&tag={2}'
FILE_FORMAT = \
"""
"properties": {
"key1": "value1"
"key2": "value2"
},
"properties_attributes": {
"attribute": {
"key1": "value1"
"key2": "value2"
}
}
"""
class UsageException(Exception):
pass
def api_accessor(host, login, password, protocol, port):
def do_request(api_url, request_type=GET_REQUEST_TYPE, request_body=''):
try:
url = '{0}://{1}:{2}{3}'.format(protocol, host, port, api_url)
admin_auth = base64.encodestring('%s:%s' % (login, password)).replace('\n', '')
request = urllib2.Request(url)
request.add_header('Authorization', 'Basic %s' % admin_auth)
request.add_header('X-Requested-By', 'ambari')
request.add_data(request_body)
request.get_method = lambda: request_type
response = urllib2.urlopen(request)
response_body = response.read()
except Exception as exc:
raise Exception('Problem with accessing api. Reason: {0}'.format(exc))
return response_body
return do_request
def get_config_tag(cluster, config_type, accessor):
response = accessor(DESIRED_CONFIGS_URL.format(cluster))
try:
desired_tags = json.loads(response)
current_config_tag = desired_tags[CLUSTERS][DESIRED_CONFIGS][config_type][TAG]
except Exception as exc:
raise Exception('"{0}" not found in server response. Response:\n{1}'.format(config_type, response))
return current_config_tag
def create_new_desired_config(cluster, config_type, properties, attributes, accessor):
new_tag = TAG_PREFIX + str(int(time.time() * 1000000))
new_config = {
CLUSTERS: {
DESIRED_CONFIGS: {
TYPE: config_type,
TAG: new_tag,
PROPERTIES: properties
}
}
}
if len(attributes.keys()) > 0:
new_config[CLUSTERS][DESIRED_CONFIGS][ATTRIBUTES] = attributes
request_body = json.dumps(new_config)
new_file = 'doSet_{0}.json'.format(new_tag)
logger.info('### PUTting json into: {0}'.format(new_file))
output_to_file(new_file)(new_config)
accessor(CLUSTERS_URL.format(cluster), PUT_REQUEST_TYPE, request_body)
logger.info('### NEW Site:{0}, Tag:{1}'.format(config_type, new_tag))
def get_current_config(cluster, config_type, accessor):
config_tag = get_config_tag(cluster, config_type, accessor)
logger.info("### on (Site:{0}, Tag:{1})".format(config_type, config_tag))
response = accessor(CONFIGURATION_URL.format(cluster, config_type, config_tag))
config_by_tag = json.loads(response)
current_config = config_by_tag[ITEMS][0]
return current_config[PROPERTIES], current_config.get(ATTRIBUTES, {})
def update_config(cluster, config_type, config_updater, accessor):
properties, attributes = config_updater(cluster, config_type, accessor)
create_new_desired_config(cluster, config_type, properties, attributes, accessor)
def update_specific_property(config_name, config_value):
def update(cluster, config_type, accessor):
properties, attributes = get_current_config(cluster, config_type, accessor)
properties[config_name] = config_value
return properties, attributes
return update
def update_from_xml(config_file):
def update(cluster, config_type, accessor):
return read_xml_data_to_map(config_file)
return update
# Used DOM parser to read data into a map
def read_xml_data_to_map(path):
configurations = {}
properties_attributes = {}
tree = ET.parse(path)
root = tree.getroot()
for properties in root.getiterator('property'):
name = properties.find('name')
value = properties.find('value')
final = properties.find('final')
if name != None:
name_text = name.text if name.text else ""
else:
logger.warn("No name is found for one of the properties in {0}, ignoring it".format(path))
continue
if value != None:
value_text = value.text if value.text else ""
else:
logger.warn("No value is found for \"{0}\" in {1}, using empty string for it".format(name_text, path))
value_text = ""
if final != None:
final_text = final.text if final.text else ""
properties_attributes[name_text] = final_text
configurations[name_text] = value_text
return configurations, {"final" : properties_attributes}
def update_from_file(config_file):
def update(cluster, config_type, accessor):
try:
with open(config_file) as in_file:
file_content = in_file.read()
except Exception as e:
raise Exception('Cannot find file "{0}" to PUT'.format(config_file))
try:
file_properties = json.loads(file_content)
except Exception as e:
raise Exception('File "{0}" should be in the following JSON format ("properties_attributes" is optional):\n{1}'.format(config_file, FILE_FORMAT))
new_properties = file_properties.get(PROPERTIES, {})
new_attributes = file_properties.get(ATTRIBUTES, {})
logger.info('### PUTting file: "{0}"'.format(config_file))
return new_properties, new_attributes
return update
def delete_specific_property(config_name):
def update(cluster, config_type, accessor):
properties, attributes = get_current_config(cluster, config_type, accessor)
properties.pop(config_name, None)
for attribute_values in attributes.values():
attribute_values.pop(config_name, None)
return properties, attributes
return update
def output_to_file(filename):
def output(config):
with open(filename, 'w') as out_file:
json.dump(config, out_file, indent=2)
return output
def output_to_console(config):
print json.dumps(config, indent=2)
def get_config(cluster, config_type, accessor, output):
properties, attributes = get_current_config(cluster, config_type, accessor)
config = {PROPERTIES: properties}
if len(attributes.keys()) > 0:
config[ATTRIBUTES] = attributes
output(config)
def set_properties(cluster, config_type, args, accessor):
logger.info('### Performing "set":')
if len(args) == 1:
config_file = args[0]
root, ext = os.path.splitext(config_file)
if ext == ".xml":
updater = update_from_xml(config_file)
elif ext == ".json":
updater = update_from_file(config_file)
else:
logger.error("File extension {0} doesn't supported".format(ext))
return -1
logger.info('### from file {0}'.format(config_file))
else:
config_name = args[0]
config_value = args[1]
updater = update_specific_property(config_name, config_value)
logger.info('### new property - "{0}":"{1}"'.format(config_name, config_value))
update_config(cluster, config_type, updater, accessor)
return 0
def delete_properties(cluster, config_type, args, accessor):
logger.info('### Performing "delete":')
if len(args) == 0:
logger.error("Not enough arguments. Expected config key.")
return -1
config_name = args[0]
logger.info('### on property "{0}"'.format(config_name))
update_config(cluster, config_type, delete_specific_property(config_name), accessor)
return 0
def get_properties(cluster, config_type, args, accessor):
logger.info("### Performing \"get\" content:")
if len(args) > 0:
filename = args[0]
output = output_to_file(filename)
logger.info('### to file "{0}"'.format(filename))
else:
output = output_to_console
get_config(cluster, config_type, accessor, output)
return 0
def main():
parser = optparse.OptionParser(usage="usage: %prog [options]")
login_options_group = OptionGroup(parser, "To specify credentials please use \"-e\" OR \"-u\" and \"-p'\"")
login_options_group.add_option("-u", "--user", dest="user", default="admin", help="Optional user ID to use for authentication. Default is 'admin'")
login_options_group.add_option("-p", "--password", dest="password", default="admin", help="Optional password to use for authentication. Default is 'admin'")
login_options_group.add_option("-e", "--credentials-file", dest="credentials_file", help="Optional file with user credentials separated by new line.")
parser.add_option_group(login_options_group)
parser.add_option("-t", "--port", dest="port", default="8080", help="Optional port number for Ambari server. Default is '8080'. Provide empty string to not use port.")
parser.add_option("-s", "--protocol", dest="protocol", default="http", help="Optional support of SSL. Default protocol is 'http'")
parser.add_option("-a", "--action", dest="action", help="Script action: <get>, <set>, <delete>")
parser.add_option("-l", "--host", dest="host", help="Server external host name")
parser.add_option("-n", "--cluster", dest="cluster", help="Name given to cluster. Ex: 'c1'")
parser.add_option("-c", "--config-type", dest="config_type", help="One of the various configuration types in Ambari. Ex: core-site, hdfs-site, mapred-queue-acls, etc.")
config_options_group = OptionGroup(parser, "To specify property(s) please use \"-f\" OR \"-k\" and \"-v'\"")
config_options_group.add_option("-f", "--file", dest="file", help="File where entire configurations are saved to, or read from. Supported extensions (.xml, .json>)")
config_options_group.add_option("-k", "--key", dest="key", help="Key that has to be set or deleted. Not necessary for 'get' action.")
config_options_group.add_option("-v", "--value", dest="value", help="Optional value to be set. Not necessary for 'get' or 'delete' actions.")
parser.add_option_group(config_options_group)
(options, args) = parser.parse_args()
logger.setLevel(logging.INFO)
formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
stdout_handler = logging.StreamHandler(sys.stdout)
stdout_handler.setLevel(logging.INFO)
stdout_handler.setFormatter(formatter)
logger.addHandler(stdout_handler)
# options with default value
if not options.credentials_file and (not options.user or not options.password):
parser.error("You should use option (-e) to set file with Ambari user credentials OR use (-u) username and (-p) password")
if options.credentials_file:
if os.path.isfile(options.credentials_file):
try:
with open(options.credentials_file) as credentials_file:
file_content = credentials_file.read()
login_lines = filter(None, file_content.splitlines())
if len(login_lines) == 2:
user = login_lines[0]
password = login_lines[1]
else:
logger.error("Incorrect content of {0} file. File should contain Ambari username and password separated by new line.".format(options.credentials_file))
return -1
except Exception as e:
logger.error("You don't have permissions to {0} file".format(options.credentials_file))
return -1
else:
logger.error("File {0} doesn't exist or you don't have permissions.".format(options.credentials_file))
return -1
else:
user = options.user
password = options.password
port = options.port
protocol = options.protocol
#options without default value
if None in [options.action, options.host, options.cluster, options.config_type]:
parser.error("One of required options is not passed")
action = options.action
host = options.host
cluster = options.cluster
config_type = options.config_type
accessor = api_accessor(host, user, password, protocol, port)
if action == SET_ACTION:
if not options.file and (not options.key or not options.value):
parser.error("You should use option (-f) to set file where entire configurations are saved OR (-k) key and (-v) value for one property")
if options.file:
action_args = [options.file]
else:
action_args = [options.key, options.value]
return set_properties(cluster, config_type, action_args, accessor)
elif action == GET_ACTION:
if options.file:
action_args = [options.file]
else:
action_args = []
return get_properties(cluster, config_type, action_args, accessor)
elif action == DELETE_ACTION:
if not options.key:
parser.error("You should use option (-k) to set property name witch will be deleted")
else:
action_args = [options.key]
return delete_properties(cluster, config_type, action_args, accessor)
else:
logger.error('Action "{0}" is not supported. Supported actions: "get", "set", "delete".'.format(action))
return -1
if __name__ == "__main__":
try:
sys.exit(main())
except (KeyboardInterrupt, EOFError):
print("\nAborting ... Keyboard Interrupt.")
sys.exit(1)
| arenadata/ambari | ambari-server/src/main/resources/scripts/configs.py | Python | apache-2.0 | 13,921 | 0.014726 |
""" Profile module
"""
from operator import itemgetter
from cProfile import Profile
from SpeedIT.ProjectErr import Err
from SpeedIT.Utils import (
format_time,
get_table_rst_formatted_lines
)
def _profile_it(func, func_positional_arguments, func_keyword_arguments, name, profileit__max_slashes_fileinfo, profileit__repeat):
""" Returns a dictionary with the profile result: the function runs only once.
.. note:: excludes a couple of not relative functions/methods
- excludes: profiler.enable()
- exclude: profiler.disable()
- exclude: cProfile.Profile.runcall()
Args:
func (function):
func_positional_arguments (list): positional arguments for the function
func_keyword_arguments (dict): any keyword arguments for the function
name (str): the name used for the output `name` part
profileit__max_slashes_fileinfo (int): to adjust max path levels in the profile info
profileit__repeat (int): how often the function is repeated: the result will be the sum of all: similar to the code below
.. code-block:: python
for repeat in range(profileit__repeat):
profiler.enable()
profiler.runcall(func, *func_positional_arguments, **func_keyword_arguments)
profiler.disable()
Returns:
tuple: format: (summary_dict, table): table = list_of_dictionaries (sorted profile result lines dict)
"""
profiler = Profile()
for repeat in range(profileit__repeat):
profiler.enable()
profiler.runcall(func, *func_positional_arguments, **func_keyword_arguments)
profiler.disable()
profiler.create_stats()
total_calls = 0
primitive_calls = 0
total_time = 0
table = []
for func_tmp, (cc, nc, tt, ct, callers) in profiler.stats.items():
temp_dict = {
'number_of_calls': '{:,}'.format(cc) if cc == nc else '{:,}/{:,}'.format(cc, nc),
'func_time': tt, 'func_cumulative_time': ct
}
if func_tmp[0] == '~':
# exclude the profiler.enable()/disable() functions
if '_lsprof.Profiler' in func_tmp[2]:
continue
else:
temp_dict['func_txt'] = func_tmp[2]
else:
# exclude: cProfile.py runcall()
if func_tmp[2] == 'runcall':
if 'cProfile' in func_tmp[0]:
continue
# adjust path levels
temp_path_file_ect = func_tmp[0]
temp_slashes = temp_path_file_ect.count('/')
if temp_slashes > profileit__max_slashes_fileinfo:
temp_dict['func_txt'] = '{}:{}({})'.format(temp_path_file_ect.split('/', temp_slashes - profileit__max_slashes_fileinfo)[-1], func_tmp[1], func_tmp[2])
else:
temp_dict['func_txt'] = '{}:{}({})'.format(temp_path_file_ect, func_tmp[1], func_tmp[2])
table.append(temp_dict)
total_calls += nc
primitive_calls += cc
total_time += tt
if ("jprofile", 0, "profiler") in callers:
raise Err('ERROR NOT SURE WHAT To DO HERE: SEE pstate.py: get_top_level_stats()', func)
summary_dict = {
'name': name,
'total_calls': total_calls,
'primitive_calls': primitive_calls,
'total_time': total_time
}
return summary_dict, table
def speedit_profile(func_dict, use_func_name=True, output_in_sec=False, profileit__max_slashes_fileinfo=2, profileit__repeat=1):
""" Returns one txt string for: table format is conform with reStructuredText
Args:
func_dict (dict): mapping function names to functions
value format: tuple (function, list_of_positional_arguments, dictionary_of_keyword_arguments)
use_func_name (bool): if True the function name will be used in the output `name` if False the `func_dict key` will be used in the the output `name`
output_in_sec (int): if true the output is keep in seconds if false it is transformed to:
second (s)
millisecond (ms) One thousandth of one second
microsecond (µs) One millionth of one second
nanosecond (ns) One billionth of one second
profileit__max_slashes_fileinfo (int): to adjust max path levels in the profile info
profileit__repeat (int): how often the function is repeated: the result will be the sum of all: similar to the code below
.. code-block:: python
for repeat in range(profileit__repeat):
profiler.enable()
profiler.runcall(func, *func_positional_arguments, **func_keyword_arguments)
profiler.disable()
Returns:
str: ready to print or write to file: table format is conform with reStructuredText
- rank: starts with the part which takes the longest
- compare: % of the total execution time
- func_time: the total time spent in the given function (and excluding time made in calls to sub-functions)
- number_of_calls: the number of calls
- func_txt: provides the respective data of each function
"""
all_final_lines = []
for func_name, (function_, func_positional_arguments, func_keyword_arguments) in sorted(func_dict.items()):
if use_func_name:
name = getattr(function_, "__name__", function_)
else:
name = func_name
summary_dict, table = _profile_it(function_, func_positional_arguments, func_keyword_arguments, name, profileit__max_slashes_fileinfo, profileit__repeat)
table = sorted(table, key=itemgetter('func_time'), reverse=True)
compare_reference = summary_dict['total_time']
if compare_reference == 0:
# add ranking ect...
for idx, dict_ in enumerate(table):
dict_['compare'] = 'TOO-FAST-NOT-MEASURED'
dict_['rank'] = '{:,}'.format(idx + 1)
if output_in_sec:
dict_['func_time'] = '{:.11f}'.format(dict_['func_time'])
else:
dict_['func_time'] = format_time(dict_['func_time'])
else:
# add ranking ect...
for idx, dict_ in enumerate(table):
dict_['compare'] = '{:,.3f}'.format((dict_['func_time'] * 100.0) / compare_reference)
dict_['rank'] = '{:,}'.format(idx + 1)
if output_in_sec:
dict_['func_time'] = '{:.11f}'.format(dict_['func_time'])
else:
dict_['func_time'] = format_time(dict_['func_time'])
header_mapping = [
('rank', 'rank'),
('compare %', 'compare'),
('func_time', 'func_time'),
('number_of_calls', 'number_of_calls'),
('func_txt', 'func_txt')
]
# add Title Summary
if output_in_sec:
title_line = '`ProfileIT` name: <{}> profileit__repeat: <{}> || total_calls: <{}> primitive_calls: <{}> total_time: <{:.11f}>'.format(summary_dict['name'], profileit__repeat, summary_dict['total_calls'], summary_dict['primitive_calls'], summary_dict['total_time'])
else:
title_line = '`ProfileIT` name: <{}> profileit__repeat: <{}> || total_calls: <{}> primitive_calls: <{}> total_time: <{}>'.format(summary_dict['name'], profileit__repeat, summary_dict['total_calls'], summary_dict['primitive_calls'], format_time(summary_dict['total_time']))
all_final_lines.extend(get_table_rst_formatted_lines(table, header_mapping, title_line))
all_final_lines.extend([
'',
'',
])
return '\n'.join(all_final_lines)
| peter1000/SpeedIT | SpeedIT/ProfileIT.py | Python | bsd-3-clause | 7,430 | 0.012115 |
##########################################################################
#
# Copyright (c) 2013, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of John Haddon nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import Gaffer
import GafferUI
import GafferScene
Gaffer.Metadata.registerNode(
GafferScene.SceneSwitch,
"description",
"""
Chooses between multiple input scene, passing through the
chosen input to the output.
""",
plugs = {
"index" : [
"description",
"""
The index of the input which is passed through. A value
of 0 chooses the first input, 1 the second and so on. Values
larger than the number of available inputs wrap back around to
the beginning.
"""
]
}
)
GafferUI.PlugValueWidget.registerCreator( GafferScene.SceneSwitch, "in[0-9]*", None )
| goddardl/gaffer | python/GafferSceneUI/SceneSwitchUI.py | Python | bsd-3-clause | 2,393 | 0.009611 |
########
# Copyright (c) 2014 GigaSpaces Technologies Ltd. All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
import logging
import mock
import sys
import os
import shutil
import tempfile
import unittest
from os.path import dirname
import testtools
from mock import patch, MagicMock
from cloudify_rest_client.exceptions import CloudifyClientError
from cloudify.utils import create_temp_folder
from cloudify.decorators import operation
from cloudify.manager import NodeInstance
from cloudify.workflows import local
from cloudify import constants, state, context, exceptions, conflict_handlers
import cloudify.tests as tests_path
from cloudify.test_utils import workflow_test
class CloudifyContextTest(testtools.TestCase):
file_server_process = None
@classmethod
def setUpClass(cls):
state.current_ctx.set(context.CloudifyContext({}), {})
resources_path = os.path.join(dirname(tests_path.__file__))
from cloudify.tests.file_server import FileServer
from cloudify.tests.file_server import PORT
cls.file_server_process = FileServer(resources_path)
cls.file_server_process.start()
os.environ[constants.MANAGER_FILE_SERVER_URL_KEY] = \
"http://localhost:{0}".format(PORT)
_, os.environ[constants.LOCAL_REST_CERT_FILE_KEY] = tempfile.mkstemp()
cls.context = context.CloudifyContext({
'blueprint_id': '',
'tenant': {'name': 'default_tenant'}
})
# the context logger will try to publish messages to rabbit, which is
# not available here. instead, we redirect the output to stdout.
cls.redirect_log_to_stdout(cls.context.logger)
@classmethod
def tearDownClass(cls):
cls.file_server_process.stop()
state.current_ctx.clear()
def setup_tenant_context(self):
self.context = context.CloudifyContext(
{'blueprint_id': 'test_blueprint',
'tenant': {'name': 'default_tenant'}})
self.redirect_log_to_stdout(self.context.logger)
@staticmethod
def redirect_log_to_stdout(logger):
stdout_log_handler = logging.StreamHandler(sys.stdout)
stdout_log_handler.setLevel(logging.DEBUG)
logger.handlers = [stdout_log_handler]
@mock.patch('cloudify.manager.get_rest_client', return_value=MagicMock())
def test_get_resource(self, _):
resource = self.context.get_resource(
resource_path='for_test_bp_resource.txt')
self.assertEquals(resource, 'Hello from test')
def test_get_deployment_resource_priority_over_blueprint_resource(self):
deployment_context_mock = MagicMock()
deployment_context_mock.id = 'dep1'
self.context.deployment = deployment_context_mock
resource = self.context.get_resource(resource_path='for_test.txt')
self.assertEquals(resource, 'belongs to dep1')
def test_get_deployment_resource_no_blueprint_resource(self):
deployment_context_mock = MagicMock()
deployment_context_mock.id = 'dep1'
self.context.deployment = deployment_context_mock
resource = self.context.get_resource(
resource_path='for_test_only_dep.txt')
self.assertEquals(resource, 'belongs to dep1')
@mock.patch('cloudify.manager.get_rest_client', return_value=MagicMock())
def test_download_resource(self, _):
resource_path = self.context.download_resource(
resource_path='for_test.txt')
self.assertIsNotNone(resource_path)
self.assertTrue(os.path.exists(resource_path))
@mock.patch('cloudify.manager.get_rest_client', return_value=MagicMock())
def test_download_blueprint_from_tenant(self, _):
self.setup_tenant_context()
resource_path = self.context.download_resource(
resource_path='blueprint.yaml')
self.assertIsNotNone(resource_path)
self.assertTrue(os.path.exists(resource_path))
@mock.patch('cloudify.manager.get_rest_client', return_value=MagicMock())
def test_download_resource_to_specific_file(self, _):
target_path = "{0}/for_test_custom.log".format(create_temp_folder())
resource_path = self.context.download_resource(
resource_path='for_test.txt',
target_path=target_path)
self.assertEqual(target_path, resource_path)
self.assertTrue(os.path.exists(resource_path))
@mock.patch('cloudify.manager.get_rest_client', return_value=MagicMock())
def test_download_resource_to_non_writable_location(self, _):
self.assertRaises(IOError, self.context.download_resource,
'for_test.txt',
'/non-existing-folder')
@mock.patch('cloudify.manager.get_rest_client', return_value=MagicMock())
def test_get_non_existing_resource(self, _):
self.assertRaises(exceptions.HttpException, self.context.get_resource,
'non_existing.log')
def test_ctx_instance_in_relationship(self):
ctx = context.CloudifyContext({
'node_id': 'node-instance-id',
'related': {
'node_id': 'related-instance-id',
'is_target': True
},
'relationships': ['related-instance-id']
})
self.assertEqual('node-instance-id', ctx.source.instance.id)
self.assertEqual('related-instance-id', ctx.target.instance.id)
e = self.assertRaises(exceptions.NonRecoverableError,
lambda: ctx.node)
self.assertIn('ctx.node/ctx.instance can only be used in a '
'node-instance context but used in a '
'relationship-instance context.', str(e))
e = self.assertRaises(exceptions.NonRecoverableError,
lambda: ctx.instance)
self.assertIn('ctx.node/ctx.instance can only be used in a '
'node-instance context but used in a '
'relationship-instance context.', str(e))
def test_source_target_not_in_relationship(self):
ctx = context.CloudifyContext({})
e = self.assertRaises(exceptions.NonRecoverableError,
lambda: ctx.source)
self.assertIn('ctx.source/ctx.target can only be used in a '
'relationship-instance context but used in a '
'deployment context.', str(e))
e = self.assertRaises(exceptions.NonRecoverableError,
lambda: ctx.target)
self.assertIn('ctx.source/ctx.target can only be used in a '
'relationship-instance context but used in a '
'deployment context.', str(e))
def test_ctx_type(self):
ctx = context.CloudifyContext({})
self.assertEqual(constants.DEPLOYMENT, ctx.type)
ctx = context.CloudifyContext({'node_id': 'node-instance-id'})
self.assertEqual(constants.NODE_INSTANCE, ctx.type)
ctx = context.CloudifyContext({
'node_id': 'node-instance-id',
'related': {
'node_id': 'related-instance-id',
'is_target': True
},
'relationships': ['related-instance-id']
})
self.assertEqual(constants.RELATIONSHIP_INSTANCE, ctx.type)
class NodeContextTests(testtools.TestCase):
test_blueprint_path = os.path.join(
os.path.dirname(os.path.realpath(__file__)),
"resources/blueprints/test-context-node.yaml")
@workflow_test(blueprint_path=test_blueprint_path,
resources_to_copy=[
'resources/blueprints/execute_operation_workflow.yaml'])
def test_node_type(self, cfy_local):
cfy_local.execute('execute_operation', parameters={
'operation': 'test.interface.create',
'nodes': ['node1', 'node2'],
'testing': 'test_node_type'})
expected = {
'node1': ['test.node1.type', ['test.node1.type']],
'node2': [
'test.node2.type',
['test.node1.type', 'test.node2.type']]}
for node in ['node1', 'node2']:
instance = cfy_local.storage.get_node_instances(node_id=node)[0]
self.assertEqual(expected[node][0],
instance.runtime_properties['type'])
self.assertEqual(expected[node][1],
instance.runtime_properties['type_hierarchy'])
class PluginContextTests(testtools.TestCase):
# workdir is tested separately for local and remote workflows
def setUp(self):
super(PluginContextTests, self).setUp()
self.plugin_name = 'test_plugin'
self.plugin_pacakge_name = 'test-plugin'
self.plugin_pacakge_version = '0.1.1'
self.deployment_id = 'test_deployment'
self.tenant_name = 'default_tenant'
self.ctx = context.CloudifyContext({
'deployment_id': self.deployment_id,
'tenant': {'name': self.tenant_name},
'plugin': {
'name': self.plugin_name,
'package_name': self.plugin_pacakge_name,
'package_version': self.plugin_pacakge_version
}
})
self.test_prefix = tempfile.mkdtemp(prefix='context-plugin-test-')
self.addCleanup(lambda: shutil.rmtree(self.test_prefix,
ignore_errors=True))
def test_attributes(self):
self.assertEqual(self.ctx.plugin.name, self.plugin_name)
self.assertEqual(self.ctx.plugin.package_name,
self.plugin_pacakge_name)
self.assertEqual(self.ctx.plugin.package_version,
self.plugin_pacakge_version)
def test_prefix_from_wagon(self):
expected_prefix = os.path.join(
self.test_prefix,
'plugins',
self.tenant_name,
'{0}-{1}'.format(self.plugin_pacakge_name,
self.plugin_pacakge_version))
os.makedirs(expected_prefix)
with patch('sys.prefix', self.test_prefix):
self.assertEqual(self.ctx.plugin.prefix, expected_prefix)
def test_prefix_from_source(self):
expected_prefix = os.path.join(
self.test_prefix,
'plugins',
self.tenant_name,
'{0}-{1}'.format(self.deployment_id,
self.plugin_name))
os.makedirs(expected_prefix)
with patch('sys.prefix', self.test_prefix):
self.assertEqual(self.ctx.plugin.prefix, expected_prefix)
def test_fallback_prefix(self):
self.assertEqual(self.ctx.plugin.prefix, sys.prefix)
class GetResourceTemplateTests(testtools.TestCase):
def __init__(self, *args, **kwargs):
super(GetResourceTemplateTests, self).__init__(*args, **kwargs)
self.blueprint_resources_path = os.path.join(
os.path.dirname(os.path.realpath(__file__)),
"resources/blueprints/resources")
self.blueprint_path = os.path.join(
os.path.dirname(os.path.realpath(__file__)),
"resources/blueprints/test-get-resource-template.yaml")
def setUp(self):
super(GetResourceTemplateTests, self).setUp()
def _assert_rendering(self, env, download,
rendered, should_fail_rendering):
instance = env.storage.get_node_instances(node_id='node1')[0]
resource = instance.runtime_properties['resource']
if not should_fail_rendering:
if download:
with open(resource, 'r') as f:
rendered_resource = f.read()
else:
rendered_resource = resource
if rendered == 'normal':
expected_resource_path = \
os.path.join(self.blueprint_resources_path,
'rendered_template.conf')
elif rendered == 'extended':
expected_resource_path = \
os.path.join(self.blueprint_resources_path,
'extended_rendered_template.conf')
else:
expected_resource_path = \
os.path.join(self.blueprint_resources_path,
'for_template_rendering_tests.conf')
with open(expected_resource_path, 'r') as f:
expected = f.read()
self.assertEqual(expected, rendered_resource)
else:
self.assertEqual('failed', resource)
def _generic_get_download_template_test(self,
parameters,
download=False,
rendered='normal',
should_fail_rendering=False):
env = local.init_env(self.blueprint_path)
updated_params = {'nodes': ['node1']}
updated_params.update(parameters)
env.execute('execute_operation',
parameters=updated_params)
self._assert_rendering(env, download,
rendered, should_fail_rendering)
def test_get_resource_template_with_ctx(self):
self._generic_get_download_template_test({
'operation': 'get_template',
'testing': 'get_resource_with_ctx'
}, rendered='false', should_fail_rendering=True)
def test_get_resource_no_template(self):
self._generic_get_download_template_test({
'operation': 'get_template',
'testing': 'get_resource_no_template'
})
def test_get_resource_empty_template_variables(self):
self._generic_get_download_template_test({
'operation': 'get_template',
'testing': 'get_resource_empty_template'
})
def test_get_resource(self):
self._generic_get_download_template_test({
'operation': 'get_template',
'testing': 'get_resource'
}, rendered='extended')
def test_download_resource_template_with_ctx(self):
self._generic_get_download_template_test(
dict(operation='download_template',
testing='download_resource_with_ctx'),
download=True,
rendered='false',
should_fail_rendering=True)
def test_download_resource_no_template(self):
self._generic_get_download_template_test(
dict(operation='download_template',
testing='download_resource_no_template'),
download=True)
def test_download_resource_empty_template_variables(self):
self._generic_get_download_template_test(
dict(operation='download_template',
testing='download_resource_empty_template'),
download=True)
def test_download_resource(self):
self._generic_get_download_template_test(
dict(operation='download_template',
testing='download_resource'),
download=True,
rendered='extended')
def _context_with_endpoint(endpoint, **kwargs):
"""Get a NodeInstanceContext with the passed stub data."""
context_kwargs = {
'context': {'node_id': 'node_id'},
'endpoint': endpoint,
'node': None,
'modifiable': True
}
context_kwargs.update(kwargs)
return context.NodeInstanceContext(**context_kwargs)
class TestPropertiesRefresh(testtools.TestCase):
def test_refresh_fetches(self):
"""Refreshing a node instance fetches new properties."""
# first .get_node_instances call returns an instance with value=1
# next call returns one with value=2
instances = [
NodeInstance('id', 'node_id', {'value': 1}),
NodeInstance('id', 'node_id', {'value': 2})
]
ep = mock.Mock(**{
'get_node_instance.side_effect': instances
})
ctx = _context_with_endpoint(ep)
self.assertEqual(1, ctx.runtime_properties['value'])
ctx.refresh()
self.assertEqual(2, ctx.runtime_properties['value'])
def test_cant_refresh_dirty(self):
"""Refreshing a dirty instance throws instead of overwriting data."""
instance = NodeInstance('id', 'node_id', {'value': 1})
ep = mock.Mock(**{
'get_node_instance.return_value': instance
})
ctx = _context_with_endpoint(ep)
ctx.runtime_properties['value'] += 5
try:
ctx.refresh()
except exceptions.NonRecoverableError as e:
self.assertIn('dirty', str(e))
else:
self.fail('NonRecoverableError was not thrown')
self.assertEqual(
6, ctx.runtime_properties['value'],
"Instance properties were overwritten, losing local changes.")
def test_force_overwrites_dirty(self):
"""Force-refreshing a dirty instance overwrites local changes."""
def get_instance(endpoint):
# we'll be mutating the instance properties, so make sure
# we return a new object every time - otherwise the properties
# would've been overwritten with the same object, not with fresh
# values.
return NodeInstance('id', 'node_id', {'value': 1})
ep = mock.Mock(**{
'get_node_instance.side_effect': get_instance
})
ctx = _context_with_endpoint(ep)
ctx.runtime_properties['value'] += 5
ctx.refresh(force=True)
self.assertEqual(
1, ctx.runtime_properties['value'],
"Instance properties were not overwritten but force was used")
class TestPropertiesUpdate(testtools.TestCase):
ERR_CONFLICT = CloudifyClientError('conflict', status_code=409)
def test_update(self):
""".update() without a handler sends the changed runtime properties."""
def mock_update(instance):
self.assertEqual({'foo': 42}, instance.runtime_properties)
instance = NodeInstance('id', 'node_id')
ep = mock.Mock(**{
'get_node_instance.return_value': instance,
'update_node_instance.side_effect': mock_update
})
ctx = _context_with_endpoint(ep)
ctx.runtime_properties['foo'] = 42
ctx.update()
ep.update_node_instance.assert_called_once_with(instance)
def test_update_conflict_no_handler(self):
"""Version conflict without a handler function aborts the operation."""
instance = NodeInstance('id', 'node_id')
ep = mock.Mock(**{
'get_node_instance.return_value': instance,
'update_node_instance.side_effect': self.ERR_CONFLICT
})
ctx = _context_with_endpoint(ep)
ctx.runtime_properties['foo'] = 42
try:
ctx.update()
except CloudifyClientError as e:
self.assertEqual(409, e.status_code)
else:
self.fail('ctx.update() has hidden the 409 error')
def test_update_conflict_simple_handler(self):
"""On a conflict, the handler will be called until it succeeds.
The simple handler function in this test will just increase the
runtime property value by 1 each call. When the value reaches 5,
the mock update method will at last allow it to save.
"""
# each next call of the mock .get_node_instance will return subsequent
# instances: each time the runtime property is changed
instances = [NodeInstance('id', 'node_id', {'value': i})
for i in range(5)]
def mock_update(instance):
if instance.runtime_properties.get('value', 0) < 5:
raise self.ERR_CONFLICT
ep = mock.Mock(**{
'get_node_instance.side_effect': instances,
'update_node_instance.side_effect': mock_update
})
ctx = _context_with_endpoint(ep)
ctx.runtime_properties['value'] = 1
def _handler(previous, next_props):
# the "previous" argument is always the props as they were before
# .update() was called
self.assertEqual(previous, {'value': 1})
return {'value': next_props['value'] + 1}
handler = mock.Mock(side_effect=_handler) # Mock() for recording calls
ctx.update(handler)
self.assertEqual(5, len(handler.mock_calls))
self.assertEqual(5, len(ep.update_node_instance.mock_calls))
class TestPropertiesUpdateDefaultMergeHandler(unittest.TestCase):
ERR_CONFLICT = CloudifyClientError('conflict', status_code=409)
def test_merge_handler_noconflict(self):
"""The merge builtin handler adds properties that are not present.
If a property was added locally, but isn't in the storage version,
it can be added.
"""
instance = NodeInstance('id', 'node_id', {'value': 1})
def mock_update(instance):
# we got both properties merged - the locally added one
# and the server one
self.assertEqual({'othervalue': 1, 'value': 1},
instance.runtime_properties)
ep = mock.Mock(**{
'get_node_instance.return_value': instance,
'update_node_instance.side_effect': mock_update
})
ctx = _context_with_endpoint(ep)
ctx.runtime_properties['othervalue'] = 1
ctx.update(conflict_handlers.simple_merge_handler)
ep.update_node_instance.assert_called_once_with(instance)
def test_merge_handler_repeated_property(self):
"""Merge handler won't overwrite already existing properties.
First fetch returns value=1; locally change that to 2 and try to
update. However server says that's a conflict, and now says value=5.
Merge handler decides it can't merge and errors out.
"""
instance = NodeInstance('id', 'node_id', {'value': 1})
ep = mock.Mock(**{
'get_node_instance.return_value': instance,
'update_node_instance.side_effect': self.ERR_CONFLICT
})
ctx = _context_with_endpoint(ep)
ctx.runtime_properties['value'] = 2
# in the meantime, server's version changed! value is now 5
ep.get_node_instance.return_value = NodeInstance('id', 'node_id',
{'value': 5})
try:
ctx.update(conflict_handlers.simple_merge_handler)
except ValueError:
pass
else:
self.fail('merge handler should fail to merge repeated properties')
self.assertEqual(1, len(ep.update_node_instance.mock_calls))
def test_merge_handler_conflict_resolved(self):
"""Merge handler can resolve conflicts, adding new properties.
First fetch returns instance without the 'value' property.
Handler adds the locally-added 'othervalue' and tries updating.
That's a conflict, because now the server version has the 'value'
property. Handler refetches, and is able to merge.
"""
instances = [NodeInstance('id', 'node_id'),
NodeInstance('id', 'node_id', {'value': 1})]
def mock_update(instance):
if 'value' not in instance.runtime_properties:
raise self.ERR_CONFLICT
self.assertEqual({'othervalue': 1, 'value': 1},
instance.runtime_properties)
ep = mock.Mock(**{
'get_node_instance.side_effect': instances,
'update_node_instance.side_effect': mock_update
})
ctx = _context_with_endpoint(ep)
ctx.runtime_properties['othervalue'] = 1
# at this point we don't know about the 'value' property yet
ctx.update(conflict_handlers.simple_merge_handler)
self.assertEqual(2, len(ep.update_node_instance.mock_calls))
@operation
def get_template(ctx, testing, **_):
resource = 'empty'
rendering_tests_demo_conf = 'resources/for_template_rendering_tests.conf'
if testing == 'get_resource_with_ctx':
try:
resource = ctx.get_resource_and_render(
rendering_tests_demo_conf,
template_variables={'ctx': ctx})
except exceptions.NonRecoverableError:
print 'caught expected exception'
resource = 'failed'
if testing == 'get_resource_no_template':
resource = ctx.get_resource_and_render(rendering_tests_demo_conf)
if testing == 'get_resource_empty_template':
resource = ctx.get_resource_and_render(rendering_tests_demo_conf,
template_variables={})
if testing == 'get_resource':
resource = ctx.get_resource_and_render(
rendering_tests_demo_conf,
template_variables={'key': 'value'})
ctx.instance.runtime_properties['resource'] = resource
@operation
def download_template(ctx, testing, **_):
resource = 'empty'
rendering_tests_demo_conf = 'resources/for_template_rendering_tests.conf'
if testing == 'download_resource_with_ctx':
try:
resource = ctx.download_resource_and_render(
rendering_tests_demo_conf,
template_variables={'ctx': ctx})
except exceptions.NonRecoverableError:
print 'caught expected exception'
resource = 'failed'
if testing == 'download_resource_no_template':
resource = ctx.download_resource_and_render(
rendering_tests_demo_conf)
if testing == 'download_resource_empty_template':
resource = ctx.download_resource_and_render(
rendering_tests_demo_conf,
template_variables={})
if testing == 'download_resource':
resource = ctx.download_resource_and_render(
rendering_tests_demo_conf,
template_variables={'key': 'value'})
ctx.instance.runtime_properties['resource'] = resource
@operation
def get_node_type(ctx, **kwargs):
ctx.instance.runtime_properties['type'] = ctx.node.type
ctx.instance.runtime_properties['type_hierarchy'] = ctx.node.type_hierarchy
| cloudify-cosmo/cloudify-plugins-common | cloudify/tests/test_context.py | Python | apache-2.0 | 26,770 | 0 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# vim: ai ts=4 sts=4 et sw=4 nu
from __future__ import (unicode_literals, absolute_import,
division, print_function)
import logging
from django.core.management.base import BaseCommand
from optparse import make_option
from py3compat import PY2
from snisi_core.models.Entities import AdministrativeEntity as AEntity
if PY2:
import unicodecsv as csv
else:
import csv
logger = logging.getLogger(__name__)
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('-f',
help='CSV file',
action='store',
dest='filename'),
)
def handle(self, *args, **options):
headers = ['name', 'region', 'cercle_commune', 'commune_quartier']
f = open(options.get('filename'), 'w')
csv_writer = csv.DictWriter(f, fieldnames=headers)
csv_writer.writeheader()
csv_writer.writerow({
'name': "label",
'region': "Région",
'cercle_commune': "Cercle",
'commune_quartier': "Commune",
})
for region in AEntity.objects.filter(type__slug='region'):
logger.info(region)
is_bko = region.name == 'BAMAKO'
for cercle in AEntity.objects.filter(parent=region):
logger.info(cercle)
for commune in AEntity.objects.filter(parent=cercle):
logger.info(commune)
if not is_bko:
csv_writer.writerow({
'name': "choice_label",
'region': region.name,
'cercle_commune': cercle.name,
'commune_quartier': commune.name
})
continue
for vfq in AEntity.objects.filter(parent=commune):
for v in (region, cercle, commune, vfq):
if not len(v.name.strip()):
continue
csv_writer.writerow({
'name': "choice_label",
'region': region.name,
'cercle_commune': commune.name,
'commune_quartier': vfq.name
})
f.close()
| yeleman/snisi | snisi_maint/management/commands/entities_to_cascades.py | Python | mit | 2,414 | 0 |
#!/usr/bin/env python
# Copyright 2015 Jason Edelman <jason@networktocode.com>
# Network to Code, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
DOCUMENTATION = '''
---
module: ntc_reboot
short_description: Reboot a network device.
description:
- Reboot a network device, optionally on a timer.
- Supported platforms include Cisco Nexus switches with NX-API, Cisco IOS switches or routers, Arista switches with eAPI.
Notes:
- The timer is only supported for IOS devices.
author: Jason Edelman (@jedelman8)
version_added: 1.9.2
requirements:
- pyntc
options:
platform:
description:
- Switch platform
required: true
choices: ['cisco_nxos_nxapi', 'arista_eos_eapi', 'cisco_ios']
timer:
description:
- Time in minutes after which the device will be rebooted.
required: false
default: null
confirm:
description:
- Safeguard boolean. Set to true if you're sure you want to reboot.
required: false
default: false
host:
description:
- Hostame or IP address of switch.
required: true
username:
description:
- Username used to login to the target device
required: true
password:
description:
- Password used to login to the target device
required: true
secret:
description:
- Enable secret for devices connecting over SSH.
required: false
transport:
description:
- Transport protocol for API-based devices.
required: false
default: https
choices: ['http', 'https']
port:
description:
- TCP/UDP port to connect to target device. If omitted standard port numbers will be used.
80 for HTTP; 443 for HTTPS; 22 for SSH.
required: false
default: null
ntc_host:
description:
- The name of a host as specified in an NTC configuration file.
required: false
default: null
ntc_conf_file:
description:
- The path to a local NTC configuration file. If omitted, and ntc_host is specified,
the system will look for a file given by the path in the environment variable PYNTC_CONF,
and then in the users home directory for a file called .ntc.conf.
required: false
default: null
'''
EXAMPLES = '''
- ntc_reboot:
platform: cisco_nxos_nxapi
confirm: true
host: "{{ inventory_hostname }}"
username: "{{ username }}"
password: "{{ password }}"
transport: http
- ntc_reboot:
ntc_host: n9k1
ntc_conf_file: .ntc.conf
confirm: true
- ntc_file_copy:
platform: arista_eos_eapi
confirm: true
host: "{{ inventory_hostname }}"
username: "{{ username }}"
password: "{{ password }}"
- ntc_file_copy:
platform: cisco_ios
confirm: true
timer: 5
host: "{{ inventory_hostname }}"
username: "{{ username }}"
password: "{{ password }}"
secret: "{{ secret }}"
'''
RETURN = '''
rebooted:
description: Whether the device was instructed to reboot.
returned: success
type: boolean
sample: true
'''
try:
HAS_PYNTC = True
from pyntc import ntc_device, ntc_device_by_name
except ImportError:
HAS_PYNTC = False
PLATFORM_NXAPI = 'cisco_nxos_nxapi'
PLATFORM_IOS = 'cisco_ios_ssh'
PLATFORM_EAPI = 'arista_eos_eapi'
PLATFORM_JUNOS = 'juniper_junos_netconf'
def main():
module = AnsibleModule(
argument_spec=dict(
platform=dict(choices=[PLATFORM_NXAPI, PLATFORM_IOS, PLATFORM_EAPI, PLATFORM_JUNOS],
required=False),
host=dict(required=False),
username=dict(required=False, type='str'),
password=dict(required=False, type='str'),
secret=dict(required=False),
transport=dict(required=False, choices=['http', 'https']),
port=dict(required=False, type='int'),
ntc_host=dict(required=False),
ntc_conf_file=dict(required=False),
confirm=dict(required=False, default=False, type='bool', choices=BOOLEANS),
timer=dict(requred=False, type='int'),
),
mutually_exclusive=[['host', 'ntc_host'],
['ntc_host', 'secret'],
['ntc_host', 'transport'],
['ntc_host', 'port'],
['ntc_conf_file', 'secret'],
['ntc_conf_file', 'transport'],
['ntc_conf_file', 'port'],
],
required_one_of=[['host', 'ntc_host']],
required_together=[['host', 'username', 'password', 'platform']],
supports_check_mode=False
)
if not HAS_PYNTC:
module.fail_json(msg='pyntc Python library not found.')
platform = module.params['platform']
host = module.params['host']
username = module.params['username']
password = module.params['password']
ntc_host = module.params['ntc_host']
ntc_conf_file = module.params['ntc_conf_file']
transport = module.params['transport']
port = module.params['port']
secret = module.params['secret']
if ntc_host is not None:
device = ntc_device_by_name(ntc_host, ntc_conf_file)
else:
kwargs = {}
if transport is not None:
kwargs['transport'] = transport
if port is not None:
kwargs['port'] = port
if secret is not None:
kwargs['secret'] = secret
device_type = platform
device = ntc_device(device_type, host, username, password, **kwargs)
confirm = module.params['confirm']
timer = module.params['timer']
if not confirm:
module.fail_json(msg='confirm must be set to true for this module to work.')
supported_timer_platforms = [PLATFORM_IOS, PLATFORM_JUNOS]
if timer is not None \
and device.device_type not in supported_timer_platforms:
module.fail_json(msg='Timer parameter not supported on platform %s.' % platform)
device.open()
changed = False
rebooted = False
if timer is not None:
device.reboot(confirm=True, timer=timer)
else:
device.reboot(confirm=True)
changed = True
rebooted = True
device.close()
module.exit_json(changed=changed, rebooted=rebooted)
from ansible.module_utils.basic import *
main()
| joergullrich/virl-lab | library/ntc_reboot.py | Python | gpl-3.0 | 6,997 | 0.001572 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django.utils.timezone
import jsonfield.fields
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Payload',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('payload', jsonfield.fields.JSONField()),
('http_error', models.IntegerField()),
('messages', jsonfield.fields.JSONField()),
('date', models.DateTimeField(default=django.utils.timezone.now)),
],
),
migrations.CreateModel(
name='Project',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('github_full_name', models.CharField(max_length=200)),
('trigger_word', models.CharField(default=b'Headsup', max_length=100)),
('case_sensitive_trigger_word', models.BooleanField(default=False)),
('github_webhook_secret', models.CharField(max_length=100)),
('send_to', models.TextField()),
('send_cc', models.TextField(null=True, blank=True)),
('send_bcc', models.TextField(null=True, blank=True)),
('cc_commit_author', models.BooleanField(default=False)),
('on_tag_only', models.BooleanField(default=False)),
('created', models.DateTimeField(auto_now_add=True)),
('modified', models.DateTimeField(auto_now=True)),
('creator', models.ForeignKey(to=settings.AUTH_USER_MODEL)),
],
),
migrations.AddField(
model_name='payload',
name='project',
field=models.ForeignKey(to='base.Project', null=True),
),
]
| peterbe/headsupper | headsupper/base/migrations/0001_initial.py | Python | mpl-2.0 | 2,093 | 0.002389 |
from flask_sqlalchemy import SQLAlchemy
from sqlalchemy import Table, Column, Integer, ForeignKey
from sqlalchemy.orm import relationship
from models.base import Base, get_or_create
db = SQLAlchemy()
class Group(Base):
__tablename__ = 'group'
id = db.Column(db.String(50), unique=True, primary_key=True)
subscribers = db.relationship('GroupSubscription', back_populates='group')
members = db.relationship('GroupMembership', back_populates='group')
def __init__(self, id):
self.id = id
@property
def name(self):
return self.id | blackmad/snippets | models/group.py | Python | apache-2.0 | 556 | 0.017986 |
# wikirandom.py: Functions for downloading random articles from Wikipedia
#
# Copyright (C) 2010 Matthew D. Hoffman
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import sys, urllib2, re, string, time, threading
def get_random_wikipedia_article():
"""
Downloads a randomly selected Wikipedia article (via
http://en.wikipedia.org/wiki/Special:Random) and strips out (most
of) the formatting, links, etc.
This function is a bit simpler and less robust than the code that
was used for the experiments in "Online VB for LDA."
"""
failed = True
while failed:
articletitle = None
failed = False
try:
req = urllib2.Request('http://en.wikipedia.org/wiki/Special:Random',
None, { 'User-Agent' : 'x'})
f = urllib2.urlopen(req)
while not articletitle:
line = f.readline()
result = re.search(r'title="Edit this page" href="/w/index.php\?title=(.*)\&action=edit" /\>', line)
if (result):
articletitle = result.group(1)
break
elif (len(line) < 1):
sys.exit(1)
req = urllib2.Request('http://en.wikipedia.org/w/index.php?title=Special:Export/%s&action=submit' \
% (articletitle),
None, { 'User-Agent' : 'x'})
f = urllib2.urlopen(req)
all = f.read()
except (urllib2.HTTPError, urllib2.URLError):
print 'oops. there was a failure downloading %s. retrying...' \
% articletitle
failed = True
continue
print 'downloaded %s. parsing...' % articletitle
try:
all = re.search(r'<text.*?>(.*)</text', all, flags=re.DOTALL).group(1)
all = re.sub(r'\n', ' ', all)
all = re.sub(r'\{\{.*?\}\}', r'', all)
all = re.sub(r'\[\[Category:.*', '', all)
all = re.sub(r'==\s*[Ss]ource\s*==.*', '', all)
all = re.sub(r'==\s*[Rr]eferences\s*==.*', '', all)
all = re.sub(r'==\s*[Ee]xternal [Ll]inks\s*==.*', '', all)
all = re.sub(r'==\s*[Ee]xternal [Ll]inks and [Rr]eferences==\s*', '', all)
all = re.sub(r'==\s*[Ss]ee [Aa]lso\s*==.*', '', all)
all = re.sub(r'http://[^\s]*', '', all)
all = re.sub(r'\[\[Image:.*?\]\]', '', all)
all = re.sub(r'Image:.*?\|', '', all)
all = re.sub(r'\[\[.*?\|*([^\|]*?)\]\]', r'\1', all)
all = re.sub(r'\<.*?>', '', all)
except:
# Something went wrong, try again. (This is bad coding practice.)
print 'oops. there was a failure parsing %s. retrying...' \
% articletitle
failed = True
continue
return(all, articletitle)
class WikiThread(threading.Thread):
articles = list()
articlenames = list()
lock = threading.Lock()
def run(self):
(article, articlename) = get_random_wikipedia_article()
WikiThread.lock.acquire()
WikiThread.articles.append(article)
WikiThread.articlenames.append(articlename)
WikiThread.lock.release()
def get_random_wikipedia_articles(n):
"""
Downloads n articles in parallel from Wikipedia and returns lists
of their names and contents. Much faster than calling
get_random_wikipedia_article() serially.
"""
maxthreads = 8
WikiThread.articles = list()
WikiThread.articlenames = list()
wtlist = list()
for i in range(0, n, maxthreads):
'''
YEGIN: commented out for test
'''
# print 'downloaded %d/%d articles...' % (i, n)
for j in range(i, min(i+maxthreads, n)):
wtlist.append(WikiThread())
wtlist[len(wtlist)-1].start()
for j in range(i, min(i+maxthreads, n)):
wtlist[j].join()
# '''
# YEGIN: added for test
# '''
# print WikiThread.articles
# print WikiThread.articlenames
return (WikiThread.articles, WikiThread.articlenames)
if __name__ == '__main__':
t0 = time.time()
(articles, articlenames) = get_random_wikipedia_articles(1)
for i in range(0, len(articles)):
print articlenames[i]
t1 = time.time()
print 'took %f' % (t1 - t0)
| ygenc/onlineLDA | wikirandom.py | Python | gpl-3.0 | 5,021 | 0.00478 |
# Copyright 2017 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
import nova.conf
hyperv_opts = [
cfg.IntOpt('evacuate_task_state_timeout',
default=600,
help='Number of seconds to wait for an instance to be '
'evacuated during host maintenance.'),
cfg.IntOpt('cluster_event_check_interval',
deprecated_for_removal=True,
deprecated_since="5.0.1",
default=2),
cfg.BoolOpt('instance_automatic_shutdown',
default=False,
help='Automatically shutdown instances when the host is '
'shutdown. By default, instances will be saved, which '
'adds a disk overhead. Changing this option will not '
'affect existing instances.'),
cfg.IntOpt('instance_live_migration_timeout',
default=300,
min=0,
help='Number of seconds to wait for an instance to be '
'live migrated (Only applies to clustered instances '
'for the moment).'),
cfg.IntOpt('max_failover_count',
default=1,
min=1,
help="The maximum number of failovers that can occur in the "
"failover_period timeframe per VM. Once a VM's number "
"failover reaches this number, the VM will simply end up "
"in a Failed state."),
cfg.IntOpt('failover_period',
default=6,
min=1,
help="The number of hours in which the max_failover_count "
"number of failovers can occur."),
cfg.BoolOpt('recreate_ports_on_failover',
default=True,
help="When enabled, the ports will be recreated for failed "
"over instances. This ensures that we're not left with "
"a stale port."),
cfg.BoolOpt('auto_failback',
default=True,
help="Allow the VM the failback to its original host once it "
"is available."),
cfg.BoolOpt('force_destroy_instances',
default=False,
help="If this option is enabled, instance destroy requests "
"are executed immediately, regardless of instance "
"pending tasks. In some situations, the destroy "
"operation will fail (e.g. due to file locks), "
"requiring subsequent retries."),
cfg.BoolOpt('move_disks_on_cold_migration',
default=True,
help="Move the instance files to the instance dir configured "
"on the destination host. You may consider disabling "
"this when using multiple CSVs or shares and you wish "
"the source location to be preserved."),
]
coordination_opts = [
cfg.StrOpt('backend_url',
default='file:///C:/OpenStack/Lock',
help='The backend URL to use for distributed coordination.'),
]
CONF = nova.conf.CONF
CONF.register_opts(coordination_opts, 'coordination')
CONF.register_opts(hyperv_opts, 'hyperv')
def list_opts():
return [('coordination', coordination_opts),
('hyperv', hyperv_opts)]
| openstack/compute-hyperv | compute_hyperv/nova/conf.py | Python | apache-2.0 | 3,933 | 0 |
import unittest
import os
from bubi.mapper import DefaultMapper
class DefaultMapperTestCase(unittest.TestCase):
def setUp(self):
self.mapper = DefaultMapper(colorize=False)
self.color_mapper = DefaultMapper(colorize=True)
# test the map method without color
def test_mapping(self):
self.assertEquals(self.mapper.map('a'), '+')
self.assertEquals(self.mapper.map('z'), '+')
self.assertEquals(self.mapper.map('0'), '-')
self.assertEquals(self.mapper.map('9'), '-')
self.assertEquals(self.mapper.map('\n'), '\n')
self.assertEquals(self.mapper.map('@'), '.')
# test the map method with color
def test_color_mapping(self):
self.assertEquals(self.color_mapper.map('a'), '\033[32m+\033[0m')
self.assertEquals(self.color_mapper.map('z'), '\033[32m+\033[0m')
self.assertEquals(self.color_mapper.map('0'), '\033[31m-\033[0m')
self.assertEquals(self.color_mapper.map('9'), '\033[31m-\033[0m')
self.assertEquals(self.color_mapper.map('\n'), '\n')
self.assertEquals(self.color_mapper.map('@'), '.')
| be-ndee/bubi-lang | tests/test_default_mapper.py | Python | mit | 1,127 | 0 |
# Copyright 2009-2015 Eucalyptus Systems, Inc.
#
# Redistribution and use of this software in source and binary forms,
# with or without modification, are permitted provided that the following
# conditions are met:
#
# Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from requestbuilder import Arg
from requestbuilder.response import PaginatedResponse
from euca2ools.commands.iam import IAMRequest, arg_account_name
from euca2ools.commands.iam.getaccountpolicy import GetAccountPolicy
class ListAccountPolicies(IAMRequest):
DESCRIPTION = ('[Eucalyptus only] List one or all policies '
'policies attached to an account')
ARGS = [arg_account_name(help='''name or ID of the account owning
the policies to list (required)'''),
Arg('-p', '--policy-name', metavar='POLICY', route_to=None,
help='display a specific policy'),
Arg('-v', '--verbose', action='store_true', route_to=None,
help='''display the contents of the resulting policies (in
addition to their names)'''),
Arg('--pretty-print', action='store_true', route_to=None,
help='''when printing the contents of policies, reformat them
for easier reading''')]
LIST_TAGS = ['PolicyNames']
def main(self):
return PaginatedResponse(self, (None,), ('PolicyNames',))
def prepare_for_page(self, page):
# Pages are defined by markers
self.params['Marker'] = page
def get_next_page(self, response):
if response.get('IsTruncated') == 'true':
return response['Marker']
def print_result(self, result):
if self.args.get('policy_name'):
# Look for the specific policy the user asked for
for policy_name in result.get('PolicyNames', []):
if policy_name == self.args['policy_name']:
if self.args['verbose']:
self.print_policy(policy_name)
else:
print policy_name
break
else:
for policy_name in result.get('PolicyNames', []):
print policy_name
if self.args['verbose']:
self.print_policy(policy_name)
def print_policy(self, policy_name):
req = GetAccountPolicy(
service=self.service, AccountName=self.args['AccountName'],
PolicyName=policy_name, pretty_print=self.args['pretty_print'])
response = req.main()
req.print_result(response)
| vasiliykochergin/euca2ools | euca2ools/commands/iam/listaccountpolicies.py | Python | bsd-2-clause | 3,688 | 0 |
'''Base module to handle the collection and the output of statistical data.'''
import logging
import time
import multiprocessing as mp
import queue
from collections import Counter
log = logging.getLogger(__name__)
current_milli_time = lambda: int(round(time.time() * 1000))
def is_number(val):
'''Function to check if the value is a number.'''
try:
float(val)
return True
except ValueError:
return False
class Logstats(object):
'''This class briges the data in input (provided by the `stats` param)
to a generic output (`log`, by default).
'''
def __init__(self, msg=None, emit_func=None, logger=log, level='INFO',
timeout=1, queue=None):
'''Initialize the instance.
If `emit_func` is defined, `logger` and `level` are ignored.
Keyword arguments:
stats -- a dict-like object storing values to output
msg -- a string to use to format `stats` (by default it outputs a
list of comma separated values)
emit_func -- a function to emit the formatted output
(default: logging.log)
logger -- the logger to use to log the formatted output (default:
a `log` instance
level -- the log level (default: INFO)
'''
self.stats = Counter()
self.msg = msg
self.logger = logger
self.level = level
self.old_stats = {}
self.emit_func = emit_func
self.last = current_milli_time()
self.timeout = timeout
self.queue = queue
self.main_queue = None
if not logger.isEnabledFor(logging.getLevelName(level)):
logger.warning('Logger is not enabled to log at level {}.'.format(level))
def __getitem__(self, key):
return self.stats[key]
def __setitem__(self, key, val):
self.stats[key] = val
def update(self, *args, **kwargs):
self.stats.update(*args, **kwargs)
def _get_speed(self, new, old, delta):
return int(round(float((new - old)) / (delta / 1e3)))
def _consume_queue(self):
if self.main_queue:
while True:
try:
self.stats.update(self.main_queue.get_nowait())
except queue.Empty:
return
def get_stats(self, delta):
self._consume_queue()
stats = self.stats
if hasattr(self.stats, '__call__'):
stats = self.stats(delta)
else:
stats = stats.copy()
speed = dict(('{}.speed'.format(k),
self._get_speed(stats[k],
self.old_stats.get(k, 0),
delta))
for k in stats if is_number(stats[k]))
self.old_stats = stats
stats.update(speed)
return stats
def get_child(self):
if not self.main_queue:
self.main_queue = mp.Queue()
return Logstats(queue=self.main_queue)
def format_msg(self, stats):
if self.msg:
msg = self.msg.format(**stats)
else:
msg = ', '.join('{}: {}'.format(k, stats[k])
for k in sorted(stats))
return msg
def emit(self, msg):
if self.emit_func:
self.emit_func(msg)
else:
self.logger.log(getattr(logging, self.level), msg)
def __call__(self):
if self.queue:
self.queue.put(self.stats)
self.stats = Counter()
else:
delta = current_milli_time() - self.last
stats = self.get_stats(delta)
if stats:
self.emit(self.format_msg(stats))
self.last = current_milli_time()
| vrde/logstats | logstats/base.py | Python | mit | 3,761 | 0.000532 |
# Message Analyzer
# Demonstrates the len() function and the in operator
message = input("Enter a message: ")
print("\nThe length of your message is:", len(message))
print("\nThe most common letter in the English language, 'e',")
if "e" in message:
print("is in your message.")
else:
print("is not in your message.")
input("\n\nPress the enter key to exit.")
| rob-nn/python | first_book/message_analyzer.py | Python | gpl-2.0 | 365 | 0.005479 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.