text
stringlengths 6
947k
| repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
|
---|---|---|---|---|---|---|
#standard library
from _functools import reduce
import imghdr
### IMPORT THE APPLICABLE SETTINGS SET IN manage.py ###
from manage import USED_SETTINGS
import importlib
used_settings = importlib.import_module(USED_SETTINGS)
settings_media_root = used_settings.MEDIA_ROOT
settings_media_url = used_settings.MEDIA_URL
### REGULAR IMPORTS ###
from django.http import HttpResponse, HttpResponseRedirect
from django.shortcuts import render
from django.db.models import Q
from django.contrib.auth.decorators import login_required
from django.contrib.auth.models import Group
from django.contrib.auth import authenticate, login, logout
from django.forms.models import model_to_dict
from django.utils import timezone
from django.core.mail import send_mail, get_connection
from django.http.response import HttpResponseNotAllowed
from tickets.models import Ticket, Measures
from tickets.forms import (
EnterTicketForm, LoginForm, DetailForm,
EditableDataForm,ClosingDataForm, SearchForm,
ClosedDataForm, CompactMeasureForm, MeasureForm
)
from tickets.field_constants import FieldConstants
# local constants
LOGIN_URL = '/tickets/login/'
STDRT_REDIRECT_URL = '/tickets/overview/'
# view function for user login
'''
#parameter: HttpRequest request
#URL:'tickets/login'
'''
def login_user(request):
# renewal of session expiration
# request.session.set_expiry(settings.COOKIE_EXP_AGE)
# initialize variables error and login_user
error = False
logged_in_user = None
infomsg = ''
# if form is submitted in a post request
if request.method == 'POST':
form = LoginForm(request.POST)
# if POST data is valid in LoginForm
if form.is_valid():
# logout currently logged in user
if request.user.is_authenticated:
logout(request)
# get user name and password from POST data and try to authenticate user
username = request.POST['username']
password = request.POST['password']
user = authenticate(request, username=username, password=password)
# if user is authenticated: login user
if user is not None:
login(request, user)
# if the login was redirected with parameter 'next' (e.g. via @login_required decorator)
if request.GET.get('next'):
return HttpResponseRedirect(request.GET.get('next'))
# default redirect to /tickets/overview/
else:
return HttpResponseRedirect(STDRT_REDIRECT_URL)
# reset the form and set error to true
else:
error = True
form = LoginForm()
# if called normally (with GET-Request)
else:
# display currently logged in user, if existent
if request.user.is_authenticated:
logged_in_user = request.user
# set empty login form
form = LoginForm()
infomsg = 'Login erforderlich!'
return render(
request, 'ticket_login.djhtml',
{'form':form,
'error':error,
'login_user':logged_in_user,
'infomsg':infomsg}
)
# view function for logging a user out and redirecting to the login page
'''
#parameter: HttpRequest request
#URL:'tickets/logout'
'''
def logout_user(request):
if request.user.is_authenticated:
logout(request)
return HttpResponseRedirect('/tickets/login/')
# view function for creating a new ticket
'''
#lets the user choose sector and category
#and requires input for subject and description
#parameter: HttpRequest request
#URL:'tickets/enter'
'''
@login_required(login_url=LOGIN_URL)
def enter_ticket(request):
# init infomsg as empty string
infomsg = ''
if request.method == 'POST':
# set form as EnterTicketForm-Object with the POST-data
form = EnterTicketForm(request.POST, request.FILES)
# create an entry in the database with the entered data
if form.is_valid():
# get cleaned data and current system time
cd = form.cleaned_data
now = timezone.now()
# initialize img as empty string, fileErr as False
img = ''
fileErr = False
# check if an image file was uploaded and if so set img to the file
if request.FILES:
if imghdr.what(request.FILES['image']):
img = request.FILES['image']
# if a file was uploaded but is not recognized as an image file
else:
# form: form to be displayed for ticket entering; infomsg: displayed infomsg
infomsg = 'Dateifehler'
fileErr = True
return render(
request, 'ticket_enter.djhtml',
{'form':form,
'infomsg':infomsg,
'fileErr':fileErr}
)
cd['sector'] = Group.objects.get(name=cd['sector'])
# initialize ticket object t with form data
# ticket id increments automatically
# fields (apart from closingdatetime) mustn't be NULL -> initalized with '' (empty String)
t = Ticket(sector=cd['sector'], category=cd['category'],
subject=cd['subject'], description=cd['description'],
creationdatetime=now, status='open',
# TODO:get username from form/request-data?
creator=request.user.username,
responsible_person=None,
comment='', keywords='',
image=img
)
# save data set to database
t.save()
# reset form and display thank-you-message
infomsg = 'Ticket erfolgreich erstellt!'
form = EnterTicketForm()
#if requested with GET-Method
else:
# initialize empty form
form = EnterTicketForm()
# form: form to be displayed for ticket entering; infomsg: displayed infomsg
return render(
request, 'ticket_enter.djhtml',
{'form':form,
'infomsg':infomsg}
)
# view function for displaying a user's tickets
'''
#displays a list of open tickets for all groups/sectors the user's in on the left (NO responsible_person specified)
#and a list of open tickets for which he is entered as responsible_person
#parameter: HttpRequest request
#URL:'tickets/overview'
'''
@login_required(login_url=LOGIN_URL)
def show_ticket_list(request):
# renewal of session expiration
# request.session.set_expiry(COOKIE_EXP_AGE)
# build list of all groups the user is part of
groups = []
for group in request.user.groups.all():
groups.append(group)
# search for open tickets to be displayed according to the
# the requesting user
query_user = Q(status='open') & Q(responsible_person=request.user)
tickets_user = Ticket.objects.filter(query_user)
# get column headings/names from Ticket model
labels_dict = FieldConstants.TICKET_FIELD_LABELS
# the groups the user is part of
query_group = Q(status='open') & Q(responsible_person=None) & Q(sector__in=groups)
tickets_group = Ticket.objects.filter(query_group)
# initialize infomsg and set it according to GET['status']
infomsg = ''
if request.GET.get('status') :
if request.GET['status'] == 'closed':
infomsg = 'Ticket abgeschlossen!'
# return the template with the fetched data on display
return render(
request, 'ticket_overview.djhtml',
{'tickets_group':tickets_group,
'tickets_user':tickets_user,
'labels_dict':labels_dict,
'infomsg':infomsg}
)
# view function for viewing a ticket/'s data
'''
#submit options for:
#back to overview, change to editing, change to closing the ticket(redirect)
#parameter: HttpRequest request, ticketid (\d{1,4} -> 4 digits from urls.py)
#URL:'tickets/<ticketid>/'
'''
@login_required(login_url=LOGIN_URL)
def show_ticket_detail(request, ticketid):
# renewal of session expiration
# request.session.set_expiry(COOKIE_EXP_AGE)
if request.method == 'GET':
# query for ticket with given id
try:
ticket = Ticket.objects.get(ticketid=str(ticketid))
# catch possible exceptions
except Exception as e:
if isinstance(e, Ticket.DoesNotExist):
return render(
request, 'ticket_error.djhtml',
{'errormsg':'No Ticket found for this ID'}
)
elif isinstance(e, Ticket.MultipleObjectsReturned):
return render(
request, 'ticket_error.djhtml',
{'errormsg':'More than one ticket found for this ID'}
)
else:
return render(
request, 'ticket_error.djhtml',
{'errormsg':'An unknown error occured'}
)
else:
# convert ticket to dictionary with it's data
ticket_dict = model_to_dict(ticket)
# set sector to String represantation in ticket_dict
ticket_dict['sector'] = ticket.sector
# build list of all groups the user is part of
groups = []
for group in request.user.groups.all():
groups.append(group)
# if user is ticket creator or has permissions to change tickets
if (ticket.sector in groups and
(request.user.username == ticket_dict['creator']
or request.user.has_perm('tickets.change_ticket'))
):
# store if the ticket is already closed
if ticket_dict['status'] == 'closed':
closed = True
else:
closed = False
detailform = DetailForm(initial=ticket_dict)
if closed:
editform = ClosedDataForm(initial=ticket_dict)
else:
editform = EditableDataForm(initial=ticket_dict)
image = ticket_dict['image']
# build list of headers for compact display of measures linked to this ticket
headers = []
for key in FieldConstants.COMPACT_MEASURE_FIELD_LABELS:
headers.append(FieldConstants.COMPACT_MEASURE_FIELD_LABELS[key])
# build list of compact forms displayed as rows for measures linked to this ticket
measures = []
ticket_measures = Measures.objects.filter(ticket=ticket)
for measure in ticket_measures:
measures.append(CompactMeasureForm(initial=model_to_dict(measure)))
#initialize infomsg and set it according to GET['status']
infomsg = ''
if request.GET.get('status') :
if request.GET['status'] == 'added':
infomsg = 'Maßnahme hinzugefügt!'
return render(
request, 'ticket_detail.djhtml',
{'infomsg':infomsg,
'detailform':detailform,
'editform':editform,
'hasImage':image,
'editable':False,
'is_Form':False,
'headers':headers,
'measures':measures,
'closed':closed}
)
# if user doesn't have permission to view/change ticket data, display error page with according message
else:
return render(
request, 'ticket_error.djhtml',
{'errormsg':'Sie haben keinen Zugriff auf das Ticket!'}
)
# deny any request method except GET
else:
# send response for 405: Method not allowed
return HttpResponseNotAllowed()
# view function for editing a ticket/'s data
'''
#lets the user enter data for status, comment and keywords
#submit options for:
#back to overview, takeover(declare yourself responsible),
#save the currently entered data, closing the ticket(redirect)
#parameter: HttpRequest request, ticketid (\d{1,4} -> 4 digits from urls.py)
#URL:'tickets/<ticketid>/edit'
'''
@login_required(login_url=LOGIN_URL)
def edit_ticket_detail(request, ticketid):
# renewal of session expiration
# request.session.set_expiry(COOKIE_EXP_AGE)
# query for ticket with given id, catch possible exceptions
try:
ticket = Ticket.objects.get(ticketid=str(ticketid))
# catch possible exceptions
except Exception as e:
if isinstance(e, Ticket.DoesNotExist):
return render(
request, 'ticket_error.djhtml',
{'errormsg':'No Ticket found for this ID'}
)
elif isinstance(e, Ticket.MultipleObjectsReturned):
return render(
request, 'ticket_error.djhtml',
{'errormsg':'More than one ticket found for this ID'}
)
else:
return render(
request, 'ticket_error.djhtml',
{'errormsg':'An unknown error occured'}
)
else:
# build list of all groups the user is part of
groups = []
for group in request.user.groups.all():
groups.append(group)
# if user has permissions to change tickets and no other user is responsible for the ticket
if (ticket.sector in groups and
request.user.has_perm('tickets.change_ticket') and
ticket.responsible_person in [None, request.user]):
# convert ticket to dictionary with it's data
ticket_dict = model_to_dict(ticket)
# set sector to String represantation in ticket_dict
ticket_dict['sector'] = ticket.sector
# if ticket is closed redirect to detail view; prevents navigation to edit template via entering url
if ticket_dict['status'] == 'closed':
return HttpResponseRedirect('/tickets/' + str(ticket_dict['ticketid'] + '/'))
# build list of headers for compact display of measures linked to this ticket
headers = []
for key in FieldConstants.COMPACT_MEASURE_FIELD_LABELS:
headers.append(FieldConstants.COMPACT_MEASURE_FIELD_LABELS[key])
# GET request, display of input fields (with current data)
if request.method == 'GET':
detailform = DetailForm(initial=ticket_dict)
editform = EditableDataForm(initial=ticket_dict)
# build list of compact forms displayed as rows for measures linked to this ticket
measures = []
ticket_measures = Measures.objects.filter(ticket=ticket)
for measure in ticket_measures:
measures.append(CompactMeasureForm(initial=model_to_dict(measure)))
image = ticket_dict['image']
return render(
request, 'ticket_edit.djhtml',
{'detailform':detailform,
'editform':editform,
'hasImage':image,
'editable':True,
'is_Form':True,
'headers':headers,
'measures':measures}
)
# POST request, form was submitted, data will be validated and database updated (if input correct)
elif request.method == 'POST':
infomsg = ''
# when editing is canceled (button 'Übersicht' clicked) -> redirect
if 'cancel' in request.POST:
return HttpResponseRedirect('/tickets/overview/')
# when button 'To Details' is clicked -> redirect
elif 'back' in request.POST:
return HttpResponseRedirect('/tickets/' + ticketid + '/')
# when button 'New Measure...' was clicked -> redirect
elif 'addmeasure' in request.POST:
return HttpResponseRedirect('/tickets/' + ticketid + '/add_measure/')
# redirect to closing for when button 'Abschließen' is clicked
elif 'close' in request.POST:
return HttpResponseRedirect('/tickets/' + ticketid + '/close/')
# change responsible person to currently logged in user
elif 'takeover' in request.POST:
if ticket.responsible_person == None:
Ticket.objects.filter(ticketid=str(ticketid)).update(responsible_person=request.user)
infomsg = 'Ticket übernommen'
elif ticket.responsible_person != request.user:
infomsg = 'Ticketübernahme nicht möglich'
# 'refresh' ticket-object after updating in db
ticket = Ticket.objects.get(ticketid=str(ticketid))
# convert ticket to dictionary with it's data
ticket_dict = model_to_dict(ticket)
ticket_dict['sector'] = ticket.sector
detailform = DetailForm(initial=ticket_dict)
editform = EditableDataForm(initial=ticket_dict)
image = ticket_dict['image']
# build list of compact forms displayed as rows for measures linked to this ticket
measures = []
ticket_measures = Measures.objects.filter(ticket=ticket)
for measure in ticket_measures:
measures.append(CompactMeasureForm(initial=model_to_dict(measure)))
return render(
request, 'ticket_edit.djhtml',
{'infomsg':infomsg,
'editform':editform,
'detailform':detailform,
'hasImage':image,
'editable':True,
'is_Form':True,
'headers': headers,
'measures': measures}
)
# check input data and update database when button 'Speichern'/'Save' is clicked
elif 'confirm' in request.POST:
# init form with POST data
editform = EditableDataForm(request.POST)
ticket_dict = model_to_dict(ticket)
ticket_dict['sector'] = ticket.sector
detailform = DetailForm(initial=ticket_dict)
ticket_measures = Measures.objects.filter(ticket=ticket)
measures = []
for measure in ticket_measures:
measures.append(CompactMeasureForm(initial=model_to_dict(measure)))
# check user input for validity
if editform.is_valid():
# get cleaned data and update ticket in database
cd = editform.cleaned_data
Ticket.objects.filter(ticketid=str(ticketid)).update(
status=cd['status'],
comment=cd['comment'],
keywords=cd['keywords'],
priority=cd['priority']
)
infomsg = 'Änderungen gespeichert!'
else:
infomsg = 'Fehlerhafte Eingabe(n)'
image = ticket_dict['image']
# build list of compact forms displayed as rows for measures linked to this ticket
measures = []
ticket_measures = Measures.objects.filter(ticket=ticket)
for measure in ticket_measures:
measures.append(CompactMeasureForm(initial=model_to_dict(measure)))
return render(
request, 'ticket_edit.djhtml',
{'infomsg':infomsg,
'detailform':detailform,
'editform':editform,
'hasImage':image,
'editable':True,
'is_Form':True,
'headers':headers,
'measures':measures}
)
#deny any request method except GET and POST
else:
# send response for 405: Method not allowed
return HttpResponseNotAllowed()
# if user mustn't edit tickets or another user is specified as responsible_person
else:
# display error template with error description
if not request.user.has_perm('tickets.change_ticket'):
errormsg = 'Sie haben nicht die Berechtigung Tickets zu bearbeiten!'
elif ticket.responsible_person != None and \
ticket.responsible_person != request.user:
errormsg = 'Für dieses Ticket ist ein anderer Benutzer verantwortlich!'
else:
errormsg = 'Unbekannter Fehler bei Ticketbearbeitung (in tickets.views.edit_ticket_detail())'
return render(
request, 'ticket_error.djhtml',
{'errormsg': errormsg}
)
# view function for adding a measure to a given ticket
'''
#lets the user enter data for short and full description and the measures result
#additionaly user has to choose the category of the solution (unsuccesful, partly, temporary, solution)
#submit option for adding the measure-> validates the data and either
#creates the measure in the database and returns to ticket details
#or displays errors in the forms fields
#parameter: HttpRequest request, ticketid (\d{1,4} -> 4 digits from urls.py)
#URL:'tickets/<ticketid>/add_measure'
'''
@login_required(login_url=LOGIN_URL)
def add_measure(request, ticketid):
# query for ticket with given id
try:
ticket = Ticket.objects.get(ticketid=str(ticketid))
# catch possible exceptions
except Exception as e:
if isinstance(e, Ticket.DoesNotExist):
return render(
request, 'ticket_error.djhtml',
{'errormsg':'No measure found!'}
)
elif isinstance(e, Ticket.MultipleObjectsReturned):
return render(
request, 'ticket_error.djhtml',
{'errormsg':'Multiple measures found under unique ID!'}
)
else:
return render(
request, 'ticket_error.djhtml',
{'errormsg':'Unknown error in views.add_measure'}
)
# if correct ticket was found
else:
# build list of all groups the user is part of
groups = []
for group in request.user.groups.all():
groups.append(group)
# if user has permissions to change tickets and no other user is responsible for the ticket
if (ticket.sector in groups and
request.user.has_perm('tickets.change_ticket') and
ticket.responsible_person in [None, request.user]):
# GET request display ticket_close template for user input
if request.method == 'GET':
return render(
request, 'measure_add.djhtml',
{'measureform': MeasureForm(initial={'ticketid':ticket.ticketid})}
)
elif request.method == 'POST':
if 'add' in request.POST:
# add ticketid through mutable copy of request.POST here since only for displaying purpose in the form
POST = request.POST.copy()
POST['ticketid'] = ticket.ticketid
measureform = MeasureForm(POST)
if measureform.is_valid():
measure_cd = measureform.cleaned_data
Measures.objects.create(
ticket=ticket,
creationdatetime=timezone.now(),
shortdsc=measure_cd['shortdsc'],
dsc=measure_cd['dsc'],
result=measure_cd['result'],
isSolution=measure_cd['isSolution']
)
return HttpResponseRedirect('/tickets/' + str(ticket.ticketid) + '/?status=added')
else:
return render(
request, 'measure_add.djhtml',
{'measureform': measureform,
'infomsg':'Eingaben fehlerhaft'}
)
elif 'cancel' in request.POST:
return HttpResponseRedirect('/tickets/' + str(ticket.ticketid) + '/')
else:
return HttpResponseNotAllowed()
# if user mustn't edit measures or another user is specified as responsible_person
else:
# display error template with error description
if not request.user.has_perm('tickets.change_ticket'):
errormsg = 'Sie haben nicht die Berechtigung Tickets zu bearbeiten (Maßnahmen hinzuzufügen)!'
elif ticket.responsible_person != None and \
ticket.responsible_person != request.user:
errormsg = 'Für dieses Ticket ist ein anderer Benutzer verantwortlich!'
else:
errormsg = 'Unbekannter Fehler bei Ticketbearbeitung (in views.add_measure)'
return render(
request, 'ticket_error.djhtml',
{'errormsg': errormsg}
)
# view function for editing specific data of an already existing measure
'''
#lets the user enter data for short and full description and the measures result
#additionaly user has to choose the category of the solution (unsuccesful, partly, temporary, solution)
#submit option for saving the changes, cancel option for returning to ticket details
#either creates the measure in the database and returns to ticket details
#or displays the measure and errors in the form's fields
#parameter: HttpRequest request, measureid (\d{1,4} -> 4 digits from urls.py)
#URL:'tickets/measures/<measureid>'
'''
@login_required(login_url=LOGIN_URL)
def edit_measure(request, measureid):
# query for measure with given id
try:
measure = Measures.objects.get(measureid=str(measureid))
# catch possible exceptions
except Exception as e:
if isinstance(e, Measures.DoesNotExist):
return render(
request, 'ticket_error.djhtml',
{'errormsg':'No measure found!'}
)
elif isinstance(e, Measures.MultipleObjectsReturned):
return render(
request, 'ticket_error.djhtml',
{'errormsg':'Multiple measures found under unique ID!'}
)
else:
return render(
request, 'ticket_error.djhtml',
{'errormsg':'Unknown error in views.edit_measure!'}
)
# if correct measure was found
else:
# get the ticket to which this measure belongs
ticket = Ticket.objects.get(ticketid=measure.ticket.ticketid)
# build list of all groups the user is part of
groups = []
for group in request.user.groups.all():
groups.append(group)
# if user has permissions to change tickets and no other user is responsible for the ticket
if (ticket.sector in groups and
ticket.responsible_person in [None, request.user]):
# set fields as editable if user has the corresponding permissions
if request.user.has_perm('tickets.change_measures'):
editable = True
else:
editable = False
# display the measure in a MeasureForm with the according template
if request.method == 'GET':
measure_dict = model_to_dict(measure)
measure_dict['ticketid'] = ticket.ticketid
measureform = MeasureForm(initial=measure_dict)
return render(
request, 'measure_edit.djhtml',
{'measureform':measureform,
'editable':editable}
)
# if the form was submitted via http-POST-Request
elif request.method == 'POST':
# if cancelled, redirect to ticket details,
if 'cancel' in request.POST:
return HttpResponseRedirect('/tickets/' + str(ticket.ticketid) + '/')
# if confirmed, check the data for validity and save the changes or display the form with error messages for the input
elif 'confirm' in request.POST:
# add ticketid via a mutable copy of the post data (read only in form)
POST = request.POST.copy()
POST['ticketid'] = measure.ticket.ticketid
measureform = MeasureForm(POST)
# check input validity
if measureform.is_valid():
# get cleaned data and update changes to the corresponding fields
measureform_cd = measureform.cleaned_data
Measures.objects.filter(measureid=str(measure.measureid)).update(
shortdsc=measureform_cd['shortdsc'],
dsc=measureform_cd['dsc'],
result=measureform_cd['result'],
isSolution=measureform_cd['isSolution']
)
# 'refresh' measure object, create a new MeasureForm with the new data
measure = Measures.objects.get(measureid=str(measure.measureid))
measure_dict = model_to_dict(measure)
measure_dict['ticketid'] = measure.ticket.ticketid
measureform = MeasureForm(initial=measure_dict)
# set infomsg to 'saved changes!'
infomsg = 'Änderungen gespeichert!'
else:
# set infomsg to 'faulty input!'
infomsg = 'Fehlerhafte Eingaben!'
# render and return the according template with measureform (with new data OR error messages for faulty input)
return render(
request, 'measure_edit.djhtml',
{'measureform':measureform,
'infomsg':infomsg,
'editable':editable}
)
#deny any request method except GET and POST
else:
return HttpResponseNotAllowed()
# if user mustn't edit measures or another user is specified as responsible_person
else:
# display error template with an error description
if not request.user.has_perm('tickets.change_measures'):
errormsg = 'Sie haben nicht die Berechtigung Maßnahmen zu bearbeiten!'
elif ticket.responsible_person != None and \
ticket.responsible_person != request.user:
errormsg = 'Für das Ticket ist ein anderer Benutzer verantwortlich!'
else:
errormsg = 'Unbekannter Fehler bei der Bearbeitung (in tickets.views.edit_measure())'
return render(
request, 'ticket_error.djhtml',
{'errormsg': errormsg}
)
# view function for closing a ticket
'''
#lets the user enter data for comment and keywords
#additional submit options for redirecting to the ticket overview and ticket editing
#submit option for closing the ticket -> validates the data and either
#updates the database and returns to the overview with a message
#or displays errors in the closing forms fields
#parameter: HttpRequest request, ticketid (\d{1,4} -> 4 digits from urls.py)
#URL:'tickets/<ticketid>/close'
'''
@login_required(login_url=LOGIN_URL)
def close_ticket(request, ticketid):
# renewal of session expiration
# request.session.set_expiry(COOKIE_EXP_AGE)
# query for ticket with given id
try:
ticket = Ticket.objects.get(ticketid=str(ticketid))
# catch possible exceptions
except Exception as e:
if isinstance(e, Ticket.DoesNotExist):
return render(
request, 'ticket_error.djhtml',
{'errormsg':'No Ticket found!'}
)
elif isinstance(e, Ticket.MultipleObjectsReturned):
return render(
request, 'ticket_error.djhtml',
{'errormsg':'Multiple tickets found for unique ID!'}
)
else:
return render(
request, 'ticket_error.djhtml',
{'errormsg':'Unknown error in views.close_ticket!'}
)
# if correct ticket was found
else:
# build list of all groups the user is part of
groups = []
for group in request.user.groups.all():
groups.append(group)
# if user has permissions to change tickets and no other user is responsible for the ticket
if (ticket.sector in groups and
request.user.has_perm('tickets.change_ticket') and
ticket.responsible_person in [None, request.user]):
# convert ticket to dictionary with it's data
ticket_dict = model_to_dict(ticket)
# set sector to String represantation in ticket_dict
ticket_dict['sector'] = ticket.sector
# if ticket is closed redirect to detail view; prevents navigation to edit template via entering url
if ticket_dict['status'] == 'closed':
return HttpResponseRedirect('/tickets/' + str(ticket_dict['ticketid'] + '/'))
# build list of headers for display of measures linked to this ticket
headers = []
for key in FieldConstants.COMPACT_MEASURE_FIELD_LABELS:
headers.append(FieldConstants.COMPACT_MEASURE_FIELD_LABELS[key])
# GET request display ticket_close template for user input
if request.method == 'GET':
# convert ticket to dictionary, for display set status to closed ('Abgeschlossen')
ticket_dict['status'] = 'Abgeschlossen'
ticket_dict['sector'] = ticket.sector
# build list of compact forms displayed as rows for measures linked to this ticket
measures = []
ticket_measures = Measures.objects.filter(ticket=ticket)
for measure in ticket_measures:
measures.append(CompactMeasureForm(initial=model_to_dict(measure)))
detailform = DetailForm(initial=ticket_dict)
closeform = ClosingDataForm(initial=ticket_dict)
image = ticket_dict['image']
return render(
request, 'ticket_close.djhtml',
{'detailform':detailform,
'editform':closeform,
'hasImage':image,
'editable':True,
'is_Form':True,
'headers':headers,
'measures': measures}
)
# POST request check form data for validity and update database if form is correct
elif request.method == 'POST':
# if button for overview is clicked -> redirect
if 'cancel' in request.POST:
return HttpResponseRedirect('/tickets/overview/')
# if button for editing is clicked -> redirect to editing form
elif 'edit' in request.POST:
return HttpResponseRedirect('/tickets/' + ticketid + '/edit/')
# when button 'New Measure...' was clicked -> redirect
elif 'addmeasure' in request.POST:
return HttpResponseRedirect('/tickets/' + ticketid + '/add_measure/')
# if button for closing the ticket is clicked -> check input, update db
elif 'close' in request.POST:
# init form object with POST data
closeform = ClosingDataForm(request.POST)
# if the data is valid, update ticket in database with entered data
if closeform.is_valid():
Ticket.objects.filter(ticketid=str(ticketid)).update(
comment=closeform.cleaned_data['comment'],
keywords=closeform.cleaned_data['keywords'],
closingdatetime=timezone.now(),
workinghours=closeform.cleaned_data['workinghours'],
priority='low',
status='closed',
responsible_person=request.user
)
ticket = Ticket.objects.get(ticketid=str(ticketid))
ticket_dict = model_to_dict(ticket)
ticket_dict['responsible_person'] = request.user.username
sendTicketCloseMail(ticket_dict)
return HttpResponseRedirect('/tickets/overview/?status=closed')
# if data is invalid, display the current template with an additional error messages
else:
ticket_dict = model_to_dict(ticket)
ticket_dict['sector'] = ticket.sector
detailform = DetailForm(initial=ticket_dict)
image = ticket_dict['image']
# build list of compact forms displayed as rows for measures linked to this ticket
measures = []
ticket_measures = Measures.objects.filter(ticket=ticket)
for measure in ticket_measures:
measures.append(CompactMeasureForm(initial=model_to_dict(measure)))
return render(
request, 'ticket_close.djhtml',
{'detailform':detailform,
'editform':closeform,
'hasImage':image,
'editable':True,
'is_Form':True,
'measures':measures,
'headers':headers}
)
# deny any request method except GET and POST
else:
# send response for 405: Method not allowed
return HttpResponseNotAllowed()
# if user mustn't edit tickets or another user is specified as responsible_person
else:
# display error template with error description
if not request.user.has_perm('tickets.change_ticket'):
errormsg = 'Sie haben nicht die Berechtigung Tickets zu bearbeiten!'
elif ticket.responsible_person != None and \
ticket.responsible_person != request.user:
errormsg = 'Für dieses Ticket ist ein anderer Benutzer verantwortlich!'
else:
errormsg = 'Unbekannter Fehler bei Ticketbearbeitung (in tickets.views.edit_ticket_detail())'
return render(
request, 'ticket_error.djhtml',
{'errormsg': errormsg}
)
'''
# function which sends a mail to ticket_dict['creator']
# informing the creator that the ticket with ID ticket_dict['ticketid'] has
# been closed by user ticket_dict['responsible_person']
# url: NONE (separated for convenience)
'''
def sendTicketCloseMail(ticket_dict):
subject = 'Ihr Ticket #' + str(ticket_dict['ticketid']) + ' wurde abgeschlossen'
message = 'Das von Ihnen erstellte Ticket mit der ID ' + str(ticket_dict['ticketid']) + \
' wurde vom Benutzer ' + ticket_dict['responsible_person'] + ' abgeschlossen!'
receiver = [ticket_dict['creator'] + '@rgoebel.de']
con = get_connection('django.core.mail.backends.console.EmailBackend')
send_mail(subject, message, 'ticket@rgoebel.de', receiver, connection=con)
# view function for ticket search
'''
#searches for tickets which match user-entered criteria and
#returns a template with all results shown
#parameter: HttpRequest request
#URL:'tickets/search'
'''
@login_required(login_url=LOGIN_URL)
def search_tickets(request):
# renewal of session expiration
# request.session.set_expiry(COOKIE_EXP_AGE)
if request.method == 'GET':
# initialize searchform with GET data
searchform = SearchForm(request.GET)
# if entered data is valid, build a query and query the db for tickets
if searchform.is_valid():
searchterms = searchform.cleaned_data
query_dict = {}
# check all fields/keys for data entered, adjust keys depending on
# the field's properties (full text, choice, char...?)
# save the adjusted key-value pairs in query_dict
for key in searchterms:
if searchterms[key] != '' and searchterms[key] is not None:
#########################
# TODO: full text will only work with MySQL (or postgreSQL);
# full text indices must be configured directly in db manager
#########################
# append '__search' -> full text search for these fields
if key == 'description' or key == 'comment':
query_key = key + '__search'
# append '__contains' -> in SQL 'LIKE '%...%'' for non-choice-fields
elif key != 'sector' and key != 'category' and key != 'status':
query_key = key + '__contains'
# else: key is unchanged -> in SQL '='...''
else:
query_key=key
#for the sector field get the corresponding group-id to the chosen group-name from Djangos Group DB
if key=='sector':
searchterms[key] = Group.objects.get(name=searchterms[key]).id
query_dict[query_key] = searchterms[key]
# build query from entered data via _functools.reduce and '&' as Q object
# one liner form of version with one Q object
query = reduce(lambda q, key: q & Q(**{key: query_dict[key]}), query_dict, Q())
tickets = Ticket.objects.filter(query)
# init label_dict from FieldConstants.TICKET_FIELD_LABELS
labels_dict = FieldConstants.TICKET_FIELD_LABELS
# generate list from query results
results = []
for ticket in tickets:
ticket_dict = model_to_dict(ticket)
# replace the value for 'sector' with the corresponding groups name (instead of primary key in/from group table)
ticket_dict['sector'] = ticket.sector.name
for key, value in ticket_dict.items():
if value is None:
ticket_dict[key] = ''
#check if an image for the ticket exists and display 'Ja/Nein' ('Yes/No') accordingly
if ticket_dict['image'] != '':
ticket_dict['image'] = 'Ja'
else:
ticket_dict['image'] = 'Nein'
results.append(ticket_dict)
# return ticket search template with searchform and result list
return render(
request, 'ticket_search.djhtml',
{'searchform':searchform,
'results':results,
'labels_dict':labels_dict}
)
else:
return render(
request, 'ticket_error.djhtml',
{'errormsg':'Searchform input invalid!'}
)
# deny any request method except GET
else:
# send response for 405: Method not allowed
return HttpResponseNotAllowed()
# view function for ticket image display in a specific template
'''
#displays the appended/uploaded file for the given ticketid
#if no such ticket exists, the error template will be rendered and returned instead
#parameters: HttpRequest request, ticketid
#URL:'tickets/<ticketid>/image'
'''
@login_required(login_url=LOGIN_URL)
def show_ticket_image(request, ticketid):
try:
ticket = Ticket.objects.get(ticketid=str(ticketid))
except:
return render(
request, 'ticket_error.djhtml',
{'errormsg':'Kein Ticket mit dieser ID!'}
)
else:
if ticket.image:
return render(
request, 'ticket_image.djhtml',
{'ticketid':str(ticketid),
'url':ticket.image.url}
)
else:
return render(
request, 'ticket_image.djhtml',
{'ticketid':str(ticketid)}
)
# view function for displaying a specific image
'''
#the image to be displayed is fetched via MEDIA_ROOT
#a HttpResponse with the image data and content_type is returned
#if an exception is raised (by open()): render and return error template (w/ message)
#parameters: HttpRequest request, imgname
'''
@login_required(login_url=LOGIN_URL)
def get_ticket_image(request, imgname):
try:
img = open(settings_media_root + 'uploads/' + imgname, 'rb+')
imgtype = imghdr.what(img)
return HttpResponse(img.read(), content_type='image/' + imgtype)
except:
errormsg = 'Fehler: Bild konnte nicht geöffnet werden'
return render(
request, 'ticket_error.djhtml',
{'errormsg': errormsg}
)
########################################
# OTHER VERSIONS OF BUILDING A QUERY FROM MULTIPLE CONDITIONS (I.E. IN search_ticket(request))
# TODO: Remove comments in final version
# Version with list of Q objects
# querylist=[]
# #add a new Q object for each key in query_dict
# for key in query_dict:
# #initialize the Q object via unpacking (**) of a dictionary
# #here it's exactly 1 keyword argument (key = value)
# querylist.append(Q(**{key:query_dict[key]}))
#
#
# #combines all Q-objects in querylist with AND (operator.and_),
# #queries the database and stores the results in tickets
# #see: https://docs.python.org/3/library/functools.html#functools.reduce
# tickets = Ticket.objects.filter(reduce(operator.and_, querylist))
# Version with one Q object which is build from all query conditions
# query = Q()
#
# for key in query_dict:
# query &= Q(**{key:query_dict[key]})
|
fagusMcFagel/ticketsystem
|
ticketsystem/tickets/views.py
|
Python
|
mit
| 49,979 | 0.008066 |
# encoding: utf-8
#
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Author: Kyle Lahnakoski (kyle@lahnakoski.com)
#
from __future__ import division
from __future__ import unicode_literals
from pyLibrary.debugs.logs import Log
from pyLibrary.dot import wrap
from pyLibrary.queries.containers.list_usingSQLite import Table_usingSQLite
from pyLibrary.queries.expressions import NullOp
from pyLibrary.testing.fuzzytestcase import FuzzyTestCase
class TestContainer(FuzzyTestCase):
def test_assumption(self):
table = Table_usingSQLite("test_table")
collection = {}
uid = table.next_uid()
ok, required_changes = table.flatten({"a": 1, "b": "v"}, uid, collection)
table.change_schema(required_changes)
uid = table.next_uid()
ok, required_changes = table.flatten({"a": None, "b": "v"}, uid, collection)
uid = table.next_uid()
ok, required_changes = table.flatten({"a": 1, "b": None}, uid, collection)
table._insert(collection)
result = table.db.query('SELECT coalesce("a.$number", "b.$string"), length(coalesce("a.$number", "b.$string")) FROM '+table.name)
self.assertEqual(result, {"data": [(1.0, 3), ('v', 1), (1.0, 3)]})
def test_flatten_inner(self):
table = Table_usingSQLite("test_table")
collection = {}
uid = table.next_uid() # 1
ok, required_changes = table.flatten({"a": 0}, uid, collection)
self.assertEqual(ok, False)
self.assertEqual(required_changes, [{"add": {"name": "a", "type": "number", "nested_path": NullOp()}}])
self.assertEqual(collection, {
".": {
"rows": [{"__id__": 1, "a.$number": 0}],
"active_columns": [{"es_column": "a.$number"}]
}
})
table.change_schema(required_changes)
table._insert(collection)
collection = {}
uid = table.next_uid() # 2
ok, required_changes = table.flatten({"a": {"b": 0}}, uid, collection)
self.assertEqual(ok, False)
self.assertEqual(required_changes, [
{"add": {"name": "a", "type": "object", "nested_path": NullOp()}},
{"add": {"name": "a.b", "type": "number", "nested_path": NullOp()}}
])
self.assertEqual(collection, {
".": {
"rows": [{"__id__": 2, "a.$object": ".", "a.b.$number": 0}],
"active_columns": {wrap({"es_column": "a.b.$number"}), wrap({"es_column": "a.$object"})}
}
})
table.change_schema(required_changes)
table._insert(collection)
collection = {}
uid = table.next_uid() # 3
ok, required_changes = table.flatten({"a": {"b": [0, 1]}}, uid, collection)
self.assertEqual(ok, False)
self.assertEqual(required_changes, [{
"add": {"name": "a.b", "type": "nested", "nested_path": NullOp()}
}])
self.assertEqual(collection, {
".": {
"rows": [
{"__id__": 3, "a.$object": "."}
],
"active_columns": {wrap({"es_column": "a.$object"}), wrap({"es_column": "a.b.$object"})}
},
"a.b": {
"rows":[
{"__id__": 4, "__parent__": 3, "__order__": 0, "a.b.$number": 0},
{"__id__": 5, "__parent__": 3, "__order__": 1, "a.b.$number": 1}
],
"active_columns": {wrap({"es_column": "a.b.$number"})}
}
})
table.change_schema(required_changes)
table._insert(collection)
collection = {}
uid = table.next_uid() # 6
ok, required_changes = table.flatten({"a": {"b": "value"}}, uid, collection)
self.assertEqual(ok, False)
self.assertEqual(required_changes, [{
"add": {"name": "a.b", "type": "string", "nested_path": "a.b"}
}])
self.assertEqual(collection, {
".": {
"rows": [
{"__id__": 6, "a.b.$object": ".", "a.$object": "."}
],
"active_columns": {wrap({"es_column": "a.b.$object"}), wrap({"es_column": "a.$object"})}
},
"a.b": {
"rows": [
{"__id__": 7, "__parent__": 6, "__order__": 0, "a.b.$string": "value"}
],
"active_columns": {wrap({"es_column": "a.b.$string"})}
}
})
table.change_schema(required_changes)
table._insert(collection)
# VERIFY CONTENT OF TABLE
result = table.db.query('SELECT * FROM "test_table.a.b" ORDER BY __id__')
self.assertEqual(result, {"data": [
(2, 2, 0, 0, None),
(4, 3, 0, 0, None),
(5, 3, 1, 1, None),
(7, 6, 0, None, 'value')
]})
# VIEW METADATA
command = 'PRAGMA table_info("test_table")'
Log.note("Metadata\n{{meta|json|indent}}", meta=table.db.query(command))
# VIEW METADATA
command = 'PRAGMA table_info("test_table.a.b")'
Log.note("Metadata\n{{meta|json|indent}}", meta=table.db.query(command))
# VERIFY PULLING DATA
result = table.query({"from": table.name})
self.assertEqual(result, {"data": [
{"a": 0},
{"a": {"b": 0}},
{"a": {"b": [0, 1]}},
{"a": {"b": "value"}}
]})
Log.note("{{result}}", result=result)
|
klahnakoski/TestFailures
|
pyLibrary/queries/containers/tests/test_container.py
|
Python
|
mpl-2.0
| 5,642 | 0.00319 |
# Author: Nic Wolfe <nic@wolfeden.ca>
# URL: http://code.google.com/p/sickbeard/
#
# This file is part of SickRage.
#
# SickRage is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickRage is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
from __future__ import with_statement
import webbrowser
import datetime
import socket
import os
import re
import os.path
import shutil
import shutil_custom
shutil.copyfile = shutil_custom.copyfile_custom
from threading import Lock
import sys
from github import Github
from sickbeard import metadata
from sickbeard import providers
from sickbeard.providers.generic import GenericProvider
from sickbeard.providers import btn, newznab, womble, thepiratebay, torrentleech, kat, iptorrents, \
omgwtfnzbs, scc, hdtorrents, torrentday, hdbits, hounddawgs, nextgen, speedcd, nyaatorrents, animenzb, bluetigers, cpasbien, fnt, xthor, torrentbytes, \
frenchtorrentdb, freshontv, titansoftv, libertalia, morethantv, bitsoup, t411, tokyotoshokan, shazbat, rarbg, alpharatio, tntvillage, binsearch, torrentproject, extratorrent, \
scenetime, btdigg, strike, transmitthenet, tvchaosuk, bitcannon
from sickbeard.config import CheckSection, check_setting_int, check_setting_str, check_setting_float, ConfigMigrator, \
naming_ep_type
from sickbeard import searchBacklog, showUpdater, versionChecker, properFinder, autoPostProcesser, \
subtitles, traktChecker
from sickbeard import db
from sickbeard import helpers
from sickbeard import scheduler
from sickbeard import search_queue
from sickbeard import show_queue
from sickbeard import logger
from sickbeard import naming
from sickbeard import dailysearcher
from sickbeard.indexers.indexer_api import indexerApi
from sickbeard.indexers.indexer_exceptions import indexer_shownotfound, indexer_showincomplete, indexer_exception, indexer_error, \
indexer_episodenotfound, indexer_attributenotfound, indexer_seasonnotfound, indexer_userabort, indexerExcepts
from sickbeard.common import SD
from sickbeard.common import SKIPPED
from sickbeard.common import WANTED
from sickbeard.databases import mainDB, cache_db, failed_db
from sickrage.helper.exceptions import ex
from sickrage.system.Shutdown import Shutdown
from configobj import ConfigObj
import requests
requests.packages.urllib3.disable_warnings()
PID = None
CFG = None
CONFIG_FILE = None
# This is the version of the config we EXPECT to find
CONFIG_VERSION = 7
# Default encryption version (0 for None)
ENCRYPTION_VERSION = 0
ENCRYPTION_SECRET = None
PROG_DIR = '.'
MY_FULLNAME = None
MY_NAME = None
MY_ARGS = []
SYS_ENCODING = ''
DATA_DIR = ''
CREATEPID = False
PIDFILE = ''
DAEMON = None
NO_RESIZE = False
# system events
events = None
# github
gh = None
# schedualers
dailySearchScheduler = None
backlogSearchScheduler = None
showUpdateScheduler = None
versionCheckScheduler = None
showQueueScheduler = None
searchQueueScheduler = None
properFinderScheduler = None
autoPostProcesserScheduler = None
subtitlesFinderScheduler = None
traktCheckerScheduler = None
showList = None
loadingShowList = None
providerList = []
newznabProviderList = []
torrentRssProviderList = []
metadata_provider_dict = {}
NEWEST_VERSION = None
NEWEST_VERSION_STRING = None
VERSION_NOTIFY = False
AUTO_UPDATE = False
NOTIFY_ON_UPDATE = False
CUR_COMMIT_HASH = None
BRANCH = ''
GIT_RESET = True
GIT_REMOTE = ''
GIT_REMOTE_URL = ''
CUR_COMMIT_BRANCH = ''
GIT_ORG = 'SiCKRAGETV'
GIT_REPO = 'SickRage'
GIT_USERNAME = None
GIT_PASSWORD = None
GIT_PATH = None
GIT_AUTOISSUES = False
GIT_NEWVER = False
DEVELOPER = False
NEWS_URL = 'http://sickragetv.github.io/sickrage-news/news.md'
NEWS_LAST_READ = None
NEWS_LATEST = None
NEWS_UNREAD = 0
INIT_LOCK = Lock()
started = False
ACTUAL_LOG_DIR = None
LOG_DIR = None
LOG_NR = 5
LOG_SIZE = 1048576
SOCKET_TIMEOUT = None
WEB_PORT = None
WEB_LOG = None
WEB_ROOT = None
WEB_USERNAME = None
WEB_PASSWORD = None
WEB_HOST = None
WEB_IPV6 = None
WEB_COOKIE_SECRET = None
WEB_USE_GZIP = True
DOWNLOAD_URL = None
HANDLE_REVERSE_PROXY = False
PROXY_SETTING = None
PROXY_INDEXERS = True
SSL_VERIFY = True
LOCALHOST_IP = None
CPU_PRESET = None
ANON_REDIRECT = None
API_KEY = None
API_ROOT = None
ENABLE_HTTPS = False
HTTPS_CERT = None
HTTPS_KEY = None
INDEXER_DEFAULT_LANGUAGE = None
EP_DEFAULT_DELETED_STATUS = None
LAUNCH_BROWSER = False
CACHE_DIR = None
ACTUAL_CACHE_DIR = None
ROOT_DIRS = None
TRASH_REMOVE_SHOW = False
TRASH_ROTATE_LOGS = False
SORT_ARTICLE = False
DEBUG = False
DISPLAY_ALL_SEASONS = True
DEFAULT_PAGE = 'home'
USE_LISTVIEW = False
METADATA_KODI = None
METADATA_KODI_12PLUS = None
METADATA_MEDIABROWSER = None
METADATA_PS3 = None
METADATA_WDTV = None
METADATA_TIVO = None
METADATA_MEDE8ER = None
QUALITY_DEFAULT = None
STATUS_DEFAULT = None
STATUS_DEFAULT_AFTER = None
FLATTEN_FOLDERS_DEFAULT = False
SUBTITLES_DEFAULT = False
INDEXER_DEFAULT = None
INDEXER_TIMEOUT = None
SCENE_DEFAULT = False
ANIME_DEFAULT = False
ARCHIVE_DEFAULT = False
PROVIDER_ORDER = []
NAMING_MULTI_EP = False
NAMING_ANIME_MULTI_EP = False
NAMING_PATTERN = None
NAMING_ABD_PATTERN = None
NAMING_CUSTOM_ABD = False
NAMING_SPORTS_PATTERN = None
NAMING_CUSTOM_SPORTS = False
NAMING_ANIME_PATTERN = None
NAMING_CUSTOM_ANIME = False
NAMING_FORCE_FOLDERS = False
NAMING_STRIP_YEAR = False
NAMING_ANIME = None
USE_NZBS = False
USE_TORRENTS = False
NZB_METHOD = None
NZB_DIR = None
USENET_RETENTION = None
TORRENT_METHOD = None
TORRENT_DIR = None
DOWNLOAD_PROPERS = False
CHECK_PROPERS_INTERVAL = None
ALLOW_HIGH_PRIORITY = False
SAB_FORCED = False
RANDOMIZE_PROVIDERS = False
AUTOPOSTPROCESSER_FREQUENCY = None
DAILYSEARCH_FREQUENCY = None
UPDATE_FREQUENCY = None
BACKLOG_FREQUENCY = None
SHOWUPDATE_HOUR = None
DEFAULT_AUTOPOSTPROCESSER_FREQUENCY = 10
DEFAULT_DAILYSEARCH_FREQUENCY = 40
DEFAULT_BACKLOG_FREQUENCY = 21
DEFAULT_UPDATE_FREQUENCY = 1
DEFAULT_SHOWUPDATE_HOUR = 3
MIN_AUTOPOSTPROCESSER_FREQUENCY = 1
MIN_DAILYSEARCH_FREQUENCY = 10
MIN_BACKLOG_FREQUENCY = 10
MIN_UPDATE_FREQUENCY = 1
BACKLOG_DAYS = 7
ADD_SHOWS_WO_DIR = False
CREATE_MISSING_SHOW_DIRS = False
RENAME_EPISODES = False
AIRDATE_EPISODES = False
FILE_TIMESTAMP_TIMEZONE = None
PROCESS_AUTOMATICALLY = False
NO_DELETE = False
KEEP_PROCESSED_DIR = False
PROCESS_METHOD = None
DELRARCONTENTS = False
MOVE_ASSOCIATED_FILES = False
POSTPONE_IF_SYNC_FILES = True
NFO_RENAME = True
TV_DOWNLOAD_DIR = None
UNPACK = False
SKIP_REMOVED_FILES = False
NZBS = False
NZBS_UID = None
NZBS_HASH = None
WOMBLE = False
BINSEARCH = False
OMGWTFNZBS = False
OMGWTFNZBS_USERNAME = None
OMGWTFNZBS_APIKEY = None
NEWZBIN = False
NEWZBIN_USERNAME = None
NEWZBIN_PASSWORD = None
SAB_USERNAME = None
SAB_PASSWORD = None
SAB_APIKEY = None
SAB_CATEGORY = None
SAB_CATEGORY_ANIME = None
SAB_HOST = ''
NZBGET_USERNAME = None
NZBGET_PASSWORD = None
NZBGET_CATEGORY = None
NZBGET_CATEGORY_ANIME = None
NZBGET_HOST = None
NZBGET_USE_HTTPS = False
NZBGET_PRIORITY = 100
TORRENT_USERNAME = None
TORRENT_PASSWORD = None
TORRENT_HOST = ''
TORRENT_PATH = ''
TORRENT_SEED_TIME = None
TORRENT_PAUSED = False
TORRENT_HIGH_BANDWIDTH = False
TORRENT_LABEL = ''
TORRENT_LABEL_ANIME = ''
TORRENT_VERIFY_CERT = False
TORRENT_RPCURL = 'transmission'
TORRENT_AUTH_TYPE = 'none'
USE_KODI = False
KODI_ALWAYS_ON = True
KODI_NOTIFY_ONSNATCH = False
KODI_NOTIFY_ONDOWNLOAD = False
KODI_NOTIFY_ONSUBTITLEDOWNLOAD = False
KODI_UPDATE_LIBRARY = False
KODI_UPDATE_FULL = False
KODI_UPDATE_ONLYFIRST = False
KODI_HOST = ''
KODI_USERNAME = None
KODI_PASSWORD = None
USE_PLEX = False
PLEX_NOTIFY_ONSNATCH = False
PLEX_NOTIFY_ONDOWNLOAD = False
PLEX_NOTIFY_ONSUBTITLEDOWNLOAD = False
PLEX_UPDATE_LIBRARY = False
PLEX_SERVER_HOST = None
PLEX_SERVER_TOKEN = None
PLEX_HOST = None
PLEX_USERNAME = None
PLEX_PASSWORD = None
USE_PLEX_CLIENT = False
PLEX_CLIENT_USERNAME = None
PLEX_CLIENT_PASSWORD = None
USE_EMBY = False
EMBY_HOST = None
EMBY_APIKEY = None
USE_GROWL = False
GROWL_NOTIFY_ONSNATCH = False
GROWL_NOTIFY_ONDOWNLOAD = False
GROWL_NOTIFY_ONSUBTITLEDOWNLOAD = False
GROWL_HOST = ''
GROWL_PASSWORD = None
USE_FREEMOBILE = False
FREEMOBILE_NOTIFY_ONSNATCH = False
FREEMOBILE_NOTIFY_ONDOWNLOAD = False
FREEMOBILE_NOTIFY_ONSUBTITLEDOWNLOAD = False
FREEMOBILE_ID = ''
FREEMOBILE_APIKEY = ''
USE_PROWL = False
PROWL_NOTIFY_ONSNATCH = False
PROWL_NOTIFY_ONDOWNLOAD = False
PROWL_NOTIFY_ONSUBTITLEDOWNLOAD = False
PROWL_API = None
PROWL_PRIORITY = 0
USE_TWITTER = False
TWITTER_NOTIFY_ONSNATCH = False
TWITTER_NOTIFY_ONDOWNLOAD = False
TWITTER_NOTIFY_ONSUBTITLEDOWNLOAD = False
TWITTER_USERNAME = None
TWITTER_PASSWORD = None
TWITTER_PREFIX = None
TWITTER_DMTO = None
TWITTER_USEDM = False
USE_BOXCAR = False
BOXCAR_NOTIFY_ONSNATCH = False
BOXCAR_NOTIFY_ONDOWNLOAD = False
BOXCAR_NOTIFY_ONSUBTITLEDOWNLOAD = False
BOXCAR_USERNAME = None
BOXCAR_PASSWORD = None
BOXCAR_PREFIX = None
USE_BOXCAR2 = False
BOXCAR2_NOTIFY_ONSNATCH = False
BOXCAR2_NOTIFY_ONDOWNLOAD = False
BOXCAR2_NOTIFY_ONSUBTITLEDOWNLOAD = False
BOXCAR2_ACCESSTOKEN = None
USE_PUSHOVER = False
PUSHOVER_NOTIFY_ONSNATCH = False
PUSHOVER_NOTIFY_ONDOWNLOAD = False
PUSHOVER_NOTIFY_ONSUBTITLEDOWNLOAD = False
PUSHOVER_USERKEY = None
PUSHOVER_APIKEY = None
PUSHOVER_DEVICE = None
PUSHOVER_SOUND = None
USE_LIBNOTIFY = False
LIBNOTIFY_NOTIFY_ONSNATCH = False
LIBNOTIFY_NOTIFY_ONDOWNLOAD = False
LIBNOTIFY_NOTIFY_ONSUBTITLEDOWNLOAD = False
USE_NMJ = False
NMJ_HOST = None
NMJ_DATABASE = None
NMJ_MOUNT = None
ANIMESUPPORT = False
USE_ANIDB = False
ANIDB_USERNAME = None
ANIDB_PASSWORD = None
ANIDB_USE_MYLIST = False
ADBA_CONNECTION = None
ANIME_SPLIT_HOME = False
USE_SYNOINDEX = False
USE_NMJv2 = False
NMJv2_HOST = None
NMJv2_DATABASE = None
NMJv2_DBLOC = None
USE_SYNOLOGYNOTIFIER = False
SYNOLOGYNOTIFIER_NOTIFY_ONSNATCH = False
SYNOLOGYNOTIFIER_NOTIFY_ONDOWNLOAD = False
SYNOLOGYNOTIFIER_NOTIFY_ONSUBTITLEDOWNLOAD = False
USE_TRAKT = False
TRAKT_USERNAME = None
TRAKT_ACCESS_TOKEN = None
TRAKT_REFRESH_TOKEN = None
TRAKT_REMOVE_WATCHLIST = False
TRAKT_REMOVE_SERIESLIST = False
TRAKT_REMOVE_SHOW_FROM_SICKRAGE = False
TRAKT_SYNC_WATCHLIST = False
TRAKT_METHOD_ADD = None
TRAKT_START_PAUSED = False
TRAKT_USE_RECOMMENDED = False
TRAKT_SYNC = False
TRAKT_SYNC_REMOVE = False
TRAKT_DEFAULT_INDEXER = None
TRAKT_TIMEOUT = None
TRAKT_BLACKLIST_NAME = None
USE_PYTIVO = False
PYTIVO_NOTIFY_ONSNATCH = False
PYTIVO_NOTIFY_ONDOWNLOAD = False
PYTIVO_NOTIFY_ONSUBTITLEDOWNLOAD = False
PYTIVO_UPDATE_LIBRARY = False
PYTIVO_HOST = ''
PYTIVO_SHARE_NAME = ''
PYTIVO_TIVO_NAME = ''
USE_NMA = False
NMA_NOTIFY_ONSNATCH = False
NMA_NOTIFY_ONDOWNLOAD = False
NMA_NOTIFY_ONSUBTITLEDOWNLOAD = False
NMA_API = None
NMA_PRIORITY = 0
USE_PUSHALOT = False
PUSHALOT_NOTIFY_ONSNATCH = False
PUSHALOT_NOTIFY_ONDOWNLOAD = False
PUSHALOT_NOTIFY_ONSUBTITLEDOWNLOAD = False
PUSHALOT_AUTHORIZATIONTOKEN = None
USE_PUSHBULLET = False
PUSHBULLET_NOTIFY_ONSNATCH = False
PUSHBULLET_NOTIFY_ONDOWNLOAD = False
PUSHBULLET_NOTIFY_ONSUBTITLEDOWNLOAD = False
PUSHBULLET_API = None
PUSHBULLET_DEVICE = None
USE_EMAIL = False
EMAIL_NOTIFY_ONSNATCH = False
EMAIL_NOTIFY_ONDOWNLOAD = False
EMAIL_NOTIFY_ONSUBTITLEDOWNLOAD = False
EMAIL_HOST = None
EMAIL_PORT = 25
EMAIL_TLS = False
EMAIL_USER = None
EMAIL_PASSWORD = None
EMAIL_FROM = None
EMAIL_LIST = None
GUI_NAME = None
HOME_LAYOUT = None
HISTORY_LAYOUT = None
HISTORY_LIMIT = 0
DISPLAY_SHOW_SPECIALS = False
COMING_EPS_LAYOUT = None
COMING_EPS_DISPLAY_PAUSED = False
COMING_EPS_SORT = None
COMING_EPS_MISSED_RANGE = None
FUZZY_DATING = False
TRIM_ZERO = False
DATE_PRESET = None
TIME_PRESET = None
TIME_PRESET_W_SECONDS = None
TIMEZONE_DISPLAY = None
THEME_NAME = None
POSTER_SORTBY = None
POSTER_SORTDIR = None
FILTER_ROW = True
USE_SUBTITLES = False
SUBTITLES_LANGUAGES = []
SUBTITLES_DIR = ''
SUBTITLES_SERVICES_LIST = []
SUBTITLES_SERVICES_ENABLED = []
SUBTITLES_HISTORY = False
EMBEDDED_SUBTITLES_ALL = False
SUBTITLES_HEARING_IMPAIRED = False
SUBTITLES_FINDER_FREQUENCY = 1
SUBTITLES_MULTI = False
SUBTITLES_EXTRA_SCRIPTS = []
ADDIC7ED_USER = None
ADDIC7ED_PASS = None
OPENSUBTITLES_USER = None
OPENSUBTITLES_PASS = None
LEGENDASTV_USER = None
LEGENDASTV_PASS = None
USE_FAILED_DOWNLOADS = False
DELETE_FAILED = False
EXTRA_SCRIPTS = []
IGNORE_WORDS = "german,french,core2hd,dutch,swedish,reenc,MrLss"
REQUIRE_WORDS = ""
IGNORED_SUBS_LIST = "dk,fin,heb,kor,nor,nordic,pl,swe"
SYNC_FILES = "!sync,lftp-pget-status,part,bts,!qb"
CALENDAR_UNPROTECTED = False
NO_RESTART = False
TMDB_API_KEY = 'edc5f123313769de83a71e157758030b'
#TRAKT_API_KEY = 'd4161a7a106424551add171e5470112e4afdaf2438e6ef2fe0548edc75924868'
TRAKT_API_KEY = '5c65f55e11d48c35385d9e8670615763a605fad28374c8ae553a7b7a50651ddd'
TRAKT_API_SECRET ='b53e32045ac122a445ef163e6d859403301ffe9b17fb8321d428531b69022a82'
TRAKT_PIN_URL = 'https://trakt.tv/pin/4562'
TRAKT_OAUTH_URL = 'https://trakt.tv/'
TRAKT_API_URL = 'https://api-v2launch.trakt.tv/'
FANART_API_KEY = '9b3afaf26f6241bdb57d6cc6bd798da7'
SHOWS_RECENT = []
__INITIALIZED__ = False
NEWZNAB_DATA = None
def get_backlog_cycle_time():
cycletime = DAILYSEARCH_FREQUENCY * 2 + 7
return max([cycletime, 720])
def initialize(consoleLogging=True):
with INIT_LOCK:
global BRANCH, GIT_RESET, GIT_REMOTE, GIT_REMOTE_URL, CUR_COMMIT_HASH, CUR_COMMIT_BRANCH, GIT_NEWVER, ACTUAL_LOG_DIR, LOG_DIR, LOG_NR, LOG_SIZE, WEB_PORT, WEB_LOG, ENCRYPTION_VERSION, ENCRYPTION_SECRET, WEB_ROOT, WEB_USERNAME, WEB_PASSWORD, WEB_HOST, WEB_IPV6, WEB_COOKIE_SECRET, WEB_USE_GZIP, API_KEY, API_ROOT, ENABLE_HTTPS, HTTPS_CERT, HTTPS_KEY, \
HANDLE_REVERSE_PROXY, USE_NZBS, USE_TORRENTS, NZB_METHOD, NZB_DIR, DOWNLOAD_PROPERS, RANDOMIZE_PROVIDERS, CHECK_PROPERS_INTERVAL, ALLOW_HIGH_PRIORITY, SAB_FORCED, TORRENT_METHOD, \
SAB_USERNAME, SAB_PASSWORD, SAB_APIKEY, SAB_CATEGORY, SAB_CATEGORY_ANIME, SAB_HOST, \
NZBGET_USERNAME, NZBGET_PASSWORD, NZBGET_CATEGORY, NZBGET_CATEGORY_ANIME, NZBGET_PRIORITY, NZBGET_HOST, NZBGET_USE_HTTPS, backlogSearchScheduler, \
TORRENT_USERNAME, TORRENT_PASSWORD, TORRENT_HOST, TORRENT_PATH, TORRENT_SEED_TIME, TORRENT_PAUSED, TORRENT_HIGH_BANDWIDTH, TORRENT_LABEL, TORRENT_LABEL_ANIME, TORRENT_VERIFY_CERT, TORRENT_RPCURL, TORRENT_AUTH_TYPE, \
USE_KODI, KODI_ALWAYS_ON, KODI_NOTIFY_ONSNATCH, KODI_NOTIFY_ONDOWNLOAD, KODI_NOTIFY_ONSUBTITLEDOWNLOAD, KODI_UPDATE_FULL, KODI_UPDATE_ONLYFIRST, \
KODI_UPDATE_LIBRARY, KODI_HOST, KODI_USERNAME, KODI_PASSWORD, BACKLOG_FREQUENCY, \
USE_TRAKT, TRAKT_USERNAME, TRAKT_ACCESS_TOKEN, TRAKT_REFRESH_TOKEN, TRAKT_REMOVE_WATCHLIST, TRAKT_SYNC_WATCHLIST, TRAKT_REMOVE_SHOW_FROM_SICKRAGE, TRAKT_METHOD_ADD, TRAKT_START_PAUSED, traktCheckerScheduler, TRAKT_USE_RECOMMENDED, TRAKT_SYNC, TRAKT_SYNC_REMOVE, TRAKT_DEFAULT_INDEXER, TRAKT_REMOVE_SERIESLIST, TRAKT_TIMEOUT, TRAKT_BLACKLIST_NAME, \
USE_PLEX, PLEX_NOTIFY_ONSNATCH, PLEX_NOTIFY_ONDOWNLOAD, PLEX_NOTIFY_ONSUBTITLEDOWNLOAD, PLEX_UPDATE_LIBRARY, USE_PLEX_CLIENT, PLEX_CLIENT_USERNAME, PLEX_CLIENT_PASSWORD, \
PLEX_SERVER_HOST, PLEX_SERVER_TOKEN, PLEX_HOST, PLEX_USERNAME, PLEX_PASSWORD, DEFAULT_BACKLOG_FREQUENCY, MIN_BACKLOG_FREQUENCY, SKIP_REMOVED_FILES, \
USE_EMBY, EMBY_HOST, EMBY_APIKEY, \
showUpdateScheduler, __INITIALIZED__, INDEXER_DEFAULT_LANGUAGE, EP_DEFAULT_DELETED_STATUS, LAUNCH_BROWSER, TRASH_REMOVE_SHOW, TRASH_ROTATE_LOGS, SORT_ARTICLE, showList, loadingShowList, \
NEWZNAB_DATA, NZBS, NZBS_UID, NZBS_HASH, INDEXER_DEFAULT, INDEXER_TIMEOUT, USENET_RETENTION, TORRENT_DIR, \
QUALITY_DEFAULT, FLATTEN_FOLDERS_DEFAULT, SUBTITLES_DEFAULT, STATUS_DEFAULT, STATUS_DEFAULT_AFTER, \
GROWL_NOTIFY_ONSNATCH, GROWL_NOTIFY_ONDOWNLOAD, GROWL_NOTIFY_ONSUBTITLEDOWNLOAD, TWITTER_NOTIFY_ONSNATCH, TWITTER_NOTIFY_ONDOWNLOAD, TWITTER_NOTIFY_ONSUBTITLEDOWNLOAD, USE_FREEMOBILE, FREEMOBILE_ID, FREEMOBILE_APIKEY, FREEMOBILE_NOTIFY_ONSNATCH, FREEMOBILE_NOTIFY_ONDOWNLOAD, FREEMOBILE_NOTIFY_ONSUBTITLEDOWNLOAD, \
USE_GROWL, GROWL_HOST, GROWL_PASSWORD, USE_PROWL, PROWL_NOTIFY_ONSNATCH, PROWL_NOTIFY_ONDOWNLOAD, PROWL_NOTIFY_ONSUBTITLEDOWNLOAD, PROWL_API, PROWL_PRIORITY, PROG_DIR, \
USE_PYTIVO, PYTIVO_NOTIFY_ONSNATCH, PYTIVO_NOTIFY_ONDOWNLOAD, PYTIVO_NOTIFY_ONSUBTITLEDOWNLOAD, PYTIVO_UPDATE_LIBRARY, PYTIVO_HOST, PYTIVO_SHARE_NAME, PYTIVO_TIVO_NAME, \
USE_NMA, NMA_NOTIFY_ONSNATCH, NMA_NOTIFY_ONDOWNLOAD, NMA_NOTIFY_ONSUBTITLEDOWNLOAD, NMA_API, NMA_PRIORITY, \
USE_PUSHALOT, PUSHALOT_NOTIFY_ONSNATCH, PUSHALOT_NOTIFY_ONDOWNLOAD, PUSHALOT_NOTIFY_ONSUBTITLEDOWNLOAD, PUSHALOT_AUTHORIZATIONTOKEN, \
USE_PUSHBULLET, PUSHBULLET_NOTIFY_ONSNATCH, PUSHBULLET_NOTIFY_ONDOWNLOAD, PUSHBULLET_NOTIFY_ONSUBTITLEDOWNLOAD, PUSHBULLET_API, PUSHBULLET_DEVICE, \
versionCheckScheduler, VERSION_NOTIFY, AUTO_UPDATE, NOTIFY_ON_UPDATE, PROCESS_AUTOMATICALLY, NO_DELETE, UNPACK, CPU_PRESET, \
KEEP_PROCESSED_DIR, PROCESS_METHOD, DELRARCONTENTS, TV_DOWNLOAD_DIR, MIN_DAILYSEARCH_FREQUENCY, DEFAULT_UPDATE_FREQUENCY, DEFAULT_SHOWUPDATE_HOUR, MIN_UPDATE_FREQUENCY, UPDATE_FREQUENCY, \
showQueueScheduler, searchQueueScheduler, ROOT_DIRS, CACHE_DIR, ACTUAL_CACHE_DIR, TIMEZONE_DISPLAY, \
NAMING_PATTERN, NAMING_MULTI_EP, NAMING_ANIME_MULTI_EP, NAMING_FORCE_FOLDERS, NAMING_ABD_PATTERN, NAMING_CUSTOM_ABD, NAMING_SPORTS_PATTERN, NAMING_CUSTOM_SPORTS, NAMING_ANIME_PATTERN, NAMING_CUSTOM_ANIME, NAMING_STRIP_YEAR, \
RENAME_EPISODES, AIRDATE_EPISODES, FILE_TIMESTAMP_TIMEZONE, properFinderScheduler, PROVIDER_ORDER, autoPostProcesserScheduler, \
WOMBLE, BINSEARCH, OMGWTFNZBS, OMGWTFNZBS_USERNAME, OMGWTFNZBS_APIKEY, providerList, newznabProviderList, torrentRssProviderList, \
EXTRA_SCRIPTS, USE_TWITTER, TWITTER_USERNAME, TWITTER_PASSWORD, TWITTER_PREFIX, DAILYSEARCH_FREQUENCY, TWITTER_DMTO, TWITTER_USEDM, \
USE_BOXCAR, BOXCAR_USERNAME, BOXCAR_PASSWORD, BOXCAR_NOTIFY_ONDOWNLOAD, BOXCAR_NOTIFY_ONSUBTITLEDOWNLOAD, BOXCAR_NOTIFY_ONSNATCH, \
USE_BOXCAR2, BOXCAR2_ACCESSTOKEN, BOXCAR2_NOTIFY_ONDOWNLOAD, BOXCAR2_NOTIFY_ONSUBTITLEDOWNLOAD, BOXCAR2_NOTIFY_ONSNATCH, \
USE_PUSHOVER, PUSHOVER_USERKEY, PUSHOVER_APIKEY, PUSHOVER_DEVICE, PUSHOVER_NOTIFY_ONDOWNLOAD, PUSHOVER_NOTIFY_ONSUBTITLEDOWNLOAD, PUSHOVER_NOTIFY_ONSNATCH, PUSHOVER_SOUND, \
USE_LIBNOTIFY, LIBNOTIFY_NOTIFY_ONSNATCH, LIBNOTIFY_NOTIFY_ONDOWNLOAD, LIBNOTIFY_NOTIFY_ONSUBTITLEDOWNLOAD, USE_NMJ, NMJ_HOST, NMJ_DATABASE, NMJ_MOUNT, USE_NMJv2, NMJv2_HOST, NMJv2_DATABASE, NMJv2_DBLOC, USE_SYNOINDEX, \
USE_SYNOLOGYNOTIFIER, SYNOLOGYNOTIFIER_NOTIFY_ONSNATCH, SYNOLOGYNOTIFIER_NOTIFY_ONDOWNLOAD, SYNOLOGYNOTIFIER_NOTIFY_ONSUBTITLEDOWNLOAD, \
USE_EMAIL, EMAIL_HOST, EMAIL_PORT, EMAIL_TLS, EMAIL_USER, EMAIL_PASSWORD, EMAIL_FROM, EMAIL_NOTIFY_ONSNATCH, EMAIL_NOTIFY_ONDOWNLOAD, EMAIL_NOTIFY_ONSUBTITLEDOWNLOAD, EMAIL_LIST, \
USE_LISTVIEW, METADATA_KODI, METADATA_KODI_12PLUS, METADATA_MEDIABROWSER, METADATA_PS3, metadata_provider_dict, \
NEWZBIN, NEWZBIN_USERNAME, NEWZBIN_PASSWORD, GIT_PATH, MOVE_ASSOCIATED_FILES, SYNC_FILES, POSTPONE_IF_SYNC_FILES, dailySearchScheduler, NFO_RENAME, \
GUI_NAME, HOME_LAYOUT, HISTORY_LAYOUT, DISPLAY_SHOW_SPECIALS, COMING_EPS_LAYOUT, COMING_EPS_SORT, COMING_EPS_DISPLAY_PAUSED, COMING_EPS_MISSED_RANGE, DISPLAY_FILESIZE, FUZZY_DATING, TRIM_ZERO, DATE_PRESET, TIME_PRESET, TIME_PRESET_W_SECONDS, THEME_NAME, FILTER_ROW, \
POSTER_SORTBY, POSTER_SORTDIR, HISTORY_LIMIT, CREATE_MISSING_SHOW_DIRS, ADD_SHOWS_WO_DIR, \
METADATA_WDTV, METADATA_TIVO, METADATA_MEDE8ER, IGNORE_WORDS, IGNORED_SUBS_LIST, REQUIRE_WORDS, CALENDAR_UNPROTECTED, NO_RESTART, \
USE_SUBTITLES, SUBTITLES_LANGUAGES, SUBTITLES_DIR, SUBTITLES_SERVICES_LIST, SUBTITLES_SERVICES_ENABLED, SUBTITLES_HISTORY, SUBTITLES_FINDER_FREQUENCY, SUBTITLES_MULTI, EMBEDDED_SUBTITLES_ALL, SUBTITLES_EXTRA_SCRIPTS, subtitlesFinderScheduler, \
SUBTITLES_HEARING_IMPAIRED, ADDIC7ED_USER, ADDIC7ED_PASS, LEGENDASTV_USER, LEGENDASTV_PASS, OPENSUBTITLES_USER, OPENSUBTITLES_PASS, \
USE_FAILED_DOWNLOADS, DELETE_FAILED, ANON_REDIRECT, LOCALHOST_IP, TMDB_API_KEY, DEBUG, DEFAULT_PAGE, PROXY_SETTING, PROXY_INDEXERS, \
AUTOPOSTPROCESSER_FREQUENCY, SHOWUPDATE_HOUR, DEFAULT_AUTOPOSTPROCESSER_FREQUENCY, MIN_AUTOPOSTPROCESSER_FREQUENCY, \
ANIME_DEFAULT, NAMING_ANIME, ANIMESUPPORT, USE_ANIDB, ANIDB_USERNAME, ANIDB_PASSWORD, ANIDB_USE_MYLIST, \
ANIME_SPLIT_HOME, SCENE_DEFAULT, ARCHIVE_DEFAULT, DOWNLOAD_URL, BACKLOG_DAYS, GIT_ORG, GIT_REPO, GIT_USERNAME, GIT_PASSWORD, \
GIT_AUTOISSUES, DEVELOPER, gh, DISPLAY_ALL_SEASONS, SSL_VERIFY, NEWS_URL, NEWS_LAST_READ, NEWS_LATEST, NEWS_UNREAD, SHOWS_RECENT
if __INITIALIZED__:
return False
CheckSection(CFG, 'General')
CheckSection(CFG, 'Blackhole')
CheckSection(CFG, 'Newzbin')
CheckSection(CFG, 'SABnzbd')
CheckSection(CFG, 'NZBget')
CheckSection(CFG, 'KODI')
CheckSection(CFG, 'PLEX')
CheckSection(CFG, 'Emby')
CheckSection(CFG, 'Growl')
CheckSection(CFG, 'Prowl')
CheckSection(CFG, 'Twitter')
CheckSection(CFG, 'Boxcar')
CheckSection(CFG, 'Boxcar2')
CheckSection(CFG, 'NMJ')
CheckSection(CFG, 'NMJv2')
CheckSection(CFG, 'Synology')
CheckSection(CFG, 'SynologyNotifier')
CheckSection(CFG, 'pyTivo')
CheckSection(CFG, 'NMA')
CheckSection(CFG, 'Pushalot')
CheckSection(CFG, 'Pushbullet')
CheckSection(CFG, 'Subtitles')
CheckSection(CFG, 'pyTivo')
# Need to be before any passwords
ENCRYPTION_VERSION = check_setting_int(CFG, 'General', 'encryption_version', 0)
ENCRYPTION_SECRET = check_setting_str(CFG, 'General', 'encryption_secret', helpers.generateCookieSecret(), censor_log=True)
GIT_AUTOISSUES = bool(check_setting_int(CFG, 'General', 'git_autoissues', 0))
# git login info
GIT_USERNAME = check_setting_str(CFG, 'General', 'git_username', '')
GIT_PASSWORD = check_setting_str(CFG, 'General', 'git_password', '', censor_log=True)
GIT_NEWVER = bool(check_setting_int(CFG, 'General', 'git_newver', 0))
DEVELOPER = bool(check_setting_int(CFG, 'General', 'developer', 0))
# debugging
DEBUG = bool(check_setting_int(CFG, 'General', 'debug', 0))
DEFAULT_PAGE = check_setting_str(CFG, 'General', 'default_page', 'home')
if DEFAULT_PAGE not in ('home', 'schedule', 'history', 'news', 'IRC'):
DEFAULT_PAGE = 'home'
ACTUAL_LOG_DIR = check_setting_str(CFG, 'General', 'log_dir', 'Logs')
LOG_DIR = os.path.normpath(os.path.join(DATA_DIR, ACTUAL_LOG_DIR))
LOG_NR = check_setting_int(CFG, 'General', 'log_nr', 5) #Default to 5 backup file (sickrage.log.x)
LOG_SIZE = check_setting_int(CFG, 'General', 'log_size', 1048576) #Default to max 1MB per logfile
fileLogging = True
if not helpers.makeDir(LOG_DIR):
sys.stderr.write("!!! No log folder, logging to screen only!\n")
fileLogging = False
# init logging
logger.initLogging(consoleLogging=consoleLogging, fileLogging=fileLogging, debugLogging=DEBUG)
# github api
try:
gh = Github(user_agent="SiCKRAGE").get_organization(GIT_ORG).get_repo(GIT_REPO)
except Exception as e:
gh = None
logger.log('Unable to setup GitHub properly. GitHub will not be available. Error: %s' % ex(e), logger.WARNING)
# git reset on update
GIT_RESET = bool(check_setting_int(CFG, 'General', 'git_reset', 1))
# current git branch
BRANCH = check_setting_str(CFG, 'General', 'branch', '')
# git_remote
GIT_REMOTE = check_setting_str(CFG, 'General', 'git_remote', 'origin')
GIT_REMOTE_URL = check_setting_str(CFG, 'General', 'git_remote_url',
'https://github.com/%s/%s.git' % (GIT_ORG, GIT_REPO))
# current commit hash
CUR_COMMIT_HASH = check_setting_str(CFG, 'General', 'cur_commit_hash', '')
# current commit branch
CUR_COMMIT_BRANCH = check_setting_str(CFG, 'General', 'cur_commit_branch', '')
ACTUAL_CACHE_DIR = check_setting_str(CFG, 'General', 'cache_dir', 'cache')
# fix bad configs due to buggy code
if ACTUAL_CACHE_DIR == 'None':
ACTUAL_CACHE_DIR = 'cache'
# unless they specify, put the cache dir inside the data dir
if not os.path.isabs(ACTUAL_CACHE_DIR):
CACHE_DIR = os.path.join(DATA_DIR, ACTUAL_CACHE_DIR)
else:
CACHE_DIR = ACTUAL_CACHE_DIR
if not helpers.makeDir(CACHE_DIR):
logger.log(u"!!! Creating local cache dir failed, using system default", logger.ERROR)
CACHE_DIR = None
# Check if we need to perform a restore of the cache folder
try:
restoreDir = os.path.join(DATA_DIR, 'restore')
if os.path.exists(restoreDir) and os.path.exists(os.path.join(restoreDir, 'cache')):
def restoreCache(srcDir, dstDir):
def path_leaf(path):
head, tail = os.path.split(path)
return tail or os.path.basename(head)
try:
if os.path.isdir(dstDir):
bakFilename = '{0}-{1}'.format(path_leaf(dstDir), datetime.datetime.strftime(datetime.datetime.now(), '%Y%m%d_%H%M%S'))
shutil.move(dstDir, os.path.join(os.path.dirname(dstDir), bakFilename))
shutil.move(srcDir, dstDir)
logger.log(u"Restore: restoring cache successful", logger.INFO)
except Exception as e:
logger.log(u"Restore: restoring cache failed: {0}".format(str(e)), logger.ERROR)
restoreCache(os.path.join(restoreDir, 'cache'), CACHE_DIR)
except Exception as e:
logger.log(u"Restore: restoring cache failed: {0}".format(ex(e)), logger.ERROR)
finally:
if os.path.exists(os.path.join(DATA_DIR, 'restore')):
try:
shutil.rmtree(os.path.join(DATA_DIR, 'restore'))
except Exception as e:
logger.log(u"Restore: Unable to remove the restore directory: {0}".format(ex(e)), logger.ERROR)
for cleanupDir in ['mako', 'sessions', 'indexers']:
try:
shutil.rmtree(os.path.join(CACHE_DIR, cleanupDir))
except Exception as e:
logger.log(u"Restore: Unable to remove the cache/{0} directory: {1}".format(cleanupDir, ex(e)), logger.WARNING)
GUI_NAME = check_setting_str(CFG, 'GUI', 'gui_name', 'slick')
THEME_NAME = check_setting_str(CFG, 'GUI', 'theme_name', 'dark')
SOCKET_TIMEOUT = check_setting_int(CFG, 'General', 'socket_timeout', 30)
socket.setdefaulttimeout(SOCKET_TIMEOUT)
try:
WEB_PORT = check_setting_int(CFG, 'General', 'web_port', 8081)
except Exception:
WEB_PORT = 8081
if WEB_PORT < 21 or WEB_PORT > 65535:
WEB_PORT = 8081
WEB_HOST = check_setting_str(CFG, 'General', 'web_host', '0.0.0.0')
WEB_IPV6 = bool(check_setting_int(CFG, 'General', 'web_ipv6', 0))
WEB_ROOT = check_setting_str(CFG, 'General', 'web_root', '').rstrip("/")
WEB_LOG = bool(check_setting_int(CFG, 'General', 'web_log', 0))
WEB_USERNAME = check_setting_str(CFG, 'General', 'web_username', '', censor_log=True)
WEB_PASSWORD = check_setting_str(CFG, 'General', 'web_password', '', censor_log=True)
WEB_COOKIE_SECRET = check_setting_str(CFG, 'General', 'web_cookie_secret', helpers.generateCookieSecret(), censor_log=True)
if not WEB_COOKIE_SECRET:
WEB_COOKIE_SECRET = helpers.generateCookieSecret()
WEB_USE_GZIP = bool(check_setting_int(CFG, 'General', 'web_use_gzip', 1))
SSL_VERIFY = bool(check_setting_int(CFG, 'General', 'ssl_verify', 1))
INDEXER_DEFAULT_LANGUAGE = check_setting_str(CFG, 'General', 'indexerDefaultLang', 'en')
EP_DEFAULT_DELETED_STATUS = check_setting_int(CFG, 'General', 'ep_default_deleted_status', 6)
LAUNCH_BROWSER = bool(check_setting_int(CFG, 'General', 'launch_browser', 1))
DOWNLOAD_URL = check_setting_str(CFG, 'General', 'download_url', "")
LOCALHOST_IP = check_setting_str(CFG, 'General', 'localhost_ip', '')
CPU_PRESET = check_setting_str(CFG, 'General', 'cpu_preset', 'NORMAL')
ANON_REDIRECT = check_setting_str(CFG, 'General', 'anon_redirect', 'http://dereferer.org/?')
PROXY_SETTING = check_setting_str(CFG, 'General', 'proxy_setting', '')
PROXY_INDEXERS = bool(check_setting_int(CFG, 'General', 'proxy_indexers', 1))
# attempt to help prevent users from breaking links by using a bad url
if not ANON_REDIRECT.endswith('?'):
ANON_REDIRECT = ''
TRASH_REMOVE_SHOW = bool(check_setting_int(CFG, 'General', 'trash_remove_show', 0))
TRASH_ROTATE_LOGS = bool(check_setting_int(CFG, 'General', 'trash_rotate_logs', 0))
SORT_ARTICLE = bool(check_setting_int(CFG, 'General', 'sort_article', 0))
API_KEY = check_setting_str(CFG, 'General', 'api_key', '', censor_log=True)
ENABLE_HTTPS = bool(check_setting_int(CFG, 'General', 'enable_https', 0))
HTTPS_CERT = check_setting_str(CFG, 'General', 'https_cert', 'server.crt')
HTTPS_KEY = check_setting_str(CFG, 'General', 'https_key', 'server.key')
HANDLE_REVERSE_PROXY = bool(check_setting_int(CFG, 'General', 'handle_reverse_proxy', 0))
ROOT_DIRS = check_setting_str(CFG, 'General', 'root_dirs', '')
if not re.match(r'\d+\|[^|]+(?:\|[^|]+)*', ROOT_DIRS):
ROOT_DIRS = ''
QUALITY_DEFAULT = check_setting_int(CFG, 'General', 'quality_default', SD)
STATUS_DEFAULT = check_setting_int(CFG, 'General', 'status_default', SKIPPED)
STATUS_DEFAULT_AFTER = check_setting_int(CFG, 'General', 'status_default_after', WANTED)
VERSION_NOTIFY = bool(check_setting_int(CFG, 'General', 'version_notify', 1))
AUTO_UPDATE = bool(check_setting_int(CFG, 'General', 'auto_update', 0))
NOTIFY_ON_UPDATE = bool(check_setting_int(CFG, 'General', 'notify_on_update', 1))
FLATTEN_FOLDERS_DEFAULT = bool(check_setting_int(CFG, 'General', 'flatten_folders_default', 0))
INDEXER_DEFAULT = check_setting_int(CFG, 'General', 'indexer_default', 0)
INDEXER_TIMEOUT = check_setting_int(CFG, 'General', 'indexer_timeout', 20)
ANIME_DEFAULT = bool(check_setting_int(CFG, 'General', 'anime_default', 0))
SCENE_DEFAULT = bool(check_setting_int(CFG, 'General', 'scene_default', 0))
ARCHIVE_DEFAULT = bool(check_setting_int(CFG, 'General', 'archive_default', 0))
PROVIDER_ORDER = check_setting_str(CFG, 'General', 'provider_order', '').split()
NAMING_PATTERN = check_setting_str(CFG, 'General', 'naming_pattern', 'Season %0S/%SN - S%0SE%0E - %EN')
NAMING_ABD_PATTERN = check_setting_str(CFG, 'General', 'naming_abd_pattern', '%SN - %A.D - %EN')
NAMING_CUSTOM_ABD = bool(check_setting_int(CFG, 'General', 'naming_custom_abd', 0))
NAMING_SPORTS_PATTERN = check_setting_str(CFG, 'General', 'naming_sports_pattern', '%SN - %A-D - %EN')
NAMING_ANIME_PATTERN = check_setting_str(CFG, 'General', 'naming_anime_pattern',
'Season %0S/%SN - S%0SE%0E - %EN')
NAMING_ANIME = check_setting_int(CFG, 'General', 'naming_anime', 3)
NAMING_CUSTOM_SPORTS = bool(check_setting_int(CFG, 'General', 'naming_custom_sports', 0))
NAMING_CUSTOM_ANIME = bool(check_setting_int(CFG, 'General', 'naming_custom_anime', 0))
NAMING_MULTI_EP = check_setting_int(CFG, 'General', 'naming_multi_ep', 1)
NAMING_ANIME_MULTI_EP = check_setting_int(CFG, 'General', 'naming_anime_multi_ep', 1)
NAMING_FORCE_FOLDERS = naming.check_force_season_folders()
NAMING_STRIP_YEAR = bool(check_setting_int(CFG, 'General', 'naming_strip_year', 0))
USE_NZBS = bool(check_setting_int(CFG, 'General', 'use_nzbs', 0))
USE_TORRENTS = bool(check_setting_int(CFG, 'General', 'use_torrents', 1))
NZB_METHOD = check_setting_str(CFG, 'General', 'nzb_method', 'blackhole')
if NZB_METHOD not in ('blackhole', 'sabnzbd', 'nzbget'):
NZB_METHOD = 'blackhole'
TORRENT_METHOD = check_setting_str(CFG, 'General', 'torrent_method', 'blackhole')
if TORRENT_METHOD not in ('blackhole', 'utorrent', 'transmission', 'deluge', 'deluged', 'download_station', 'rtorrent', 'qbittorrent'):
TORRENT_METHOD = 'blackhole'
DOWNLOAD_PROPERS = bool(check_setting_int(CFG, 'General', 'download_propers', 1))
CHECK_PROPERS_INTERVAL = check_setting_str(CFG, 'General', 'check_propers_interval', '')
if CHECK_PROPERS_INTERVAL not in ('15m', '45m', '90m', '4h', 'daily'):
CHECK_PROPERS_INTERVAL = 'daily'
RANDOMIZE_PROVIDERS = bool(check_setting_int(CFG, 'General', 'randomize_providers', 0))
ALLOW_HIGH_PRIORITY = bool(check_setting_int(CFG, 'General', 'allow_high_priority', 1))
SKIP_REMOVED_FILES = bool(check_setting_int(CFG, 'General', 'skip_removed_files', 0))
USENET_RETENTION = check_setting_int(CFG, 'General', 'usenet_retention', 500)
AUTOPOSTPROCESSER_FREQUENCY = check_setting_int(CFG, 'General', 'autopostprocesser_frequency',
DEFAULT_AUTOPOSTPROCESSER_FREQUENCY)
if AUTOPOSTPROCESSER_FREQUENCY < MIN_AUTOPOSTPROCESSER_FREQUENCY:
AUTOPOSTPROCESSER_FREQUENCY = MIN_AUTOPOSTPROCESSER_FREQUENCY
DAILYSEARCH_FREQUENCY = check_setting_int(CFG, 'General', 'dailysearch_frequency',
DEFAULT_DAILYSEARCH_FREQUENCY)
if DAILYSEARCH_FREQUENCY < MIN_DAILYSEARCH_FREQUENCY:
DAILYSEARCH_FREQUENCY = MIN_DAILYSEARCH_FREQUENCY
MIN_BACKLOG_FREQUENCY = get_backlog_cycle_time()
BACKLOG_FREQUENCY = check_setting_int(CFG, 'General', 'backlog_frequency', DEFAULT_BACKLOG_FREQUENCY)
if BACKLOG_FREQUENCY < MIN_BACKLOG_FREQUENCY:
BACKLOG_FREQUENCY = MIN_BACKLOG_FREQUENCY
UPDATE_FREQUENCY = check_setting_int(CFG, 'General', 'update_frequency', DEFAULT_UPDATE_FREQUENCY)
if UPDATE_FREQUENCY < MIN_UPDATE_FREQUENCY:
UPDATE_FREQUENCY = MIN_UPDATE_FREQUENCY
SHOWUPDATE_HOUR = check_setting_int(CFG, 'General', 'showupdate_hour', DEFAULT_SHOWUPDATE_HOUR)
if SHOWUPDATE_HOUR > 23:
SHOWUPDATE_HOUR = 0
elif SHOWUPDATE_HOUR < 0:
SHOWUPDATE_HOUR = 0
BACKLOG_DAYS = check_setting_int(CFG, 'General', 'backlog_days', 7)
NEWS_LAST_READ = check_setting_str(CFG, 'General', 'news_last_read', '1970-01-01')
NEWS_LATEST = NEWS_LAST_READ
NZB_DIR = check_setting_str(CFG, 'Blackhole', 'nzb_dir', '')
TORRENT_DIR = check_setting_str(CFG, 'Blackhole', 'torrent_dir', '')
TV_DOWNLOAD_DIR = check_setting_str(CFG, 'General', 'tv_download_dir', '')
PROCESS_AUTOMATICALLY = bool(check_setting_int(CFG, 'General', 'process_automatically', 0))
NO_DELETE = bool(check_setting_int(CFG, 'General', 'no_delete', 0))
UNPACK = bool(check_setting_int(CFG, 'General', 'unpack', 0))
RENAME_EPISODES = bool(check_setting_int(CFG, 'General', 'rename_episodes', 1))
AIRDATE_EPISODES = bool(check_setting_int(CFG, 'General', 'airdate_episodes', 0))
FILE_TIMESTAMP_TIMEZONE = check_setting_str(CFG, 'General', 'file_timestamp_timezone', 'network')
KEEP_PROCESSED_DIR = bool(check_setting_int(CFG, 'General', 'keep_processed_dir', 1))
PROCESS_METHOD = check_setting_str(CFG, 'General', 'process_method', 'copy' if KEEP_PROCESSED_DIR else 'move')
DELRARCONTENTS = bool(check_setting_int(CFG, 'General', 'del_rar_contents', 0))
MOVE_ASSOCIATED_FILES = bool(check_setting_int(CFG, 'General', 'move_associated_files', 0))
POSTPONE_IF_SYNC_FILES = bool(check_setting_int(CFG, 'General', 'postpone_if_sync_files', 1))
SYNC_FILES = check_setting_str(CFG, 'General', 'sync_files', SYNC_FILES)
NFO_RENAME = bool(check_setting_int(CFG, 'General', 'nfo_rename', 1))
CREATE_MISSING_SHOW_DIRS = bool(check_setting_int(CFG, 'General', 'create_missing_show_dirs', 0))
ADD_SHOWS_WO_DIR = bool(check_setting_int(CFG, 'General', 'add_shows_wo_dir', 0))
NZBS = bool(check_setting_int(CFG, 'NZBs', 'nzbs', 0))
NZBS_UID = check_setting_str(CFG, 'NZBs', 'nzbs_uid', '', censor_log=True)
NZBS_HASH = check_setting_str(CFG, 'NZBs', 'nzbs_hash', '', censor_log=True)
NEWZBIN = bool(check_setting_int(CFG, 'Newzbin', 'newzbin', 0))
NEWZBIN_USERNAME = check_setting_str(CFG, 'Newzbin', 'newzbin_username', '', censor_log=True)
NEWZBIN_PASSWORD = check_setting_str(CFG, 'Newzbin', 'newzbin_password', '', censor_log=True)
SAB_USERNAME = check_setting_str(CFG, 'SABnzbd', 'sab_username', '', censor_log=True)
SAB_PASSWORD = check_setting_str(CFG, 'SABnzbd', 'sab_password', '', censor_log=True)
SAB_APIKEY = check_setting_str(CFG, 'SABnzbd', 'sab_apikey', '', censor_log=True)
SAB_CATEGORY = check_setting_str(CFG, 'SABnzbd', 'sab_category', 'tv')
SAB_CATEGORY_ANIME = check_setting_str(CFG, 'SABnzbd', 'sab_category_anime', 'anime')
SAB_HOST = check_setting_str(CFG, 'SABnzbd', 'sab_host', '')
SAB_FORCED = bool(check_setting_int(CFG, 'SABnzbd', 'sab_forced', 0))
NZBGET_USERNAME = check_setting_str(CFG, 'NZBget', 'nzbget_username', 'nzbget', censor_log=True)
NZBGET_PASSWORD = check_setting_str(CFG, 'NZBget', 'nzbget_password', 'tegbzn6789', censor_log=True)
NZBGET_CATEGORY = check_setting_str(CFG, 'NZBget', 'nzbget_category', 'tv')
NZBGET_CATEGORY_ANIME = check_setting_str(CFG, 'NZBget', 'nzbget_category_anime', 'anime')
NZBGET_HOST = check_setting_str(CFG, 'NZBget', 'nzbget_host', '')
NZBGET_USE_HTTPS = bool(check_setting_int(CFG, 'NZBget', 'nzbget_use_https', 0))
NZBGET_PRIORITY = check_setting_int(CFG, 'NZBget', 'nzbget_priority', 100)
TORRENT_USERNAME = check_setting_str(CFG, 'TORRENT', 'torrent_username', '', censor_log=True)
TORRENT_PASSWORD = check_setting_str(CFG, 'TORRENT', 'torrent_password', '', censor_log=True)
TORRENT_HOST = check_setting_str(CFG, 'TORRENT', 'torrent_host', '')
TORRENT_PATH = check_setting_str(CFG, 'TORRENT', 'torrent_path', '')
TORRENT_SEED_TIME = check_setting_int(CFG, 'TORRENT', 'torrent_seed_time', 0)
TORRENT_PAUSED = bool(check_setting_int(CFG, 'TORRENT', 'torrent_paused', 0))
TORRENT_HIGH_BANDWIDTH = bool(check_setting_int(CFG, 'TORRENT', 'torrent_high_bandwidth', 0))
TORRENT_LABEL = check_setting_str(CFG, 'TORRENT', 'torrent_label', '')
TORRENT_LABEL_ANIME = check_setting_str(CFG, 'TORRENT', 'torrent_label_anime', '')
TORRENT_VERIFY_CERT = bool(check_setting_int(CFG, 'TORRENT', 'torrent_verify_cert', 0))
TORRENT_RPCURL = check_setting_str(CFG, 'TORRENT', 'torrent_rpcurl', 'transmission')
TORRENT_AUTH_TYPE = check_setting_str(CFG, 'TORRENT', 'torrent_auth_type', '')
USE_KODI = bool(check_setting_int(CFG, 'KODI', 'use_kodi', 0))
KODI_ALWAYS_ON = bool(check_setting_int(CFG, 'KODI', 'kodi_always_on', 1))
KODI_NOTIFY_ONSNATCH = bool(check_setting_int(CFG, 'KODI', 'kodi_notify_onsnatch', 0))
KODI_NOTIFY_ONDOWNLOAD = bool(check_setting_int(CFG, 'KODI', 'kodi_notify_ondownload', 0))
KODI_NOTIFY_ONSUBTITLEDOWNLOAD = bool(check_setting_int(CFG, 'KODI', 'kodi_notify_onsubtitledownload', 0))
KODI_UPDATE_LIBRARY = bool(check_setting_int(CFG, 'KODI', 'kodi_update_library', 0))
KODI_UPDATE_FULL = bool(check_setting_int(CFG, 'KODI', 'kodi_update_full', 0))
KODI_UPDATE_ONLYFIRST = bool(check_setting_int(CFG, 'KODI', 'kodi_update_onlyfirst', 0))
KODI_HOST = check_setting_str(CFG, 'KODI', 'kodi_host', '')
KODI_USERNAME = check_setting_str(CFG, 'KODI', 'kodi_username', '', censor_log=True)
KODI_PASSWORD = check_setting_str(CFG, 'KODI', 'kodi_password', '', censor_log=True)
USE_PLEX = bool(check_setting_int(CFG, 'Plex', 'use_plex', 0))
PLEX_NOTIFY_ONSNATCH = bool(check_setting_int(CFG, 'Plex', 'plex_notify_onsnatch', 0))
PLEX_NOTIFY_ONDOWNLOAD = bool(check_setting_int(CFG, 'Plex', 'plex_notify_ondownload', 0))
PLEX_NOTIFY_ONSUBTITLEDOWNLOAD = bool(check_setting_int(CFG, 'Plex', 'plex_notify_onsubtitledownload', 0))
PLEX_UPDATE_LIBRARY = bool(check_setting_int(CFG, 'Plex', 'plex_update_library', 0))
PLEX_SERVER_HOST = check_setting_str(CFG, 'Plex', 'plex_server_host', '')
PLEX_SERVER_TOKEN = check_setting_str(CFG, 'Plex', 'plex_server_token', '')
PLEX_HOST = check_setting_str(CFG, 'Plex', 'plex_host', '')
PLEX_USERNAME = check_setting_str(CFG, 'Plex', 'plex_username', '', censor_log=True)
PLEX_PASSWORD = check_setting_str(CFG, 'Plex', 'plex_password', '', censor_log=True)
USE_PLEX_CLIENT = bool(check_setting_int(CFG, 'Plex', 'use_plex_client', 0))
PLEX_CLIENT_USERNAME = check_setting_str(CFG, 'Plex', 'plex_client_username', '', censor_log=True)
PLEX_CLIENT_PASSWORD = check_setting_str(CFG, 'Plex', 'plex_client_password', '', censor_log=True)
USE_EMBY = bool(check_setting_int(CFG, 'Emby', 'use_emby', 0))
EMBY_HOST = check_setting_str(CFG, 'Emby', 'emby_host', '')
EMBY_APIKEY = check_setting_str(CFG, 'Emby', 'emby_apikey', '')
USE_GROWL = bool(check_setting_int(CFG, 'Growl', 'use_growl', 0))
GROWL_NOTIFY_ONSNATCH = bool(check_setting_int(CFG, 'Growl', 'growl_notify_onsnatch', 0))
GROWL_NOTIFY_ONDOWNLOAD = bool(check_setting_int(CFG, 'Growl', 'growl_notify_ondownload', 0))
GROWL_NOTIFY_ONSUBTITLEDOWNLOAD = bool(check_setting_int(CFG, 'Growl', 'growl_notify_onsubtitledownload', 0))
GROWL_HOST = check_setting_str(CFG, 'Growl', 'growl_host', '')
GROWL_PASSWORD = check_setting_str(CFG, 'Growl', 'growl_password', '', censor_log=True)
USE_FREEMOBILE = bool(check_setting_int(CFG, 'FreeMobile', 'use_freemobile', 0))
FREEMOBILE_NOTIFY_ONSNATCH = bool(check_setting_int(CFG, 'FreeMobile', 'freemobile_notify_onsnatch', 0))
FREEMOBILE_NOTIFY_ONDOWNLOAD = bool(check_setting_int(CFG, 'FreeMobile', 'freemobile_notify_ondownload', 0))
FREEMOBILE_NOTIFY_ONSUBTITLEDOWNLOAD = bool(check_setting_int(CFG, 'FreeMobile', 'freemobile_notify_onsubtitledownload', 0))
FREEMOBILE_ID = check_setting_str(CFG, 'FreeMobile', 'freemobile_id', '')
FREEMOBILE_APIKEY = check_setting_str(CFG, 'FreeMobile', 'freemobile_apikey', '')
USE_PROWL = bool(check_setting_int(CFG, 'Prowl', 'use_prowl', 0))
PROWL_NOTIFY_ONSNATCH = bool(check_setting_int(CFG, 'Prowl', 'prowl_notify_onsnatch', 0))
PROWL_NOTIFY_ONDOWNLOAD = bool(check_setting_int(CFG, 'Prowl', 'prowl_notify_ondownload', 0))
PROWL_NOTIFY_ONSUBTITLEDOWNLOAD = bool(check_setting_int(CFG, 'Prowl', 'prowl_notify_onsubtitledownload', 0))
PROWL_API = check_setting_str(CFG, 'Prowl', 'prowl_api', '', censor_log=True)
PROWL_PRIORITY = check_setting_str(CFG, 'Prowl', 'prowl_priority', "0")
USE_TWITTER = bool(check_setting_int(CFG, 'Twitter', 'use_twitter', 0))
TWITTER_NOTIFY_ONSNATCH = bool(check_setting_int(CFG, 'Twitter', 'twitter_notify_onsnatch', 0))
TWITTER_NOTIFY_ONDOWNLOAD = bool(check_setting_int(CFG, 'Twitter', 'twitter_notify_ondownload', 0))
TWITTER_NOTIFY_ONSUBTITLEDOWNLOAD = bool(
check_setting_int(CFG, 'Twitter', 'twitter_notify_onsubtitledownload', 0))
TWITTER_USERNAME = check_setting_str(CFG, 'Twitter', 'twitter_username', '', censor_log=True)
TWITTER_PASSWORD = check_setting_str(CFG, 'Twitter', 'twitter_password', '', censor_log=True)
TWITTER_PREFIX = check_setting_str(CFG, 'Twitter', 'twitter_prefix', GIT_REPO)
TWITTER_DMTO = check_setting_str(CFG, 'Twitter', 'twitter_dmto', '')
TWITTER_USEDM = bool(check_setting_int(CFG, 'Twitter', 'twitter_usedm', 0))
USE_BOXCAR = bool(check_setting_int(CFG, 'Boxcar', 'use_boxcar', 0))
BOXCAR_NOTIFY_ONSNATCH = bool(check_setting_int(CFG, 'Boxcar', 'boxcar_notify_onsnatch', 0))
BOXCAR_NOTIFY_ONDOWNLOAD = bool(check_setting_int(CFG, 'Boxcar', 'boxcar_notify_ondownload', 0))
BOXCAR_NOTIFY_ONSUBTITLEDOWNLOAD = bool(check_setting_int(CFG, 'Boxcar', 'boxcar_notify_onsubtitledownload', 0))
BOXCAR_USERNAME = check_setting_str(CFG, 'Boxcar', 'boxcar_username', '', censor_log=True)
USE_BOXCAR2 = bool(check_setting_int(CFG, 'Boxcar2', 'use_boxcar2', 0))
BOXCAR2_NOTIFY_ONSNATCH = bool(check_setting_int(CFG, 'Boxcar2', 'boxcar2_notify_onsnatch', 0))
BOXCAR2_NOTIFY_ONDOWNLOAD = bool(check_setting_int(CFG, 'Boxcar2', 'boxcar2_notify_ondownload', 0))
BOXCAR2_NOTIFY_ONSUBTITLEDOWNLOAD = bool(check_setting_int(CFG, 'Boxcar2', 'boxcar2_notify_onsubtitledownload', 0))
BOXCAR2_ACCESSTOKEN = check_setting_str(CFG, 'Boxcar2', 'boxcar2_accesstoken', '', censor_log=True)
USE_PUSHOVER = bool(check_setting_int(CFG, 'Pushover', 'use_pushover', 0))
PUSHOVER_NOTIFY_ONSNATCH = bool(check_setting_int(CFG, 'Pushover', 'pushover_notify_onsnatch', 0))
PUSHOVER_NOTIFY_ONDOWNLOAD = bool(check_setting_int(CFG, 'Pushover', 'pushover_notify_ondownload', 0))
PUSHOVER_NOTIFY_ONSUBTITLEDOWNLOAD = bool(check_setting_int(CFG, 'Pushover', 'pushover_notify_onsubtitledownload', 0))
PUSHOVER_USERKEY = check_setting_str(CFG, 'Pushover', 'pushover_userkey', '', censor_log=True)
PUSHOVER_APIKEY = check_setting_str(CFG, 'Pushover', 'pushover_apikey', '', censor_log=True)
PUSHOVER_DEVICE = check_setting_str(CFG, 'Pushover', 'pushover_device', '')
PUSHOVER_SOUND = check_setting_str(CFG, 'Pushover', 'pushover_sound', 'pushover')
USE_LIBNOTIFY = bool(check_setting_int(CFG, 'Libnotify', 'use_libnotify', 0))
LIBNOTIFY_NOTIFY_ONSNATCH = bool(check_setting_int(CFG, 'Libnotify', 'libnotify_notify_onsnatch', 0))
LIBNOTIFY_NOTIFY_ONDOWNLOAD = bool(check_setting_int(CFG, 'Libnotify', 'libnotify_notify_ondownload', 0))
LIBNOTIFY_NOTIFY_ONSUBTITLEDOWNLOAD = bool(check_setting_int(CFG, 'Libnotify', 'libnotify_notify_onsubtitledownload', 0))
USE_NMJ = bool(check_setting_int(CFG, 'NMJ', 'use_nmj', 0))
NMJ_HOST = check_setting_str(CFG, 'NMJ', 'nmj_host', '')
NMJ_DATABASE = check_setting_str(CFG, 'NMJ', 'nmj_database', '')
NMJ_MOUNT = check_setting_str(CFG, 'NMJ', 'nmj_mount', '')
USE_NMJv2 = bool(check_setting_int(CFG, 'NMJv2', 'use_nmjv2', 0))
NMJv2_HOST = check_setting_str(CFG, 'NMJv2', 'nmjv2_host', '')
NMJv2_DATABASE = check_setting_str(CFG, 'NMJv2', 'nmjv2_database', '')
NMJv2_DBLOC = check_setting_str(CFG, 'NMJv2', 'nmjv2_dbloc', '')
USE_SYNOINDEX = bool(check_setting_int(CFG, 'Synology', 'use_synoindex', 0))
USE_SYNOLOGYNOTIFIER = bool(check_setting_int(CFG, 'SynologyNotifier', 'use_synologynotifier', 0))
SYNOLOGYNOTIFIER_NOTIFY_ONSNATCH = bool(
check_setting_int(CFG, 'SynologyNotifier', 'synologynotifier_notify_onsnatch', 0))
SYNOLOGYNOTIFIER_NOTIFY_ONDOWNLOAD = bool(
check_setting_int(CFG, 'SynologyNotifier', 'synologynotifier_notify_ondownload', 0))
SYNOLOGYNOTIFIER_NOTIFY_ONSUBTITLEDOWNLOAD = bool(
check_setting_int(CFG, 'SynologyNotifier', 'synologynotifier_notify_onsubtitledownload', 0))
USE_TRAKT = bool(check_setting_int(CFG, 'Trakt', 'use_trakt', 0))
TRAKT_USERNAME = check_setting_str(CFG, 'Trakt', 'trakt_username', '', censor_log=True)
TRAKT_ACCESS_TOKEN = check_setting_str(CFG, 'Trakt', 'trakt_access_token', '', censor_log=True)
TRAKT_REFRESH_TOKEN = check_setting_str(CFG, 'Trakt', 'trakt_refresh_token', '', censor_log=True)
TRAKT_REMOVE_WATCHLIST = bool(check_setting_int(CFG, 'Trakt', 'trakt_remove_watchlist', 0))
TRAKT_REMOVE_SERIESLIST = bool(check_setting_int(CFG, 'Trakt', 'trakt_remove_serieslist', 0))
TRAKT_REMOVE_SHOW_FROM_SICKRAGE = bool(check_setting_int(CFG, 'Trakt', 'trakt_remove_show_from_sickrage', 0))
TRAKT_SYNC_WATCHLIST = bool(check_setting_int(CFG, 'Trakt', 'trakt_sync_watchlist', 0))
TRAKT_METHOD_ADD = check_setting_int(CFG, 'Trakt', 'trakt_method_add', 0)
TRAKT_START_PAUSED = bool(check_setting_int(CFG, 'Trakt', 'trakt_start_paused', 0))
TRAKT_USE_RECOMMENDED = bool(check_setting_int(CFG, 'Trakt', 'trakt_use_recommended', 0))
TRAKT_SYNC = bool(check_setting_int(CFG, 'Trakt', 'trakt_sync', 0))
TRAKT_SYNC_REMOVE = bool(check_setting_int(CFG, 'Trakt', 'trakt_sync_remove', 0))
TRAKT_DEFAULT_INDEXER = check_setting_int(CFG, 'Trakt', 'trakt_default_indexer', 1)
TRAKT_TIMEOUT = check_setting_int(CFG, 'Trakt', 'trakt_timeout', 30)
TRAKT_BLACKLIST_NAME = check_setting_str(CFG, 'Trakt', 'trakt_blacklist_name', '')
USE_PYTIVO = bool(check_setting_int(CFG, 'pyTivo', 'use_pytivo', 0))
PYTIVO_NOTIFY_ONSNATCH = bool(check_setting_int(CFG, 'pyTivo', 'pytivo_notify_onsnatch', 0))
PYTIVO_NOTIFY_ONDOWNLOAD = bool(check_setting_int(CFG, 'pyTivo', 'pytivo_notify_ondownload', 0))
PYTIVO_NOTIFY_ONSUBTITLEDOWNLOAD = bool(check_setting_int(CFG, 'pyTivo', 'pytivo_notify_onsubtitledownload', 0))
PYTIVO_UPDATE_LIBRARY = bool(check_setting_int(CFG, 'pyTivo', 'pyTivo_update_library', 0))
PYTIVO_HOST = check_setting_str(CFG, 'pyTivo', 'pytivo_host', '')
PYTIVO_SHARE_NAME = check_setting_str(CFG, 'pyTivo', 'pytivo_share_name', '')
PYTIVO_TIVO_NAME = check_setting_str(CFG, 'pyTivo', 'pytivo_tivo_name', '')
USE_NMA = bool(check_setting_int(CFG, 'NMA', 'use_nma', 0))
NMA_NOTIFY_ONSNATCH = bool(check_setting_int(CFG, 'NMA', 'nma_notify_onsnatch', 0))
NMA_NOTIFY_ONDOWNLOAD = bool(check_setting_int(CFG, 'NMA', 'nma_notify_ondownload', 0))
NMA_NOTIFY_ONSUBTITLEDOWNLOAD = bool(check_setting_int(CFG, 'NMA', 'nma_notify_onsubtitledownload', 0))
NMA_API = check_setting_str(CFG, 'NMA', 'nma_api', '', censor_log=True)
NMA_PRIORITY = check_setting_str(CFG, 'NMA', 'nma_priority', "0")
USE_PUSHALOT = bool(check_setting_int(CFG, 'Pushalot', 'use_pushalot', 0))
PUSHALOT_NOTIFY_ONSNATCH = bool(check_setting_int(CFG, 'Pushalot', 'pushalot_notify_onsnatch', 0))
PUSHALOT_NOTIFY_ONDOWNLOAD = bool(check_setting_int(CFG, 'Pushalot', 'pushalot_notify_ondownload', 0))
PUSHALOT_NOTIFY_ONSUBTITLEDOWNLOAD = bool(
check_setting_int(CFG, 'Pushalot', 'pushalot_notify_onsubtitledownload', 0))
PUSHALOT_AUTHORIZATIONTOKEN = check_setting_str(CFG, 'Pushalot', 'pushalot_authorizationtoken', '', censor_log=True)
USE_PUSHBULLET = bool(check_setting_int(CFG, 'Pushbullet', 'use_pushbullet', 0))
PUSHBULLET_NOTIFY_ONSNATCH = bool(check_setting_int(CFG, 'Pushbullet', 'pushbullet_notify_onsnatch', 0))
PUSHBULLET_NOTIFY_ONDOWNLOAD = bool(check_setting_int(CFG, 'Pushbullet', 'pushbullet_notify_ondownload', 0))
PUSHBULLET_NOTIFY_ONSUBTITLEDOWNLOAD = bool(
check_setting_int(CFG, 'Pushbullet', 'pushbullet_notify_onsubtitledownload', 0))
PUSHBULLET_API = check_setting_str(CFG, 'Pushbullet', 'pushbullet_api', '', censor_log=True)
PUSHBULLET_DEVICE = check_setting_str(CFG, 'Pushbullet', 'pushbullet_device', '')
USE_EMAIL = bool(check_setting_int(CFG, 'Email', 'use_email', 0))
EMAIL_NOTIFY_ONSNATCH = bool(check_setting_int(CFG, 'Email', 'email_notify_onsnatch', 0))
EMAIL_NOTIFY_ONDOWNLOAD = bool(check_setting_int(CFG, 'Email', 'email_notify_ondownload', 0))
EMAIL_NOTIFY_ONSUBTITLEDOWNLOAD = bool(check_setting_int(CFG, 'Email', 'email_notify_onsubtitledownload', 0))
EMAIL_HOST = check_setting_str(CFG, 'Email', 'email_host', '')
EMAIL_PORT = check_setting_int(CFG, 'Email', 'email_port', 25)
EMAIL_TLS = bool(check_setting_int(CFG, 'Email', 'email_tls', 0))
EMAIL_USER = check_setting_str(CFG, 'Email', 'email_user', '', censor_log=True)
EMAIL_PASSWORD = check_setting_str(CFG, 'Email', 'email_password', '', censor_log=True)
EMAIL_FROM = check_setting_str(CFG, 'Email', 'email_from', '')
EMAIL_LIST = check_setting_str(CFG, 'Email', 'email_list', '')
USE_SUBTITLES = bool(check_setting_int(CFG, 'Subtitles', 'use_subtitles', 0))
SUBTITLES_LANGUAGES = check_setting_str(CFG, 'Subtitles', 'subtitles_languages', '').split(',')
if SUBTITLES_LANGUAGES[0] == '':
SUBTITLES_LANGUAGES = []
SUBTITLES_DIR = check_setting_str(CFG, 'Subtitles', 'subtitles_dir', '')
SUBTITLES_SERVICES_LIST = check_setting_str(CFG, 'Subtitles', 'SUBTITLES_SERVICES_LIST', '').split(',')
SUBTITLES_SERVICES_ENABLED = [int(x) for x in
check_setting_str(CFG, 'Subtitles', 'SUBTITLES_SERVICES_ENABLED', '').split('|')
if x]
SUBTITLES_DEFAULT = bool(check_setting_int(CFG, 'Subtitles', 'subtitles_default', 0))
SUBTITLES_HISTORY = bool(check_setting_int(CFG, 'Subtitles', 'subtitles_history', 0))
EMBEDDED_SUBTITLES_ALL = bool(check_setting_int(CFG, 'Subtitles', 'embedded_subtitles_all', 0))
SUBTITLES_HEARING_IMPAIRED = bool(check_setting_int(CFG, 'Subtitles', 'subtitles_hearing_impaired', 0))
SUBTITLES_FINDER_FREQUENCY = check_setting_int(CFG, 'Subtitles', 'subtitles_finder_frequency', 1)
SUBTITLES_MULTI = bool(check_setting_int(CFG, 'Subtitles', 'subtitles_multi', 1))
SUBTITLES_EXTRA_SCRIPTS = [x.strip() for x in check_setting_str(CFG, 'Subtitles', 'subtitles_extra_scripts', '').split('|') if
x.strip()]
ADDIC7ED_USER = check_setting_str(CFG, 'Subtitles', 'addic7ed_username', '', censor_log=True)
ADDIC7ED_PASS = check_setting_str(CFG, 'Subtitles', 'addic7ed_password', '', censor_log=True)
LEGENDASTV_USER = check_setting_str(CFG, 'Subtitles', 'legendastv_username', '', censor_log=True)
LEGENDASTV_PASS = check_setting_str(CFG, 'Subtitles', 'legendastv_password', '', censor_log=True)
OPENSUBTITLES_USER = check_setting_str(CFG, 'Subtitles', 'opensubtitles_username', '', censor_log=True)
OPENSUBTITLES_PASS = check_setting_str(CFG, 'Subtitles', 'opensubtitles_password', '', censor_log=True)
USE_FAILED_DOWNLOADS = bool(check_setting_int(CFG, 'FailedDownloads', 'use_failed_downloads', 0))
DELETE_FAILED = bool(check_setting_int(CFG, 'FailedDownloads', 'delete_failed', 0))
GIT_PATH = check_setting_str(CFG, 'General', 'git_path', '')
IGNORE_WORDS = check_setting_str(CFG, 'General', 'ignore_words', IGNORE_WORDS)
REQUIRE_WORDS = check_setting_str(CFG, 'General', 'require_words', REQUIRE_WORDS)
IGNORED_SUBS_LIST = check_setting_str(CFG, 'General', 'ignored_subs_list', IGNORED_SUBS_LIST)
CALENDAR_UNPROTECTED = bool(check_setting_int(CFG, 'General', 'calendar_unprotected', 0))
NO_RESTART = bool(check_setting_int(CFG, 'General', 'no_restart', 0))
EXTRA_SCRIPTS = [x.strip() for x in check_setting_str(CFG, 'General', 'extra_scripts', '').split('|') if
x.strip()]
USE_LISTVIEW = bool(check_setting_int(CFG, 'General', 'use_listview', 0))
ANIMESUPPORT = False
USE_ANIDB = bool(check_setting_int(CFG, 'ANIDB', 'use_anidb', 0))
ANIDB_USERNAME = check_setting_str(CFG, 'ANIDB', 'anidb_username', '', censor_log=True)
ANIDB_PASSWORD = check_setting_str(CFG, 'ANIDB', 'anidb_password', '', censor_log=True)
ANIDB_USE_MYLIST = bool(check_setting_int(CFG, 'ANIDB', 'anidb_use_mylist', 0))
ANIME_SPLIT_HOME = bool(check_setting_int(CFG, 'ANIME', 'anime_split_home', 0))
METADATA_KODI = check_setting_str(CFG, 'General', 'metadata_kodi', '0|0|0|0|0|0|0|0|0|0')
METADATA_KODI_12PLUS = check_setting_str(CFG, 'General', 'metadata_kodi_12plus', '0|0|0|0|0|0|0|0|0|0')
METADATA_MEDIABROWSER = check_setting_str(CFG, 'General', 'metadata_mediabrowser', '0|0|0|0|0|0|0|0|0|0')
METADATA_PS3 = check_setting_str(CFG, 'General', 'metadata_ps3', '0|0|0|0|0|0|0|0|0|0')
METADATA_WDTV = check_setting_str(CFG, 'General', 'metadata_wdtv', '0|0|0|0|0|0|0|0|0|0')
METADATA_TIVO = check_setting_str(CFG, 'General', 'metadata_tivo', '0|0|0|0|0|0|0|0|0|0')
METADATA_MEDE8ER = check_setting_str(CFG, 'General', 'metadata_mede8er', '0|0|0|0|0|0|0|0|0|0')
HOME_LAYOUT = check_setting_str(CFG, 'GUI', 'home_layout', 'poster')
HISTORY_LAYOUT = check_setting_str(CFG, 'GUI', 'history_layout', 'detailed')
HISTORY_LIMIT = check_setting_str(CFG, 'GUI', 'history_limit', '100')
DISPLAY_SHOW_SPECIALS = bool(check_setting_int(CFG, 'GUI', 'display_show_specials', 1))
COMING_EPS_LAYOUT = check_setting_str(CFG, 'GUI', 'coming_eps_layout', 'banner')
COMING_EPS_DISPLAY_PAUSED = bool(check_setting_int(CFG, 'GUI', 'coming_eps_display_paused', 0))
COMING_EPS_SORT = check_setting_str(CFG, 'GUI', 'coming_eps_sort', 'date')
COMING_EPS_MISSED_RANGE = check_setting_int(CFG, 'GUI', 'coming_eps_missed_range', 7)
FUZZY_DATING = bool(check_setting_int(CFG, 'GUI', 'fuzzy_dating', 0))
TRIM_ZERO = bool(check_setting_int(CFG, 'GUI', 'trim_zero', 0))
DATE_PRESET = check_setting_str(CFG, 'GUI', 'date_preset', '%x')
TIME_PRESET_W_SECONDS = check_setting_str(CFG, 'GUI', 'time_preset', '%I:%M:%S %p')
TIME_PRESET = TIME_PRESET_W_SECONDS.replace(u":%S", u"")
TIMEZONE_DISPLAY = check_setting_str(CFG, 'GUI', 'timezone_display', 'local')
POSTER_SORTBY = check_setting_str(CFG, 'GUI', 'poster_sortby', 'name')
POSTER_SORTDIR = check_setting_int(CFG, 'GUI', 'poster_sortdir', 1)
FILTER_ROW = bool(check_setting_int(CFG, 'GUI', 'filter_row', 1))
DISPLAY_ALL_SEASONS = bool(check_setting_int(CFG, 'General', 'display_all_seasons', 1))
# initialize NZB and TORRENT providers
providerList = providers.makeProviderList()
NEWZNAB_DATA = check_setting_str(CFG, 'Newznab', 'newznab_data', '')
newznabProviderList = providers.getNewznabProviderList(NEWZNAB_DATA)
TORRENTRSS_DATA = check_setting_str(CFG, 'TorrentRss', 'torrentrss_data', '')
torrentRssProviderList = providers.getTorrentRssProviderList(TORRENTRSS_DATA)
# dynamically load provider settings
for curTorrentProvider in [curProvider for curProvider in providers.sortedProviderList() if
curProvider.providerType == GenericProvider.TORRENT]:
curTorrentProvider.enabled = bool(check_setting_int(CFG, curTorrentProvider.getID().upper(),
curTorrentProvider.getID(), 0))
if hasattr(curTorrentProvider, 'api_key'):
curTorrentProvider.api_key = check_setting_str(CFG, curTorrentProvider.getID().upper(),
curTorrentProvider.getID() + '_api_key', '', censor_log=True)
if hasattr(curTorrentProvider, 'hash'):
curTorrentProvider.hash = check_setting_str(CFG, curTorrentProvider.getID().upper(),
curTorrentProvider.getID() + '_hash', '', censor_log=True)
if hasattr(curTorrentProvider, 'digest'):
curTorrentProvider.digest = check_setting_str(CFG, curTorrentProvider.getID().upper(),
curTorrentProvider.getID() + '_digest', '', censor_log=True)
if hasattr(curTorrentProvider, 'username'):
curTorrentProvider.username = check_setting_str(CFG, curTorrentProvider.getID().upper(),
curTorrentProvider.getID() + '_username', '', censor_log=True)
if hasattr(curTorrentProvider, 'password'):
curTorrentProvider.password = check_setting_str(CFG, curTorrentProvider.getID().upper(),
curTorrentProvider.getID() + '_password', '', censor_log=True)
if hasattr(curTorrentProvider, 'passkey'):
curTorrentProvider.passkey = check_setting_str(CFG, curTorrentProvider.getID().upper(),
curTorrentProvider.getID() + '_passkey', '', censor_log=True)
if hasattr(curTorrentProvider, 'proxy'):
curTorrentProvider.proxy.enabled = bool(check_setting_int(CFG, curTorrentProvider.getID().upper(),
curTorrentProvider.getID() + '_proxy', 0))
if hasattr(curTorrentProvider.proxy, 'url'):
curTorrentProvider.proxy.url = check_setting_str(CFG, curTorrentProvider.getID().upper(),
curTorrentProvider.getID() + '_proxy_url', '')
if hasattr(curTorrentProvider, 'confirmed'):
curTorrentProvider.confirmed = bool(check_setting_int(CFG, curTorrentProvider.getID().upper(),
curTorrentProvider.getID() + '_confirmed', 1))
if hasattr(curTorrentProvider, 'ranked'):
curTorrentProvider.ranked = bool(check_setting_int(CFG, curTorrentProvider.getID().upper(),
curTorrentProvider.getID() + '_ranked', 1))
if hasattr(curTorrentProvider, 'engrelease'):
curTorrentProvider.engrelease = bool(check_setting_int(CFG, curTorrentProvider.getID().upper(),
curTorrentProvider.getID() + '_engrelease', 0))
if hasattr(curTorrentProvider, 'sorting'):
curTorrentProvider.sorting = check_setting_str(CFG, curTorrentProvider.getID().upper(),
curTorrentProvider.getID() + '_sorting','seeders')
if hasattr(curTorrentProvider, 'options'):
curTorrentProvider.options = check_setting_str(CFG, curTorrentProvider.getID().upper(),
curTorrentProvider.getID() + '_options', '')
if hasattr(curTorrentProvider, 'ratio'):
curTorrentProvider.ratio = check_setting_str(CFG, curTorrentProvider.getID().upper(),
curTorrentProvider.getID() + '_ratio', '')
if hasattr(curTorrentProvider, 'minseed'):
curTorrentProvider.minseed = check_setting_int(CFG, curTorrentProvider.getID().upper(),
curTorrentProvider.getID() + '_minseed', 1)
if hasattr(curTorrentProvider, 'minleech'):
curTorrentProvider.minleech = check_setting_int(CFG, curTorrentProvider.getID().upper(),
curTorrentProvider.getID() + '_minleech', 0)
if hasattr(curTorrentProvider, 'freeleech'):
curTorrentProvider.freeleech = bool(check_setting_int(CFG, curTorrentProvider.getID().upper(),
curTorrentProvider.getID() + '_freeleech', 0))
if hasattr(curTorrentProvider, 'search_mode'):
curTorrentProvider.search_mode = check_setting_str(CFG, curTorrentProvider.getID().upper(),
curTorrentProvider.getID() + '_search_mode',
'eponly')
if hasattr(curTorrentProvider, 'search_fallback'):
curTorrentProvider.search_fallback = bool(check_setting_int(CFG, curTorrentProvider.getID().upper(),
curTorrentProvider.getID() + '_search_fallback',
0))
if hasattr(curTorrentProvider, 'enable_daily'):
curTorrentProvider.enable_daily = bool(check_setting_int(CFG, curTorrentProvider.getID().upper(),
curTorrentProvider.getID() + '_enable_daily',
1))
if hasattr(curTorrentProvider, 'enable_backlog'):
curTorrentProvider.enable_backlog = bool(check_setting_int(CFG, curTorrentProvider.getID().upper(),
curTorrentProvider.getID() + '_enable_backlog',
curTorrentProvider.supportsBacklog))
if hasattr(curTorrentProvider, 'cat'):
curTorrentProvider.cat = check_setting_int(CFG, curTorrentProvider.getID().upper(),
curTorrentProvider.getID() + '_cat', 0)
if hasattr(curTorrentProvider, 'subtitle'):
curTorrentProvider.subtitle = bool(check_setting_int(CFG, curTorrentProvider.getID().upper(),
curTorrentProvider.getID() + '_subtitle', 0))
for curNzbProvider in [curProvider for curProvider in providers.sortedProviderList() if
curProvider.providerType == GenericProvider.NZB]:
curNzbProvider.enabled = bool(
check_setting_int(CFG, curNzbProvider.getID().upper(), curNzbProvider.getID(), 0))
if hasattr(curNzbProvider, 'api_key'):
curNzbProvider.api_key = check_setting_str(CFG, curNzbProvider.getID().upper(),
curNzbProvider.getID() + '_api_key', '', censor_log=True)
if hasattr(curNzbProvider, 'username'):
curNzbProvider.username = check_setting_str(CFG, curNzbProvider.getID().upper(),
curNzbProvider.getID() + '_username', '', censor_log=True)
if hasattr(curNzbProvider, 'search_mode'):
curNzbProvider.search_mode = check_setting_str(CFG, curNzbProvider.getID().upper(),
curNzbProvider.getID() + '_search_mode',
'eponly')
if hasattr(curNzbProvider, 'search_fallback'):
curNzbProvider.search_fallback = bool(check_setting_int(CFG, curNzbProvider.getID().upper(),
curNzbProvider.getID() + '_search_fallback',
0))
if hasattr(curNzbProvider, 'enable_daily'):
curNzbProvider.enable_daily = bool(check_setting_int(CFG, curNzbProvider.getID().upper(),
curNzbProvider.getID() + '_enable_daily',
1))
if hasattr(curNzbProvider, 'enable_backlog'):
curNzbProvider.enable_backlog = bool(check_setting_int(CFG, curNzbProvider.getID().upper(),
curNzbProvider.getID() + '_enable_backlog',
curNzbProvider.supportsBacklog))
if not os.path.isfile(CONFIG_FILE):
logger.log(u"Unable to find '" + CONFIG_FILE + "', all settings will be default!", logger.DEBUG)
save_config()
# initialize the main SB database
myDB = db.DBConnection()
db.upgradeDatabase(myDB, mainDB.InitialSchema)
# initialize the cache database
myDB = db.DBConnection('cache.db')
db.upgradeDatabase(myDB, cache_db.InitialSchema)
# initialize the failed downloads database
myDB = db.DBConnection('failed.db')
db.upgradeDatabase(myDB, failed_db.InitialSchema)
# fix up any db problems
myDB = db.DBConnection()
db.sanityCheckDatabase(myDB, mainDB.MainSanityCheck)
# migrate the config if it needs it
migrator = ConfigMigrator(CFG)
migrator.migrate_config()
# initialize metadata_providers
metadata_provider_dict = metadata.get_metadata_generator_dict()
for cur_metadata_tuple in [(METADATA_KODI, metadata.kodi),
(METADATA_KODI_12PLUS, metadata.kodi_12plus),
(METADATA_MEDIABROWSER, metadata.mediabrowser),
(METADATA_PS3, metadata.ps3),
(METADATA_WDTV, metadata.wdtv),
(METADATA_TIVO, metadata.tivo),
(METADATA_MEDE8ER, metadata.mede8er),
]:
(cur_metadata_config, cur_metadata_class) = cur_metadata_tuple
tmp_provider = cur_metadata_class.metadata_class()
tmp_provider.set_config(cur_metadata_config)
metadata_provider_dict[tmp_provider.name] = tmp_provider
# initialize schedulers
# updaters
versionCheckScheduler = scheduler.Scheduler(versionChecker.CheckVersion(),
cycleTime=datetime.timedelta(hours=UPDATE_FREQUENCY),
threadName="CHECKVERSION",
silent=False)
showQueueScheduler = scheduler.Scheduler(show_queue.ShowQueue(),
cycleTime=datetime.timedelta(seconds=3),
threadName="SHOWQUEUE")
showUpdateScheduler = scheduler.Scheduler(showUpdater.ShowUpdater(),
cycleTime=datetime.timedelta(hours=1),
threadName="SHOWUPDATER",
start_time=datetime.time(hour=SHOWUPDATE_HOUR))
# searchers
searchQueueScheduler = scheduler.Scheduler(search_queue.SearchQueue(),
cycleTime=datetime.timedelta(seconds=3),
threadName="SEARCHQUEUE")
# TODO: update_interval should take last daily/backlog times into account!
update_interval = datetime.timedelta(minutes=DAILYSEARCH_FREQUENCY)
dailySearchScheduler = scheduler.Scheduler(dailysearcher.DailySearcher(),
cycleTime=update_interval,
threadName="DAILYSEARCHER",
run_delay=update_interval)
update_interval = datetime.timedelta(minutes=BACKLOG_FREQUENCY)
backlogSearchScheduler = searchBacklog.BacklogSearchScheduler(searchBacklog.BacklogSearcher(),
cycleTime=update_interval,
threadName="BACKLOG",
run_delay=update_interval)
search_intervals = {'15m': 15, '45m': 45, '90m': 90, '4h': 4 * 60, 'daily': 24 * 60}
if CHECK_PROPERS_INTERVAL in search_intervals:
update_interval = datetime.timedelta(minutes=search_intervals[CHECK_PROPERS_INTERVAL])
run_at = None
else:
update_interval = datetime.timedelta(hours=1)
run_at = datetime.time(hour=1) # 1 AM
properFinderScheduler = scheduler.Scheduler(properFinder.ProperFinder(),
cycleTime=update_interval,
threadName="FINDPROPERS",
start_time=run_at,
run_delay=update_interval)
# processors
autoPostProcesserScheduler = scheduler.Scheduler(autoPostProcesser.PostProcesser(),
cycleTime=datetime.timedelta(
minutes=AUTOPOSTPROCESSER_FREQUENCY),
threadName="POSTPROCESSER",
silent=not PROCESS_AUTOMATICALLY)
traktCheckerScheduler = scheduler.Scheduler(traktChecker.TraktChecker(),
cycleTime=datetime.timedelta(hours=1),
threadName="TRAKTCHECKER",
silent=not USE_TRAKT)
subtitlesFinderScheduler = scheduler.Scheduler(subtitles.SubtitlesFinder(),
cycleTime=datetime.timedelta(hours=SUBTITLES_FINDER_FREQUENCY),
threadName="FINDSUBTITLES",
silent=not USE_SUBTITLES)
showList = []
loadingShowList = {}
__INITIALIZED__ = True
return True
def start():
global __INITIALIZED__, backlogSearchScheduler, \
showUpdateScheduler, versionCheckScheduler, showQueueScheduler, \
properFinderScheduler, autoPostProcesserScheduler, searchQueueScheduler, \
subtitlesFinderScheduler, USE_SUBTITLES, traktCheckerScheduler, \
dailySearchScheduler, events, started
with INIT_LOCK:
if __INITIALIZED__:
# start sysetm events queue
events.start()
# start the daily search scheduler
dailySearchScheduler.enable = True
dailySearchScheduler.start()
# start the backlog scheduler
backlogSearchScheduler.enable = True
backlogSearchScheduler.start()
# start the show updater
showUpdateScheduler.enable = True
showUpdateScheduler.start()
# start the version checker
versionCheckScheduler.enable = True
versionCheckScheduler.start()
# start the queue checker
showQueueScheduler.enable = True
showQueueScheduler.start()
# start the search queue checker
searchQueueScheduler.enable = True
searchQueueScheduler.start()
# start the proper finder
if DOWNLOAD_PROPERS:
properFinderScheduler.silent = False
properFinderScheduler.enable = True
else:
properFinderScheduler.enable = False
properFinderScheduler.silent = True
properFinderScheduler.start()
# start the post processor
if PROCESS_AUTOMATICALLY:
autoPostProcesserScheduler.silent = False
autoPostProcesserScheduler.enable = True
else:
autoPostProcesserScheduler.enable = False
autoPostProcesserScheduler.silent = True
autoPostProcesserScheduler.start()
# start the subtitles finder
if USE_SUBTITLES:
subtitlesFinderScheduler.silent = False
subtitlesFinderScheduler.enable = True
else:
subtitlesFinderScheduler.enable = False
subtitlesFinderScheduler.silent = True
subtitlesFinderScheduler.start()
# start the trakt checker
if USE_TRAKT:
traktCheckerScheduler.silent = False
traktCheckerScheduler.enable = True
else:
traktCheckerScheduler.enable = False
traktCheckerScheduler.silent = True
traktCheckerScheduler.start()
started = True
def halt():
global __INITIALIZED__, backlogSearchScheduler, \
showUpdateScheduler, versionCheckScheduler, showQueueScheduler, \
properFinderScheduler, autoPostProcesserScheduler, searchQueueScheduler, \
subtitlesFinderScheduler, traktCheckerScheduler, \
dailySearchScheduler, events, started
with INIT_LOCK:
if __INITIALIZED__:
logger.log(u"Aborting all threads")
events.stop.set()
logger.log(u"Waiting for the EVENTS thread to exit")
try:
events.join(10)
except Exception:
pass
dailySearchScheduler.stop.set()
logger.log(u"Waiting for the DAILYSEARCH thread to exit")
try:
dailySearchScheduler.join(10)
except Exception:
pass
backlogSearchScheduler.stop.set()
logger.log(u"Waiting for the BACKLOG thread to exit")
try:
backlogSearchScheduler.join(10)
except Exception:
pass
showUpdateScheduler.stop.set()
logger.log(u"Waiting for the SHOWUPDATER thread to exit")
try:
showUpdateScheduler.join(10)
except Exception:
pass
versionCheckScheduler.stop.set()
logger.log(u"Waiting for the VERSIONCHECKER thread to exit")
try:
versionCheckScheduler.join(10)
except Exception:
pass
showQueueScheduler.stop.set()
logger.log(u"Waiting for the SHOWQUEUE thread to exit")
try:
showQueueScheduler.join(10)
except Exception:
pass
searchQueueScheduler.stop.set()
logger.log(u"Waiting for the SEARCHQUEUE thread to exit")
try:
searchQueueScheduler.join(10)
except Exception:
pass
autoPostProcesserScheduler.stop.set()
logger.log(u"Waiting for the POSTPROCESSER thread to exit")
try:
autoPostProcesserScheduler.join(10)
except Exception:
pass
traktCheckerScheduler.stop.set()
logger.log(u"Waiting for the TRAKTCHECKER thread to exit")
try:
traktCheckerScheduler.join(10)
except Exception:
pass
properFinderScheduler.stop.set()
logger.log(u"Waiting for the PROPERFINDER thread to exit")
try:
properFinderScheduler.join(10)
except Exception:
pass
subtitlesFinderScheduler.stop.set()
logger.log(u"Waiting for the SUBTITLESFINDER thread to exit")
try:
subtitlesFinderScheduler.join(10)
except Exception:
pass
if ADBA_CONNECTION:
ADBA_CONNECTION.logout()
logger.log(u"Waiting for the ANIDB CONNECTION thread to exit")
try:
ADBA_CONNECTION.join(10)
except Exception:
pass
__INITIALIZED__ = False
started = False
def sig_handler(signum=None, frame=None):
if not isinstance(signum, type(None)):
logger.log(u"Signal %i caught, saving and exiting..." % int(signum))
Shutdown.stop(PID)
def saveAll():
global showList
# write all shows
logger.log(u"Saving all shows to the database")
for show in showList:
show.saveToDB()
# save config
logger.log(u"Saving config file to disk")
save_config()
def restart(soft=True):
if soft:
halt()
saveAll()
logger.log(u"Re-initializing all data")
initialize()
else:
events.put(events.SystemEvent.RESTART)
def save_config():
new_config = ConfigObj()
new_config.filename = CONFIG_FILE
# For passwords you must include the word `password` in the item_name and add `helpers.encrypt(ITEM_NAME, ENCRYPTION_VERSION)` in save_config()
new_config['General'] = {}
new_config['General']['git_autoissues'] = int(GIT_AUTOISSUES)
new_config['General']['git_username'] = GIT_USERNAME
new_config['General']['git_password'] = helpers.encrypt(GIT_PASSWORD, ENCRYPTION_VERSION)
new_config['General']['git_reset'] = int(GIT_RESET)
new_config['General']['branch'] = BRANCH
new_config['General']['git_remote'] = GIT_REMOTE
new_config['General']['git_remote_url'] = GIT_REMOTE_URL
new_config['General']['cur_commit_hash'] = CUR_COMMIT_HASH
new_config['General']['cur_commit_branch'] = CUR_COMMIT_BRANCH
new_config['General']['git_newver'] = int(GIT_NEWVER)
new_config['General']['config_version'] = CONFIG_VERSION
new_config['General']['encryption_version'] = int(ENCRYPTION_VERSION)
new_config['General']['encryption_secret'] = ENCRYPTION_SECRET
new_config['General']['log_dir'] = ACTUAL_LOG_DIR if ACTUAL_LOG_DIR else 'Logs'
new_config['General']['log_nr'] = int(LOG_NR)
new_config['General']['log_size'] = int(LOG_SIZE)
new_config['General']['socket_timeout'] = SOCKET_TIMEOUT
new_config['General']['web_port'] = WEB_PORT
new_config['General']['web_host'] = WEB_HOST
new_config['General']['web_ipv6'] = int(WEB_IPV6)
new_config['General']['web_log'] = int(WEB_LOG)
new_config['General']['web_root'] = WEB_ROOT
new_config['General']['web_username'] = WEB_USERNAME
new_config['General']['web_password'] = helpers.encrypt(WEB_PASSWORD, ENCRYPTION_VERSION)
new_config['General']['web_cookie_secret'] = WEB_COOKIE_SECRET
new_config['General']['web_use_gzip'] = int(WEB_USE_GZIP)
new_config['General']['ssl_verify'] = int(SSL_VERIFY)
new_config['General']['download_url'] = DOWNLOAD_URL
new_config['General']['localhost_ip'] = LOCALHOST_IP
new_config['General']['cpu_preset'] = CPU_PRESET
new_config['General']['anon_redirect'] = ANON_REDIRECT
new_config['General']['api_key'] = API_KEY
new_config['General']['debug'] = int(DEBUG)
new_config['General']['default_page'] = DEFAULT_PAGE
new_config['General']['enable_https'] = int(ENABLE_HTTPS)
new_config['General']['https_cert'] = HTTPS_CERT
new_config['General']['https_key'] = HTTPS_KEY
new_config['General']['handle_reverse_proxy'] = int(HANDLE_REVERSE_PROXY)
new_config['General']['use_nzbs'] = int(USE_NZBS)
new_config['General']['use_torrents'] = int(USE_TORRENTS)
new_config['General']['nzb_method'] = NZB_METHOD
new_config['General']['torrent_method'] = TORRENT_METHOD
new_config['General']['usenet_retention'] = int(USENET_RETENTION)
new_config['General']['autopostprocesser_frequency'] = int(AUTOPOSTPROCESSER_FREQUENCY)
new_config['General']['dailysearch_frequency'] = int(DAILYSEARCH_FREQUENCY)
new_config['General']['backlog_frequency'] = int(BACKLOG_FREQUENCY)
new_config['General']['update_frequency'] = int(UPDATE_FREQUENCY)
new_config['General']['showupdate_hour'] = int(SHOWUPDATE_HOUR)
new_config['General']['download_propers'] = int(DOWNLOAD_PROPERS)
new_config['General']['randomize_providers'] = int(RANDOMIZE_PROVIDERS)
new_config['General']['check_propers_interval'] = CHECK_PROPERS_INTERVAL
new_config['General']['allow_high_priority'] = int(ALLOW_HIGH_PRIORITY)
new_config['General']['skip_removed_files'] = int(SKIP_REMOVED_FILES)
new_config['General']['quality_default'] = int(QUALITY_DEFAULT)
new_config['General']['status_default'] = int(STATUS_DEFAULT)
new_config['General']['status_default_after'] = int(STATUS_DEFAULT_AFTER)
new_config['General']['flatten_folders_default'] = int(FLATTEN_FOLDERS_DEFAULT)
new_config['General']['indexer_default'] = int(INDEXER_DEFAULT)
new_config['General']['indexer_timeout'] = int(INDEXER_TIMEOUT)
new_config['General']['anime_default'] = int(ANIME_DEFAULT)
new_config['General']['scene_default'] = int(SCENE_DEFAULT)
new_config['General']['archive_default'] = int(ARCHIVE_DEFAULT)
new_config['General']['provider_order'] = ' '.join(PROVIDER_ORDER)
new_config['General']['version_notify'] = int(VERSION_NOTIFY)
new_config['General']['auto_update'] = int(AUTO_UPDATE)
new_config['General']['notify_on_update'] = int(NOTIFY_ON_UPDATE)
new_config['General']['naming_strip_year'] = int(NAMING_STRIP_YEAR)
new_config['General']['naming_pattern'] = NAMING_PATTERN
new_config['General']['naming_custom_abd'] = int(NAMING_CUSTOM_ABD)
new_config['General']['naming_abd_pattern'] = NAMING_ABD_PATTERN
new_config['General']['naming_custom_sports'] = int(NAMING_CUSTOM_SPORTS)
new_config['General']['naming_sports_pattern'] = NAMING_SPORTS_PATTERN
new_config['General']['naming_custom_anime'] = int(NAMING_CUSTOM_ANIME)
new_config['General']['naming_anime_pattern'] = NAMING_ANIME_PATTERN
new_config['General']['naming_multi_ep'] = int(NAMING_MULTI_EP)
new_config['General']['naming_anime_multi_ep'] = int(NAMING_ANIME_MULTI_EP)
new_config['General']['naming_anime'] = int(NAMING_ANIME)
new_config['General']['indexerDefaultLang'] = INDEXER_DEFAULT_LANGUAGE
new_config['General']['ep_default_deleted_status'] = int(EP_DEFAULT_DELETED_STATUS)
new_config['General']['launch_browser'] = int(LAUNCH_BROWSER)
new_config['General']['trash_remove_show'] = int(TRASH_REMOVE_SHOW)
new_config['General']['trash_rotate_logs'] = int(TRASH_ROTATE_LOGS)
new_config['General']['sort_article'] = int(SORT_ARTICLE)
new_config['General']['proxy_setting'] = PROXY_SETTING
new_config['General']['proxy_indexers'] = int(PROXY_INDEXERS)
new_config['General']['use_listview'] = int(USE_LISTVIEW)
new_config['General']['metadata_kodi'] = METADATA_KODI
new_config['General']['metadata_kodi_12plus'] = METADATA_KODI_12PLUS
new_config['General']['metadata_mediabrowser'] = METADATA_MEDIABROWSER
new_config['General']['metadata_ps3'] = METADATA_PS3
new_config['General']['metadata_wdtv'] = METADATA_WDTV
new_config['General']['metadata_tivo'] = METADATA_TIVO
new_config['General']['metadata_mede8er'] = METADATA_MEDE8ER
new_config['General']['backlog_days'] = int(BACKLOG_DAYS)
new_config['General']['cache_dir'] = ACTUAL_CACHE_DIR if ACTUAL_CACHE_DIR else 'cache'
new_config['General']['root_dirs'] = ROOT_DIRS if ROOT_DIRS else ''
new_config['General']['tv_download_dir'] = TV_DOWNLOAD_DIR
new_config['General']['keep_processed_dir'] = int(KEEP_PROCESSED_DIR)
new_config['General']['process_method'] = PROCESS_METHOD
new_config['General']['del_rar_contents'] = int(DELRARCONTENTS)
new_config['General']['move_associated_files'] = int(MOVE_ASSOCIATED_FILES)
new_config['General']['sync_files'] = SYNC_FILES
new_config['General']['postpone_if_sync_files'] = int(POSTPONE_IF_SYNC_FILES)
new_config['General']['nfo_rename'] = int(NFO_RENAME)
new_config['General']['process_automatically'] = int(PROCESS_AUTOMATICALLY)
new_config['General']['no_delete'] = int(NO_DELETE)
new_config['General']['unpack'] = int(UNPACK)
new_config['General']['rename_episodes'] = int(RENAME_EPISODES)
new_config['General']['airdate_episodes'] = int(AIRDATE_EPISODES)
new_config['General']['file_timestamp_timezone'] = FILE_TIMESTAMP_TIMEZONE
new_config['General']['create_missing_show_dirs'] = int(CREATE_MISSING_SHOW_DIRS)
new_config['General']['add_shows_wo_dir'] = int(ADD_SHOWS_WO_DIR)
new_config['General']['extra_scripts'] = '|'.join(EXTRA_SCRIPTS)
new_config['General']['git_path'] = GIT_PATH
new_config['General']['ignore_words'] = IGNORE_WORDS
new_config['General']['require_words'] = REQUIRE_WORDS
new_config['General']['ignored_subs_list'] = IGNORED_SUBS_LIST
new_config['General']['calendar_unprotected'] = int(CALENDAR_UNPROTECTED)
new_config['General']['no_restart'] = int(NO_RESTART)
new_config['General']['developer'] = int(DEVELOPER)
new_config['General']['display_all_seasons'] = int(DISPLAY_ALL_SEASONS)
new_config['General']['news_last_read'] = NEWS_LAST_READ
new_config['Blackhole'] = {}
new_config['Blackhole']['nzb_dir'] = NZB_DIR
new_config['Blackhole']['torrent_dir'] = TORRENT_DIR
# dynamically save provider settings
for curTorrentProvider in [curProvider for curProvider in providers.sortedProviderList() if
curProvider.providerType == GenericProvider.TORRENT]:
new_config[curTorrentProvider.getID().upper()] = {}
new_config[curTorrentProvider.getID().upper()][curTorrentProvider.getID()] = int(curTorrentProvider.enabled)
if hasattr(curTorrentProvider, 'digest'):
new_config[curTorrentProvider.getID().upper()][
curTorrentProvider.getID() + '_digest'] = curTorrentProvider.digest
if hasattr(curTorrentProvider, 'hash'):
new_config[curTorrentProvider.getID().upper()][
curTorrentProvider.getID() + '_hash'] = curTorrentProvider.hash
if hasattr(curTorrentProvider, 'api_key'):
new_config[curTorrentProvider.getID().upper()][
curTorrentProvider.getID() + '_api_key'] = curTorrentProvider.api_key
if hasattr(curTorrentProvider, 'username'):
new_config[curTorrentProvider.getID().upper()][
curTorrentProvider.getID() + '_username'] = curTorrentProvider.username
if hasattr(curTorrentProvider, 'password'):
new_config[curTorrentProvider.getID().upper()][curTorrentProvider.getID() + '_password'] = helpers.encrypt(
curTorrentProvider.password, ENCRYPTION_VERSION)
if hasattr(curTorrentProvider, 'passkey'):
new_config[curTorrentProvider.getID().upper()][
curTorrentProvider.getID() + '_passkey'] = curTorrentProvider.passkey
if hasattr(curTorrentProvider, 'confirmed'):
new_config[curTorrentProvider.getID().upper()][curTorrentProvider.getID() + '_confirmed'] = int(
curTorrentProvider.confirmed)
if hasattr(curTorrentProvider, 'ranked'):
new_config[curTorrentProvider.getID().upper()][curTorrentProvider.getID() + '_ranked'] = int(
curTorrentProvider.ranked)
if hasattr(curTorrentProvider, 'engrelease'):
new_config[curTorrentProvider.getID().upper()][curTorrentProvider.getID() + '_engrelease'] = int(
curTorrentProvider.engrelease)
if hasattr(curTorrentProvider, 'sorting'):
new_config[curTorrentProvider.getID().upper()][curTorrentProvider.getID() + '_sorting'] = curTorrentProvider.sorting
if hasattr(curTorrentProvider, 'ratio'):
new_config[curTorrentProvider.getID().upper()][
curTorrentProvider.getID() + '_ratio'] = curTorrentProvider.ratio
if hasattr(curTorrentProvider, 'minseed'):
new_config[curTorrentProvider.getID().upper()][curTorrentProvider.getID() + '_minseed'] = int(
curTorrentProvider.minseed)
if hasattr(curTorrentProvider, 'minleech'):
new_config[curTorrentProvider.getID().upper()][curTorrentProvider.getID() + '_minleech'] = int(
curTorrentProvider.minleech)
if hasattr(curTorrentProvider, 'options'):
new_config[curTorrentProvider.getID().upper()][
curTorrentProvider.getID() + '_options'] = curTorrentProvider.options
if hasattr(curTorrentProvider, 'proxy'):
new_config[curTorrentProvider.getID().upper()][curTorrentProvider.getID() + '_proxy'] = int(
curTorrentProvider.proxy.enabled)
if hasattr(curTorrentProvider.proxy, 'url'):
new_config[curTorrentProvider.getID().upper()][
curTorrentProvider.getID() + '_proxy_url'] = curTorrentProvider.proxy.url
if hasattr(curTorrentProvider, 'freeleech'):
new_config[curTorrentProvider.getID().upper()][curTorrentProvider.getID() + '_freeleech'] = int(
curTorrentProvider.freeleech)
if hasattr(curTorrentProvider, 'search_mode'):
new_config[curTorrentProvider.getID().upper()][
curTorrentProvider.getID() + '_search_mode'] = curTorrentProvider.search_mode
if hasattr(curTorrentProvider, 'search_fallback'):
new_config[curTorrentProvider.getID().upper()][curTorrentProvider.getID() + '_search_fallback'] = int(
curTorrentProvider.search_fallback)
if hasattr(curTorrentProvider, 'enable_daily'):
new_config[curTorrentProvider.getID().upper()][curTorrentProvider.getID() + '_enable_daily'] = int(
curTorrentProvider.enable_daily)
if hasattr(curTorrentProvider, 'enable_backlog'):
new_config[curTorrentProvider.getID().upper()][curTorrentProvider.getID() + '_enable_backlog'] = int(
curTorrentProvider.enable_backlog)
if hasattr(curTorrentProvider, 'cat'):
new_config[curTorrentProvider.getID().upper()][curTorrentProvider.getID() + '_cat'] = int(
curTorrentProvider.cat)
if hasattr(curTorrentProvider, 'subtitle'):
new_config[curTorrentProvider.getID().upper()][curTorrentProvider.getID() + '_subtitle'] = int(
curTorrentProvider.subtitle)
for curNzbProvider in [curProvider for curProvider in providers.sortedProviderList() if
curProvider.providerType == GenericProvider.NZB]:
new_config[curNzbProvider.getID().upper()] = {}
new_config[curNzbProvider.getID().upper()][curNzbProvider.getID()] = int(curNzbProvider.enabled)
if hasattr(curNzbProvider, 'api_key'):
new_config[curNzbProvider.getID().upper()][
curNzbProvider.getID() + '_api_key'] = curNzbProvider.api_key
if hasattr(curNzbProvider, 'username'):
new_config[curNzbProvider.getID().upper()][
curNzbProvider.getID() + '_username'] = curNzbProvider.username
if hasattr(curNzbProvider, 'search_mode'):
new_config[curNzbProvider.getID().upper()][
curNzbProvider.getID() + '_search_mode'] = curNzbProvider.search_mode
if hasattr(curNzbProvider, 'search_fallback'):
new_config[curNzbProvider.getID().upper()][curNzbProvider.getID() + '_search_fallback'] = int(
curNzbProvider.search_fallback)
if hasattr(curNzbProvider, 'enable_daily'):
new_config[curNzbProvider.getID().upper()][curNzbProvider.getID() + '_enable_daily'] = int(
curNzbProvider.enable_daily)
if hasattr(curNzbProvider, 'enable_backlog'):
new_config[curNzbProvider.getID().upper()][curNzbProvider.getID() + '_enable_backlog'] = int(
curNzbProvider.enable_backlog)
new_config['NZBs'] = {}
new_config['NZBs']['nzbs'] = int(NZBS)
new_config['NZBs']['nzbs_uid'] = NZBS_UID
new_config['NZBs']['nzbs_hash'] = NZBS_HASH
new_config['Newzbin'] = {}
new_config['Newzbin']['newzbin'] = int(NEWZBIN)
new_config['Newzbin']['newzbin_username'] = NEWZBIN_USERNAME
new_config['Newzbin']['newzbin_password'] = helpers.encrypt(NEWZBIN_PASSWORD, ENCRYPTION_VERSION)
new_config['SABnzbd'] = {}
new_config['SABnzbd']['sab_username'] = SAB_USERNAME
new_config['SABnzbd']['sab_password'] = helpers.encrypt(SAB_PASSWORD, ENCRYPTION_VERSION)
new_config['SABnzbd']['sab_apikey'] = SAB_APIKEY
new_config['SABnzbd']['sab_category'] = SAB_CATEGORY
new_config['SABnzbd']['sab_category_anime'] = SAB_CATEGORY_ANIME
new_config['SABnzbd']['sab_host'] = SAB_HOST
new_config['SABnzbd']['sab_forced'] = int(SAB_FORCED)
new_config['NZBget'] = {}
new_config['NZBget']['nzbget_username'] = NZBGET_USERNAME
new_config['NZBget']['nzbget_password'] = helpers.encrypt(NZBGET_PASSWORD, ENCRYPTION_VERSION)
new_config['NZBget']['nzbget_category'] = NZBGET_CATEGORY
new_config['NZBget']['nzbget_category_anime'] = NZBGET_CATEGORY_ANIME
new_config['NZBget']['nzbget_host'] = NZBGET_HOST
new_config['NZBget']['nzbget_use_https'] = int(NZBGET_USE_HTTPS)
new_config['NZBget']['nzbget_priority'] = NZBGET_PRIORITY
new_config['TORRENT'] = {}
new_config['TORRENT']['torrent_username'] = TORRENT_USERNAME
new_config['TORRENT']['torrent_password'] = helpers.encrypt(TORRENT_PASSWORD, ENCRYPTION_VERSION)
new_config['TORRENT']['torrent_host'] = TORRENT_HOST
new_config['TORRENT']['torrent_path'] = TORRENT_PATH
new_config['TORRENT']['torrent_seed_time'] = int(TORRENT_SEED_TIME)
new_config['TORRENT']['torrent_paused'] = int(TORRENT_PAUSED)
new_config['TORRENT']['torrent_high_bandwidth'] = int(TORRENT_HIGH_BANDWIDTH)
new_config['TORRENT']['torrent_label'] = TORRENT_LABEL
new_config['TORRENT']['torrent_label_anime'] = TORRENT_LABEL_ANIME
new_config['TORRENT']['torrent_verify_cert'] = int(TORRENT_VERIFY_CERT)
new_config['TORRENT']['torrent_rpcurl'] = TORRENT_RPCURL
new_config['TORRENT']['torrent_auth_type'] = TORRENT_AUTH_TYPE
new_config['KODI'] = {}
new_config['KODI']['use_kodi'] = int(USE_KODI)
new_config['KODI']['kodi_always_on'] = int(KODI_ALWAYS_ON)
new_config['KODI']['kodi_notify_onsnatch'] = int(KODI_NOTIFY_ONSNATCH)
new_config['KODI']['kodi_notify_ondownload'] = int(KODI_NOTIFY_ONDOWNLOAD)
new_config['KODI']['kodi_notify_onsubtitledownload'] = int(KODI_NOTIFY_ONSUBTITLEDOWNLOAD)
new_config['KODI']['kodi_update_library'] = int(KODI_UPDATE_LIBRARY)
new_config['KODI']['kodi_update_full'] = int(KODI_UPDATE_FULL)
new_config['KODI']['kodi_update_onlyfirst'] = int(KODI_UPDATE_ONLYFIRST)
new_config['KODI']['kodi_host'] = KODI_HOST
new_config['KODI']['kodi_username'] = KODI_USERNAME
new_config['KODI']['kodi_password'] = helpers.encrypt(KODI_PASSWORD, ENCRYPTION_VERSION)
new_config['Plex'] = {}
new_config['Plex']['use_plex'] = int(USE_PLEX)
new_config['Plex']['plex_notify_onsnatch'] = int(PLEX_NOTIFY_ONSNATCH)
new_config['Plex']['plex_notify_ondownload'] = int(PLEX_NOTIFY_ONDOWNLOAD)
new_config['Plex']['plex_notify_onsubtitledownload'] = int(PLEX_NOTIFY_ONSUBTITLEDOWNLOAD)
new_config['Plex']['plex_update_library'] = int(PLEX_UPDATE_LIBRARY)
new_config['Plex']['plex_server_host'] = PLEX_SERVER_HOST
new_config['Plex']['plex_server_token'] = PLEX_SERVER_TOKEN
new_config['Plex']['plex_host'] = PLEX_HOST
new_config['Plex']['plex_username'] = PLEX_USERNAME
new_config['Plex']['plex_password'] = helpers.encrypt(PLEX_PASSWORD, ENCRYPTION_VERSION)
new_config['Emby'] = {}
new_config['Emby']['use_emby'] = int(USE_EMBY)
new_config['Emby']['emby_host'] = EMBY_HOST
new_config['Emby']['emby_apikey'] = EMBY_APIKEY
new_config['Growl'] = {}
new_config['Growl']['use_growl'] = int(USE_GROWL)
new_config['Growl']['growl_notify_onsnatch'] = int(GROWL_NOTIFY_ONSNATCH)
new_config['Growl']['growl_notify_ondownload'] = int(GROWL_NOTIFY_ONDOWNLOAD)
new_config['Growl']['growl_notify_onsubtitledownload'] = int(GROWL_NOTIFY_ONSUBTITLEDOWNLOAD)
new_config['Growl']['growl_host'] = GROWL_HOST
new_config['Growl']['growl_password'] = helpers.encrypt(GROWL_PASSWORD, ENCRYPTION_VERSION)
new_config['FreeMobile'] = {}
new_config['FreeMobile']['use_freemobile'] = int(USE_FREEMOBILE)
new_config['FreeMobile']['freemobile_notify_onsnatch'] = int(FREEMOBILE_NOTIFY_ONSNATCH)
new_config['FreeMobile']['freemobile_notify_ondownload'] = int(FREEMOBILE_NOTIFY_ONDOWNLOAD)
new_config['FreeMobile']['freemobile_notify_onsubtitledownload'] = int(FREEMOBILE_NOTIFY_ONSUBTITLEDOWNLOAD)
new_config['FreeMobile']['freemobile_id'] = FREEMOBILE_ID
new_config['FreeMobile']['freemobile_apikey'] = FREEMOBILE_APIKEY
new_config['Prowl'] = {}
new_config['Prowl']['use_prowl'] = int(USE_PROWL)
new_config['Prowl']['prowl_notify_onsnatch'] = int(PROWL_NOTIFY_ONSNATCH)
new_config['Prowl']['prowl_notify_ondownload'] = int(PROWL_NOTIFY_ONDOWNLOAD)
new_config['Prowl']['prowl_notify_onsubtitledownload'] = int(PROWL_NOTIFY_ONSUBTITLEDOWNLOAD)
new_config['Prowl']['prowl_api'] = PROWL_API
new_config['Prowl']['prowl_priority'] = PROWL_PRIORITY
new_config['Twitter'] = {}
new_config['Twitter']['use_twitter'] = int(USE_TWITTER)
new_config['Twitter']['twitter_notify_onsnatch'] = int(TWITTER_NOTIFY_ONSNATCH)
new_config['Twitter']['twitter_notify_ondownload'] = int(TWITTER_NOTIFY_ONDOWNLOAD)
new_config['Twitter']['twitter_notify_onsubtitledownload'] = int(TWITTER_NOTIFY_ONSUBTITLEDOWNLOAD)
new_config['Twitter']['twitter_username'] = TWITTER_USERNAME
new_config['Twitter']['twitter_password'] = helpers.encrypt(TWITTER_PASSWORD, ENCRYPTION_VERSION)
new_config['Twitter']['twitter_prefix'] = TWITTER_PREFIX
new_config['Twitter']['twitter_dmto'] = TWITTER_DMTO
new_config['Twitter']['twitter_usedm'] = int(TWITTER_USEDM)
new_config['Boxcar'] = {}
new_config['Boxcar']['use_boxcar'] = int(USE_BOXCAR)
new_config['Boxcar']['boxcar_notify_onsnatch'] = int(BOXCAR_NOTIFY_ONSNATCH)
new_config['Boxcar']['boxcar_notify_ondownload'] = int(BOXCAR_NOTIFY_ONDOWNLOAD)
new_config['Boxcar']['boxcar_notify_onsubtitledownload'] = int(BOXCAR_NOTIFY_ONSUBTITLEDOWNLOAD)
new_config['Boxcar']['boxcar_username'] = BOXCAR_USERNAME
new_config['Boxcar2'] = {}
new_config['Boxcar2']['use_boxcar2'] = int(USE_BOXCAR2)
new_config['Boxcar2']['boxcar2_notify_onsnatch'] = int(BOXCAR2_NOTIFY_ONSNATCH)
new_config['Boxcar2']['boxcar2_notify_ondownload'] = int(BOXCAR2_NOTIFY_ONDOWNLOAD)
new_config['Boxcar2']['boxcar2_notify_onsubtitledownload'] = int(BOXCAR2_NOTIFY_ONSUBTITLEDOWNLOAD)
new_config['Boxcar2']['boxcar2_accesstoken'] = BOXCAR2_ACCESSTOKEN
new_config['Pushover'] = {}
new_config['Pushover']['use_pushover'] = int(USE_PUSHOVER)
new_config['Pushover']['pushover_notify_onsnatch'] = int(PUSHOVER_NOTIFY_ONSNATCH)
new_config['Pushover']['pushover_notify_ondownload'] = int(PUSHOVER_NOTIFY_ONDOWNLOAD)
new_config['Pushover']['pushover_notify_onsubtitledownload'] = int(PUSHOVER_NOTIFY_ONSUBTITLEDOWNLOAD)
new_config['Pushover']['pushover_userkey'] = PUSHOVER_USERKEY
new_config['Pushover']['pushover_apikey'] = PUSHOVER_APIKEY
new_config['Pushover']['pushover_device'] = PUSHOVER_DEVICE
new_config['Pushover']['pushover_sound'] = PUSHOVER_SOUND
new_config['Libnotify'] = {}
new_config['Libnotify']['use_libnotify'] = int(USE_LIBNOTIFY)
new_config['Libnotify']['libnotify_notify_onsnatch'] = int(LIBNOTIFY_NOTIFY_ONSNATCH)
new_config['Libnotify']['libnotify_notify_ondownload'] = int(LIBNOTIFY_NOTIFY_ONDOWNLOAD)
new_config['Libnotify']['libnotify_notify_onsubtitledownload'] = int(LIBNOTIFY_NOTIFY_ONSUBTITLEDOWNLOAD)
new_config['NMJ'] = {}
new_config['NMJ']['use_nmj'] = int(USE_NMJ)
new_config['NMJ']['nmj_host'] = NMJ_HOST
new_config['NMJ']['nmj_database'] = NMJ_DATABASE
new_config['NMJ']['nmj_mount'] = NMJ_MOUNT
new_config['NMJv2'] = {}
new_config['NMJv2']['use_nmjv2'] = int(USE_NMJv2)
new_config['NMJv2']['nmjv2_host'] = NMJv2_HOST
new_config['NMJv2']['nmjv2_database'] = NMJv2_DATABASE
new_config['NMJv2']['nmjv2_dbloc'] = NMJv2_DBLOC
new_config['Synology'] = {}
new_config['Synology']['use_synoindex'] = int(USE_SYNOINDEX)
new_config['SynologyNotifier'] = {}
new_config['SynologyNotifier']['use_synologynotifier'] = int(USE_SYNOLOGYNOTIFIER)
new_config['SynologyNotifier']['synologynotifier_notify_onsnatch'] = int(SYNOLOGYNOTIFIER_NOTIFY_ONSNATCH)
new_config['SynologyNotifier']['synologynotifier_notify_ondownload'] = int(SYNOLOGYNOTIFIER_NOTIFY_ONDOWNLOAD)
new_config['SynologyNotifier']['synologynotifier_notify_onsubtitledownload'] = int(
SYNOLOGYNOTIFIER_NOTIFY_ONSUBTITLEDOWNLOAD)
new_config['Trakt'] = {}
new_config['Trakt']['use_trakt'] = int(USE_TRAKT)
new_config['Trakt']['trakt_username'] = TRAKT_USERNAME
new_config['Trakt']['trakt_access_token'] = TRAKT_ACCESS_TOKEN
new_config['Trakt']['trakt_refresh_token'] = TRAKT_REFRESH_TOKEN
new_config['Trakt']['trakt_remove_watchlist'] = int(TRAKT_REMOVE_WATCHLIST)
new_config['Trakt']['trakt_remove_serieslist'] = int(TRAKT_REMOVE_SERIESLIST)
new_config['Trakt']['trakt_remove_show_from_sickrage'] = int(TRAKT_REMOVE_SHOW_FROM_SICKRAGE)
new_config['Trakt']['trakt_sync_watchlist'] = int(TRAKT_SYNC_WATCHLIST)
new_config['Trakt']['trakt_method_add'] = int(TRAKT_METHOD_ADD)
new_config['Trakt']['trakt_start_paused'] = int(TRAKT_START_PAUSED)
new_config['Trakt']['trakt_use_recommended'] = int(TRAKT_USE_RECOMMENDED)
new_config['Trakt']['trakt_sync'] = int(TRAKT_SYNC)
new_config['Trakt']['trakt_sync_remove'] = int(TRAKT_SYNC_REMOVE)
new_config['Trakt']['trakt_default_indexer'] = int(TRAKT_DEFAULT_INDEXER)
new_config['Trakt']['trakt_timeout'] = int(TRAKT_TIMEOUT)
new_config['Trakt']['trakt_blacklist_name'] = TRAKT_BLACKLIST_NAME
new_config['pyTivo'] = {}
new_config['pyTivo']['use_pytivo'] = int(USE_PYTIVO)
new_config['pyTivo']['pytivo_notify_onsnatch'] = int(PYTIVO_NOTIFY_ONSNATCH)
new_config['pyTivo']['pytivo_notify_ondownload'] = int(PYTIVO_NOTIFY_ONDOWNLOAD)
new_config['pyTivo']['pytivo_notify_onsubtitledownload'] = int(PYTIVO_NOTIFY_ONSUBTITLEDOWNLOAD)
new_config['pyTivo']['pyTivo_update_library'] = int(PYTIVO_UPDATE_LIBRARY)
new_config['pyTivo']['pytivo_host'] = PYTIVO_HOST
new_config['pyTivo']['pytivo_share_name'] = PYTIVO_SHARE_NAME
new_config['pyTivo']['pytivo_tivo_name'] = PYTIVO_TIVO_NAME
new_config['NMA'] = {}
new_config['NMA']['use_nma'] = int(USE_NMA)
new_config['NMA']['nma_notify_onsnatch'] = int(NMA_NOTIFY_ONSNATCH)
new_config['NMA']['nma_notify_ondownload'] = int(NMA_NOTIFY_ONDOWNLOAD)
new_config['NMA']['nma_notify_onsubtitledownload'] = int(NMA_NOTIFY_ONSUBTITLEDOWNLOAD)
new_config['NMA']['nma_api'] = NMA_API
new_config['NMA']['nma_priority'] = NMA_PRIORITY
new_config['Pushalot'] = {}
new_config['Pushalot']['use_pushalot'] = int(USE_PUSHALOT)
new_config['Pushalot']['pushalot_notify_onsnatch'] = int(PUSHALOT_NOTIFY_ONSNATCH)
new_config['Pushalot']['pushalot_notify_ondownload'] = int(PUSHALOT_NOTIFY_ONDOWNLOAD)
new_config['Pushalot']['pushalot_notify_onsubtitledownload'] = int(PUSHALOT_NOTIFY_ONSUBTITLEDOWNLOAD)
new_config['Pushalot']['pushalot_authorizationtoken'] = PUSHALOT_AUTHORIZATIONTOKEN
new_config['Pushbullet'] = {}
new_config['Pushbullet']['use_pushbullet'] = int(USE_PUSHBULLET)
new_config['Pushbullet']['pushbullet_notify_onsnatch'] = int(PUSHBULLET_NOTIFY_ONSNATCH)
new_config['Pushbullet']['pushbullet_notify_ondownload'] = int(PUSHBULLET_NOTIFY_ONDOWNLOAD)
new_config['Pushbullet']['pushbullet_notify_onsubtitledownload'] = int(PUSHBULLET_NOTIFY_ONSUBTITLEDOWNLOAD)
new_config['Pushbullet']['pushbullet_api'] = PUSHBULLET_API
new_config['Pushbullet']['pushbullet_device'] = PUSHBULLET_DEVICE
new_config['Email'] = {}
new_config['Email']['use_email'] = int(USE_EMAIL)
new_config['Email']['email_notify_onsnatch'] = int(EMAIL_NOTIFY_ONSNATCH)
new_config['Email']['email_notify_ondownload'] = int(EMAIL_NOTIFY_ONDOWNLOAD)
new_config['Email']['email_notify_onsubtitledownload'] = int(EMAIL_NOTIFY_ONSUBTITLEDOWNLOAD)
new_config['Email']['email_host'] = EMAIL_HOST
new_config['Email']['email_port'] = int(EMAIL_PORT)
new_config['Email']['email_tls'] = int(EMAIL_TLS)
new_config['Email']['email_user'] = EMAIL_USER
new_config['Email']['email_password'] = helpers.encrypt(EMAIL_PASSWORD, ENCRYPTION_VERSION)
new_config['Email']['email_from'] = EMAIL_FROM
new_config['Email']['email_list'] = EMAIL_LIST
new_config['Newznab'] = {}
new_config['Newznab']['newznab_data'] = NEWZNAB_DATA
new_config['TorrentRss'] = {}
new_config['TorrentRss']['torrentrss_data'] = '!!!'.join([x.configStr() for x in torrentRssProviderList])
new_config['GUI'] = {}
new_config['GUI']['gui_name'] = GUI_NAME
new_config['GUI']['theme_name'] = THEME_NAME
new_config['GUI']['home_layout'] = HOME_LAYOUT
new_config['GUI']['history_layout'] = HISTORY_LAYOUT
new_config['GUI']['history_limit'] = HISTORY_LIMIT
new_config['GUI']['display_show_specials'] = int(DISPLAY_SHOW_SPECIALS)
new_config['GUI']['coming_eps_layout'] = COMING_EPS_LAYOUT
new_config['GUI']['coming_eps_display_paused'] = int(COMING_EPS_DISPLAY_PAUSED)
new_config['GUI']['coming_eps_sort'] = COMING_EPS_SORT
new_config['GUI']['coming_eps_missed_range'] = int(COMING_EPS_MISSED_RANGE)
new_config['GUI']['fuzzy_dating'] = int(FUZZY_DATING)
new_config['GUI']['trim_zero'] = int(TRIM_ZERO)
new_config['GUI']['date_preset'] = DATE_PRESET
new_config['GUI']['time_preset'] = TIME_PRESET_W_SECONDS
new_config['GUI']['timezone_display'] = TIMEZONE_DISPLAY
new_config['GUI']['poster_sortby'] = POSTER_SORTBY
new_config['GUI']['poster_sortdir'] = POSTER_SORTDIR
new_config['GUI']['filter_row'] = int(FILTER_ROW)
new_config['Subtitles'] = {}
new_config['Subtitles']['use_subtitles'] = int(USE_SUBTITLES)
new_config['Subtitles']['subtitles_languages'] = ','.join(SUBTITLES_LANGUAGES)
new_config['Subtitles']['SUBTITLES_SERVICES_LIST'] = ','.join(SUBTITLES_SERVICES_LIST)
new_config['Subtitles']['SUBTITLES_SERVICES_ENABLED'] = '|'.join([str(x) for x in SUBTITLES_SERVICES_ENABLED])
new_config['Subtitles']['subtitles_dir'] = SUBTITLES_DIR
new_config['Subtitles']['subtitles_default'] = int(SUBTITLES_DEFAULT)
new_config['Subtitles']['subtitles_history'] = int(SUBTITLES_HISTORY)
new_config['Subtitles']['embedded_subtitles_all'] = int(EMBEDDED_SUBTITLES_ALL)
new_config['Subtitles']['subtitles_hearing_impaired'] = int(SUBTITLES_HEARING_IMPAIRED)
new_config['Subtitles']['subtitles_finder_frequency'] = int(SUBTITLES_FINDER_FREQUENCY)
new_config['Subtitles']['subtitles_multi'] = int(SUBTITLES_MULTI)
new_config['Subtitles']['subtitles_extra_scripts'] = '|'.join(SUBTITLES_EXTRA_SCRIPTS)
new_config['Subtitles']['addic7ed_username'] = ADDIC7ED_USER
new_config['Subtitles']['addic7ed_password'] = helpers.encrypt(ADDIC7ED_PASS, ENCRYPTION_VERSION)
new_config['Subtitles']['legendastv_username'] = LEGENDASTV_USER
new_config['Subtitles']['legendastv_password'] = helpers.encrypt(LEGENDASTV_PASS, ENCRYPTION_VERSION)
new_config['Subtitles']['opensubtitles_username'] = OPENSUBTITLES_USER
new_config['Subtitles']['opensubtitles_password'] = helpers.encrypt(OPENSUBTITLES_PASS, ENCRYPTION_VERSION)
new_config['FailedDownloads'] = {}
new_config['FailedDownloads']['use_failed_downloads'] = int(USE_FAILED_DOWNLOADS)
new_config['FailedDownloads']['delete_failed'] = int(DELETE_FAILED)
new_config['ANIDB'] = {}
new_config['ANIDB']['use_anidb'] = int(USE_ANIDB)
new_config['ANIDB']['anidb_username'] = ANIDB_USERNAME
new_config['ANIDB']['anidb_password'] = helpers.encrypt(ANIDB_PASSWORD, ENCRYPTION_VERSION)
new_config['ANIDB']['anidb_use_mylist'] = int(ANIDB_USE_MYLIST)
new_config['ANIME'] = {}
new_config['ANIME']['anime_split_home'] = int(ANIME_SPLIT_HOME)
new_config.write()
def launchBrowser(protocol='http', startPort=None, web_root='/'):
if not startPort:
startPort = WEB_PORT
browserURL = '%s://localhost:%d%s/home/' % (protocol, startPort, web_root)
try:
webbrowser.open(browserURL, 2, 1)
except Exception:
try:
webbrowser.open(browserURL, 1, 1)
except Exception:
logger.log(u"Unable to launch a browser", logger.ERROR)
def getEpList(epIDs, showid=None):
if epIDs == None or len(epIDs) == 0:
return []
query = "SELECT * FROM tv_episodes WHERE indexerid in (%s)" % (",".join(['?'] * len(epIDs)),)
params = epIDs
if showid != None:
query += " AND showid = ?"
params.append(showid)
myDB = db.DBConnection()
sqlResults = myDB.select(query, params)
epList = []
for curEp in sqlResults:
curShowObj = helpers.findCertainShow(showList, int(curEp["showid"]))
curEpObj = curShowObj.getEpisode(int(curEp["season"]), int(curEp["episode"]))
epList.append(curEpObj)
return epList
|
SerialShadow/SickRage
|
sickbeard/__init__.py
|
Python
|
gpl-3.0
| 114,709 | 0.005048 |
"""Utility code for constructing importers, etc."""
from ._bootstrap import MAGIC_NUMBER
from ._bootstrap import cache_from_source
from ._bootstrap import decode_source
from ._bootstrap import source_from_cache
from ._bootstrap import spec_from_loader
from ._bootstrap import spec_from_file_location
from ._bootstrap import _resolve_name
from ._bootstrap import _find_spec
from contextlib import contextmanager
import functools
import sys
import warnings
def resolve_name(name, package):
"""Resolve a relative module name to an absolute one."""
if not name.startswith('.'):
return name
elif not package:
raise ValueError('{!r} is not a relative name '
'(no leading dot)'.format(name))
level = 0
for character in name:
if character != '.':
break
level += 1
return _resolve_name(name[level:], package, level)
def _find_spec_from_path(name, path=None):
"""Return the spec for the specified module.
First, sys.modules is checked to see if the module was already imported. If
so, then sys.modules[name].__spec__ is returned. If that happens to be
set to None, then ValueError is raised. If the module is not in
sys.modules, then sys.meta_path is searched for a suitable spec with the
value of 'path' given to the finders. None is returned if no spec could
be found.
Dotted names do not have their parent packages implicitly imported. You will
most likely need to explicitly import all parent packages in the proper
order for a submodule to get the correct spec.
"""
if name not in sys.modules:
return _find_spec(name, path)
else:
module = sys.modules[name]
if module is None:
return None
try:
spec = module.__spec__
except AttributeError:
raise ValueError('{}.__spec__ is not set'.format(name))
else:
if spec is None:
raise ValueError('{}.__spec__ is None'.format(name))
return spec
def find_spec(name, package=None):
"""Return the spec for the specified module.
First, sys.modules is checked to see if the module was already imported. If
so, then sys.modules[name].__spec__ is returned. If that happens to be
set to None, then ValueError is raised. If the module is not in
sys.modules, then sys.meta_path is searched for a suitable spec with the
value of 'path' given to the finders. None is returned if no spec could
be found.
If the name is for submodule (contains a dot), the parent module is
automatically imported.
The name and package arguments work the same as importlib.import_module().
In other words, relative module names (with leading dots) work.
"""
fullname = resolve_name(name, package) if name.startswith('.') else name
if fullname not in sys.modules:
parent_name = fullname.rpartition('.')[0]
if parent_name:
# Use builtins.__import__() in case someone replaced it.
parent = __import__(parent_name, fromlist=['__path__'])
return _find_spec(fullname, parent.__path__)
else:
return _find_spec(fullname, None)
else:
module = sys.modules[fullname]
if module is None:
return None
try:
spec = module.__spec__
except AttributeError:
raise ValueError('{}.__spec__ is not set'.format(name))
else:
if spec is None:
raise ValueError('{}.__spec__ is None'.format(name))
return spec
@contextmanager
def _module_to_load(name):
is_reload = name in sys.modules
module = sys.modules.get(name)
if not is_reload:
# This must be done before open() is called as the 'io' module
# implicitly imports 'locale' and would otherwise trigger an
# infinite loop.
module = type(sys)(name)
# This must be done before putting the module in sys.modules
# (otherwise an optimization shortcut in import.c becomes wrong)
module.__initializing__ = True
sys.modules[name] = module
try:
yield module
except Exception:
if not is_reload:
try:
del sys.modules[name]
except KeyError:
pass
finally:
module.__initializing__ = False
def set_package(fxn):
"""Set __package__ on the returned module.
This function is deprecated.
"""
@functools.wraps(fxn)
def set_package_wrapper(*args, **kwargs):
warnings.warn('The import system now takes care of this automatically.',
DeprecationWarning, stacklevel=2)
module = fxn(*args, **kwargs)
if getattr(module, '__package__', None) is None:
module.__package__ = module.__name__
if not hasattr(module, '__path__'):
module.__package__ = module.__package__.rpartition('.')[0]
return module
return set_package_wrapper
def set_loader(fxn):
"""Set __loader__ on the returned module.
This function is deprecated.
"""
@functools.wraps(fxn)
def set_loader_wrapper(self, *args, **kwargs):
warnings.warn('The import system now takes care of this automatically.',
DeprecationWarning, stacklevel=2)
module = fxn(self, *args, **kwargs)
if getattr(module, '__loader__', None) is None:
module.__loader__ = self
return module
return set_loader_wrapper
def module_for_loader(fxn):
"""Decorator to handle selecting the proper module for loaders.
The decorated function is passed the module to use instead of the module
name. The module passed in to the function is either from sys.modules if
it already exists or is a new module. If the module is new, then __name__
is set the first argument to the method, __loader__ is set to self, and
__package__ is set accordingly (if self.is_package() is defined) will be set
before it is passed to the decorated function (if self.is_package() does
not work for the module it will be set post-load).
If an exception is raised and the decorator created the module it is
subsequently removed from sys.modules.
The decorator assumes that the decorated function takes the module name as
the second argument.
"""
warnings.warn('The import system now takes care of this automatically.',
DeprecationWarning, stacklevel=2)
@functools.wraps(fxn)
def module_for_loader_wrapper(self, fullname, *args, **kwargs):
with _module_to_load(fullname) as module:
module.__loader__ = self
try:
is_package = self.is_package(fullname)
except (ImportError, AttributeError):
pass
else:
if is_package:
module.__package__ = fullname
else:
module.__package__ = fullname.rpartition('.')[0]
# If __package__ was not set above, __import__() will do it later.
return fxn(self, module, *args, **kwargs)
return module_for_loader_wrapper
|
milinbhakta/flaskmaterialdesign
|
venv/Lib/importlib/util.py
|
Python
|
gpl-2.0
| 7,227 | 0.000692 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Methods for working with cecog
Copyright 2010 University of Dundee, Inc. All rights reserved.
Use is subject to license terms supplied in LICENSE.txt
"""
import os
import re
import sys
from omero.cli import BaseControl, CLI
import omero
import omero.constants
from omero.rtypes import rstring
class CecogControl(BaseControl):
"""CeCog integration plugin.
Provides actions for prepairing data and otherwise integrating with Cecog. See
the Run_Cecog_4.1.py script.
"""
# [MetaMorph_PlateScanPackage]
# regex_subdirectories = re.compile('(?=[^_]).*?(?P<D>\d+).*?')
# regex_position = re.compile('P(?P<P>.+?)_')
# continuous_frames = 1
regex_token = re.compile(r'(?P<Token>.+)\.')
regex_time = re.compile(r'T(?P<T>\d+)')
regex_channel = re.compile(r'_C(?P<C>.+?)(_|$)')
regex_zslice = re.compile(r'_Z(?P<Z>\d+)')
def _configure(self, parser):
sub = parser.sub()
merge = parser.add(sub, self.merge, self.merge.__doc__)
merge.add_argument("path", help="Path to image files")
rois = parser.add(sub, self.rois, self.rois.__doc__)
rois.add_argument(
"-f", "--file", required=True, help="Details file to be parsed")
rois.add_argument(
"-i", "--image", required=True,
help="Image id which should have ids attached")
for x in (merge, rois):
x.add_login_arguments()
#
# Public methods
#
def merge(self, args):
"""Uses PIL to read multiple planes from a local folder.
Planes are combined and uploaded to OMERO as new images with additional T, C,
Z dimensions.
It should be run as a local script (not via scripting service) in order that
it has access to the local users file system. Therefore need EMAN2 or PIL
installed locally.
Example usage:
$ bin/omero cecog merge /Applications/CecogPackage/Data/Demo_data/0037/
Since this dir does not contain folders, this will upload images in '0037'
into a Dataset called Demo_data in a Project called 'Data'.
$ bin/omero cecog merge /Applications/CecogPackage/Data/Demo_data/
Since this dir does contain folders, this will look for images in all
subdirectories of 'Demo_data' and upload images into a Dataset called
Demo_data in a Project called 'Data'.
Images will be combined in Z, C and T according to the \
MetaMorph_PlateScanPackage naming convention.
E.g. tubulin_P0037_T00005_Cgfp_Z1_S1.tiff is Point 37, Timepoint 5, Channel \
gfp, Z 1. S?
see \
/Applications/CecogPackage/CecogAnalyzer.app/Contents/Resources/resources/\
naming_schemes.conf
"""
"""
Processes the command args, makes project and dataset then calls
uploadDirAsImages() to process and
upload the images to OMERO.
"""
from omero.rtypes import unwrap
from omero.util.script_utils import uploadDirAsImages
path = args.path
client = self.ctx.conn(args)
queryService = client.sf.getQueryService()
updateService = client.sf.getUpdateService()
pixelsService = client.sf.getPixelsService()
# if we don't have any folders in the 'dir' E.g.
# CecogPackage/Data/Demo_data/0037/
# then 'Demo_data' becomes a dataset
subDirs = []
for f in os.listdir(path):
fullpath = path + f
# process folders in root dir:
if os.path.isdir(fullpath):
subDirs.append(fullpath)
# get the dataset name and project name from path
if len(subDirs) == 0:
p = path[:-1] # will remove the last folder
p = os.path.dirname(p)
else:
if os.path.basename(path) == "":
p = path[:-1] # remove slash
datasetName = os.path.basename(p) # e.g. Demo_data
p = p[:-1]
p = os.path.dirname(p)
projectName = os.path.basename(p) # e.g. Data
self.ctx.err("Putting images in Project: %s Dataset: %s"
% (projectName, datasetName))
# create dataset
dataset = omero.model.DatasetI()
dataset.name = rstring(datasetName)
dataset = updateService.saveAndReturnObject(dataset)
# create project
project = omero.model.ProjectI()
project.name = rstring(projectName)
project = updateService.saveAndReturnObject(project)
# put dataset in project
link = omero.model.ProjectDatasetLinkI()
link.parent = omero.model.ProjectI(project.id.val, False)
link.child = omero.model.DatasetI(dataset.id.val, False)
updateService.saveAndReturnObject(link)
if len(subDirs) > 0:
for subDir in subDirs:
self.ctx.err("Processing images in %s" % subDir)
rv = uploadDirAsImages(client.sf, queryService, updateService,
pixelsService, subDir, dataset)
self.ctx.out("%s" % unwrap(rv))
# if there are no sub-directories, just put all the images in the dir
else:
self.ctx.err("Processing images in %s" % path)
rv = uploadDirAsImages(client.sf, queryService, updateService,
pixelsService, path, dataset)
self.ctx.out("%s" % unwrap(rv))
def rois(self, args):
"""Parses an object_details text file, as generated by CeCog Analyzer
and saves the data as ROIs on an Image in OMERO.
Text file is of the form:
frame objID classLabel className centerX centerY mean sd
1 10 6 lateana 1119 41 76.8253796095 \
54.9305640673
Example usage:
bin/omero cecog rois -f \
Data/Demo_output/analyzed/0037/statistics/P0037__object_details.txt -i 502
"""
"""
Processes the command args, parses the object_details.txt file and
creates ROIs on the image specified in OMERO
"""
from omero.util.script_utils import uploadCecogObjectDetails
filePath = args.file
imageId = args.image
if not os.path.exists(filePath):
self.ctx.die(654, "Could find the object_details file at %s"
% filePath)
client = self.ctx.conn(args)
updateService = client.sf.getUpdateService()
ids = uploadCecogObjectDetails(updateService, imageId, filePath)
self.ctx.out("Rois created: %s" % len(ids))
try:
register("cecog", CecogControl, CecogControl.__doc__)
except NameError:
if __name__ == "__main__":
cli = CLI()
cli.register("cecog", CecogControl, CecogControl.__doc__)
cli.invoke(sys.argv[1:])
|
dominikl/openmicroscopy
|
components/tools/OmeroPy/src/omero/plugins/cecog.py
|
Python
|
gpl-2.0
| 6,684 | 0 |
# -*- coding: utf-8 -*-
# This file is part of the Calibre-Web (https://github.com/janeczku/calibre-web)
# Copyright (C) 2018 lemmsh, cervinko, OzzieIsaacs
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import division, print_function, unicode_literals
from lxml import etree
from .constants import BookMeta
def get_fb2_info(tmp_file_path, original_file_extension):
ns = {
'fb': 'http://www.gribuser.ru/xml/fictionbook/2.0',
'l': 'http://www.w3.org/1999/xlink',
}
fb2_file = open(tmp_file_path)
tree = etree.fromstring(fb2_file.read().encode())
authors = tree.xpath('/fb:FictionBook/fb:description/fb:title-info/fb:author', namespaces=ns)
def get_author(element):
last_name = element.xpath('fb:last-name/text()', namespaces=ns)
if len(last_name):
last_name = last_name[0]
else:
last_name = u''
middle_name = element.xpath('fb:middle-name/text()', namespaces=ns)
if len(middle_name):
middle_name = middle_name[0]
else:
middle_name = u''
first_name = element.xpath('fb:first-name/text()', namespaces=ns)
if len(first_name):
first_name = first_name[0]
else:
first_name = u''
return (first_name + u' '
+ middle_name + u' '
+ last_name)
author = str(", ".join(map(get_author, authors)))
title = tree.xpath('/fb:FictionBook/fb:description/fb:title-info/fb:book-title/text()', namespaces=ns)
if len(title):
title = str(title[0])
else:
title = u''
description = tree.xpath('/fb:FictionBook/fb:description/fb:publish-info/fb:book-name/text()', namespaces=ns)
if len(description):
description = str(description[0])
else:
description = u''
return BookMeta(
file_path=tmp_file_path,
extension=original_file_extension,
title=title,
author=author,
cover=None,
description=description,
tags="",
series="",
series_id="",
languages="",
publisher="")
|
idalin/calibre-web
|
cps/fb2.py
|
Python
|
gpl-3.0
| 2,739 | 0.00146 |
from datetime import date
from monthdelta import MonthDelta as monthdelta
from optparse import make_option
from django.core.management.base import NoArgsCommand, BaseCommand
import dateutil.parser
from django_pjm import models
class Command(BaseCommand):
help = "Imports PJM load values."
args = ''
option_list = BaseCommand.option_list + (
make_option('--start-date', default=None),
make_option('--end-date', default=None),
make_option('--zone', default=None),
make_option('--segments', default=None),
#make_option('--only-type', default=None),
#make_option('--auto-reprocess-days', default=0),
)
def handle(self, **options):
start_date = (options['start_date'] or '').strip()
if start_date:
start_date = dateutil.parser.parse(start_date)
start_date = date(start_date.year, start_date.month, start_date.day)
else:
start_date = date.today() - monthdelta(1)
end_date = (options['end_date'] or '').strip()
if end_date:
end_date = dateutil.parser.parse(end_date)
end_date = date(end_date.year, end_date.month, end_date.day)
else:
end_date = date.today()
segments = [_ for _ in options['segments'].split(',') if _.strip()]
while start_date <= end_date:
for segment in segments:
print 'Calculating for segment %s on start date %s.' % (segment, start_date)
models.Load.calculate(
year=start_date.year,
month=start_date.month,
zone=options['zone'],
segment=segment,
)
start_date += monthdelta(1)
|
chrisspen/django-pjm
|
django_pjm/management/commands/import_pjm_loads.py
|
Python
|
mit
| 1,807 | 0.006641 |
####################################################################################################
#
# GroupedPurchaseOrder - A Django Application.
# Copyright (C) 2014 Fabrice Salvaire
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
####################################################################################################
####################################################################################################
from django.core.urlresolvers import reverse, NoReverseMatch
from django.forms.utils import flatatt
from django.utils.html import escape, format_html
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext as _
####################################################################################################
from .html import join_text, merge_new_words, render_tag
####################################################################################################
#
# Notes:
# - How to concate in {% %} ? #deleteModal{{ supplier.pk }}
# - url 'suppliers.update' supplier.pk
#
####################################################################################################
####################################################################################################
def render_icon(icon, title=''):
"""Render a glyphicon.
"""
#? escape ?
# attrs = {'class': 'glyphicon glyphicon-{}'.format(icon)}
attrs = {'class': 'glyphicon glyphicon-' + icon}
if title:
attrs['title'] = _(title)
return format_html('<span{0}></span>', flatatt(attrs))
####################################################################################################
def render_button(content, icon=None, style='default', size='', href='', title='', button_class='', attrs=None):
"""Render a button with content
"""
# <button type="button" class="btn btn-default">Default</button>
# <button type="button" class="btn btn-primary">Primary</button>
# <button type="button" class="btn btn-success">Success</button>
# <button type="button" class="btn btn-info">Info</button>
# <button type="button" class="btn btn-warning">Warning</button>
# <button type="button" class="btn btn-danger">Danger</button>
# <button type="button" class="btn btn-link">Link</button>
#
# size : btn-lg, btn-sm, btn-xs
# <button type="button" class="btn btn-primary btn-lg">Large button</button>
#
# btn-block
# <button type="button" class="btn btn-primary btn-lg btn-block">Block level button</button>
# <button type="button" class="btn btn-default btn-lg btn-block">Block level button</button>
#
# active
# <button type="button" class="btn btn-primary btn-lg active">Primary button</button>
# <a href="#" class="btn btn-default btn-lg active" role="button">Link</a>
#
# disabled="disabled"
# <button type="button" class="btn btn-lg btn-primary" disabled="disabled">Primary button</button>
# <a href="#" class="btn btn-default btn-lg disabled" role="button">Link</a>
#
# <a class="btn btn-default" href="#" role="button">Link</a>
# <button class="btn btn-default" type="submit">Button</button>
# <input class="btn btn-default" type="button" value="Input">
# <input class="btn btn-default" type="submit" value="Submit">
if attrs is None:
attrs = {}
classes = ['btn']
button_styles = ('default', 'primary', 'success', 'info', 'warning', 'danger', 'link')
if style in button_styles:
classes.append('btn-' + style)
else:
raise ValueError('Parameter style must be {} ("{}" given)',
', '.join(button_styles), style)
# size = text_value(size).lower().strip()
if size:
if size == 'xs':
classes.append('btn-xs')
elif size == 'sm' or size == 'small':
classes.append('btn-sm')
elif size == 'lg' or size == 'large':
classes.append('btn-lg')
else:
raise ValueError('Parameter "size" should be "xs", "sm", "lg" or empty ("{}" given)',
format(size))
attrs['class'] = merge_new_words(button_class, classes)
if href:
try:
# current_app = context['request'].resolver_match.namespace
# viewname=viewname, args=view_args, kwargs=view_kwargs, current_app=current_app
url = reverse(href)
except NoReverseMatch:
url = href
attrs['href'] = url
tag = 'a'
else:
tag = 'button'
if title:
attrs['title'] = escape(_(title))
icon_content = render_icon(icon) if icon else ''
if content:
content = join_text((icon_content, escape(_(content))), separator=' ')
else:
content = icon_content
return render_tag(tag, mark_safe(content), attrs=attrs)
####################################################################################################
def render_icon_button(icon, **kwargs):
return render_button(None, icon=icon, **kwargs)
####################################################################################################
def render_modal_icon_button(icon, *args, **kwargs):
attrs = {'data-toggle':'modal', 'data-target':join_text(args)}
return render_button(None, icon=icon, attrs=attrs, **kwargs)
####################################################################################################
def render_dismiss_button(title, **kwargs):
attrs = {'type':'button', 'data-dismiss':'modal'}
return render_button(title, attrs=attrs, **kwargs)
####################################################################################################
def render_close_button(*args, **kwargs):
# '<button type="button" class="close" data-dismiss="modal">'
# '</button>'
attrs = {'type':'button', 'class':'close', 'data-dismiss':'modal'}
title = escape(_('Close'))
content = ('<span aria-hidden="true">×</span>'
'<span class="sr-only">{0}</span>'.format(title))
return render_tag('button', mark_safe(content), attrs=attrs)
####################################################################################################
#
# End
#
####################################################################################################
|
FabriceSalvaire/grouped-purchase-order
|
GroupedPurchaseOrder/bootstrap/components.py
|
Python
|
agpl-3.0
| 6,955 | 0.006039 |
import re
from django.core import urlresolvers
from django.db import IntegrityError
from cl.citations import find_citations, match_citations
from cl.custom_filters.templatetags.text_filters import best_case_name
from cl.search.models import Opinion, OpinionsCited
from celery import task
def get_document_citations(opinion):
"""Identify and return citations from the html or plain text of the
opinion.
"""
if opinion.html_columbia:
citations = find_citations.get_citations(opinion.html_columbia)
elif opinion.html_lawbox:
citations = find_citations.get_citations(opinion.html_lawbox)
elif opinion.html:
citations = find_citations.get_citations(opinion.html)
elif opinion.plain_text:
citations = find_citations.get_citations(opinion.plain_text,
html=False)
else:
citations = []
return citations
def create_cited_html(opinion, citations):
if any([opinion.html_columbia, opinion.html_lawbox, opinion.html]):
new_html = opinion.html_columbia or opinion.html_lawbox or opinion.html
for citation in citations:
new_html = re.sub(citation.as_regex(), citation.as_html(),
new_html)
elif opinion.plain_text:
inner_html = opinion.plain_text
for citation in citations:
repl = u'</pre>%s<pre class="inline">' % citation.as_html()
inner_html = re.sub(citation.as_regex(), repl, inner_html)
new_html = u'<pre class="inline">%s</pre>' % inner_html
return new_html.encode('utf-8')
@task
def update_document(opinion, index=True):
"""Get the citations for an item and save it and add it to the index if
requested."""
DEBUG = 0
if DEBUG >= 1:
print "%s at %s" % (
best_case_name(opinion.cluster),
urlresolvers.reverse(
'admin:search_opinioncluster_change',
args=(opinion.cluster.pk,),
)
)
citations = get_document_citations(opinion)
# List for tracking number of citation vs. name matches
matched_citations = []
# List used so we can do one simple update to the citing opinion.
opinions_cited = []
for citation in citations:
# Resource.org docs contain their own citation in the html text, which
# we don't want to include
if citation.base_citation() in opinion.cluster.citation_string:
continue
matches, is_citation_match = match_citations.match_citation(
citation,
citing_doc=opinion
)
# TODO: Figure out what to do if there's more than one
if len(matches) == 1:
matched_citations.append(is_citation_match)
match_id = matches[0]['id']
try:
matched_opinion = Opinion.objects.get(pk=match_id)
# Increase citation count for matched cluster if it hasn't
# already been cited by this opinion.
if matched_opinion not in opinion.opinions_cited.all():
matched_opinion.cluster.citation_count += 1
matched_opinion.cluster.save(index=index)
# Add citation match to the citing opinion's list of cases it
# cites. opinions_cited is a set so duplicates aren't an issue
opinions_cited.append(matched_opinion.pk)
# URL field will be used for generating inline citation html
citation.match_url = matched_opinion.cluster.get_absolute_url()
citation.match_id = matched_opinion.pk
except Opinion.DoesNotExist:
if DEBUG >= 2:
print "No Opinions returned for id %s" % match_id
continue
except Opinion.MultipleObjectsReturned:
if DEBUG >= 2:
print "Multiple Opinions returned for id %s" % match_id
continue
else:
#create_stub([citation])
if DEBUG >= 2:
# TODO: Don't print 1 line per citation. Save them in a list
# and print in a single line at the end.
print "No match found for citation %s" % citation.base_citation()
# Only update things if we found citations
if citations:
opinion.html_with_citations = create_cited_html(opinion, citations)
try:
OpinionsCited.objects.bulk_create([
OpinionsCited(citing_opinion_id=pk,
cited_opinion_id=opinion.pk) for
pk in opinions_cited
])
except IntegrityError as e:
# If bulk_create would create an item that already exists, it fails.
# In that case, do them one by one, skipping failing cases.
for pk in opinions_cited:
try:
cited = OpinionsCited(
citing_opinion_id=pk,
cited_opinion_id=opinion.pk,
)
cited.save()
except IntegrityError:
# We'll just skip the ones that already exist, but still do
# the others.
pass
if DEBUG >= 3:
print opinion.html_with_citations
# Update Solr if requested. In some cases we do it at the end for
# performance reasons.
opinion.save(index=index)
if DEBUG >= 1:
citation_matches = sum(matched_citations)
name_matches = len(matched_citations) - citation_matches
print " %d citations" % len(citations)
print " %d exact matches" % citation_matches
print " %d name matches" % name_matches
@task
def update_document_by_id(opinion_id):
"""This is not an OK way to do id-based tasks. Needs to be refactored."""
op = Opinion.objects.get(pk=opinion_id)
update_document(op)
|
brianwc/courtlistener
|
cl/citations/tasks.py
|
Python
|
agpl-3.0
| 5,964 | 0.000503 |
"""
The middlewares in this file do mobile detection, provide a user override,
and provide a cookie override. They must be used in the correct order.
MobileMiddleware must always come after any of the other middlewares in this
file in `settings.MIDDLEWARE_CLASSES`.
"""
import urllib
from warnings import warn
from django.conf import settings
from django.http import HttpResponsePermanentRedirect
from django.utils import translation
from django.utils.encoding import smart_str
from fjord.base import urlresolvers
from fjord.base.browsers import parse_ua
from fjord.base.templatetags.jinja_helpers import urlparams
MOBILE_COOKIE = getattr(settings, 'MOBILE_COOKIE', 'mobile')
class UserAgentMiddleware(object):
"""Add ``request.BROWSER`` which has information from the User-Agent
``request.BROWSER`` has the following attributes:
- browser: The user's browser, eg: "Firefox".
- browser_version: The browser's version, eg: "14.0.1"
- platform: The general platform the user is using, eg "Windows".
- platform_version: The version of the platform, eg. "XP" or "10.6.2".
- mobile: If the client is using a mobile device. `True` or `False`.
Any of the above may be `None` if detection fails.
"""
def process_request(self, request):
ua = request.META.get('HTTP_USER_AGENT', '')
request.BROWSER = parse_ua(ua)
class MobileQueryStringMiddleware(object):
"""
Add querystring override for mobile.
This allows the user to override mobile detection by setting
'mobile=1' in the querystring. This will persist in a cookie
that other the other middlewares in this file will respect.
"""
def process_request(self, request):
# The 'mobile' querystring overrides any prior MOBILE
# figuring and we put it in two places.
mobile_qs = request.GET.get('mobile', None)
if mobile_qs == '1':
request.MOBILE = True
request.MOBILE_SET_COOKIE = 'yes'
elif mobile_qs == '0':
request.MOBILE = False
request.MOBILE_SET_COOKIE = 'no'
class MobileMiddleware(object):
"""
Set request.MOBILE based on cookies and UA detection.
The set of rules to decide `request.MOBILE` is given below. If any rule
matches, the process stops.
1. If there is a variable `mobile` in the query string, `request.MOBILE`
is set accordingly.
2. If a cookie is set indicating a mobile preference, follow it.
3. If user agent parsing has already happened, trust it's judgement about
mobile-ness. (i.e. `request.BROWSER.mobile`)
4. Otherwise, set `request.MOBILE` to True if the string "mobile" is in the
user agent (case insensitive), and False otherwise.
If there is a variable `request.MOBILE_SET_COOKIE`, it's value will be
stored in the mobile cookie.
"""
def process_request(self, request):
ua = request.META.get('HTTP_USER_AGENT', '')
mc = request.COOKIES.get(MOBILE_COOKIE)
if hasattr(request, 'MOBILE'):
# Our work here is done
return
if mc:
request.MOBILE = (mc == 'yes')
return
if hasattr(request, 'BROWSER'):
# UA Detection already figured this out.
request.MOBILE = request.BROWSER.mobile
return
# Make a guess based on UA if nothing else has figured it out.
request.MOBILE = ('mobile' in ua)
def process_response(self, request, response):
if hasattr(request, 'MOBILE_SET_COOKIE'):
cookie_value = request.MOBILE_SET_COOKIE
response.set_cookie(MOBILE_COOKIE, cookie_value)
return response
class LocaleURLMiddleware(object):
"""
1. Search for the locale.
2. Save it in the request.
3. Strip them from the URL.
"""
def __init__(self):
if not settings.USE_I18N or not settings.USE_L10N:
warn('USE_I18N or USE_L10N is False but LocaleURLMiddleware is '
'loaded. Consider removing fjord.base.middleware.'
'LocaleURLMiddleware from your MIDDLEWARE_CLASSES setting.')
self.exempt_urls = getattr(settings, 'FF_EXEMPT_LANG_PARAM_URLS', ())
def _is_lang_change(self, request):
"""Return True if the lang param is present and URL isn't exempt."""
if 'lang' not in request.GET:
return False
return not any(request.path.endswith(url) for url in self.exempt_urls)
def process_request(self, request):
prefixer = urlresolvers.Prefixer(request)
urlresolvers.set_url_prefix(prefixer)
full_path = prefixer.fix(prefixer.shortened_path)
if self._is_lang_change(request):
# Blank out the locale so that we can set a new one. Remove lang
# from the query params so we don't have an infinite loop.
prefixer.locale = ''
new_path = prefixer.fix(prefixer.shortened_path)
query = dict((smart_str(k), request.GET[k]) for k in request.GET)
query.pop('lang')
return HttpResponsePermanentRedirect(urlparams(new_path, **query))
if full_path != request.path:
query_string = request.META.get('QUERY_STRING', '')
full_path = urllib.quote(full_path.encode('utf-8'))
if query_string:
full_path = '%s?%s' % (full_path, query_string)
response = HttpResponsePermanentRedirect(full_path)
# Vary on Accept-Language if we changed the locale
old_locale = prefixer.locale
new_locale, _ = urlresolvers.split_path(full_path)
if old_locale != new_locale:
response['Vary'] = 'Accept-Language'
return response
request.path_info = '/' + prefixer.shortened_path
request.locale = prefixer.locale
translation.activate(prefixer.locale)
|
lgp171188/fjord
|
fjord/base/middleware.py
|
Python
|
bsd-3-clause
| 5,915 | 0 |
#
# Autogenerated by Thrift
#
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
# @generated
#
__all__ = ['ttypes', 'constants']
|
facebook/fbthrift
|
thrift/compiler/test/fixtures/types/gen-py/include/__init__.py
|
Python
|
apache-2.0
| 147 | 0 |
'''manipulate ndarray list'''
from itertools import imap,starmap,izip
from operator import mul,add,sub
def check_list(v1,v2):
'''check if the length of two list is same'''
if v1.size != v2.size:
raise ValueError,"the lenght of both arrays must be the same"
pass
def Add(v1,v2):
'''add two list'''
check_list(v1,v2)
return v1.__add__(v2)
def Subtract(v1,v2):
'''subtract v2 from v1'''
check_list(v1,v2)
return v1.__sub__(v2)
def Product(v1,v2):
'''return product of two list'''
check_list(v1,v2)
return v1.__mul__(v2)
def Division(v1,v2):
'''return divide v1 by v2. add 1 to both v1 and v2'''
check_list(v1,v2)
return (v1+1).__div__(v2+1)
def Average(v1,v2):
'''return arithmetic mean of two list'''
check_list(v1,v2)
return v1.__add__(v2)/2
def geometricMean(v1,v2):
'''return geometric mean of two list'''
check_list(v1,v2)
return (v1.__mul__(v2))**0.5
def Max(v1,v2):
'''pairwise comparison two list. return the max one between two paried number'''
check_list(v1,v2)
return imap(max,izip(v1,v2))
def Min(v1,v2):
'''pairwise comparison two list. return the max one between two paried number'''
check_list(v1,v2)
return imap(min,izip(v1,v2))
def euclidean_distance(v1,v2):
'''return euclidean distance'''
check_list(v1,v2)
return (sum((v1.__sub__(v2))**2) / v1.size)**0.5
|
dnanexus/rseqc
|
rseqc/lib/qcmodule/twoList.py
|
Python
|
gpl-3.0
| 2,098 | 0.039561 |
from .responses import QueueResponse, QueuesResponse
url_bases = [
"https?://(.*?)(queue|sqs)(.*?).amazonaws.com"
]
url_paths = {
'{0}/$': QueuesResponse().dispatch,
'{0}/(?P<account_id>\d+)/(?P<queue_name>[a-zA-Z0-9\-_]+)': QueueResponse().dispatch,
}
|
devs1991/test_edx_docmode
|
venv/lib/python2.7/site-packages/moto/sqs/urls.py
|
Python
|
agpl-3.0
| 267 | 0.011236 |
#!/usr/bin/env python
###############################################################################
# $Id: ogr_sde.py 33793 2016-03-26 13:02:07Z goatbar $
#
# Project: GDAL/OGR Test Suite
# Purpose: Test OGR ArcSDE driver.
# Author: Howard Butler <hobu.inc@gmail.com>
#
###############################################################################
# Copyright (c) 2008, Howard Butler <hobu.inc@gmail.com>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
###############################################################################
import sys
sys.path.append( '../pymod' )
import gdaltest
import ogrtest
from osgeo import ogr
from osgeo import osr
from osgeo import gdal
###############################################################################
# Open ArcSDE datasource.
sde_server = '172.16.1.193'
sde_port = '5151'
sde_db = 'sde'
sde_user = 'sde'
sde_password = 'sde'
gdaltest.sde_dr = None
try:
gdaltest.sde_dr = ogr.GetDriverByName( 'SDE' )
except:
pass
def ogr_sde_1():
"Test basic opening of a database"
if gdaltest.sde_dr is None:
return 'skip'
base = 'SDE:%s,%s,%s,%s,%s' % (sde_server, sde_port, sde_db, sde_user, sde_password)
ds = ogr.Open(base)
if ds is None:
print("Could not open %s" % base)
gdaltest.sde_dr = None
return 'skip'
ds.Destroy()
ds = ogr.Open(base, update=1)
ds.Destroy()
return 'success'
def ogr_sde_2():
"Test creation of a layer"
if gdaltest.sde_dr is None:
return 'skip'
base = 'SDE:%s,%s,%s,%s,%s' % (sde_server, sde_port, sde_db, sde_user, sde_password)
shp_ds = ogr.Open( 'data/poly.shp' )
gdaltest.shp_ds = shp_ds
shp_lyr = shp_ds.GetLayer(0)
ds = ogr.Open(base, update=1)
lyr = ds.CreateLayer( 'SDE.TPOLY' ,geom_type=ogr.wkbPolygon, srs=shp_lyr.GetSpatialRef(),options = [ 'OVERWRITE=YES' ] )
# lyr = ds.CreateLayer( 'SDE.TPOLY' ,geom_type=ogr.wkbPolygon)
ogrtest.quick_create_layer_def( lyr,
[ ('AREA', ogr.OFTReal),
('EAS_ID', ogr.OFTInteger),
('PRFEDEA', ogr.OFTString),
('WHEN', ogr.OFTDateTime) ] )
#######################################################
# Copy in poly.shp
dst_feat = ogr.Feature( feature_def = lyr.GetLayerDefn() )
feat = shp_lyr.GetNextFeature()
gdaltest.poly_feat = []
while feat is not None:
gdaltest.poly_feat.append( feat )
dst_feat.SetFrom( feat )
lyr.CreateFeature( dst_feat )
feat = shp_lyr.GetNextFeature()
dst_feat.Destroy()
return 'success'
def ogr_sde_3():
"Test basic version locking"
if gdaltest.sde_dr is None:
return 'skip'
base = 'SDE:%s,%s,%s,%s,%s,SDE.TPOLY,SDE.DEFAULT' % (sde_server, sde_port, sde_db, sde_user, sde_password)
ds = ogr.Open(base, update=1)
ds2 = ogr.Open(base, update=1)
if ds2 is not None:
gdaltest.post_reason('A locked version was able to be opened')
return 'fail'
ds.Destroy()
return 'success'
def ogr_sde_4():
"Test basic version creation"
if gdaltest.sde_dr is None:
return 'skip'
version_name = 'TESTING'
gdal.SetConfigOption( 'SDE_VERSIONOVERWRITE', 'TRUE' )
base = 'SDE:%s,%s,%s,%s,%s,SDE.TPOLY,SDE.DEFAULT,%s' % (sde_server, sde_port, sde_db, sde_user, sde_password, version_name)
ds = ogr.Open(base, update=1)
ds.Destroy()
gdal.SetConfigOption( 'SDE_VERSIONOVERWRITE', 'FALSE' )
base = 'SDE:%s,%s,%s,%s,%s,SDE.TPOLY,SDE.DEFAULT,%s' % (sde_server, sde_port, sde_db, sde_user, sde_password, version_name)
ds = ogr.Open(base, update=1)
ds.Destroy()
return 'success'
def ogr_sde_5():
"Test versioned editing"
if gdaltest.sde_dr is None:
return 'skip'
version_name = 'TESTING'
gdal.SetConfigOption( 'SDE_VERSIONOVERWRITE', 'TRUE' )
base = 'SDE:%s,%s,%s,%s,%s,SDE.TPOLY,SDE.DEFAULT,%s' % (sde_server, sde_port, sde_db, sde_user, sde_password, version_name)
ds = ogr.Open(base, update=1)
l1 = ds.GetLayerByName('SDE.TPOLY')
f1 = l1.GetFeature(1)
f1.SetField("PRFEDEA",'SDE.TESTING')
l1.SetFeature(f1)
ds.Destroy()
del ds
default = 'DEFAULT'
gdal.SetConfigOption( 'SDE_VERSIONOVERWRITE', 'FALSE' )
default = 'SDE:%s,%s,%s,%s,%s,SDE.TPOLY,SDE.DEFAULT,%s' % (sde_server, sde_port, sde_db, sde_user, sde_password, default)
# print default
ds2 = ogr.Open(default, update=1)
l2 = ds2.GetLayerByName('SDE.TPOLY')
f2 = l2.GetFeature(1)
f2.SetField("PRFEDEA",'SDE.DEFAULT')
f2.SetField("WHEN", 2008, 3, 19, 16, 15, 00, 0)
l2.SetFeature(f2)
ds2.Destroy()
del ds2
ds3 = ogr.Open(base)
l3 = ds3.GetLayerByName('SDE.TPOLY')
f3 = l3.GetFeature(1)
if f3.GetField("PRFEDEA") != "SDE.TESTING":
gdaltest.post_reason('versioned editing failed for child version SDE.TESTING')
return 'fail'
ds3.Destroy()
del ds3
ds4 = ogr.Open(default)
l4 = ds4.GetLayerByName('SDE.TPOLY')
f4 = l4.GetFeature(1)
if f4.GetField("PRFEDEA") != "SDE.DEFAULT":
gdaltest.post_reason('versioned editing failed for parent version SDE.DEFAULT')
return 'fail'
idx = f4.GetFieldIndex('WHEN')
df = f4.GetField(idx)
if df != '2008/03/19 16:15:00':
gdaltest.post_reason("datetime handling did not work -- expected '2008/03/19 16:15:00' got '%s' "% df)
ds4.Destroy()
del ds4
return 'success'
def ogr_sde_6():
"Extent fetching"
if gdaltest.sde_dr is None:
return 'skip'
base = 'SDE:%s,%s,%s,%s,%s,SDE.TPOLY,SDE.DEFAULT' % (
sde_server, sde_port, sde_db, sde_user, sde_password)
ds = ogr.Open(base, update=1)
l1 = ds.GetLayerByName('SDE.TPOLY')
extent = l1.GetExtent(force=0)
if extent != (0.0, 2147483645.0, 0.0, 2147483645.0):
gdaltest.post_reason("unforced extent did not equal expected value")
extent = l1.GetExtent(force=1)
if extent != (478316.0, 481645.0, 4762881.0, 4765611.0):
gdaltest.post_reason("forced extent did not equal expected value")
return 'success'
def ogr_sde_7():
"Bad layer test"
if gdaltest.sde_dr is None:
return 'skip'
base = 'SDE:%s,%s,%s,%s,%s,SDE.TPOLY,SDE.DEFAULT' % (
sde_server, sde_port, sde_db, sde_user, sde_password)
ds = ogr.Open(base, update=1)
l1 = ds.GetLayerByName('SDE.TPOLY2')
if l1:
gdaltest.post_reason("we got a layer when we should not have")
ds.Destroy()
default = 'DEFAULT'
gdal.SetConfigOption( 'SDE_VERSIONOVERWRITE', 'FALSE' )
default = 'SDE:%s,%s,%s,%s,%s,SDE.TPOLY,SDE.DEFAULT,%s' % (
sde_server, sde_port, sde_db, sde_user, sde_password, default)
ds = ogr.Open(default, update=1)
l1 = ds.GetLayerByName('SDE.TPOLY2')
if l1:
gdaltest.post_reason("we got a layer when we should not have")
ds.Destroy()
default = 'DEFAULT'
gdal.SetConfigOption( 'SDE_VERSIONOVERWRITE', 'FALSE' )
default = 'SDE:%s,%s,%s,%s,%s' % (
sde_server, sde_port, sde_db, sde_user, sde_password)
ds = ogr.Open(default)
l1 = ds.GetLayerByName('SDE.TPOLY2')
if l1:
gdaltest.post_reason("we got a layer when we should not have")
ds.Destroy()
return 'success'
def ogr_sde_8():
"Test spatial references"
if gdaltest.sde_dr is None:
return 'skip'
base = 'SDE:%s,%s,%s,%s,%s' % (sde_server, sde_port, sde_db, sde_user, sde_password)
shp_ds = ogr.Open( 'data/poly.shp' )
gdaltest.shp_ds = shp_ds
shp_lyr = shp_ds.GetLayer(0)
ref = osr.SpatialReference()
ref.ImportFromWkt('LOCAL_CS["IMAGE"]')
ds = ogr.Open(base, update=1)
lyr = ds.CreateLayer( 'SDE.TPOLY' ,geom_type=ogr.wkbPolygon, srs=ref,options = [ 'OVERWRITE=YES' ] )
ref.ImportFromEPSG(4326)
lyr = ds.CreateLayer( 'SDE.TPOLY' ,geom_type=ogr.wkbPolygon, srs=ref,options = [ 'OVERWRITE=YES' ] )
ogrtest.quick_create_layer_def( lyr,
[ ('AREA', ogr.OFTReal),
('EAS_ID', ogr.OFTInteger),
('PRFEDEA', ogr.OFTString),
('WHEN', ogr.OFTDateTime) ] )
#######################################################
# Copy in poly.shp
dst_feat = ogr.Feature( feature_def = lyr.GetLayerDefn() )
feat = shp_lyr.GetNextFeature()
gdaltest.poly_feat = []
while feat is not None:
gdaltest.poly_feat.append( feat )
dst_feat.SetFrom( feat )
lyr.CreateFeature( dst_feat )
feat = shp_lyr.GetNextFeature()
dst_feat.Destroy()
return 'success'
def ogr_sde_cleanup():
if gdaltest.sde_dr is None:
return 'skip'
base = 'SDE:%s,%s,%s,%s,%s' % (sde_server, sde_port, sde_db, sde_user, sde_password)
ds = ogr.Open(base, update=1)
ds.DeleteLayer('%s.%s'%(sde_user.upper(),'TPOLY'))
ds.Destroy()
return 'success'
gdaltest_list = [
ogr_sde_1,
ogr_sde_2,
ogr_sde_3,
ogr_sde_4,
ogr_sde_5,
ogr_sde_6,
ogr_sde_7,
ogr_sde_8,
ogr_sde_cleanup
]
if __name__ == '__main__':
gdaltest.setup_run( 'ogr_sde' )
gdaltest.run_tests( gdaltest_list )
gdaltest.summarize()
|
nextgis-extra/tests
|
lib_gdal/ogr/ogr_sde.py
|
Python
|
gpl-2.0
| 10,401 | 0.011826 |
'''@file nnet.py
contains the functionality for a Kaldi style neural network'''
import shutil
import os
import itertools
import numpy as np
import tensorflow as tf
import classifiers.activation
from classifiers.dnn import DNN
from trainer import CrossEnthropyTrainer
from decoder import Decoder
class Nnet(object):
'''a class for a neural network that can be used together with Kaldi'''
def __init__(self, conf, input_dim, num_labels):
'''
Nnet constructor
Args:
conf: nnet configuration
input_dim: network input dimension
num_labels: number of target labels
'''
#get nnet structure configs
self.conf = dict(conf.items('nnet'))
#define location to save neural nets
self.conf['savedir'] = (conf.get('directories', 'expdir')
+ '/' + self.conf['name'])
if not os.path.isdir(self.conf['savedir']):
os.mkdir(self.conf['savedir'])
if not os.path.isdir(self.conf['savedir'] + '/training'):
os.mkdir(self.conf['savedir'] + '/training')
#compute the input_dimension of the spliced features
self.input_dim = input_dim * (2*int(self.conf['context_width']) + 1)
if self.conf['batch_norm'] == 'True':
activation = classifiers.activation.Batchnorm(None)
else:
activation = None
#create the activation function
if self.conf['nonlin'] == 'relu':
activation = classifiers.activation.TfActivation(activation,
tf.nn.relu)
elif self.conf['nonlin'] == 'sigmoid':
activation = classifiers.activation.TfActivation(activation,
tf.nn.sigmoid)
elif self.conf['nonlin'] == 'tanh':
activation = classifiers.activation.TfActivation(activation,
tf.nn.tanh)
elif self.conf['nonlin'] == 'linear':
activation = classifiers.activation.TfActivation(activation,
lambda(x): x)
else:
raise Exception('unkown nonlinearity')
if self.conf['l2_norm'] == 'True':
activation = classifiers.activation.L2Norm(activation)
if float(self.conf['dropout']) < 1:
activation = classifiers.activation.Dropout(
activation, float(self.conf['dropout']))
self.weight_init = self.conf['weight_init']
#create a DNN
self.dnn = DNN(
num_labels, int(self.conf['num_hidden_layers']),
int(self.conf['num_hidden_units']), activation,
self.weight_init, int(self.conf['add_layer_period']) > 0)
def train(self, dispenser, dispenser_dev):
'''
Train the neural network
Args:
dispenser: a batchdispenser for training data
dispenser_dev: a batchdispenser for dev data
'''
#put the DNN in a training environment
epoch = int(self.conf['epoch'])
max_epoch = int(self.conf['max_epoch'])
halve_learning_rate = int(self.conf['halve_learning_rate'])
start_halving_impr = float(self.conf['start_halving_impr'])
end_halving_impr = float(self.conf['end_halving_impr'])
trainer = CrossEnthropyTrainer(
self.dnn, self.input_dim, dispenser.max_input_length,
dispenser.max_target_length,
float(self.conf['initial_learning_rate']),
float(self.conf['l1_penalty']),
float(self.conf['l2_penalty']),
float(self.conf['momentum']),
int(self.conf['minibatch_size']),
float(self.conf['clip_grad']))
#start the visualization if it is requested
if self.conf['visualise'] == 'True':
if os.path.isdir(self.conf['savedir'] + '/logdir'):
shutil.rmtree(self.conf['savedir'] + '/logdir')
trainer.start_visualization(self.conf['savedir'] + '/logdir')
#start a tensorflow session
config = tf.ConfigProto()
config.gpu_options.allow_growth = True #pylint: disable=E1101
with tf.Session(graph=trainer.graph, config=config):
#initialise the trainer
trainer.initialize()
#load the neural net if the starting epoch is not 0
if (epoch > 0):
trainer.restore_trainer(self.conf['savedir'] + '/training/')
#do a validation step
validation_loss = trainer.evaluate(dispenser_dev)
print '======================================= validation loss at epoch %d is: %f =============================' % (epoch, validation_loss)
#start the training iteration
while (epoch < max_epoch):
#update the model
loss = trainer.update(dispenser)
#print the progress
print '======================================= training loss at epoch %d is : %f ==============================' %(epoch, loss)
#validate the model if required
current_loss = trainer.evaluate(dispenser_dev)
print '======================================= validation loss at epoch %d is: %f ==========================' % (epoch, current_loss)
epoch += 1
if halve_learning_rate == 0:
if current_loss < validation_loss:
if current_loss > (validation_loss - start_halving_impr):
halve_learning_rate = 1
trainer.halve_learning_rate()
print "================ begining to halve learning rate ================"
validation_loss = current_loss
pre_loss = loss
trainer.save_trainer(self.conf['savedir']
+ '/training/', 'iter_' + str(epoch) + '_tr'+str(loss)+'_cv'+str(validation_loss))
else:
print ('the validation loss is worse, returning to '
'the previously validated model with halved '
'learning rate')
trainer.restore_trainer(self.conf['savedir']+ '/training/')
trainer.halve_learning_rate()
halve_learning_rate = 1
print "================ begining to halve learning rate ================"
continue
else:
if current_loss < (validation_loss - end_halving_impr):
trainer.halve_learning_rate()
pre_loss = loss
validation_loss = current_loss
trainer.save_trainer(self.conf['savedir']
+ '/training/', 'iter_' + str(epoch) + '_tr'+str(loss)+'_cv'+str(validation_loss))
else:
trainer.restore_trainer(self.conf['savedir'] + '/training/')
print ('the validation loss is worse, '
'terminating training')
break
#save the final model
trainer.save_model(self.conf['savedir'] + '/final')
#compute the state prior and write it to the savedir
prior = dispenser.compute_target_count().astype(np.float32)
prior = prior + 1
prior = prior/prior.sum()
np.save(self.conf['savedir'] + '/prior.npy', prior)
def decode(self, reader, writer):
'''
compute pseudo likelihoods the testing set
Args:
reader: a feature reader object to read features to decode
writer: a writer object to write likelihoods
'''
#create a decoder
decoder = Decoder(self.dnn, self.input_dim, reader.max_input_length)
#read the prior
prior = np.load(self.conf['savedir'] + '/prior.npy')
#start tensorflow session
config = tf.ConfigProto()
config.gpu_options.allow_growth = True #pylint: disable=E1101
with tf.Session(graph=decoder.graph, config=config):
#load the model
decoder.restore(self.conf['savedir'] + '/final')
#feed the utterances one by one to the neural net
while True:
utt_id, utt_mat, looped = reader.get_utt()
if looped:
break
#compute predictions
output = decoder(utt_mat)
#get state likelihoods by dividing by the prior
output = output/prior
#floor the values to avoid problems with log
output = np.where(output == 0, np.finfo(float).eps, output)
#write the pseudo-likelihoods in kaldi feature format
writer.write_next_utt(utt_id, np.log(output))
#close the writer
writer.close()
|
waterxt/tensorflowkaldi
|
neuralNetworks/nnet.py
|
Python
|
mit
| 9,220 | 0.005748 |
#
# This file is part of pyasn1-modules software.
#
# Copyright (c) 2005-2017, Ilya Etingof <etingof@gmail.com>
# License: http://pyasn1.sf.net/license.html
#
# LDAP message syntax
#
# ASN.1 source from:
# http://www.trl.ibm.com/projects/xml/xss4j/data/asn1/grammars/ldap.asn
#
# Sample captures from:
# http://wiki.wireshark.org/SampleCaptures/
#
from pyasn1.type import tag, namedtype, namedval, univ, constraint
maxInt = univ.Integer(2147483647)
class LDAPString(univ.OctetString):
pass
class LDAPOID(univ.OctetString):
pass
class LDAPDN(LDAPString):
pass
class RelativeLDAPDN(LDAPString):
pass
class AttributeType(LDAPString):
pass
class AttributeDescription(LDAPString):
pass
class AttributeDescriptionList(univ.SequenceOf):
componentType = AttributeDescription()
class AttributeValue(univ.OctetString):
pass
class AssertionValue(univ.OctetString):
pass
class AttributeValueAssertion(univ.Sequence):
componentType = namedtype.NamedTypes(
namedtype.NamedType('attributeDesc', AttributeDescription()),
namedtype.NamedType('assertionValue', AssertionValue())
)
class Attribute(univ.Sequence):
componentType = namedtype.NamedTypes(
namedtype.NamedType('type', AttributeDescription()),
namedtype.NamedType('vals', univ.SetOf(componentType=AttributeValue()))
)
class MatchingRuleId(LDAPString):
pass
class Control(univ.Sequence):
componentType = namedtype.NamedTypes(
namedtype.NamedType('controlType', LDAPOID()),
namedtype.DefaultedNamedType('criticality', univ.Boolean('False')),
namedtype.OptionalNamedType('controlValue', univ.OctetString())
)
class Controls(univ.SequenceOf):
componentType = Control()
class LDAPURL(LDAPString):
pass
class Referral(univ.SequenceOf):
componentType = LDAPURL()
class SaslCredentials(univ.Sequence):
componentType = namedtype.NamedTypes(
namedtype.NamedType('mechanism', LDAPString()),
namedtype.OptionalNamedType('credentials', univ.OctetString())
)
class AuthenticationChoice(univ.Choice):
componentType = namedtype.NamedTypes(
namedtype.NamedType('simple', univ.OctetString().subtype(
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
namedtype.NamedType('reserved-1', univ.OctetString().subtype(
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
namedtype.NamedType('reserved-2', univ.OctetString().subtype(
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))),
namedtype.NamedType('sasl',
SaslCredentials().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3)))
)
class BindRequest(univ.Sequence):
tagSet = univ.Sequence.tagSet.tagImplicitly(
tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 0)
)
componentType = namedtype.NamedTypes(
namedtype.NamedType('version', univ.Integer().subtype(subtypeSpec=constraint.ValueRangeConstraint(1, 127))),
namedtype.NamedType('name', LDAPDN()),
namedtype.NamedType('authentication', AuthenticationChoice())
)
class PartialAttributeList(univ.SequenceOf):
componentType = univ.Sequence(
componentType=namedtype.NamedTypes(
namedtype.NamedType('type', AttributeDescription()),
namedtype.NamedType('vals', univ.SetOf(componentType=AttributeValue()))
)
)
class SearchResultEntry(univ.Sequence):
tagSet = univ.Sequence.tagSet.tagImplicitly(
tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 4)
)
componentType = namedtype.NamedTypes(
namedtype.NamedType('objectName', LDAPDN()),
namedtype.NamedType('attributes', PartialAttributeList())
)
class MatchingRuleAssertion(univ.Sequence):
componentType = namedtype.NamedTypes(
namedtype.OptionalNamedType('matchingRule', MatchingRuleId().subtype(
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
namedtype.OptionalNamedType('type', AttributeDescription().subtype(
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))),
namedtype.NamedType('matchValue',
AssertionValue().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3))),
namedtype.DefaultedNamedType('dnAttributes', univ.Boolean('False').subtype(
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 4)))
)
class SubstringFilter(univ.Sequence):
componentType = namedtype.NamedTypes(
namedtype.NamedType('type', AttributeDescription()),
namedtype.NamedType('substrings',
univ.SequenceOf(
componentType=univ.Choice(
componentType=namedtype.NamedTypes(
namedtype.NamedType(
'initial', LDAPString().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))
),
namedtype.NamedType(
'any', LDAPString().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))
),
namedtype.NamedType(
'final', LDAPString().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))
)
)
)
)
)
)
# Ugly hack to handle recursive Filter reference (up to 3-levels deep).
class Filter3(univ.Choice):
componentType = namedtype.NamedTypes(
namedtype.NamedType('equalityMatch', AttributeValueAssertion().subtype(
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 3))),
namedtype.NamedType('substrings', SubstringFilter().subtype(
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 4))),
namedtype.NamedType('greaterOrEqual', AttributeValueAssertion().subtype(
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 5))),
namedtype.NamedType('lessOrEqual', AttributeValueAssertion().subtype(
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 6))),
namedtype.NamedType('present', AttributeDescription().subtype(
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 7))),
namedtype.NamedType('approxMatch', AttributeValueAssertion().subtype(
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 8))),
namedtype.NamedType('extensibleMatch', MatchingRuleAssertion().subtype(
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 9)))
)
class Filter2(univ.Choice):
componentType = namedtype.NamedTypes(
namedtype.NamedType('and', univ.SetOf(componentType=Filter3()).subtype(
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))),
namedtype.NamedType('or', univ.SetOf(componentType=Filter3()).subtype(
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1))),
namedtype.NamedType('not',
Filter3().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 2))),
namedtype.NamedType('equalityMatch', AttributeValueAssertion().subtype(
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 3))),
namedtype.NamedType('substrings', SubstringFilter().subtype(
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 4))),
namedtype.NamedType('greaterOrEqual', AttributeValueAssertion().subtype(
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 5))),
namedtype.NamedType('lessOrEqual', AttributeValueAssertion().subtype(
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 6))),
namedtype.NamedType('present', AttributeDescription().subtype(
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 7))),
namedtype.NamedType('approxMatch', AttributeValueAssertion().subtype(
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 8))),
namedtype.NamedType('extensibleMatch', MatchingRuleAssertion().subtype(
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 9)))
)
class Filter(univ.Choice):
componentType = namedtype.NamedTypes(
namedtype.NamedType('and', univ.SetOf(componentType=Filter2()).subtype(
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))),
namedtype.NamedType('or', univ.SetOf(componentType=Filter2()).subtype(
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1))),
namedtype.NamedType('not',
Filter2().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 2))),
namedtype.NamedType('equalityMatch', AttributeValueAssertion().subtype(
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 3))),
namedtype.NamedType('substrings', SubstringFilter().subtype(
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 4))),
namedtype.NamedType('greaterOrEqual', AttributeValueAssertion().subtype(
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 5))),
namedtype.NamedType('lessOrEqual', AttributeValueAssertion().subtype(
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 6))),
namedtype.NamedType('present', AttributeDescription().subtype(
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 7))),
namedtype.NamedType('approxMatch', AttributeValueAssertion().subtype(
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 8))),
namedtype.NamedType('extensibleMatch', MatchingRuleAssertion().subtype(
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 9)))
)
# End of Filter hack
class SearchRequest(univ.Sequence):
tagSet = univ.Sequence.tagSet.tagImplicitly(
tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 3)
)
componentType = namedtype.NamedTypes(
namedtype.NamedType('baseObject', LDAPDN()),
namedtype.NamedType('scope', univ.Enumerated(
namedValues=namedval.NamedValues(('baseObject', 0), ('singleLevel', 1), ('wholeSubtree', 2)))),
namedtype.NamedType('derefAliases', univ.Enumerated(
namedValues=namedval.NamedValues(('neverDerefAliases', 0), ('derefInSearching', 1),
('derefFindingBaseObj', 2), ('derefAlways', 3)))),
namedtype.NamedType('sizeLimit',
univ.Integer().subtype(subtypeSpec=constraint.ValueRangeConstraint(0, maxInt))),
namedtype.NamedType('timeLimit',
univ.Integer().subtype(subtypeSpec=constraint.ValueRangeConstraint(0, maxInt))),
namedtype.NamedType('typesOnly', univ.Boolean()),
namedtype.NamedType('filter', Filter()),
namedtype.NamedType('attributes', AttributeDescriptionList())
)
class UnbindRequest(univ.Null):
tagSet = univ.Sequence.tagSet.tagImplicitly(
tag.Tag(tag.tagClassApplication, tag.tagFormatSimple, 2)
)
class BindResponse(univ.Sequence):
tagSet = univ.Sequence.tagSet.tagImplicitly(
tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 1)
)
componentType = namedtype.NamedTypes(
namedtype.NamedType('resultCode', univ.Enumerated(
namedValues=namedval.NamedValues(('success', 0), ('operationsError', 1), ('protocolError', 2),
('timeLimitExceeded', 3), ('sizeLimitExceeded', 4), ('compareFalse', 5),
('compareTrue', 6), ('authMethodNotSupported', 7),
('strongAuthRequired', 8), ('reserved-9', 9), ('referral', 10),
('adminLimitExceeded', 11), ('unavailableCriticalExtension', 12),
('confidentialityRequired', 13), ('saslBindInProgress', 14),
('noSuchAttribute', 16), ('undefinedAttributeType', 17),
('inappropriateMatching', 18), ('constraintViolation', 19),
('attributeOrValueExists', 20), ('invalidAttributeSyntax', 21),
('noSuchObject', 32), ('aliasProblem', 33), ('invalidDNSyntax', 34),
('reserved-35', 35), ('aliasDereferencingProblem', 36),
('inappropriateAuthentication', 48), ('invalidCredentials', 49),
('insufficientAccessRights', 50), ('busy', 51), ('unavailable', 52),
('unwillingToPerform', 53), ('loopDetect', 54), ('namingViolation', 64),
('objectClassViolation', 65), ('notAllowedOnNonLeaf', 66),
('notAllowedOnRDN', 67), ('entryAlreadyExists', 68),
('objectClassModsProhibited', 69), ('reserved-70', 70),
('affectsMultipleDSAs', 71), ('other', 80), ('reserved-81', 81),
('reserved-82', 82), ('reserved-83', 83), ('reserved-84', 84),
('reserved-85', 85), ('reserved-86', 86), ('reserved-87', 87),
('reserved-88', 88), ('reserved-89', 89), ('reserved-90', 90)))),
namedtype.NamedType('matchedDN', LDAPDN()),
namedtype.NamedType('errorMessage', LDAPString()),
namedtype.OptionalNamedType('referral', Referral().subtype(
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 3))),
namedtype.OptionalNamedType('serverSaslCreds', univ.OctetString().subtype(
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 7)))
)
class LDAPResult(univ.Sequence):
componentType = namedtype.NamedTypes(
namedtype.NamedType('resultCode', univ.Enumerated(
namedValues=namedval.NamedValues(('success', 0), ('operationsError', 1), ('protocolError', 2),
('timeLimitExceeded', 3), ('sizeLimitExceeded', 4), ('compareFalse', 5),
('compareTrue', 6), ('authMethodNotSupported', 7),
('strongAuthRequired', 8), ('reserved-9', 9), ('referral', 10),
('adminLimitExceeded', 11), ('unavailableCriticalExtension', 12),
('confidentialityRequired', 13), ('saslBindInProgress', 14),
('noSuchAttribute', 16), ('undefinedAttributeType', 17),
('inappropriateMatching', 18), ('constraintViolation', 19),
('attributeOrValueExists', 20), ('invalidAttributeSyntax', 21),
('noSuchObject', 32), ('aliasProblem', 33), ('invalidDNSyntax', 34),
('reserved-35', 35), ('aliasDereferencingProblem', 36),
('inappropriateAuthentication', 48), ('invalidCredentials', 49),
('insufficientAccessRights', 50), ('busy', 51), ('unavailable', 52),
('unwillingToPerform', 53), ('loopDetect', 54), ('namingViolation', 64),
('objectClassViolation', 65), ('notAllowedOnNonLeaf', 66),
('notAllowedOnRDN', 67), ('entryAlreadyExists', 68),
('objectClassModsProhibited', 69), ('reserved-70', 70),
('affectsMultipleDSAs', 71), ('other', 80), ('reserved-81', 81),
('reserved-82', 82), ('reserved-83', 83), ('reserved-84', 84),
('reserved-85', 85), ('reserved-86', 86), ('reserved-87', 87),
('reserved-88', 88), ('reserved-89', 89), ('reserved-90', 90)))),
namedtype.NamedType('matchedDN', LDAPDN()),
namedtype.NamedType('errorMessage', LDAPString()),
namedtype.OptionalNamedType('referral', Referral().subtype(
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 3)))
)
class SearchResultReference(univ.SequenceOf):
tagSet = univ.Sequence.tagSet.tagImplicitly(
tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 19)
)
componentType = LDAPURL()
class SearchResultDone(LDAPResult):
tagSet = univ.Sequence.tagSet.tagImplicitly(
tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 5)
)
class AttributeTypeAndValues(univ.Sequence):
componentType = namedtype.NamedTypes(
namedtype.NamedType('type', AttributeDescription()),
namedtype.NamedType('vals', univ.SetOf(componentType=AttributeValue()))
)
class ModifyRequest(univ.Sequence):
tagSet = univ.Sequence.tagSet.tagImplicitly(
tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 6)
)
componentType = namedtype.NamedTypes(
namedtype.NamedType('object', LDAPDN()),
namedtype.NamedType('modification',
univ.SequenceOf(
componentType=univ.Sequence(
componentType=namedtype.NamedTypes(
namedtype.NamedType(
'operation', univ.Enumerated(namedValues=namedval.NamedValues(('add', 0), ('delete', 1), ('replace', 2)))
),
namedtype.NamedType('modification', AttributeTypeAndValues())))
)
)
)
class ModifyResponse(LDAPResult):
tagSet = univ.Sequence.tagSet.tagImplicitly(
tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 7)
)
class AttributeList(univ.SequenceOf):
componentType = univ.Sequence(
componentType=namedtype.NamedTypes(
namedtype.NamedType('type', AttributeDescription()),
namedtype.NamedType('vals', univ.SetOf(componentType=AttributeValue()))
)
)
class AddRequest(univ.Sequence):
tagSet = univ.Sequence.tagSet.tagImplicitly(
tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 8)
)
componentType = namedtype.NamedTypes(
namedtype.NamedType('entry', LDAPDN()),
namedtype.NamedType('attributes', AttributeList())
)
class AddResponse(LDAPResult):
tagSet = univ.Sequence.tagSet.tagImplicitly(
tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 9)
)
class DelRequest(LDAPResult):
tagSet = univ.Sequence.tagSet.tagImplicitly(
tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 10)
)
class DelResponse(LDAPResult):
tagSet = univ.Sequence.tagSet.tagImplicitly(
tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 11)
)
class ModifyDNRequest(univ.Sequence):
tagSet = univ.Sequence.tagSet.tagImplicitly(
tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 12)
)
componentType = namedtype.NamedTypes(
namedtype.NamedType('entry', LDAPDN()),
namedtype.NamedType('newrdn', RelativeLDAPDN()),
namedtype.NamedType('deleteoldrdn', univ.Boolean()),
namedtype.OptionalNamedType('newSuperior',
LDAPDN().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0)))
)
class ModifyDNResponse(LDAPResult):
tagSet = univ.Sequence.tagSet.tagImplicitly(
tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 13)
)
class CompareRequest(univ.Sequence):
tagSet = univ.Sequence.tagSet.tagImplicitly(
tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 14)
)
componentType = namedtype.NamedTypes(
namedtype.NamedType('entry', LDAPDN()),
namedtype.NamedType('ava', AttributeValueAssertion())
)
class CompareResponse(LDAPResult):
tagSet = univ.Sequence.tagSet.tagImplicitly(
tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 15)
)
class AbandonRequest(LDAPResult):
tagSet = univ.Sequence.tagSet.tagImplicitly(
tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 16)
)
class ExtendedRequest(univ.Sequence):
tagSet = univ.Sequence.tagSet.tagImplicitly(
tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 23)
)
componentType = namedtype.NamedTypes(
namedtype.NamedType('requestName',
LDAPOID().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
namedtype.OptionalNamedType('requestValue', univ.OctetString().subtype(
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1)))
)
class ExtendedResponse(univ.Sequence):
tagSet = univ.Sequence.tagSet.tagImplicitly(
tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 24)
)
componentType = namedtype.NamedTypes(
namedtype.NamedType('resultCode', univ.Enumerated(
namedValues=namedval.NamedValues(('success', 0), ('operationsError', 1), ('protocolError', 2),
('timeLimitExceeded', 3), ('sizeLimitExceeded', 4), ('compareFalse', 5),
('compareTrue', 6), ('authMethodNotSupported', 7),
('strongAuthRequired', 8), ('reserved-9', 9), ('referral', 10),
('adminLimitExceeded', 11), ('unavailableCriticalExtension', 12),
('confidentialityRequired', 13), ('saslBindInProgress', 14),
('noSuchAttribute', 16), ('undefinedAttributeType', 17),
('inappropriateMatching', 18), ('constraintViolation', 19),
('attributeOrValueExists', 20), ('invalidAttributeSyntax', 21),
('noSuchObject', 32), ('aliasProblem', 33), ('invalidDNSyntax', 34),
('reserved-35', 35), ('aliasDereferencingProblem', 36),
('inappropriateAuthentication', 48), ('invalidCredentials', 49),
('insufficientAccessRights', 50), ('busy', 51), ('unavailable', 52),
('unwillingToPerform', 53), ('loopDetect', 54), ('namingViolation', 64),
('objectClassViolation', 65), ('notAllowedOnNonLeaf', 66),
('notAllowedOnRDN', 67), ('entryAlreadyExists', 68),
('objectClassModsProhibited', 69), ('reserved-70', 70),
('affectsMultipleDSAs', 71), ('other', 80), ('reserved-81', 81),
('reserved-82', 82), ('reserved-83', 83), ('reserved-84', 84),
('reserved-85', 85), ('reserved-86', 86), ('reserved-87', 87),
('reserved-88', 88), ('reserved-89', 89), ('reserved-90', 90)))),
namedtype.NamedType('matchedDN', LDAPDN()),
namedtype.NamedType('errorMessage', LDAPString()),
namedtype.OptionalNamedType('referral', Referral().subtype(
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 3))),
namedtype.OptionalNamedType('responseName', LDAPOID().subtype(
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 10))),
namedtype.OptionalNamedType('response', univ.OctetString().subtype(
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 11)))
)
class MessageID(univ.Integer):
subtypeSpec = univ.Integer.subtypeSpec + constraint.ValueRangeConstraint(
0, maxInt
)
class LDAPMessage(univ.Sequence):
componentType = namedtype.NamedTypes(
namedtype.NamedType('messageID', MessageID()),
namedtype.NamedType(
'protocolOp', univ.Choice(
componentType=namedtype.NamedTypes(
namedtype.NamedType('bindRequest', BindRequest()),
namedtype.NamedType('bindResponse', BindResponse()),
namedtype.NamedType('unbindRequest', UnbindRequest()),
namedtype.NamedType('searchRequest', SearchRequest()),
namedtype.NamedType('searchResEntry', SearchResultEntry()),
namedtype.NamedType('searchResDone', SearchResultDone()),
namedtype.NamedType('searchResRef', SearchResultReference()),
namedtype.NamedType('modifyRequest', ModifyRequest()),
namedtype.NamedType('modifyResponse', ModifyResponse()),
namedtype.NamedType('addRequest', AddRequest()),
namedtype.NamedType('addResponse', AddResponse()),
namedtype.NamedType('delRequest', DelRequest()),
namedtype.NamedType('delResponse', DelResponse()),
namedtype.NamedType('modDNRequest', ModifyDNRequest()),
namedtype.NamedType('modDNResponse', ModifyDNResponse()),
namedtype.NamedType('compareRequest', CompareRequest()),
namedtype.NamedType('compareResponse', CompareResponse()),
namedtype.NamedType('abandonRequest', AbandonRequest()),
namedtype.NamedType('extendedReq', ExtendedRequest()),
namedtype.NamedType('extendedResp', ExtendedResponse())
)
)
),
namedtype.OptionalNamedType('controls', Controls().subtype(
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0)))
)
|
saurabhbajaj207/CarpeDiem
|
venv/Lib/site-packages/pyasn1_modules/rfc2251.py
|
Python
|
mit
| 26,833 | 0.004398 |
# -*- coding: utf-8 -*-
# Import python libs
from __future__ import absolute_import
import warnings
# Import third party libs
import yaml
from yaml.nodes import MappingNode
from yaml.constructor import ConstructorError
try:
yaml.Loader = yaml.CLoader
yaml.Dumper = yaml.CDumper
except Exception:
pass
# This function is safe and needs to stay as yaml.load. The load function
# accepts a custom loader, and every time this function is used in Salt
# the custom loader defined below is used. This should be altered though to
# not require the custom loader to be explicitly added.
load = yaml.load # pylint: disable=C0103
class DuplicateKeyWarning(RuntimeWarning):
'''
Warned when duplicate keys exist
'''
warnings.simplefilter('always', category=DuplicateKeyWarning)
# with code integrated from https://gist.github.com/844388
class SaltYamlSafeLoader(yaml.SafeLoader, object):
'''
Create a custom YAML loader that uses the custom constructor. This allows
for the YAML loading defaults to be manipulated based on needs within salt
to make things like sls file more intuitive.
'''
def __init__(self, stream, dictclass=dict):
yaml.SafeLoader.__init__(self, stream)
if dictclass is not dict:
# then assume ordered dict and use it for both !map and !omap
self.add_constructor(
u'tag:yaml.org,2002:map',
type(self).construct_yaml_map)
self.add_constructor(
u'tag:yaml.org,2002:omap',
type(self).construct_yaml_map)
self.dictclass = dictclass
def construct_yaml_map(self, node):
data = self.dictclass()
yield data
value = self.construct_mapping(node)
data.update(value)
def construct_mapping(self, node, deep=False):
'''
Build the mapping for YAML
'''
if not isinstance(node, MappingNode):
raise ConstructorError(
None,
None,
'expected a mapping node, but found {0}'.format(node.id),
node.start_mark)
self.flatten_mapping(node)
mapping = self.dictclass()
for key_node, value_node in node.value:
key = self.construct_object(key_node, deep=deep)
try:
hash(key)
except TypeError:
err = ('While constructing a mapping {0} found unacceptable '
'key {1}').format(node.start_mark, key_node.start_mark)
raise ConstructorError(err)
value = self.construct_object(value_node, deep=deep)
if key in mapping:
raise ConstructorError('Conflicting ID {0!r}'.format(key))
mapping[key] = value
return mapping
def construct_scalar(self, node):
'''
Verify integers and pass them in correctly is they are declared
as octal
'''
if node.tag == 'tag:yaml.org,2002:int':
if node.value == '0':
pass
elif node.value.startswith('0') and not node.value.startswith(('0b', '0x')):
node.value = node.value.lstrip('0')
# If value was all zeros, node.value would have been reduced to
# an empty string. Change it to '0'.
if node.value == '':
node.value = '0'
return super(SaltYamlSafeLoader, self).construct_scalar(node)
|
smallyear/linuxLearn
|
salt/salt/utils/yamlloader.py
|
Python
|
apache-2.0
| 3,478 | 0.000575 |
from django.apps import AppConfig
from django.contrib.auth import get_user_model
from django.contrib.auth.models import Permission
from django.contrib.contenttypes.models import ContentType
class AppConfig(AppConfig):
name = '.'.join(__name__.split('.')[:-1])
label = 'icekit_plugins_iiif'
verbose_name = "IIIF Basics"
def ready(self):
# Create custom permission pointing to User, because we have no other
# model to hang it off for now...
# TODO This is a hack, find a better way
User = get_user_model()
try:
# this doesn't work if migrations haven't been updated, resulting
# in "RuntimeError: Error creating new content types. Please make
# sure contenttypes is migrated before trying to migrate apps
# individually."
content_type = ContentType.objects.get_for_model(User)
Permission.objects.get_or_create(
codename='can_use_iiif_image_api',
name='Can Use IIIF Image API',
content_type=content_type,
)
except RuntimeError:
pass
|
ic-labs/django-icekit
|
icekit/plugins/iiif/apps.py
|
Python
|
mit
| 1,144 | 0 |
from __future__ import print_function
import pandas as pd
from sklearn.base import TransformerMixin
class FamilyCounter(TransformerMixin):
def __init__(self, use=True):
self.use = use
def transform(self, features_raw, **transform_params):
if self.use:
features = features_raw.copy(deep=True)
family = features_raw[['SibSp', 'Parch']]\
.apply(lambda x: x[0] + x[1], axis=1)
features.drop('SibSp', axis=1, inplace=True)
features.drop('Parch', axis=1, inplace=True)
return pd.concat([features,
pd.DataFrame({'Family': family})], axis=1)
return features_raw
def fit(self, X, y=None, **fit_params):
return self
def get_params(self, *args, **kwargs):
return { 'use': self.use }
def set_params(self, **params):
if 'use' in params:
self.use = params.get('use')
|
wojtekwalczak/kaggle_titanic
|
titanic/transformers/FamilyCounter.py
|
Python
|
apache-2.0
| 946 | 0.005285 |
# -*- coding: utf-8 -*-
"""
Project name: Open Methodology for Security Tool Developers
Project URL: https://github.com/cr0hn/OMSTD
Copyright (c) 2014, cr0hn<-AT->cr0hn.com
All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
__author__ = 'cr0hn - cr0hn<-at->cr0hn.com (@ggdaniel)'
from multiprocessing.pool import Pool
# ----------------------------------------------------------------------
def hello(i):
print(i)
# ----------------------------------------------------------------------
def main():
p = Pool(10)
p.map(hello, range(50))
if __name__ == '__main__':
main()
|
cr0hn/OMSTD
|
examples/develop/lp/002/lp-002-s1.py
|
Python
|
bsd-2-clause
| 2,001 | 0.002999 |
###############################################################################
#
# Copyright (C) 2001-2014 Micronaet SRL (<http://www.micronaet.it>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
###############################################################################
{
'name': 'XMLRPC Operation Product',
'version': '0.1',
'category': 'ETL',
'description': '''
XMLRPC Import product
''',
'author': 'Micronaet S.r.l. - Nicola Riolini',
'website': 'http://www.micronaet.it',
'license': 'AGPL-3',
'depends': [
'base',
'xmlrpc_base',
'product',
'sql_product', # for statistic category
'base_accounting_program', # q x pack
#'sql_partner', # for fields to update
#'l10n_it_private', # private info
#'mx_partner_zone', # zone
# 'l10n_it_iban_cin'
],
'init_xml': [],
'demo': [],
'data': [
#'operation.xml',
'product_view.xml',
'data/operation.xml',
],
'active': False,
'installable': True,
'auto_install': False,
}
|
Micronaet/micronaet-xmlrpc
|
xmlrpc_operation_product/__openerp__.py
|
Python
|
agpl-3.0
| 1,769 | 0.004522 |
import sys
from services.spawn import MobileTemplate
from services.spawn import WeaponTemplate
from resources.datatables import WeaponType
from resources.datatables import Difficulty
from resources.datatables import Options
from java.util import Vector
def addTemplate(core):
mobileTemplate = MobileTemplate()
mobileTemplate.setCreatureName('variegated_womprat')
mobileTemplate.setLevel(12)
mobileTemplate.setDifficulty(Difficulty.NORMAL)
mobileTemplate.setMinSpawnDistance(4)
mobileTemplate.setMaxSpawnDistance(8)
mobileTemplate.setDeathblow(False)
mobileTemplate.setScale(1)
mobileTemplate.setMeatType("Wild Meat")
mobileTemplate.setMeatAmount(3)
mobileTemplate.setHideType("Leathery Hide")
mobileTemplate.setBoneAmount(3)
mobileTemplate.setBoneType("Animal Bone")
mobileTemplate.setHideAmount(2)
mobileTemplate.setSocialGroup("variegated womprat")
mobileTemplate.setAssistRange(6)
mobileTemplate.setStalker(False)
mobileTemplate.setOptionsBitmask(Options.AGGRESSIVE | Options.ATTACKABLE)
templates = Vector()
templates.add('object/mobile/shared_variegated_womp_rat.iff')
mobileTemplate.setTemplates(templates)
weaponTemplates = Vector()
weapontemplate = WeaponTemplate('object/weapon/melee/unarmed/shared_unarmed_default.iff', WeaponType.UNARMED, 1.0, 6, 'kinetic')
weaponTemplates.add(weapontemplate)
mobileTemplate.setWeaponTemplateVector(weaponTemplates)
attacks = Vector()
attacks.add('bm_bite_1')
attacks.add('bm_bolster_armor_1')
attacks.add('bm_enfeeble_1')
mobileTemplate.setDefaultAttack('creatureMeleeAttack')
mobileTemplate.setAttacks(attacks)
core.spawnService.addMobileTemplate('variegated_womprat', mobileTemplate)
return
|
agry/NGECore2
|
scripts/mobiles/tatooine/variegated_womprat.py
|
Python
|
lgpl-3.0
| 1,684 | 0.026128 |
#!/usr/bin/env python
"""
Twython is a library for Python that wraps the Twitter API.
It aims to abstract away all the API endpoints, so that additions to the library
and/or the Twitter API won't cause any overall problems.
Questions, comments? ryan@venodesigns.net
"""
__author__ = "Ryan McGrath <ryan@venodesigns.net>"
__version__ = "2.3.4"
import urllib
import re
import warnings
import requests
from requests.auth import OAuth1
try:
from urlparse import parse_qsl
except ImportError:
from cgi import parse_qsl
# Twython maps keyword based arguments to Twitter API endpoints. The endpoints
# table is a file with a dictionary of every API endpoint that Twython supports.
from twitter_endpoints import base_url, api_table, twitter_http_status_codes
try:
import simplejson
except ImportError:
try:
# Python 2.6 and up
import json as simplejson
except ImportError:
try:
from django.utils import simplejson
except:
# Seriously wtf is wrong with you if you get this Exception.
raise Exception("Twython requires the simplejson library (or Python 2.6) to work. http://www.undefined.org/python/")
class TwythonError(Exception):
"""
Generic error class, catch-all for most Twython issues.
Special cases are handled by TwythonAPILimit and TwythonAuthError.
Note: To use these, the syntax has changed as of Twython 1.3. To catch these,
you need to explicitly import them into your code, e.g:
from twython import TwythonError, TwythonAPILimit, TwythonAuthError
"""
def __init__(self, msg, error_code=None, retry_after=None):
self.msg = msg
self.error_code = error_code
if error_code is not None and error_code in twitter_http_status_codes:
self.msg = '%s: %s -- %s' % \
(twitter_http_status_codes[error_code][0],
twitter_http_status_codes[error_code][1],
self.msg)
def __str__(self):
return repr(self.msg)
class TwythonAuthError(TwythonError):
""" Raised when you try to access a protected resource and it fails due to
some issue with your authentication.
"""
pass
class TwythonRateLimitError(TwythonError):
""" Raised when you've hit a rate limit.
retry_wait_seconds is the number of seconds to wait before trying again.
"""
def __init__(self, msg, error_code, retry_after=None):
TwythonError.__init__(self, msg, error_code=error_code)
if isinstance(retry_after, int):
self.msg = '%s (Retry after %d seconds)' % (msg, retry_after)
class Twython(object):
def __init__(self, app_key=None, app_secret=None, oauth_token=None, oauth_token_secret=None, \
headers=None, callback_url=None, twitter_token=None, twitter_secret=None, proxies=None):
"""Instantiates an instance of Twython. Takes optional parameters for authentication and such (see below).
:param app_key: (optional) Your applications key
:param app_secret: (optional) Your applications secret key
:param oauth_token: (optional) Used with oauth_token_secret to make authenticated calls
:param oauth_token_secret: (optional) Used with oauth_token to make authenticated calls
:param headers: (optional) Custom headers to send along with the request
:param callback_url: (optional) If set, will overwrite the callback url set in your application
:param proxies: (optional) A dictionary of proxies, for example {"http":"proxy.example.org:8080", "https":"proxy.example.org:8081"}.
"""
# Needed for hitting that there API.
self.api_url = 'https://api.twitter.com/%s'
self.request_token_url = self.api_url % 'oauth/request_token'
self.access_token_url = self.api_url % 'oauth/access_token'
self.authorize_url = self.api_url % 'oauth/authorize'
self.authenticate_url = self.api_url % 'oauth/authenticate'
# Enforce unicode on keys and secrets
self.app_key = app_key and unicode(app_key) or twitter_token and unicode(twitter_token)
self.app_secret = app_key and unicode(app_secret) or twitter_secret and unicode(twitter_secret)
self.oauth_token = oauth_token and u'%s' % oauth_token
self.oauth_token_secret = oauth_token_secret and u'%s' % oauth_token_secret
self.callback_url = callback_url
# If there's headers, set them, otherwise be an embarassing parent for their own good.
self.headers = headers or {'User-Agent': 'Twython v' + __version__}
# Allow for unauthenticated requests
self.client = requests.session(proxies=proxies)
self.auth = None
if self.app_key is not None and self.app_secret is not None and \
self.oauth_token is None and self.oauth_token_secret is None:
self.auth = OAuth1(self.app_key, self.app_secret,
signature_type='auth_header')
if self.app_key is not None and self.app_secret is not None and \
self.oauth_token is not None and self.oauth_token_secret is not None:
self.auth = OAuth1(self.app_key, self.app_secret,
self.oauth_token, self.oauth_token_secret,
signature_type='auth_header')
if self.auth is not None:
self.client = requests.session(headers=self.headers, auth=self.auth, proxies=proxies)
# register available funcs to allow listing name when debugging.
def setFunc(key):
return lambda **kwargs: self._constructFunc(key, **kwargs)
for key in api_table.keys():
self.__dict__[key] = setFunc(key)
# create stash for last call intel
self._last_call = None
def _constructFunc(self, api_call, **kwargs):
# Go through and replace any mustaches that are in our API url.
fn = api_table[api_call]
url = re.sub(
'\{\{(?P<m>[a-zA-Z_]+)\}\}',
# The '1' here catches the API version. Slightly hilarious.
lambda m: "%s" % kwargs.get(m.group(1), '1'),
base_url + fn['url']
)
content = self._request(url, method=fn['method'], params=kwargs)
return content
def _request(self, url, method='GET', params=None, files=None, api_call=None):
'''Internal response generator, no sense in repeating the same
code twice, right? ;)
'''
method = method.lower()
if not method in ('get', 'post'):
raise TwythonError('Method must be of GET or POST')
params = params or {}
func = getattr(self.client, method)
if method == 'get':
response = func(url, params=params)
else:
response = func(url, data=params, files=files)
content = response.content.decode('utf-8')
# create stash for last function intel
self._last_call = {
'api_call': api_call,
'api_error': None,
'cookies': response.cookies,
'error': response.error,
'headers': response.headers,
'status_code': response.status_code,
'url': response.url,
'content': content,
}
try:
content = simplejson.loads(content)
except ValueError:
raise TwythonError('Response was not valid JSON, unable to decode.')
if response.status_code > 304:
# If there is no error message, use a default.
error_msg = content.get(
'error', 'An error occurred processing your request.')
self._last_call['api_error'] = error_msg
if response.status_code == 420:
exceptionType = TwythonRateLimitError
else:
exceptionType = TwythonError
raise exceptionType(error_msg,
error_code=response.status_code,
retry_after=response.headers.get('retry-after'))
return content
'''
# Dynamic Request Methods
Just in case Twitter releases something in their API
and a developer wants to implement it on their app, but
we haven't gotten around to putting it in Twython yet. :)
'''
def request(self, endpoint, method='GET', params=None, files=None, version=1):
# In case they want to pass a full Twitter URL
# i.e. https://search.twitter.com/
if endpoint.startswith('http://') or endpoint.startswith('https://'):
url = endpoint
else:
url = '%s/%s.json' % (self.api_url % version, endpoint)
content = self._request(url, method=method, params=params, files=files, api_call=url)
return content
def get(self, endpoint, params=None, version=1):
return self.request(endpoint, params=params, version=version)
def post(self, endpoint, params=None, files=None, version=1):
return self.request(endpoint, 'POST', params=params, files=files, version=version)
# End Dynamic Request Methods
def get_lastfunction_header(self, header):
"""Returns the header in the last function
This must be called after an API call, as it returns header based
information.
This will return None if the header is not present
Most useful for the following header information:
x-ratelimit-limit
x-ratelimit-remaining
x-ratelimit-class
x-ratelimit-reset
"""
if self._last_call is None:
raise TwythonError('This function must be called after an API call. It delivers header information.')
if header in self._last_call['headers']:
return self._last_call['headers'][header]
return None
def get_authentication_tokens(self):
"""Returns an authorization URL for a user to hit.
"""
request_args = {}
if self.callback_url:
request_args['oauth_callback'] = self.callback_url
response = self.client.get(self.request_token_url, params=request_args)
if response.status_code != 200:
raise TwythonAuthError("Seems something couldn't be verified with your OAuth junk. Error: %s, Message: %s" % (response.status_code, response.content))
request_tokens = dict(parse_qsl(response.content))
if not request_tokens:
raise TwythonError('Unable to decode request tokens.')
oauth_callback_confirmed = request_tokens.get('oauth_callback_confirmed') == 'true'
auth_url_params = {
'oauth_token': request_tokens['oauth_token'],
}
# Use old-style callback argument if server didn't accept new-style
if self.callback_url and not oauth_callback_confirmed:
auth_url_params['oauth_callback'] = self.callback_url
request_tokens['auth_url'] = self.authenticate_url + '?' + urllib.urlencode(auth_url_params)
return request_tokens
def get_authorized_tokens(self):
"""Returns authorized tokens after they go through the auth_url phase.
"""
response = self.client.get(self.access_token_url)
authorized_tokens = dict(parse_qsl(response.content))
if not authorized_tokens:
raise TwythonError('Unable to decode authorized tokens.')
return authorized_tokens
# ------------------------------------------------------------------------------------------------------------------------
# The following methods are all different in some manner or require special attention with regards to the Twitter API.
# Because of this, we keep them separate from all the other endpoint definitions - ideally this should be change-able,
# but it's not high on the priority list at the moment.
# ------------------------------------------------------------------------------------------------------------------------
@staticmethod
def shortenURL(url_to_shorten, shortener='http://is.gd/api.php'):
"""Shortens url specified by url_to_shorten.
Note: Twitter automatically shortens all URLs behind their own custom t.co shortener now,
but we keep this here for anyone who was previously using it for alternative purposes. ;)
:param url_to_shorten: (required) The URL to shorten
:param shortener: (optional) In case you want to use a different
URL shortening service
"""
if shortener == '':
raise TwythonError('Please provide a URL shortening service.')
request = requests.get(shortener, params={
'query': url_to_shorten
})
if request.status_code in [301, 201, 200]:
return request.text
else:
raise TwythonError('shortenURL() failed with a %s error code.' % request.status_code)
@staticmethod
def constructApiURL(base_url, params):
return base_url + "?" + "&".join(["%s=%s" % (Twython.unicode2utf8(key), urllib.quote_plus(Twython.unicode2utf8(value))) for (key, value) in params.iteritems()])
def search(self, **kwargs):
""" Returns tweets that match a specified query.
Documentation: https://dev.twitter.com/doc/get/search
:param q: (required) The query you want to search Twitter for
:param geocode: (optional) Returns tweets by users located within
a given radius of the given latitude/longitude.
The parameter value is specified by
"latitude,longitude,radius", where radius units
must be specified as either "mi" (miles) or
"km" (kilometers).
Example Values: 37.781157,-122.398720,1mi
:param lang: (optional) Restricts tweets to the given language,
given by an ISO 639-1 code.
:param locale: (optional) Specify the language of the query you
are sending. Only ``ja`` is currently effective.
:param page: (optional) The page number (starting at 1) to return
Max ~1500 results
:param result_type: (optional) Default ``mixed``
mixed: Include both popular and real time
results in the response.
recent: return only the most recent results in
the response
popular: return only the most popular results
in the response.
e.g x.search(q='jjndf', page='2')
"""
return self.get('https://search.twitter.com/search.json', params=kwargs)
def searchGen(self, search_query, **kwargs):
""" Returns a generator of tweets that match a specified query.
Documentation: https://dev.twitter.com/doc/get/search
See Twython.search() for acceptable parameters
e.g search = x.searchGen('python')
for result in search:
print result
"""
kwargs['q'] = search_query
content = self.get('https://search.twitter.com/search.json', params=kwargs)
if not content['results']:
raise StopIteration
for tweet in content['results']:
yield tweet
if 'page' not in kwargs:
kwargs['page'] = '2'
else:
try:
kwargs['page'] = int(kwargs['page'])
kwargs['page'] += 1
kwargs['page'] = str(kwargs['page'])
except TypeError:
raise TwythonError("searchGen() exited because page takes type str")
for tweet in self.searchGen(search_query, **kwargs):
yield tweet
# The following methods are apart from the other Account methods,
# because they rely on a whole multipart-data posting function set.
def updateProfileBackgroundImage(self, file_, tile=True, version=1):
"""Updates the authenticating user's profile background image.
:param file_: (required) A string to the location of the file
(less than 800KB in size, larger than 2048px width will scale down)
:param tile: (optional) Default ``True`` If set to true the background image
will be displayed tiled. The image will not be tiled otherwise.
:param version: (optional) A number, default 1 because that's the
only API version Twitter has now
"""
url = 'https://api.twitter.com/%d/account/update_profile_background_image.json' % version
return self._media_update(url,
{'image': (file_, open(file_, 'rb'))},
**{'tile': tile})
def bulkUserLookup(self, **kwargs):
"""Stub for a method that has been deprecated, kept for now to raise errors
properly if people are relying on this (which they are...).
"""
warnings.warn(
"This function has been deprecated. Please migrate to .lookupUser() - params should be the same.",
DeprecationWarning,
stacklevel=2
)
def updateProfileImage(self, file_, version=1):
"""Updates the authenticating user's profile image (avatar).
:param file_: (required) A string to the location of the file
:param version: (optional) A number, default 1 because that's the
only API version Twitter has now
"""
url = 'https://api.twitter.com/%d/account/update_profile_image.json' % version
return self._media_update(url,
{'image': (file_, open(file_, 'rb'))})
def updateStatusWithMedia(self, file_, version=1, **params):
"""Updates the users status with media
:param file_: (required) A string to the location of the file
:param version: (optional) A number, default 1 because that's the
only API version Twitter has now
**params - You may pass items that are taken in this doc
(https://dev.twitter.com/docs/api/1/post/statuses/update_with_media)
"""
url = 'https://upload.twitter.com/%d/statuses/update_with_media.json' % version
return self._media_update(url,
{'media': (file_, open(file_, 'rb'))},
**params)
def _media_update(self, url, file_, **params):
return self.post(url, params=params, files=file_)
def getProfileImageUrl(self, username, size='normal', version=1):
"""Gets the URL for the user's profile image.
:param username: (required) Username, self explanatory.
:param size: (optional) Default 'normal' (48px by 48px)
bigger - 73px by 73px
mini - 24px by 24px
original - undefined, be careful -- images may be
large in bytes and/or size.
:param version: A number, default 1 because that's the only API
version Twitter has now
"""
endpoint = 'users/profile_image/%s' % username
url = self.api_url % version + '/' + endpoint
response = self.client.get(url, params={'size': size}, allow_redirects=False)
image_url = response.headers.get('location')
if response.status_code in (301, 302, 303, 307) and image_url is not None:
return image_url
else:
raise TwythonError('getProfileImageUrl() threw an error.',
error_code=response.status_code)
@staticmethod
def stream(data, callback):
"""A Streaming API endpoint, because requests (by Kenneth Reitz)
makes this not stupidly annoying to implement.
In reality, Twython does absolutely *nothing special* here,
but people new to programming expect this type of function to
exist for this library, so we provide it for convenience.
Seriously, this is nothing special. :)
For the basic stream you're probably accessing, you'll want to
pass the following as data dictionary keys. If you need to use
OAuth (newer streams), passing secrets/etc
as keys SHOULD work...
This is all done over SSL (https://), so you're not left
totally vulnerable by passing your password.
:param username: (required) Username, self explanatory.
:param password: (required) The Streaming API doesn't use OAuth,
so we do this the old school way.
:param callback: (required) Callback function to be fired when
tweets come in (this is an event-based-ish API).
:param endpoint: (optional) Override the endpoint you're using
with the Twitter Streaming API. This is defaulted
to the one that everyone has access to, but if
Twitter <3's you feel free to set this to your
wildest desires.
"""
endpoint = 'https://stream.twitter.com/1/statuses/filter.json'
if 'endpoint' in data:
endpoint = data.pop('endpoint')
needs_basic_auth = False
if 'username' in data and 'password' in data:
needs_basic_auth = True
username = data.pop('username')
password = data.pop('password')
if needs_basic_auth:
stream = requests.post(endpoint,
data=data,
auth=(username, password))
else:
stream = requests.post(endpoint, data=data)
for line in stream.iter_lines():
if line:
try:
callback(simplejson.loads(line))
except ValueError:
raise TwythonError('Response was not valid JSON, unable to decode.')
@staticmethod
def unicode2utf8(text):
try:
if isinstance(text, unicode):
text = text.encode('utf-8')
except:
pass
return text
@staticmethod
def encode(text):
if isinstance(text, (str, unicode)):
return Twython.unicode2utf8(text)
return str(text)
|
davish/Twitter-World-Mood
|
twython/twython.py
|
Python
|
mit
| 22,902 | 0.00262 |
# ----------------------------------------------------------------------------
# Copyright 2015 Nervana Systems Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ----------------------------------------------------------------------------
'''
Test of the optimizers
'''
import numpy as np
import copy
from neon import NervanaObject
from neon.backends import gen_backend
from neon.optimizers import GradientDescentMomentum, RMSProp, Adadelta, Adam, Adagrad
from neon.optimizers import MultiOptimizer
from neon.layers import Conv, Affine, LSTM, GRU
from neon.initializers import Gaussian, Constant
from neon.transforms import Rectlin, Logistic, Tanh
class DummyLayer(object):
def __init__(self, p):
self.p = p[0]
def get_params(self):
return self.p
def compare_tensors(func, param_list, param2, tol=0., epoch=1):
func.optimize([DummyLayer(param_list)], epoch=epoch)
(param, grad), states = param_list[0]
cond = np.sum(np.abs(param.get() - param2) <= tol)
assert cond == np.prod(param2.shape)
def wrap(x):
be = NervanaObject.be
dtypeu = np.float32
return be.array(dtypeu(x))
def test_gdm(backend_default):
lrate, mom, wdecay = 0.1, 0.9, 0.005
gdm = GradientDescentMomentum(
learning_rate=lrate, momentum_coef=mom, wdecay=wdecay)
param = np.random.rand(200, 128)
param2 = copy.deepcopy(param)
grad = 0.01 * np.random.rand(200, 128)
grad2 = grad / 128.
states = [0.01 * np.random.rand(200, 128)]
velocity = states[0]
param2[:] = param2 + velocity * mom - grad2 * lrate - wdecay * lrate * param
param_list = [((wrap(param), wrap(grad)), [wrap(states[0])])]
compare_tensors(gdm, param_list, param2, tol=1e-7)
def test_rmsprop(backend_default):
rms = RMSProp()
param = np.random.rand(200, 128)
param2 = copy.deepcopy(param)
grad = 0.01 * np.random.rand(200, 128)
grad2 = grad / 128.
states = [0.01 * np.random.rand(200, 128)]
state = states[0]
decay = rms.decay_rate
denom = np.sqrt(decay * state + np.square(grad2) * (1.0 - decay) + rms.epsilon) + rms.epsilon
param2[:] -= grad2 * rms.learning_rate / denom
param_list = [((wrap(param), wrap(grad)), [wrap(states[0])])]
compare_tensors(rms, param_list, param2, tol=1e-7)
def test_adadelta(backend_default):
ada = Adadelta()
param = np.random.rand(200, 128)
param2 = copy.deepcopy(param)
grad = 0.01 * np.random.rand(200, 128)
grad2 = grad / 128.
states = [0.01 * np.random.rand(200, 128),
0.01 * np.random.rand(200, 128),
0.01 * np.random.rand(200, 128)]
states2 = [copy.deepcopy(states[0]),
copy.deepcopy(states[1]),
copy.deepcopy(states[2])]
decay = ada.decay
states2[0][:] = states2[0] * decay + (1. - decay) * grad2 * grad2
states2[2][:] = np.sqrt(
(states2[1] + ada.epsilon) / (states2[0] + ada.epsilon)) * grad2
states2[1][:] = states2[1] * decay + (1. - decay) * states2[2] * states2[2]
param2[:] -= states2[2]
param_list = [
((wrap(param), wrap(grad)), [wrap(states[0]), wrap(states[1]), wrap(states[2])])]
compare_tensors(ada, param_list, param2, tol=1e-7)
def test_adagrad(backend_default):
ada = Adagrad()
param = np.random.rand(200, 128)
param2 = copy.deepcopy(param)
grad = 0.01 * np.random.rand(200, 128)
grad2 = grad / 128.
states = [0.01 * np.random.rand(200, 128)]
states2 = [copy.deepcopy(states[0])]
states2[0][:] = states2[0] + np.square(grad2)
denom = np.sqrt(states2[0] + ada.epsilon)
param2[:] -= grad2 * ada.learning_rate / denom
param_list = [
((wrap(param), wrap(grad)), [wrap(states[0])])]
compare_tensors(ada, param_list, param2, tol=1e-7)
def test_adam(backend_default):
adam = Adam()
param = np.random.rand(200, 128)
param2 = copy.deepcopy(param)
grad = 0.01 * np.random.rand(200, 128)
grad2 = grad / 128.
states = [0.01 * np.random.rand(200, 128),
0.01 * np.random.rand(200, 128)]
states2 = [copy.deepcopy(states[0]),
copy.deepcopy(states[1])]
epoch = 1
t = epoch + 1
l = adam.learning_rate * np.sqrt(1 - adam.beta_2 ** t) / (1 - adam.beta_1 ** t)
m, v = states2
m[:] = m * adam.beta_1 + (1. - adam.beta_1) * grad2
v[:] = v * adam.beta_2 + (1. - adam.beta_2) * grad2 * grad2
param2[:] -= l * m / (np.sqrt(v) + adam.epsilon)
param_list = [
((wrap(param), wrap(grad)), [wrap(states[0]), wrap(states[1])])]
compare_tensors(adam, param_list, param2, tol=1e-7, epoch=epoch)
def test_multi_optimizer(backend_default):
opt_gdm = GradientDescentMomentum(
learning_rate=0.001, momentum_coef=0.9, wdecay=0.005)
opt_ada = Adadelta()
opt_adam = Adam()
opt_rms = RMSProp()
opt_rms_1 = RMSProp(gradient_clip_value=5)
init_one = Gaussian(scale=0.01)
l1 = Conv((11, 11, 64), strides=4, padding=3,
init=init_one, bias=Constant(0), activation=Rectlin())
l2 = Affine(nout=4096, init=init_one,
bias=Constant(1), activation=Rectlin())
l3 = LSTM(output_size=1000, init=init_one, activation=Logistic(), gate_activation=Tanh())
l4 = GRU(output_size=100, init=init_one, activation=Logistic(), gate_activation=Tanh())
layers = [l1, l2, l3, l4]
layer_list = []
for layer in layers:
if isinstance(layer, list):
layer_list.extend(layer)
else:
layer_list.append(layer)
opt = MultiOptimizer({'default': opt_gdm,
'Bias': opt_ada,
'Convolution': opt_adam,
'Linear': opt_rms,
'LSTM': opt_rms_1,
'GRU': opt_rms_1})
map_list = opt.map_optimizers(layer_list)
assert map_list[opt_adam][0].__class__.__name__ == 'Convolution'
assert map_list[opt_ada][0].__class__.__name__ == 'Bias'
assert map_list[opt_rms][0].__class__.__name__ == 'Linear'
assert map_list[opt_gdm][0].__class__.__name__ == 'Activation'
assert map_list[opt_rms_1][0].__class__.__name__ == 'LSTM'
assert map_list[opt_rms_1][1].__class__.__name__ == 'GRU'
if __name__ == '__main__':
be = gen_backend(backend='gpu', batch_size=50)
test_multi_optimizer(be)
|
nhynes/neon
|
tests/test_optimizer.py
|
Python
|
apache-2.0
| 6,823 | 0.001319 |
import os
import os.path as op
import pytest
import numpy as np
from numpy.testing import (assert_array_equal, assert_equal, assert_allclose,
assert_array_less, assert_almost_equal)
import itertools
import mne
from mne.datasets import testing
from mne.fixes import _get_img_fdata
from mne import read_trans, write_trans
from mne.io import read_info
from mne.transforms import (invert_transform, _get_trans,
rotation, rotation3d, rotation_angles, _find_trans,
combine_transforms, apply_trans, translation,
get_ras_to_neuromag_trans, _pol_to_cart,
quat_to_rot, rot_to_quat, _angle_between_quats,
_find_vector_rotation, _sph_to_cart, _cart_to_sph,
_topo_to_sph, _average_quats,
_SphericalSurfaceWarp as SphericalSurfaceWarp,
rotation3d_align_z_axis, _read_fs_xfm,
_write_fs_xfm, _quat_real, _fit_matched_points,
_quat_to_euler, _euler_to_quat,
_quat_to_affine, _compute_r2, _validate_pipeline)
from mne.utils import requires_nibabel, requires_dipy
data_path = testing.data_path(download=False)
fname = op.join(data_path, 'MEG', 'sample', 'sample_audvis_trunc-trans.fif')
fname_eve = op.join(data_path, 'MEG', 'sample',
'sample_audvis_trunc_raw-eve.fif')
subjects_dir = op.join(data_path, 'subjects')
fname_t1 = op.join(subjects_dir, 'fsaverage', 'mri', 'T1.mgz')
base_dir = op.join(op.dirname(__file__), '..', 'io', 'tests', 'data')
fname_trans = op.join(base_dir, 'sample-audvis-raw-trans.txt')
test_fif_fname = op.join(base_dir, 'test_raw.fif')
ctf_fname = op.join(base_dir, 'test_ctf_raw.fif')
hp_fif_fname = op.join(base_dir, 'test_chpi_raw_sss.fif')
def test_tps():
"""Test TPS warping."""
az = np.linspace(0., 2 * np.pi, 20, endpoint=False)
pol = np.linspace(0, np.pi, 12)[1:-1]
sph = np.array(np.meshgrid(1, az, pol, indexing='ij'))
sph.shape = (3, -1)
assert_equal(sph.shape[1], 200)
source = _sph_to_cart(sph.T)
destination = source.copy()
destination *= 2
destination[:, 0] += 1
# fit with 100 points
warp = SphericalSurfaceWarp()
assert 'no ' in repr(warp)
warp.fit(source[::3], destination[::2])
assert 'oct5' in repr(warp)
destination_est = warp.transform(source)
assert_allclose(destination_est, destination, atol=1e-3)
@testing.requires_testing_data
def test_get_trans():
"""Test converting '-trans.txt' to '-trans.fif'."""
trans = read_trans(fname)
trans = invert_transform(trans) # starts out as head->MRI, so invert
trans_2 = _get_trans(fname_trans)[0]
assert trans.__eq__(trans_2, atol=1e-5)
@testing.requires_testing_data
def test_io_trans(tmpdir):
"""Test reading and writing of trans files."""
tempdir = str(tmpdir)
os.mkdir(op.join(tempdir, 'sample'))
pytest.raises(RuntimeError, _find_trans, 'sample', subjects_dir=tempdir)
trans0 = read_trans(fname)
fname1 = op.join(tempdir, 'sample', 'test-trans.fif')
trans0.save(fname1)
assert fname1 == _find_trans('sample', subjects_dir=tempdir)
trans1 = read_trans(fname1)
# check all properties
assert trans0 == trans1
# check reading non -trans.fif files
pytest.raises(IOError, read_trans, fname_eve)
# check warning on bad filenames
fname2 = op.join(tempdir, 'trans-test-bad-name.fif')
with pytest.warns(RuntimeWarning, match='-trans.fif'):
write_trans(fname2, trans0)
def test_get_ras_to_neuromag_trans():
"""Test the coordinate transformation from ras to neuromag."""
# create model points in neuromag-like space
rng = np.random.RandomState(0)
anterior = [0, 1, 0]
left = [-1, 0, 0]
right = [.8, 0, 0]
up = [0, 0, 1]
rand_pts = rng.uniform(-1, 1, (3, 3))
pts = np.vstack((anterior, left, right, up, rand_pts))
# change coord system
rx, ry, rz, tx, ty, tz = rng.uniform(-2 * np.pi, 2 * np.pi, 6)
trans = np.dot(translation(tx, ty, tz), rotation(rx, ry, rz))
pts_changed = apply_trans(trans, pts)
# transform back into original space
nas, lpa, rpa = pts_changed[:3]
hsp_trans = get_ras_to_neuromag_trans(nas, lpa, rpa)
pts_restored = apply_trans(hsp_trans, pts_changed)
err = "Neuromag transformation failed"
assert_allclose(pts_restored, pts, atol=1e-6, err_msg=err)
def _cartesian_to_sphere(x, y, z):
"""Convert using old function."""
hypotxy = np.hypot(x, y)
r = np.hypot(hypotxy, z)
elev = np.arctan2(z, hypotxy)
az = np.arctan2(y, x)
return az, elev, r
def _sphere_to_cartesian(theta, phi, r):
"""Convert using old function."""
z = r * np.sin(phi)
rcos_phi = r * np.cos(phi)
x = rcos_phi * np.cos(theta)
y = rcos_phi * np.sin(theta)
return x, y, z
def test_sph_to_cart():
"""Test conversion between sphere and cartesian."""
# Simple test, expected value (11, 0, 0)
r, theta, phi = 11., 0., np.pi / 2.
z = r * np.cos(phi)
rsin_phi = r * np.sin(phi)
x = rsin_phi * np.cos(theta)
y = rsin_phi * np.sin(theta)
coord = _sph_to_cart(np.array([[r, theta, phi]]))[0]
assert_allclose(coord, (x, y, z), atol=1e-7)
assert_allclose(coord, (r, 0, 0), atol=1e-7)
rng = np.random.RandomState(0)
# round-trip test
coords = rng.randn(10, 3)
assert_allclose(_sph_to_cart(_cart_to_sph(coords)), coords, atol=1e-5)
# equivalence tests to old versions
for coord in coords:
sph = _cart_to_sph(coord[np.newaxis])
cart = _sph_to_cart(sph)
sph_old = np.array(_cartesian_to_sphere(*coord))
cart_old = _sphere_to_cartesian(*sph_old)
sph_old[1] = np.pi / 2. - sph_old[1] # new convention
assert_allclose(sph[0], sph_old[[2, 0, 1]], atol=1e-7)
assert_allclose(cart[0], cart_old, atol=1e-7)
assert_allclose(cart[0], coord, atol=1e-7)
def _polar_to_cartesian(theta, r):
"""Transform polar coordinates to cartesian."""
x = r * np.cos(theta)
y = r * np.sin(theta)
return x, y
def test_polar_to_cartesian():
"""Test helper transform function from polar to cartesian."""
r = 1
theta = np.pi
# expected values are (-1, 0)
x = r * np.cos(theta)
y = r * np.sin(theta)
coord = _pol_to_cart(np.array([[r, theta]]))[0]
# np.pi is an approx since pi is irrational
assert_allclose(coord, (x, y), atol=1e-7)
assert_allclose(coord, (-1, 0), atol=1e-7)
assert_allclose(coord, _polar_to_cartesian(theta, r), atol=1e-7)
rng = np.random.RandomState(0)
r = rng.randn(10)
theta = rng.rand(10) * (2 * np.pi)
polar = np.array((r, theta)).T
assert_allclose([_polar_to_cartesian(p[1], p[0]) for p in polar],
_pol_to_cart(polar), atol=1e-7)
def _topo_to_phi_theta(theta, radius):
"""Convert using old function."""
sph_phi = (0.5 - radius) * 180
sph_theta = -theta
return sph_phi, sph_theta
def test_topo_to_sph():
"""Test topo to sphere conversion."""
rng = np.random.RandomState(0)
angles = rng.rand(10) * 360
radii = rng.rand(10)
angles[0] = 30
radii[0] = 0.25
# new way
sph = _topo_to_sph(np.array([angles, radii]).T)
new = _sph_to_cart(sph)
new[:, [0, 1]] = new[:, [1, 0]] * [-1, 1]
# old way
for ii, (angle, radius) in enumerate(zip(angles, radii)):
sph_phi, sph_theta = _topo_to_phi_theta(angle, radius)
if ii == 0:
assert_allclose(_topo_to_phi_theta(angle, radius), [45, -30])
azimuth = sph_theta / 180.0 * np.pi
elevation = sph_phi / 180.0 * np.pi
assert_allclose(sph[ii], [1., azimuth, np.pi / 2. - elevation],
atol=1e-7)
r = np.ones_like(radius)
x, y, z = _sphere_to_cartesian(azimuth, elevation, r)
pos = [-y, x, z]
if ii == 0:
expected = np.array([1. / 2., np.sqrt(3) / 2., 1.])
expected /= np.sqrt(2)
assert_allclose(pos, expected, atol=1e-7)
assert_allclose(pos, new[ii], atol=1e-7)
def test_rotation():
"""Test conversion between rotation angles and transformation matrix."""
tests = [(0, 0, 1), (.5, .5, .5), (np.pi, 0, -1.5)]
for rot in tests:
x, y, z = rot
m = rotation3d(x, y, z)
m4 = rotation(x, y, z)
assert_array_equal(m, m4[:3, :3])
back = rotation_angles(m)
assert_almost_equal(actual=back, desired=rot, decimal=12)
back4 = rotation_angles(m4)
assert_almost_equal(actual=back4, desired=rot, decimal=12)
def test_rotation3d_align_z_axis():
"""Test rotation3d_align_z_axis."""
# The more complex z axis fails the assert presumably due to tolerance
#
inp_zs = [[0, 0, 1], [0, 1, 0], [1, 0, 0], [0, 0, -1],
[-0.75071668, -0.62183808, 0.22302888]]
exp_res = [[[1., 0., 0.], [0., 1., 0.], [0., 0., 1.]],
[[1., 0., 0.], [0., 0., 1.], [0., -1., 0.]],
[[0., 0., 1.], [0., 1., 0.], [-1., 0., 0.]],
[[1., 0., 0.], [0., -1., 0.], [0., 0., -1.]],
[[0.53919688, -0.38169517, -0.75071668],
[-0.38169517, 0.683832, -0.62183808],
[0.75071668, 0.62183808, 0.22302888]]]
for res, z in zip(exp_res, inp_zs):
assert_allclose(res, rotation3d_align_z_axis(z), atol=1e-7)
@testing.requires_testing_data
def test_combine():
"""Test combining transforms."""
trans = read_trans(fname)
inv = invert_transform(trans)
combine_transforms(trans, inv, trans['from'], trans['from'])
pytest.raises(RuntimeError, combine_transforms, trans, inv,
trans['to'], trans['from'])
pytest.raises(RuntimeError, combine_transforms, trans, inv,
trans['from'], trans['to'])
pytest.raises(RuntimeError, combine_transforms, trans, trans,
trans['from'], trans['to'])
def test_quaternions():
"""Test quaternion calculations."""
rots = [np.eye(3)]
for fname in [test_fif_fname, ctf_fname, hp_fif_fname]:
rots += [read_info(fname)['dev_head_t']['trans'][:3, :3]]
# nasty numerical cases
rots += [np.array([
[-0.99978541, -0.01873462, -0.00898756],
[-0.01873462, 0.62565561, 0.77987608],
[-0.00898756, 0.77987608, -0.62587152],
])]
rots += [np.array([
[0.62565561, -0.01873462, 0.77987608],
[-0.01873462, -0.99978541, -0.00898756],
[0.77987608, -0.00898756, -0.62587152],
])]
rots += [np.array([
[-0.99978541, -0.00898756, -0.01873462],
[-0.00898756, -0.62587152, 0.77987608],
[-0.01873462, 0.77987608, 0.62565561],
])]
for rot in rots:
assert_allclose(rot, quat_to_rot(rot_to_quat(rot)),
rtol=1e-5, atol=1e-5)
rot = rot[np.newaxis, np.newaxis, :, :]
assert_allclose(rot, quat_to_rot(rot_to_quat(rot)),
rtol=1e-5, atol=1e-5)
# let's make sure our angle function works in some reasonable way
for ii in range(3):
for jj in range(3):
a = np.zeros(3)
b = np.zeros(3)
a[ii] = 1.
b[jj] = 1.
expected = np.pi if ii != jj else 0.
assert_allclose(_angle_between_quats(a, b), expected, atol=1e-5)
y_180 = np.array([[-1, 0, 0], [0, 1, 0], [0, 0, -1.]])
assert_allclose(_angle_between_quats(rot_to_quat(y_180),
np.zeros(3)), np.pi)
h_180_attitude_90 = np.array([[0, 1, 0], [1, 0, 0], [0, 0, -1.]])
assert_allclose(_angle_between_quats(rot_to_quat(h_180_attitude_90),
np.zeros(3)), np.pi)
def test_vector_rotation():
"""Test basic rotation matrix math."""
x = np.array([1., 0., 0.])
y = np.array([0., 1., 0.])
rot = _find_vector_rotation(x, y)
assert_array_equal(rot,
[[0, -1, 0], [1, 0, 0], [0, 0, 1]])
quat_1 = rot_to_quat(rot)
quat_2 = rot_to_quat(np.eye(3))
assert_allclose(_angle_between_quats(quat_1, quat_2), np.pi / 2.)
def test_average_quats():
"""Test averaging of quaternions."""
sq2 = 1. / np.sqrt(2.)
quats = np.array([[0, sq2, sq2],
[0, sq2, sq2],
[0, sq2, 0],
[0, 0, sq2],
[sq2, 0, 0]], float)
# In MATLAB:
# quats = [[0, sq2, sq2, 0]; [0, sq2, sq2, 0];
# [0, sq2, 0, sq2]; [0, 0, sq2, sq2]; [sq2, 0, 0, sq2]];
expected = [quats[0],
quats[0],
[0, 0.788675134594813, 0.577350269189626],
[0, 0.657192299694123, 0.657192299694123],
[0.100406058540540, 0.616329446922803, 0.616329446922803]]
# Averaging the first two should give the same thing:
for lim, ex in enumerate(expected):
assert_allclose(_average_quats(quats[:lim + 1]), ex, atol=1e-7)
quats[1] *= -1 # same quaternion (hidden value is zero here)!
rot_0, rot_1 = quat_to_rot(quats[:2])
assert_allclose(rot_0, rot_1, atol=1e-7)
for lim, ex in enumerate(expected):
assert_allclose(_average_quats(quats[:lim + 1]), ex, atol=1e-7)
# Assert some symmetry
count = 0
extras = [[sq2, sq2, 0]] + list(np.eye(3))
for quat in np.concatenate((quats, expected, extras)):
if np.isclose(_quat_real(quat), 0., atol=1e-7): # can flip sign
count += 1
angle = _angle_between_quats(quat, -quat)
assert_allclose(angle, 0., atol=1e-7)
rot_0, rot_1 = quat_to_rot(np.array((quat, -quat)))
assert_allclose(rot_0, rot_1, atol=1e-7)
assert count == 4 + len(extras)
@testing.requires_testing_data
@pytest.mark.parametrize('subject', ('fsaverage', 'sample'))
def test_fs_xfm(subject, tmpdir):
"""Test reading and writing of Freesurfer transforms."""
fname = op.join(data_path, 'subjects', subject, 'mri', 'transforms',
'talairach.xfm')
xfm, kind = _read_fs_xfm(fname)
if subject == 'fsaverage':
assert_allclose(xfm, np.eye(4), atol=1e-5) # fsaverage is in MNI
assert kind == 'MNI Transform File'
tempdir = str(tmpdir)
fname_out = op.join(tempdir, 'out.xfm')
_write_fs_xfm(fname_out, xfm, kind)
xfm_read, kind_read = _read_fs_xfm(fname_out)
assert kind_read == kind
assert_allclose(xfm, xfm_read, rtol=1e-5, atol=1e-5)
# Some wacky one
xfm[:3] = np.random.RandomState(0).randn(3, 4)
_write_fs_xfm(fname_out, xfm, 'foo')
xfm_read, kind_read = _read_fs_xfm(fname_out)
assert kind_read == 'foo'
assert_allclose(xfm, xfm_read, rtol=1e-5, atol=1e-5)
# degenerate conditions
with open(fname_out, 'w') as fid:
fid.write('foo')
with pytest.raises(ValueError, match='Failed to find'):
_read_fs_xfm(fname_out)
_write_fs_xfm(fname_out, xfm[:2], 'foo')
with pytest.raises(ValueError, match='Could not find'):
_read_fs_xfm(fname_out)
@pytest.fixture()
def quats():
"""Make some unit quats."""
quats = np.random.RandomState(0).randn(5, 3)
quats[:, 0] = 0 # identity
quats /= 2 * np.linalg.norm(quats, axis=1, keepdims=True) # some real part
return quats
def _check_fit_matched_points(
p, x, weights, do_scale, angtol=1e-5, dtol=1e-5, stol=1e-7):
__tracebackhide__ = True
mne.coreg._ALLOW_ANALITICAL = False
try:
params = mne.coreg.fit_matched_points(
p, x, weights=weights, scale=do_scale, out='params')
finally:
mne.coreg._ALLOW_ANALITICAL = True
quat_an, scale_an = _fit_matched_points(p, x, weights, scale=do_scale)
assert len(params) == 6 + int(do_scale)
q_co = _euler_to_quat(params[:3])
translate_co = params[3:6]
angle = np.rad2deg(_angle_between_quats(quat_an[:3], q_co))
dist = np.linalg.norm(quat_an[3:] - translate_co)
assert 0 <= angle < angtol, 'angle'
assert 0 <= dist < dtol, 'dist'
if do_scale:
scale_co = params[6]
assert_allclose(scale_an, scale_co, rtol=stol, err_msg='scale')
# errs
trans = _quat_to_affine(quat_an)
trans[:3, :3] *= scale_an
weights = np.ones(1) if weights is None else weights
err_an = np.linalg.norm(
weights[:, np.newaxis] * apply_trans(trans, p) - x)
trans = mne.coreg._trans_from_params((True, True, do_scale), params)
err_co = np.linalg.norm(
weights[:, np.newaxis] * apply_trans(trans, p) - x)
if err_an > 1e-14:
assert err_an < err_co * 1.5
return quat_an, scale_an
@pytest.mark.parametrize('scaling', [0.25, 1])
@pytest.mark.parametrize('do_scale', (True, False))
def test_fit_matched_points(quats, scaling, do_scale):
"""Test analytical least-squares matched point fitting."""
if scaling != 1 and not do_scale:
return # no need to test this, it will not be good
rng = np.random.RandomState(0)
fro = rng.randn(10, 3)
translation = rng.randn(3)
for qi, quat in enumerate(quats):
to = scaling * np.dot(quat_to_rot(quat), fro.T).T + translation
for corrupted in (False, True):
# mess up a point
if corrupted:
to[0, 2] += 100
weights = np.ones(len(to))
weights[0] = 0
else:
weights = None
est, scale_est = _check_fit_matched_points(
fro, to, weights=weights, do_scale=do_scale)
assert_allclose(scale_est, scaling, rtol=1e-5)
assert_allclose(est[:3], quat, atol=1e-14)
assert_allclose(est[3:], translation, atol=1e-14)
# if we don't adjust for the corruption above, it should get worse
angle = dist = None
for weighted in (False, True):
if not weighted:
weights = None
dist_bounds = (5, 20)
if scaling == 1:
angle_bounds = (5, 95)
angtol, dtol, stol = 1, 15, 3
else:
angle_bounds = (5, 105)
angtol, dtol, stol = 20, 15, 3
else:
weights = np.ones(len(to))
weights[0] = 10 # weighted=True here means "make it worse"
angle_bounds = (angle, 180) # unweighted values as new min
dist_bounds = (dist, 100)
if scaling == 1:
# XXX this angtol is not great but there is a hard to
# identify linalg/angle calculation bug on Travis...
angtol, dtol, stol = 180, 70, 3
else:
angtol, dtol, stol = 50, 70, 3
est, scale_est = _check_fit_matched_points(
fro, to, weights=weights, do_scale=do_scale,
angtol=angtol, dtol=dtol, stol=stol)
assert not np.allclose(est[:3], quat, atol=1e-5)
assert not np.allclose(est[3:], translation, atol=1e-5)
angle = np.rad2deg(_angle_between_quats(est[:3], quat))
assert_array_less(angle_bounds[0], angle)
assert_array_less(angle, angle_bounds[1])
dist = np.linalg.norm(est[3:] - translation)
assert_array_less(dist_bounds[0], dist)
assert_array_less(dist, dist_bounds[1])
def test_euler(quats):
"""Test euler transformations."""
euler = _quat_to_euler(quats)
quats_2 = _euler_to_quat(euler)
assert_allclose(quats, quats_2, atol=1e-14)
quat_rot = quat_to_rot(quats)
euler_rot = np.array([rotation(*e)[:3, :3] for e in euler])
assert_allclose(quat_rot, euler_rot, atol=1e-14)
@requires_nibabel()
@requires_dipy()
@pytest.mark.slowtest
@testing.requires_testing_data
def test_volume_registration():
"""Test volume registration."""
import nibabel as nib
from dipy.align import resample
T1 = nib.load(fname_t1)
affine = np.eye(4)
affine[0, 3] = 10
T1_resampled = resample(moving=T1.get_fdata(),
static=T1.get_fdata(),
moving_affine=T1.affine,
static_affine=T1.affine,
between_affine=np.linalg.inv(affine))
for pipeline in ('rigids', ('translation', 'sdr')):
reg_affine, sdr_morph = mne.transforms.compute_volume_registration(
T1_resampled, T1, pipeline=pipeline, zooms=10, niter=[5])
assert_allclose(affine, reg_affine, atol=0.25)
T1_aligned = mne.transforms.apply_volume_registration(
T1_resampled, T1, reg_affine, sdr_morph)
r2 = _compute_r2(_get_img_fdata(T1_aligned), _get_img_fdata(T1))
assert 99.9 < r2
# check that all orders of the pipeline work
for pipeline_len in range(1, 5):
for pipeline in itertools.combinations(
('translation', 'rigid', 'affine', 'sdr'), pipeline_len):
_validate_pipeline(pipeline)
_validate_pipeline(list(pipeline))
with pytest.raises(ValueError, match='Steps in pipeline are out of order'):
_validate_pipeline(('sdr', 'affine'))
with pytest.raises(ValueError,
match='Steps in pipeline should not be repeated'):
_validate_pipeline(('affine', 'affine'))
|
bloyl/mne-python
|
mne/tests/test_transforms.py
|
Python
|
bsd-3-clause
| 21,423 | 0 |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.4 on 2016-04-08 11:04
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('users', '0004_myuser_is_approved'),
]
operations = [
migrations.AddField(
model_name='myuser',
name='email_me',
field=models.BooleanField(default=True),
),
]
|
BD2KGenomics/brca-website
|
django/users/migrations/0005_myuser_email_me.py
|
Python
|
apache-2.0
| 450 | 0 |
"""Tests for tools for solving inequalities and systems of inequalities. """
from sympy import (And, Eq, FiniteSet, Ge, Gt, Interval, Le, Lt, Ne, oo,
Or, S, sin, sqrt, Symbol, Union, Integral, Sum,
Function, Poly, PurePoly, pi, root)
from sympy.solvers.inequalities import (reduce_inequalities,
solve_poly_inequality as psolve,
reduce_rational_inequalities,
solve_univariate_inequality as isolve,
reduce_abs_inequality)
from sympy.polys.rootoftools import RootOf
from sympy.solvers.solvers import solve
from sympy.abc import x, y
from sympy.utilities.pytest import raises, slow
inf = oo.evalf()
def test_solve_poly_inequality():
assert psolve(Poly(0, x), '==') == [S.Reals]
assert psolve(Poly(1, x), '==') == [S.EmptySet]
assert psolve(PurePoly(x + 1, x), ">") == [Interval(-1, oo, True, False)]
def test_reduce_poly_inequalities_real_interval():
assert reduce_rational_inequalities(
[[Eq(x**2, 0)]], x, relational=False) == FiniteSet(0)
assert reduce_rational_inequalities(
[[Le(x**2, 0)]], x, relational=False) == FiniteSet(0)
assert reduce_rational_inequalities(
[[Lt(x**2, 0)]], x, relational=False) == S.EmptySet
assert reduce_rational_inequalities(
[[Ge(x**2, 0)]], x, relational=False) == \
S.Reals if x.is_real else Interval(-oo, oo)
assert reduce_rational_inequalities(
[[Gt(x**2, 0)]], x, relational=False) == \
FiniteSet(0).complement(S.Reals)
assert reduce_rational_inequalities(
[[Ne(x**2, 0)]], x, relational=False) == \
FiniteSet(0).complement(S.Reals)
assert reduce_rational_inequalities(
[[Eq(x**2, 1)]], x, relational=False) == FiniteSet(-1, 1)
assert reduce_rational_inequalities(
[[Le(x**2, 1)]], x, relational=False) == Interval(-1, 1)
assert reduce_rational_inequalities(
[[Lt(x**2, 1)]], x, relational=False) == Interval(-1, 1, True, True)
assert reduce_rational_inequalities(
[[Ge(x**2, 1)]], x, relational=False) == \
Union(Interval(-oo, -1), Interval(1, oo))
assert reduce_rational_inequalities(
[[Gt(x**2, 1)]], x, relational=False) == \
Interval(-1, 1).complement(S.Reals)
assert reduce_rational_inequalities(
[[Ne(x**2, 1)]], x, relational=False) == \
FiniteSet(-1, 1).complement(S.Reals)
assert reduce_rational_inequalities([[Eq(
x**2, 1.0)]], x, relational=False) == FiniteSet(-1.0, 1.0).evalf()
assert reduce_rational_inequalities(
[[Le(x**2, 1.0)]], x, relational=False) == Interval(-1.0, 1.0)
assert reduce_rational_inequalities([[Lt(
x**2, 1.0)]], x, relational=False) == Interval(-1.0, 1.0, True, True)
assert reduce_rational_inequalities(
[[Ge(x**2, 1.0)]], x, relational=False) == \
Union(Interval(-inf, -1.0), Interval(1.0, inf))
assert reduce_rational_inequalities(
[[Gt(x**2, 1.0)]], x, relational=False) == \
Union(Interval(-inf, -1.0, right_open=True),
Interval(1.0, inf, left_open=True))
assert reduce_rational_inequalities([[Ne(
x**2, 1.0)]], x, relational=False) == \
FiniteSet(-1.0, 1.0).complement(S.Reals)
s = sqrt(2)
assert reduce_rational_inequalities([[Lt(
x**2 - 1, 0), Gt(x**2 - 1, 0)]], x, relational=False) == S.EmptySet
assert reduce_rational_inequalities([[Le(x**2 - 1, 0), Ge(
x**2 - 1, 0)]], x, relational=False) == FiniteSet(-1, 1)
assert reduce_rational_inequalities(
[[Le(x**2 - 2, 0), Ge(x**2 - 1, 0)]], x, relational=False
) == Union(Interval(-s, -1, False, False), Interval(1, s, False, False))
assert reduce_rational_inequalities(
[[Le(x**2 - 2, 0), Gt(x**2 - 1, 0)]], x, relational=False
) == Union(Interval(-s, -1, False, True), Interval(1, s, True, False))
assert reduce_rational_inequalities(
[[Lt(x**2 - 2, 0), Ge(x**2 - 1, 0)]], x, relational=False
) == Union(Interval(-s, -1, True, False), Interval(1, s, False, True))
assert reduce_rational_inequalities(
[[Lt(x**2 - 2, 0), Gt(x**2 - 1, 0)]], x, relational=False
) == Union(Interval(-s, -1, True, True), Interval(1, s, True, True))
assert reduce_rational_inequalities(
[[Lt(x**2 - 2, 0), Ne(x**2 - 1, 0)]], x, relational=False
) == Union(Interval(-s, -1, True, True), Interval(-1, 1, True, True),
Interval(1, s, True, True))
def test_reduce_poly_inequalities_complex_relational():
assert reduce_rational_inequalities(
[[Eq(x**2, 0)]], x, relational=True) == Eq(x, 0)
assert reduce_rational_inequalities(
[[Le(x**2, 0)]], x, relational=True) == Eq(x, 0)
assert reduce_rational_inequalities(
[[Lt(x**2, 0)]], x, relational=True) == False
assert reduce_rational_inequalities(
[[Ge(x**2, 0)]], x, relational=True) == And(Lt(-oo, x), Lt(x, oo))
assert reduce_rational_inequalities(
[[Gt(x**2, 0)]], x, relational=True) == \
And(Or(And(Lt(-oo, x), Lt(x, 0)), And(Lt(0, x), Lt(x, oo))))
assert reduce_rational_inequalities(
[[Ne(x**2, 0)]], x, relational=True) == \
And(Or(And(Lt(-oo, x), Lt(x, 0)), And(Lt(0, x), Lt(x, oo))))
for one in (S(1), S(1.0)):
inf = one*oo
assert reduce_rational_inequalities(
[[Eq(x**2, one)]], x, relational=True) == \
Or(Eq(x, -one), Eq(x, one))
assert reduce_rational_inequalities(
[[Le(x**2, one)]], x, relational=True) == \
And(And(Le(-one, x), Le(x, one)))
assert reduce_rational_inequalities(
[[Lt(x**2, one)]], x, relational=True) == \
And(And(Lt(-one, x), Lt(x, one)))
assert reduce_rational_inequalities(
[[Ge(x**2, one)]], x, relational=True) == \
And(Or(And(Le(one, x), Lt(x, inf)), And(Le(x, -one), Lt(-inf, x))))
assert reduce_rational_inequalities(
[[Gt(x**2, one)]], x, relational=True) == \
And(Or(And(Lt(-inf, x), Lt(x, -one)), And(Lt(one, x), Lt(x, inf))))
assert reduce_rational_inequalities(
[[Ne(x**2, one)]], x, relational=True) == \
Or(And(Lt(-inf, x), Lt(x, -one)),
And(Lt(-one, x), Lt(x, one)),
And(Lt(one, x), Lt(x, inf)))
def test_reduce_rational_inequalities_real_relational():
assert reduce_rational_inequalities([], x) == False
assert reduce_rational_inequalities(
[[(x**2 + 3*x + 2)/(x**2 - 16) >= 0]], x, relational=False) == \
Union(Interval.open(-oo, -4), Interval(-2, -1), Interval.open(4, oo))
assert reduce_rational_inequalities(
[[((-2*x - 10)*(3 - x))/((x**2 + 5)*(x - 2)**2) < 0]], x,
relational=False) == \
Union(Interval.open(-5, 2), Interval.open(2, 3))
assert reduce_rational_inequalities([[(x + 1)/(x - 5) <= 0]], x,
relational=False) == \
Interval.Ropen(-1, 5)
assert reduce_rational_inequalities([[(x**2 + 4*x + 3)/(x - 1) > 0]], x,
relational=False) == \
Union(Interval.open(-3, -1), Interval.open(1, oo))
assert reduce_rational_inequalities([[(x**2 - 16)/(x - 1)**2 < 0]], x,
relational=False) == \
Union(Interval.open(-4, 1), Interval.open(1, 4))
assert reduce_rational_inequalities([[(3*x + 1)/(x + 4) >= 1]], x,
relational=False) == \
Union(Interval.open(-oo, -4), Interval.Ropen(S(3)/2, oo))
assert reduce_rational_inequalities([[(x - 8)/x <= 3 - x]], x,
relational=False) == \
Union(Interval.Lopen(-oo, -2), Interval.Lopen(0, 4))
def test_reduce_abs_inequalities():
e = abs(x - 5) < 3
ans = And(Lt(2, x), Lt(x, 8))
assert reduce_inequalities(e) == ans
assert reduce_inequalities(e, x) == ans
assert reduce_inequalities(abs(x - 5)) == Eq(x, 5)
assert reduce_inequalities(
abs(2*x + 3) >= 8) == Or(And(Le(S(5)/2, x), Lt(x, oo)),
And(Le(x, -S(11)/2), Lt(-oo, x)))
assert reduce_inequalities(abs(x - 4) + abs(
3*x - 5) < 7) == And(Lt(S(1)/2, x), Lt(x, 4))
assert reduce_inequalities(abs(x - 4) + abs(3*abs(x) - 5) < 7) == \
Or(And(S(-2) < x, x < -1), And(S(1)/2 < x, x < 4))
nr = Symbol('nr', real=False)
raises(TypeError, lambda: reduce_inequalities(abs(nr - 5) < 3))
assert reduce_inequalities(x < 3, symbols=[x, nr]) == And(-oo < x, x < 3)
def test_reduce_inequalities_general():
assert reduce_inequalities(Ge(sqrt(2)*x, 1)) == And(sqrt(2)/2 <= x, x < oo)
assert reduce_inequalities(PurePoly(x + 1, x) > 0) == And(S(-1) < x, x < oo)
def test_reduce_inequalities_boolean():
assert reduce_inequalities(
[Eq(x**2, 0), True]) == Eq(x, 0)
assert reduce_inequalities([Eq(x**2, 0), False]) == False
assert reduce_inequalities(x**2 >= 0) is S.true # issue 10196
def test_reduce_inequalities_multivariate():
assert reduce_inequalities([Ge(x**2, 1), Ge(y**2, 1)]) == And(
Or(And(Le(1, x), Lt(x, oo)), And(Le(x, -1), Lt(-oo, x))),
Or(And(Le(1, y), Lt(y, oo)), And(Le(y, -1), Lt(-oo, y))))
def test_reduce_inequalities_errors():
raises(NotImplementedError, lambda: reduce_inequalities(Ge(sin(x) + x, 1)))
raises(NotImplementedError, lambda: reduce_inequalities(Ge(x**2*y + y, 1)))
def test_hacky_inequalities():
assert reduce_inequalities(x + y < 1, symbols=[x]) == (x < 1 - y)
assert reduce_inequalities(x + y >= 1, symbols=[x]) == (x >= 1 - y)
assert reduce_inequalities(Eq(0, x - y), symbols=[x]) == Eq(x, y)
assert reduce_inequalities(Ne(0, x - y), symbols=[x]) == Ne(x, y)
def test_issue_6343():
eq = -3*x**2/2 - 45*x/4 + S(33)/2 > 0
assert reduce_inequalities(eq) == \
And(x < -S(15)/4 + sqrt(401)/4, -sqrt(401)/4 - S(15)/4 < x)
def test_issue_8235():
assert reduce_inequalities(x**2 - 1 < 0) == \
And(S(-1) < x, x < S(1))
assert reduce_inequalities(x**2 - 1 <= 0) == \
And(S(-1) <= x, x <= 1)
assert reduce_inequalities(x**2 - 1 > 0) == \
Or(And(-oo < x, x < -1), And(x < oo, S(1) < x))
assert reduce_inequalities(x**2 - 1 >= 0) == \
Or(And(-oo < x, x <= S(-1)), And(S(1) <= x, x < oo))
eq = x**8 + x - 9 # we want RootOf solns here
sol = solve(eq >= 0)
tru = Or(And(RootOf(eq, 1) <= x, x < oo), And(-oo < x, x <= RootOf(eq, 0)))
assert sol == tru
# recast vanilla as real
assert solve(sqrt((-x + 1)**2) < 1) == And(S(0) < x, x < 2)
def test_issue_5526():
assert reduce_inequalities(S(0) <=
x + Integral(y**2, (y, 1, 3)) - 1, [x]) == \
(x >= -Integral(y**2, (y, 1, 3)) + 1)
f = Function('f')
e = Sum(f(x), (x, 1, 3))
assert reduce_inequalities(S(0) <= x + e + y**2, [x]) == \
(x >= -y**2 - Sum(f(x), (x, 1, 3)))
def test_solve_univariate_inequality():
assert isolve(x**2 >= 4, x, relational=False) == Union(Interval(-oo, -2),
Interval(2, oo))
assert isolve(x**2 >= 4, x) == Or(And(Le(2, x), Lt(x, oo)), And(Le(x, -2),
Lt(-oo, x)))
assert isolve((x - 1)*(x - 2)*(x - 3) >= 0, x, relational=False) == \
Union(Interval(1, 2), Interval(3, oo))
assert isolve((x - 1)*(x - 2)*(x - 3) >= 0, x) == \
Or(And(Le(1, x), Le(x, 2)), And(Le(3, x), Lt(x, oo)))
# issue 2785:
assert isolve(x**3 - 2*x - 1 > 0, x, relational=False) == \
Union(Interval(-1, -sqrt(5)/2 + S(1)/2, True, True),
Interval(S(1)/2 + sqrt(5)/2, oo, True, True))
# issue 2794:
assert isolve(x**3 - x**2 + x - 1 > 0, x, relational=False) == \
Interval(1, oo, True)
# XXX should be limited in domain, e.g. between 0 and 2*pi
assert isolve(sin(x) < S.Half, x) == \
Or(And(-oo < x, x < pi/6), And(5*pi/6 < x, x < oo))
assert isolve(sin(x) > S.Half, x) == And(pi/6 < x, x < 5*pi/6)
# numerical testing in valid() is needed
assert isolve(x**7 - x - 2 > 0, x) == \
And(RootOf(x**7 - x - 2, 0) < x, x < oo)
# handle numerator and denominator; although these would be handled as
# rational inequalities, these test confirm that the right thing is done
# when the domain is EX (e.g. when 2 is replaced with sqrt(2))
assert isolve(1/(x - 2) > 0, x) == And(S(2) < x, x < oo)
den = ((x - 1)*(x - 2)).expand()
assert isolve((x - 1)/den <= 0, x) == \
Or(And(-oo < x, x < 1), And(S(1) < x, x < 2))
def test_issue_9954():
assert isolve(x**2 >= 0, x, relational=False) == S.Reals
assert isolve(x**2 >= 0, x, relational=True) == S.Reals.as_relational(x)
assert isolve(x**2 < 0, x, relational=False) == S.EmptySet
assert isolve(x**2 < 0, x, relational=True) == S.EmptySet.as_relational(x)
def test_slow_general_univariate():
r = RootOf(x**5 - x**2 + 1, 0)
assert solve(sqrt(x) + 1/root(x, 3) > 1) == \
Or(And(S(0) < x, x < r**6), And(r**6 < x, x < oo))
def test_issue_8545():
eq = 1 - x - abs(1 - x)
ans = And(Lt(1, x), Lt(x, oo))
assert reduce_abs_inequality(eq, '<', x) == ans
eq = 1 - x - sqrt((1 - x)**2)
assert reduce_inequalities(eq < 0) == ans
def test_issue_8974():
assert isolve(-oo < x, x) == And(-oo < x, x < oo)
assert isolve(oo > x, x) == And(-oo < x, x < oo)
def test_issue_10047():
assert solve(sin(x) < 2) == And(-oo < x, x < oo)
|
mcdaniel67/sympy
|
sympy/solvers/tests/test_inequalities.py
|
Python
|
bsd-3-clause
| 13,455 | 0.001189 |
#!/usr/bin/python
def characterPictureGrid(grid):
for dim1 in range(0, len(grid)):
for dim2 in range(0, len(grid[dim1])):
print grid[dim1][dim2],
print "\n"
grid = [['.', '.', '.', '.', '.', '.'],
['.', 'O', 'O', '.', '.', '.'],
['O', 'O', 'O', 'O', '.', '.'],
['O', 'O', 'O', 'O', 'O', '.'],
['.', 'O', 'O', 'O', 'O', 'O'],
['O', 'O', 'O', 'O', 'O', '.'],
['O', 'O', 'O', 'O', '.', '.'],
['.', 'O', 'O', '.', '.', '.'],
['.', '.', '.', '.', '.', '.']]
characterPictureGrid(grid)
|
ajitabhpandey/learn-programming
|
python/characterPictureGrid.py
|
Python
|
gpl-2.0
| 597 | 0.0067 |
# Copyright 2012 Nebula, Inc.
# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from nova.tests.functional.api_sample_tests import api_sample_base
CONF = cfg.CONF
CONF.import_opt('osapi_compute_extension',
'nova.api.openstack.compute.legacy_v2.extensions')
class QuotaSetsSampleJsonTests(api_sample_base.ApiSampleTestBaseV3):
ADMIN_API = True
extension_name = "os-quota-sets"
def _get_flags(self):
f = super(QuotaSetsSampleJsonTests, self)._get_flags()
f['osapi_compute_extension'] = CONF.osapi_compute_extension[:]
f['osapi_compute_extension'].append('nova.api.openstack.compute.'
'contrib.server_group_quotas.'
'Server_group_quotas')
f['osapi_compute_extension'].append('nova.api.openstack.compute.'
'contrib.quotas.Quotas')
f['osapi_compute_extension'].append('nova.api.openstack.compute.'
'contrib.extended_quotas.Extended_quotas')
f['osapi_compute_extension'].append('nova.api.openstack.compute.'
'contrib.user_quotas.User_quotas')
return f
def test_show_quotas(self):
# Get api sample to show quotas.
response = self._do_get('os-quota-sets/fake_tenant')
self._verify_response('quotas-show-get-resp', {}, response, 200)
def test_show_quotas_defaults(self):
# Get api sample to show quotas defaults.
response = self._do_get('os-quota-sets/fake_tenant/defaults')
self._verify_response('quotas-show-defaults-get-resp',
{}, response, 200)
def test_update_quotas(self):
# Get api sample to update quotas.
response = self._do_put('os-quota-sets/fake_tenant',
'quotas-update-post-req',
{})
self._verify_response('quotas-update-post-resp', {}, response, 200)
def test_delete_quotas(self):
# Get api sample to delete quota.
response = self._do_delete('os-quota-sets/fake_tenant')
self.assertEqual(response.status_code, 202)
self.assertEqual(response.content, '')
def test_update_quotas_force(self):
# Get api sample to update quotas.
response = self._do_put('os-quota-sets/fake_tenant',
'quotas-update-force-post-req',
{})
return self._verify_response('quotas-update-force-post-resp', {},
response, 200)
def test_show_quotas_for_user(self):
# Get api sample to show quotas for user.
response = self._do_get('os-quota-sets/fake_tenant?user_id=1')
self._verify_response('user-quotas-show-get-resp', {}, response, 200)
def test_delete_quotas_for_user(self):
response = self._do_delete('os-quota-sets/fake_tenant?user_id=1')
self.assertEqual(response.status_code, 202)
self.assertEqual(response.content, '')
def test_update_quotas_for_user(self):
# Get api sample to update quotas for user.
response = self._do_put('os-quota-sets/fake_tenant?user_id=1',
'user-quotas-update-post-req',
{})
return self._verify_response('user-quotas-update-post-resp', {},
response, 200)
|
whitepages/nova
|
nova/tests/functional/api_sample_tests/test_quota_sets.py
|
Python
|
apache-2.0
| 4,046 | 0.001236 |
# -*- coding: utf-8 -*-
"""
Test parsing of strings that are phrases with the
ptc.StartTimeFromSourceTime flag set to True
"""
import sys
import time
import datetime
import unittest
import parsedatetime as pdt
from parsedatetime.context import pdtContext
from . import utils
class test(unittest.TestCase):
@utils.assertEqualWithComparator
def assertExpectedResult(self, result, check, **kwargs):
return utils.compareResultByTimeTuplesAndFlags(result, check, **kwargs)
def setUp(self):
self.cal = pdt.Calendar()
self.cal.ptc.StartTimeFromSourceTime = True
(self.yr, self.mth, self.dy, self.hr,
self.mn, self.sec, self.wd, self.yd, self.isdst) = time.localtime()
def testEndOfPhrases(self):
s = datetime.datetime.now()
# find out what month we are currently on
# set the day to 1 and then go back a day
# to get the end of the current month
(yr, mth, dy, hr, mn, sec, _, _, _) = s.timetuple()
s = datetime.datetime(yr, mth, dy, 13, 14, 15)
mth += 1
if mth > 12:
mth = 1
yr += 1
t = datetime.datetime(
yr, mth, 1, 13, 14, 15) + datetime.timedelta(days=-1)
start = s.timetuple()
target = t.timetuple()
self.assertExpectedResult(
self.cal.parse('eom', start),
(target, pdtContext(pdtContext.ACU_DAY)))
self.assertExpectedResult(
self.cal.parse('meeting eom', start),
(target, pdtContext(pdtContext.ACU_DAY)))
s = datetime.datetime.now()
(yr, mth, dy, hr, mn, sec, wd, yd, isdst) = s.timetuple()
s = datetime.datetime(yr, mth, 1, 13, 14, 15)
t = datetime.datetime(yr, 12, 31, 13, 14, 15)
start = s.timetuple()
target = t.timetuple()
self.assertExpectedResult(
self.cal.parse('eoy', start),
(target, pdtContext(pdtContext.ACU_MONTH)))
self.assertExpectedResult(
self.cal.parse('meeting eoy', start),
(target, pdtContext(pdtContext.ACU_MONTH)))
|
bear/parsedatetime
|
tests/TestStartTimeFromSourceTime.py
|
Python
|
apache-2.0
| 2,109 | 0 |
import os.path as op
import logging
import shutil
from subprocess import check_output
from tempfile import mkdtemp
import click
from ob_pipelines.s3 import (
s3, download_file_or_folder, remove_file_or_folder, SCRATCH_DIR, path_to_bucket_and_key
)
logger = logging.getLogger('ob-pipelines')
@click.command()
@click.argument('fq1')
@click.argument('fq2')
@click.argument('out_dir')
@click.argument('name')
def fastqc(fq1, fq2, out_dir, name):
"""Run FastQC"""
out_dir = out_dir if out_dir.endswith('/') else out_dir + '/'
temp_dir = mkdtemp(dir=SCRATCH_DIR)
fq1_local = op.join(temp_dir, name + '_1.fastq.gz')
fq2_local = op.join(temp_dir, name + '_2.fastq.gz')
if fq1.startswith('s3://'):
# Assume that if fq1 is in S3, so is fq2
download_file_or_folder(fq1, fq1_local)
download_file_or_folder(fq2, fq2_local)
else:
shutil.copy(fq1, fq1_local)
shutil.copy(fq2, fq2_local)
cmd = ['fastqc', '-o', temp_dir, fq1_local, fq2_local]
# Run command and save output
logging.info('Running:\n{}'.format(' '.join(cmd)))
out = check_output(cmd)
logging.info(out.decode())
out_files = [
name + '_1_fastqc.html',
name + '_2_fastqc.html',
name + '_1_fastqc.zip',
name + '_2_fastqc.zip'
]
for fname in out_files:
# Upload temp out directory to S3 with prefix
if out_dir.startswith('s3://'):
bucket, key = path_to_bucket_and_key(out_dir)
local_fpath = op.join(temp_dir, fname)
print('uploading {} to s3://{}/{}{}'.format(local_fpath, bucket, key, fname))
s3.upload_file(local_fpath, bucket, key + fname)
remove_file_or_folder(local_fpath)
else:
shutil.move(temp_dir, out_dir)
if __name__ == '__main__':
fastqc()
|
outlierbio/ob-pipelines
|
ob_pipelines/apps/fastqc/fastqc.py
|
Python
|
apache-2.0
| 1,841 | 0.002173 |
# -*- coding: utf-8 -*-
##############################################################################
#
# Author: Yannick Buron and Valeureux
# Copyright 2013 Yannick Buron and Valeureux
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import orm
class ProjectProject(orm.Model):
_name = 'project.project'
_inherit = ['project.project', 'crowdfunding.campaign']
|
Valeureux/wezer-exchange
|
__TODO__/project_crowdfunding/project_crowdfunding.py
|
Python
|
agpl-3.0
| 1,117 | 0 |
# -*- encoding: utf-8 -*-
try:
from httplib import HTTPSConnection
from urlparse import urlparse
except ImportError:
from http.client import HTTPSConnection
from urllib.parse import urlparse
from json import dumps, loads
from django.conf import settings
class GCMError(Exception):
pass
def send(user, message, **kwargs):
"""
Site: https://developers.google.com
API: https://developers.google.com/cloud-messaging/
Desc: Android notifications
"""
headers = {
"Content-type": "application/json",
"Authorization": "key=" + kwargs.pop("gcm_key", settings.GCM_KEY)
}
hook_url = 'https://android.googleapis.com/gcm/send'
data = {
"registration_ids": [user],
"data": {
"title": kwargs.pop("event"),
'message': message,
}
}
data['data'].update(kwargs)
up = urlparse(hook_url)
http = HTTPSConnection(up.netloc)
http.request(
"POST", up.path,
headers=headers,
body=dumps(data))
response = http.getresponse()
if response.status != 200:
raise GCMError(response.reason)
body = response.read()
if loads(body).get("failure") > 0:
raise GCMError(repr(body))
return True
|
LPgenerator/django-db-mailer
|
dbmail/providers/google/android.py
|
Python
|
gpl-2.0
| 1,265 | 0 |
#!/usr/bin/env python3
#
# Copyright (c) 2015 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import os
import sys
from oslo_config import cfg
from oslo_log import log
import yaml
try:
import openstack
SDK_LOADED = True
except ImportError:
SDK_LOADED = False
DOCUMENTATION = '''
Bifrost Inventory Module
========================
This is a dynamic inventory module intended to provide a platform for
consistent inventory information for Bifrost.
The inventory supplies two distinct groups by default:
- localhost
- baremetal
The localhost group is required for Bifrost to perform local actions to
bifrost for local actions such as installing Ironic.
The baremetal group contains the hosts defined by the data source along with
variables extracted from the data source. The variables are defined on a
per-host level which allows explicit actions to be taken based upon the
variables.
It is also possible for users to specify additional per-host groups by
simply setting the host_groups variable in the inventory file. See below for
an example JSON file.
The default group can also be changed by setting the DEFAULT_HOST_GROUPS
variable to contain the desired groups separated by whitespace as follows:
DEFAULT_HOST_GROUPS="foo bar zoo"
In case of provisioning virtual machines, additional per-VM groups can
be set by simply setting the test_vm_groups[$host] variable to a list
of desired groups. Moreover, users can override the default 'baremetal'
group by assigning a list of default groups to the test_vm_default_group
variable.
Presently, the base mode of operation reads a JSON/YAML file in the format
originally utilized by bifrost and returns structured JSON that is
interpreted by Ansible.
Conceivably, this inventory module can be extended to allow for direct
processing of inventory data from other data sources such as a configuration
management database or other inventory data source to provide a consistent
user experience.
How to use?
-----------
export BIFROST_INVENTORY_SOURCE=/tmp/baremetal.[json|yaml]
ansible-playbook playbook.yaml -i inventory/bifrost_inventory.py
One can also just directly invoke bifrost_inventory.py in order to see the
resulting JSON output. This module also has a feature to support the
pass-through of a pre-existing JSON document, which receives updates and
formatting to be supplied to Ansible. Ultimately the use of JSON will be
far more flexible and should be the preferred path forward.
Example JSON Element:
{
"node1": {
"uuid": "a8cb6624-0d9f-c882-affc-046ebb96ec01",
"host_groups": [
"nova",
"neutron"
],
"driver_info": {
"ipmi_target_channel": "0",
"ipmi_username": "ADMIN",
"ipmi_address": "192.168.122.1",
"ipmi_target_address": "0",
"ipmi_password": "undefined",
"ipmi_bridging": "single"
},
"nics": [
{
"mac": "00:01:02:03:04:05"
}.
{
"mac": "00:01:02:03:04:06"
}
],
"driver": "ipmi",
"ipv4_address": "192.168.122.2",
"properties": {
"cpu_arch": "x86_64",
"ram": "3072",
"disk_size": "10",
"cpus": "1"
},
"name": "node1"
}
}
Utilizing ironic as the data source
-----------------------------------
The functionality exists to allow a user to query an existing ironic
installation for the inventory data. This is an advanced feature,
as the node may not have sufficient information to allow for node
deployment or automated testing, unless DHCP reservations are used.
This setting can be invoked by setting the source to "ironic"::
export BIFROST_INVENTORY_SOURCE=ironic
Known Issues
------------
At present, this module only supports inventory list mode and is not
intended to support specific host queries.
'''
LOG = log.getLogger(__name__)
opts = [
cfg.BoolOpt('list',
default=True,
help='List active hosts'),
]
def _parse_config():
config = cfg.ConfigOpts()
log.register_options(config)
config.register_cli_opts(opts)
config(prog='bifrost_inventory.py')
config.set_override('use_stderr', True)
log.set_defaults()
log.setup(config, "bifrost_inventory.py")
return config
def _prepare_inventory():
hostvars = {"127.0.0.1": {"ansible_connection": "local"}}
groups = {}
groups.update({'baremetal': {'hosts': []}})
groups.update({'localhost': {'hosts': ["127.0.0.1"]}})
return (groups, hostvars)
def _process_baremetal_data(data_source, groups, hostvars):
"""Process data through as pre-formatted data"""
with open(data_source, 'rb') as file_object:
try:
file_data = yaml.safe_load(file_object)
except Exception as e:
LOG.error("Failed to parse JSON or YAML: %s", e)
raise Exception("Failed to parse JSON or YAML")
node_names = os.environ.get('BIFROST_NODE_NAMES', None)
if node_names:
node_names = node_names.split(',')
for name in file_data:
if node_names and name not in node_names:
continue
host = file_data[name]
# Perform basic validation
node_net_data = host.get('node_network_data')
ipv4_addr = host.get('ipv4_address')
default_groups = os.environ.get('DEFAULT_HOST_GROUPS',
'baremetal').split()
host['host_groups'] = sorted(list(set(host.get('host_groups', []) +
default_groups)))
if not node_net_data and not ipv4_addr:
host['addressing_mode'] = "dhcp"
else:
host['ansible_ssh_host'] = host['ipv4_address']
if ('provisioning_ipv4_address' not in host and
'addressing_mode' not in host):
host['provisioning_ipv4_address'] = host['ipv4_address']
# Add each host to the values to be returned.
for group in host['host_groups']:
if group not in groups:
groups.update({group: {'hosts': []}})
groups[group]['hosts'].append(host['name'])
hostvars.update({host['name']: host})
return (groups, hostvars)
def _process_sdk(groups, hostvars):
"""Retrieve inventory utilizing OpenStackSDK."""
# NOTE(dtantsur): backward compatibility
if os.environ.get('IRONIC_URL'):
print("WARNING: IRONIC_URL is deprecated, use OS_ENDPOINT")
os.environ['OS_ENDPOINT'] = os.environ['IRONIC_URL']
if os.environ.get('OS_ENDPOINT') and not os.environ.get('OS_AUTH_URL'):
os.environ['OS_AUTH_TYPE'] = None
cloud = openstack.connect()
machines = cloud.list_machines()
node_names = os.environ.get('BIFROST_NODE_NAMES', None)
if node_names:
node_names = node_names.split(',')
for machine in machines:
machine = cloud.get_machine(machine['uuid'])
if machine['name'] is None:
name = machine['uuid']
else:
name = machine['name']
if node_names and name not in node_names:
continue
new_machine = {}
for key, value in machine.items():
# NOTE(TheJulia): We don't want to pass infomrational links
# nor do we want to pass links about the ports since they
# are API endpoint URLs.
if key not in ['links', 'ports']:
new_machine[key] = value
# NOTE(TheJulia): Collect network information, enumerate through
# and extract important values, presently MAC address. Once done,
# return the network information to the inventory.
nics = cloud.list_nics_for_machine(machine['uuid'])
new_nics = []
for nic in nics:
new_nic = {}
if 'address' in nic:
new_nic['mac'] = nic['address']
new_nics.append(new_nic)
new_machine['nics'] = new_nics
new_machine['addressing_mode'] = "dhcp"
groups['baremetal']['hosts'].append(name)
hostvars.update({name: new_machine})
return (groups, hostvars)
def main():
"""Generate a list of hosts."""
config = _parse_config()
if not config.list:
LOG.error("This program must be executed in list mode.")
sys.exit(1)
(groups, hostvars) = _prepare_inventory()
if 'BIFROST_INVENTORY_SOURCE' not in os.environ:
LOG.error('Please define a BIFROST_INVENTORY_SOURCE environment '
'variable with a comma separated list of data sources')
sys.exit(1)
try:
data_source = os.environ['BIFROST_INVENTORY_SOURCE']
if os.path.isfile(data_source):
try:
(groups, hostvars) = _process_baremetal_data(
data_source,
groups,
hostvars)
except Exception as e:
LOG.error("BIFROST_INVENTORY_SOURCE does not define "
"a file that could be processed: %s."
"Tried JSON and YAML formats", e)
sys.exit(1)
elif "ironic" in data_source:
if SDK_LOADED:
(groups, hostvars) = _process_sdk(groups, hostvars)
else:
LOG.error("BIFROST_INVENTORY_SOURCE is set to ironic "
"however the openstacksdk library failed to load, "
"and may not be present.")
sys.exit(1)
else:
LOG.error('BIFROST_INVENTORY_SOURCE does not define a file')
sys.exit(1)
except Exception as error:
LOG.error('Failed processing: %s' % error)
sys.exit(1)
# Drop empty groups. This is usually necessary when
# the default ["baremetal"] group has been overridden
# by the user.
for group in list(groups):
# Empty groups
if len(groups[group]['hosts']) == 0:
del groups[group]
# FIXME(dtantsur): there is a conflict between the Bifrost's and the node's
# network_interface. Drop the node's one for now.
for host, items in hostvars.items():
items.pop('network_interface', None)
# General Data Conversion
inventory = {'_meta': {'hostvars': hostvars}}
inventory.update(groups)
print(json.dumps(inventory, indent=2))
if __name__ == '__main__':
main()
|
openstack/bifrost
|
bifrost/inventory.py
|
Python
|
apache-2.0
| 10,872 | 0 |
"""
cfbrank -- A college football ranking algorithm
dataparse.py: A module for parsing datafiles containing the relevant
statistics for the cfbrank algorithm. See the readme for full details
on the data formats and sources supported.
Written by Michael V. DePalatis <depalatis@gmail.com>
cfbrank is distributed under the terms of the GNU GPL.
"""
import csv
from team import Team
from conference import Conference
ncaa_names = [x.strip() for x in open('data/NCAANames2012.txt', 'r').readlines()]
sun_names = [x.strip() for x in open('data/SunNames2013.txt', 'r').readlines()]
def parseNCAACSV(filename, teamd={}):
"""Parse CSV schedule data file downloadable from the NCAA web
site. Unfortunately, as of week 4 of the 2013 season, the NCAA
schedules do not include scores, so this won't work."""
if not isinstance(teamd, dict):
raise RuntimeError("teamd must be a dictionary!")
datafile = csv.reader(open(filename, 'r'))
for i, row in enumerate(datafile):
if i == 0 or row[5] == '':
continue
school = row[1]
if not teamd.has_key(school):
teamd[school] = Team(school, "", True)
won = int(row[5]) > int(row[6])
opp_name = row[4]
if not teamd.has_key(opp_name):
FBS = opp_name in ncaa_names
teamd[opp_name] = Team(opp_name, "", FBS)
opponent = teamd[opp_name]
#print opp_name
teamd[school].addOpponent(opponent, won)
return teamd
def parseSunCSV(filename, teamd={}):
"""Prase Sunshine Forecast data file."""
if not isinstance(teamd, dict):
raise RuntimeError("teamd must be a dictionary!")
datafile = csv.reader(open(filename, 'r'))
for i, row in enumerate(datafile):
if i == 0 or len(row[2].split()) == 0:
continue
home, away = row[3], row[1]
home_score, away_score = int(row[4]), int(row[2])
## if home == 'Texas' or away == 'Texas':
## print home_score, home, "--", away, away_score
## if home == 'Texas':
## print home_score > away_score
## else:
## print away_score > home_score
for school in [home, away]:
if not teamd.has_key(school):
FBS = school in sun_names
teamd[school] = Team(school, "", FBS)
home_won = home_score > away_score
teamd[home].addOpponent(teamd[away], home_won)
teamd[home].points_for += home_score
teamd[home].points_against += away_score
teamd[away].addOpponent(teamd[home], not home_won)
teamd[away].points_for += away_score
teamd[away].points_against += home_score
return teamd
if __name__ == "__main__":
teamd = {}
parseSunCSV('data/sun4cast_FBS_2013.csv', teamd)
Texas = teamd['Texas']
Bama = teamd['Alabama']
print 'Alabama: %i-%i' % (Bama.wins, Bama.losses)
print 'Texas: %i-%i' % (Texas.wins, Texas.losses)
if True:
print "opponents:"
for opp in Texas.opponents:
print opp.school
rankings = []
for school in teamd.keys():
team = teamd[school]
if team.FBS:
rankings.append([team.getScore(), team.school])
rankings = sorted(rankings)[::-1]
for i in range(25):
print i+1, rankings[i][1], rankings[i][0]
|
mivade/cfbrank
|
dataparse.py
|
Python
|
gpl-3.0
| 3,352 | 0.004177 |
from django.conf.urls import patterns, include, url
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
import minestrone.soup.views
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'minestrone.views.home', name='home'),
# url(r'^minestrone/', include('minestrone.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
url(r'^$', minestrone.soup.views.JobsView.as_view()),
url(r'^jobs/$', minestrone.soup.views.JobsView.as_view()),
url(r'^editor/$', minestrone.soup.views.EditorView.as_view()),
)
|
cloudControl/django-celery-migration-app
|
minestrone/urls.py
|
Python
|
mit
| 781 | 0.010243 |
# Copyright 2020 The StackStorm Authors.
# Copyright 2019 Extreme Networks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
import os
from yaml.parser import ParserError
import six
from oslo_config import cfg
from st2common import log as logging
from st2common.constants.meta import ALLOWED_EXTS
from st2common.constants.meta import PARSER_FUNCS
from st2common.constants.pack import MANIFEST_FILE_NAME
if six.PY2:
from io import open
__all__ = ["ContentPackLoader", "MetaLoader", "OverrideLoader"]
LOG = logging.getLogger(__name__)
class ContentPackLoader(object):
"""
Class for loading pack and pack content information from directories on disk.
"""
# TODO: Rename "get_content" methods since they don't actually return
# content - they just return a path
ALLOWED_CONTENT_TYPES = [
"triggers",
"sensors",
"actions",
"rules",
"aliases",
"policies",
]
def get_packs(self, base_dirs):
"""
Retrieve a list of packs in the provided directories.
:return: Dictionary where the key is pack name and the value is full path to the pack
directory.
:rtype: ``dict``
"""
if not isinstance(base_dirs, list):
raise TypeError(
"The base dirs has a value that is not a list"
f" (was {type(base_dirs)})."
)
result = {}
for base_dir in base_dirs:
if not os.path.isdir(base_dir):
raise ValueError('Directory "%s" doesn\'t exist' % (base_dir))
packs_in_dir = self._get_packs_from_dir(base_dir=base_dir)
result.update(packs_in_dir)
return result
def get_content(self, base_dirs, content_type):
"""
Retrieve content from the provided directories.
Provided directories are searched from left to right. If a pack with the same name exists
in multiple directories, first pack which is found wins.
:param base_dirs: Directories to look into.
:type base_dirs: ``list``
:param content_type: Content type to look for (sensors, actions, rules).
:type content_type: ``str``
:rtype: ``dict``
"""
if not isinstance(base_dirs, list):
raise TypeError(
"The base dirs has a value that is not a list"
f" (was {type(base_dirs)})."
)
if content_type not in self.ALLOWED_CONTENT_TYPES:
raise ValueError("Unsupported content_type: %s" % (content_type))
content = {}
pack_to_dir_map = {}
for base_dir in base_dirs:
if not os.path.isdir(base_dir):
raise ValueError('Directory "%s" doesn\'t exist' % (base_dir))
dir_content = self._get_content_from_dir(
base_dir=base_dir, content_type=content_type
)
# Check for duplicate packs
for pack_name, pack_content in six.iteritems(dir_content):
if pack_name in content:
pack_dir = pack_to_dir_map[pack_name]
LOG.warning(
'Pack "%s" already found in "%s", ignoring content from "%s"'
% (pack_name, pack_dir, base_dir)
)
else:
content[pack_name] = pack_content
pack_to_dir_map[pack_name] = base_dir
return content
def get_content_from_pack(self, pack_dir, content_type):
"""
Retrieve content from the provided pack directory.
:param pack_dir: Path to the pack directory.
:type pack_dir: ``str``
:param content_type: Content type to look for (sensors, actions, rules).
:type content_type: ``str``
:rtype: ``str``
"""
if content_type not in self.ALLOWED_CONTENT_TYPES:
raise ValueError("Unsupported content_type: %s" % (content_type))
if not os.path.isdir(pack_dir):
raise ValueError('Directory "%s" doesn\'t exist' % (pack_dir))
content = self._get_content_from_pack_dir(
pack_dir=pack_dir, content_type=content_type
)
return content
def _get_packs_from_dir(self, base_dir):
result = {}
for pack_name in os.listdir(base_dir):
pack_dir = os.path.join(base_dir, pack_name)
pack_manifest_file = os.path.join(pack_dir, MANIFEST_FILE_NAME)
if os.path.isdir(pack_dir) and os.path.isfile(pack_manifest_file):
result[pack_name] = pack_dir
return result
def _get_content_from_dir(self, base_dir, content_type):
content = {}
for pack in os.listdir(base_dir):
# TODO: Use function from util which escapes the name
pack_dir = os.path.join(base_dir, pack)
# Ignore missing or non directories
try:
pack_content = self._get_content_from_pack_dir(
pack_dir=pack_dir, content_type=content_type
)
except ValueError:
continue
else:
content[pack] = pack_content
return content
def _get_content_from_pack_dir(self, pack_dir, content_type):
content_types = dict(
triggers=self._get_triggers,
sensors=self._get_sensors,
actions=self._get_actions,
rules=self._get_rules,
aliases=self._get_aliases,
policies=self._get_policies,
)
get_func = content_types.get(content_type)
if get_func is None:
raise ValueError("Invalid content_type: %s" % (content_type))
if not os.path.isdir(pack_dir):
raise ValueError('Directory "%s" doesn\'t exist' % (pack_dir))
pack_content = get_func(pack_dir=pack_dir)
return pack_content
def _get_triggers(self, pack_dir):
return self._get_folder(pack_dir=pack_dir, content_type="triggers")
def _get_sensors(self, pack_dir):
return self._get_folder(pack_dir=pack_dir, content_type="sensors")
def _get_actions(self, pack_dir):
return self._get_folder(pack_dir=pack_dir, content_type="actions")
def _get_rules(self, pack_dir):
return self._get_folder(pack_dir=pack_dir, content_type="rules")
def _get_aliases(self, pack_dir):
return self._get_folder(pack_dir=pack_dir, content_type="aliases")
def _get_policies(self, pack_dir):
return self._get_folder(pack_dir=pack_dir, content_type="policies")
def _get_folder(self, pack_dir, content_type):
path = os.path.join(pack_dir, content_type)
if not os.path.isdir(path):
return None
return path
class MetaLoader(object):
"""
Class for loading and parsing pack and resource metadata files.
"""
def load(self, file_path, expected_type=None):
"""
Loads content from file_path if file_path's extension
is one of allowed ones (See ALLOWED_EXTS).
Throws UnsupportedMetaException on disallowed filetypes.
Throws ValueError on malformed meta.
:param file_path: Absolute path to the file to load content from.
:type file_path: ``str``
:param expected_type: Expected type for the loaded and parsed content (optional).
:type expected_type: ``object``
:rtype: ``dict``
"""
file_name, file_ext = os.path.splitext(file_path)
if file_ext not in ALLOWED_EXTS:
raise Exception(
"Unsupported meta type %s, file %s. Allowed: %s"
% (file_ext, file_path, ALLOWED_EXTS)
)
result = self._load(PARSER_FUNCS[file_ext], file_path)
if expected_type and not isinstance(result, expected_type):
actual_type = type(result).__name__
error = 'Expected "%s", got "%s"' % (expected_type.__name__, actual_type)
raise ValueError(error)
return result
def _load(self, parser_func, file_path):
with open(file_path, "r", encoding="utf-8") as fd:
try:
return parser_func(fd)
except ValueError:
LOG.exception("Failed loading content from %s.", file_path)
raise
except ParserError:
LOG.exception("Failed loading content from %s.", file_path)
raise
class OverrideLoader(object):
"""
Class for loading pack override data
"""
# Mapping of permitted override types to resource name
ALLOWED_OVERRIDE_TYPES = {
"sensors": "class_name",
"actions": "name",
"rules": "name",
"aliases": "name",
}
ALLOWED_OVERRIDE_NAMES = [
"enabled",
]
DEFAULT_OVERRIDE_VALUES = {"enabled": True}
def override(self, pack_name, resource_type, content):
"""
Loads override content for pack, and updates content
:param pack_name: Name of pack
:type pack_name: ``str``
:param resource_type: Type of resource loading
:type type: ``str``
:param content: Content as loaded from meta information
:type content: ``object``
:return: Whether data was overridden
:rtype: ``bool``
"""
orig_content = content.copy()
if resource_type not in self.ALLOWED_OVERRIDE_TYPES.keys():
raise ValueError(
f"Invalid override type of {resource_type} attempted for pack {pack_name}"
)
override_dir = os.path.join(cfg.CONF.system.base_path, "overrides")
# Apply global overrides
global_file = os.path.join(override_dir, "_global.yaml")
self._apply_override_file(global_file, pack_name, resource_type, content, True)
# Apply pack overrides
override_file = os.path.join(override_dir, f"{pack_name}.yaml")
self._apply_override_file(
override_file, pack_name, resource_type, content, False
)
if content == orig_content:
overridden = False
else:
# Need to account for defaults that might not have been set
for key in self.ALLOWED_OVERRIDE_NAMES:
if key not in orig_content.keys() and key in content.keys():
orig_content[key] = self.DEFAULT_OVERRIDE_VALUES[key]
if content == orig_content:
overridden = False
else:
overridden = True
return overridden
def _apply_override_file(
self, override_file, pack_name, resource_type, content, global_file
):
"""
Loads override content from override file
:param override_file: Override filename
:type override_file: ``str``
:param pack_name: Name of pack
:type pack_name: ``str``
:param resource_type: Type of resource loading
:type type: ``str``
:param content: Content as loaded from meta information
:type content: ``object``
"""
if not os.path.exists(override_file):
# No override file for pack
LOG.debug(f"No override file {override_file} found")
return
# Read override file
file_name, file_ext = os.path.splitext(override_file)
overrides = self._load(PARSER_FUNCS[file_ext], override_file)
# Apply overrides
if resource_type in overrides:
type_override = overrides[resource_type]
name = content[self.ALLOWED_OVERRIDE_TYPES[resource_type]]
if "defaults" in type_override:
for key in type_override["defaults"]:
if key in self.ALLOWED_OVERRIDE_NAMES:
content[key] = type_override["defaults"][key]
LOG.debug(
f"Overridden {resource_type} {pack_name}.{name} {key} to default value of {content[key]} from {override_file}"
)
else:
raise ValueError(
f"Override attempted with invalid default key {key} in pack {pack_name}"
)
if global_file:
# No exceptions required in global content file
return
if "exceptions" in type_override:
if name in type_override["exceptions"]:
for key in type_override["exceptions"][name]:
if key in self.ALLOWED_OVERRIDE_NAMES:
content[key] = type_override["exceptions"][name][key]
LOG.debug(
f"Overridden {resource_type} {pack_name}.{name} {key} to exception value of {content[key]} from {override_file}"
)
else:
raise ValueError(
f"Override attempted with invalid exceptions key {key} in pack {pack_name}"
)
def _load(self, parser_func, file_path):
with open(file_path, "r", encoding="utf-8") as fd:
try:
return parser_func(fd)
except ValueError:
LOG.exception("Failed loading content from %s.", file_path)
raise
except ParserError:
LOG.exception("Failed loading content from %s.", file_path)
raise
|
StackStorm/st2
|
st2common/st2common/content/loader.py
|
Python
|
apache-2.0
| 14,094 | 0.001064 |
#!/usr/bin/env python
"""PIQS: Permutational Invariant Quantum Solver
PIQS is an open-source Python solver to study the exact Lindbladian
dynamics of open quantum systems consisting of identical qubits.
"""
DOCLINES = __doc__.split('\n')
CLASSIFIERS = """\
Development Status :: 3 - Alpha
Intended Audience :: Science/Research
License :: OSI Approved :: BSD License
Programming Language :: Python
Programming Language :: Python :: 3
Topic :: Scientific/Engineering
Operating System :: MacOS
Operating System :: POSIX
Operating System :: Unix
Operating System :: Microsoft :: Windows
"""
import os
import sys
# The following is required to get unit tests up and running.
# If the user doesn't have, then that's OK, we'll just skip unit tests.
try:
from setuptools import setup, Extension
TEST_SUITE = 'nose.collector'
TESTS_REQUIRE = ['nose']
EXTRA_KWARGS = {
'test_suite': TEST_SUITE,
'tests_require': TESTS_REQUIRE
}
except:
from distutils.core import setup
from distutils.extension import Extension
EXTRA_KWARGS = {}
try:
import numpy as np
except:
np = None
from Cython.Build import cythonize
from Cython.Distutils import build_ext
MAJOR = 1
MINOR = 2
MICRO = 0
ISRELEASED = True
VERSION = '%d.%d.%d' % (MAJOR, MINOR, MICRO)
REQUIRES = ['numpy (>=1.8)', 'scipy (>=0.15)', 'cython (>=0.21)', 'qutip (>=4.2)']
INSTALL_REQUIRES = ['numpy>=1.8', 'scipy>=0.15', 'cython>=0.21', 'qutip>=4.2']
PACKAGES = ['piqs', 'piqs/cy', 'piqs/tests']
PACKAGE_DATA = {
'piqs': ['configspec.ini'],
'piqs/tests': ['*.ini'],
'piqs/cy': ['*.pxi', '*.pxd', '*.pyx'],
}
INCLUDE_DIRS = [np.get_include()] if np is not None else []
NAME = "piqs"
AUTHOR = ("Nathan Shammah, Shahnawaz Ahmed")
AUTHOR_EMAIL = ("nathan.shammah@gmail.com, shahnawaz.ahmed95@gmail.com")
LICENSE = "BSD"
DESCRIPTION = DOCLINES[0]
LONG_DESCRIPTION = "\n".join(DOCLINES[2:])
KEYWORDS = "quantum physics dynamics permutational symmetry invariance"
URL = ""
CLASSIFIERS = [_f for _f in CLASSIFIERS.split('\n') if _f]
PLATFORMS = ["Linux", "Mac OSX", "Unix", "Windows"]
# Add Cython extensions here
cy_exts = ['dicke']
# If on Win and Python version >= 3.5 and not in MSYS2 (i.e. Visual studio compile)
if sys.platform == 'win32' and int(str(sys.version_info[0])+str(sys.version_info[1])) >= 35 and os.environ.get('MSYSTEM') is None:
_compiler_flags = ['/w', '/Ox']
# Everything else
else:
_compiler_flags = ['-w', '-O3', '-march=native', '-funroll-loops']
EXT_MODULES =[]
# Add Cython files from piqs/cy
for ext in cy_exts:
_mod = Extension('piqs.cy.'+ext,
sources = ['piqs/cy/'+ext+'.pyx'],
include_dirs = [np.get_include()],
extra_compile_args=_compiler_flags,
extra_link_args=[],
language='c++')
EXT_MODULES.append(_mod)
# Remove -Wstrict-prototypes from cflags
import distutils.sysconfig
cfg_vars = distutils.sysconfig.get_config_vars()
if "CFLAGS" in cfg_vars:
cfg_vars["CFLAGS"] = cfg_vars["CFLAGS"].replace("-Wstrict-prototypes", "")
# Setup commands go here
setup(
name = NAME,
version = VERSION,
packages = PACKAGES,
include_package_data=True,
include_dirs = INCLUDE_DIRS,
ext_modules = cythonize(EXT_MODULES),
cmdclass = {'build_ext': build_ext},
author = AUTHOR,
author_email = AUTHOR_EMAIL,
license = LICENSE,
description = DESCRIPTION,
long_description = LONG_DESCRIPTION,
keywords = KEYWORDS,
url = URL,
classifiers = CLASSIFIERS,
platforms = PLATFORMS,
requires = REQUIRES,
package_data = PACKAGE_DATA,
zip_safe = False,
install_requires=INSTALL_REQUIRES,
**EXTRA_KWARGS
)
|
nathanshammah/pim
|
setup.py
|
Python
|
mit
| 3,679 | 0.015222 |
#!/usr/bin/env python
#
# "TC BOUT BOARD" a wrestling Match Bout Board application for youth matches or tournaments
# Copyright (C) 2016 Anthony Cetera
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from os import system
from platform import system as platform
from Tkinter import *
import tkMessageBox
# Global Variables
meet = set([])
# Images
team_image = "bb_team_image.gif"
# Initialize bout board row text variables. Global so they can be updated from both classes.
nw_text = []
od_text = []
dd_text = []
ith_text = []
class WrMat:
"""
The WrMat class maintains the bout board data set.
Each object is a mat number and a list of match numbers on that mat
mat_bouts should always be added to the class as a list.
"""
def __init__(self, mat_num, mat_bouts):
self.mat_num = mat_num
self.mat_bouts = mat_bouts
def __str__(self):
return str(self.mat_num) + "\n" + str(self.mat_bouts)
def add_bout(self, bout_num):
self.mat_bouts.append(bout_num)
def finish_bout(self, bout_pos):
self.mat_bouts.pop(bout_pos)
def insert_bout(self, bout_pos, bout_num):
self.mat_bouts.insert(bout_pos, bout_num)
def get_mat_num(self):
return self.mat_num
def get_mat_bouts(self):
return self.mat_bouts
def help_about():
"""
Posts version and license information.
"""
tkMessageBox.showinfo("About TC BOUT BOARD", "TC BOUT BOARD v1.0 Copyright (C) 2016 Anthony Cetera\n"
"This program comes with ABSOLUTELY NO WARRANTY;"
" for details click Help --> About\n\n"
"This is free software, and you are welcome to redistribute it"
"under certain conditions; "
"please check the beginning of the source code for license details.")
def get_mat(matnum):
"""
Send in a mat number and get back the WrMat object containing that mat number.
"""
global meet
for eachmat in meet:
if eachmat.get_mat_num() == matnum:
return eachmat
def validate_match_spinbox(value):
"""
Function checks that spinboxes contain integers between 1 and 99.
First I tried making this a method in the Adminwin class but Pycharm complained.
Made it static to avoid Pycharm error - I'm neurotic like that.
"""
try:
intval = int(value)
if 0 < intval < 100:
return True
else:
return False
except ValueError:
return False
def validate_insert_match(value):
"""
Function checks limits the insert to 5 characters.
"""
try:
if len(value) < 6:
return True
else:
return False
except ValueError:
return False
def update_grid(matnum):
"""
StringVars for board grid labels are defined in class Boardwin.
Function sets each of these stringvars based on the contents of the current WrMat match list.
Function must be passed a mat number to update from.
"""
curmat = get_mat(matnum)
matboutlist = curmat.get_mat_bouts()
try:
nw_text[matnum].set(matboutlist[0])
except IndexError:
nw_text[matnum].set("*")
try:
od_text[matnum].set(matboutlist[1])
except IndexError:
od_text[matnum].set("*")
try:
dd_text[matnum].set(matboutlist[2])
except IndexError:
dd_text[matnum].set("*")
try:
ith_text[matnum].set(matboutlist[3])
except IndexError:
ith_text[matnum].set("*")
class Adminwin:
"""
All administrative window functions are defined here.
"""
def __init__(self, master):
# Define the maximum number of mats the application will support
# Update this if you want to try running more than 6 mats. Not tested with integers > 6.
self.maxmats = 6
# Define lists needed to hold each listbox object
# One needed for the mats, mat labels, and mat scrollbars
self.mat = []
self.matlabel = []
self.sbmat = []
# Define variables needed to start the additional board window
self.board_window = None
self.start_board_window = None
# Establish that the bout board isn't running
self.board_running = False
# Define meet setup variables before initializing
self.init_mat_num = None
self.init_mat_optmenu = None
self.init_mat_label = None
self.init_button = None
# Define list to hold spinboxes for match numbers
self.match_num_spin = []
self.match_spinner_label = []
# Init cleanup flag
# This is used to decide if we should cleanup the information frame after initializing the meet set.
self.wipe_mat_optmenu = False
# Set starting rows for mat grid
mat_start_row = 0
mat_button_row = 0
# Deal with initial focus problem on OSX
if platform() == 'Darwin': # How Mac OS X is identified by Python
system('''/usr/bin/osascript -e 'tell app "Finder" to set frontmost of process "Python" to true' ''')
# Set up root of parent window as master
self.master = master
self.master.title("BOUT BOARD ADMIN")
# Draw a frame in the grid for a border
self.adminframe = Frame(self.master, bd=6, bg='gray90', relief=SUNKEN)
self.adminframe.grid(column=0, row=0)
self.adminframe.grid_rowconfigure(0, weight=1)
self.adminframe.grid_columnconfigure(0, weight=1)
# Menu block
# Build menu for one time options
self.menubar = Menu(self.master)
self.init_menu = Menu(self.menubar, tearoff=0)
self.init_menu.add_command(label="Setup Meet", command=self.draw_init_dialogs)
self.init_menu.add_command(label="Show Board", command=self.start_board)
self.menubar.add_cascade(label="Get Started", menu=self.init_menu)
# Quit Menu
self.quit_menu = Menu(self.menubar, tearoff=0)
self.quit_menu.add_command(label="Close Board", command=self.stop_board)
self.quit_menu.add_command(label="Quit!", command=self.adminframe.quit)
self.menubar.add_cascade(label="Quit Menu", menu=self.quit_menu)
# Help Menu
self.help_menu = Menu(self.menubar, tearoff=0)
self.help_menu.add_command(label="About", command=help_about)
self.menubar.add_cascade(label="Help", menu=self.help_menu)
# Populate the menu bar with options above
self.master.config(menu=self.menubar)
# Build grid of up to 6 potential mats
for i in range(self.maxmats):
matnum = i + 1
matcol = (i % 3) * 2
matlabelrow = ((i // 3) * 2) + mat_start_row
matrow = matlabelrow + 1
scrollcol = matcol + 1
self.matlabel.append(Label(self.adminframe, text="MAT " + str(matnum)))
self.sbmat.append(Scrollbar(self.adminframe, orient=VERTICAL))
self.mat.append(Listbox(self.adminframe, selectmode=SINGLE, yscrollcommand=self.sbmat[i].set))
self.sbmat[i].config(command=self.mat[i].yview)
self.matlabel[i].grid(sticky=(N, W), column=matcol, row=matlabelrow)
self.mat[i].grid(sticky=(N, W), column=matcol, row=matrow)
self.sbmat[i].grid(sticky=(N, S), column=scrollcol, row=matrow)
# Draw interactive area
self.interactframe = Frame(self.master, bd=6, bg='gray69', relief=SUNKEN)
self.interactframe.grid(sticky=(E, W), column=0, columnspan=2, row=1)
# Remove a match
self.rm_button = Button(self.interactframe, text="Remove Match", command=self.rm_bout)
self.rm_button.grid(sticky=W, column=0, row=mat_button_row)
# Assign validation function to master for spinbox input validation
# Each time a change is made inside the entry box of the spinner, the contents are evaluated against this.
validate_insert_cmd = self.master.register(validate_insert_match)
# Insert a match
self.add_button = Button(self.interactframe, text="Add Match at Selection", command=self.add_bout)
self.add_button.grid(column=0, row=mat_button_row + 1)
self.add_dialog = Entry(self.interactframe, validate='all', validatecommand=(validate_insert_cmd, '%P'), bd=3)
self.add_dialog.grid(column=2, row=mat_button_row + 1)
# Draw information message area
self.infoframe = Frame(self.master, bd=6, bg='gray69', relief=SUNKEN)
self.infoframe.grid(sticky=(N, S), column=1, row=0)
self.infoframe.grid_columnconfigure(0, minsize=200)
self.infolabel = Label(self.infoframe, text="Information Area", fg='blue', relief=GROOVE)
self.infospacer = Label(self.infoframe)
self.infospacer.grid(column=1, row=0, padx=36)
self.infolabel.grid(sticky=(E, W), column=0, row=0, columnspan=2)
def cleanup_init(self):
"""
This method destroys all the widgets that were created during the meet setup phase.
However, we can also call this every time we need to redraw the bouts per mat spinners if the user
selects a new number of mats.
"""
if self.wipe_mat_optmenu:
self.init_mat_optmenu.destroy()
self.init_mat_label.destroy()
self.wipe_mat_optmenu = False
for count in range(len(self.match_num_spin)):
self.match_spinner_label[count].destroy()
self.match_num_spin[count].destroy()
# wipe button because it doesn't seem to destroy after it's pushed down the grid
self.init_button.destroy()
# wipe lists that hold spinner widgets
self.match_num_spin = []
self.match_spinner_label = []
def meet_init(self):
"""
This function generates the meet set by iterating through each mat that the user chose a number of matches for.
A WrMat object is added to the set one by one until the entire meet is stored.
"""
global meet
# wipe current meet. meet should contain current set of WrMat objects.
meet = set([])
# Time to get the user input for the number of mats running today.
mat_qty = self.init_mat_num.get()
# Create each list of matches and add them to the meet.
for count in range(mat_qty):
mat_num = count + 1
mat_bouts = self.match_num_spin[count].get()
temp_bouts = list(range(mat_num * 100 + 1, mat_num * 100 + int(mat_bouts) + 1))
temp_bouts_str = []
for bout_num in temp_bouts:
temp_bouts_str.append(str(bout_num))
temp_mat = WrMat(mat_num, temp_bouts_str)
meet.add(temp_mat)
# Destroy all widgets associated with setting up a new board by calling the cleanup function.
self.wipe_mat_optmenu = True
self.cleanup_init()
# Draw everything now that the meet set is created
self.draw_lists()
def draw_matchnum_spin(self, matsval):
"""
Method generates spinboxes to input number of matches per mat. Then, generates
the list of match numbers from x01-x99.
"""
# Assign validation function to master for spinbox input validation
# Each time a change is made inside the entry box of the spinner, the contents are evaluated against this.
validate_spin_cmd = self.master.register(validate_match_spinbox)
# If number of mats changes we need to clear the existing spinboxes in the info frame
# Also cleanup function should wipe the list containing the existing SB widgets
# So check for widgets erase and redraw
if self.match_num_spin:
self.cleanup_init()
# Create and draw the spinboxes based on the number of mats chosen.
# These spinboxes allow the user to select the number of bouts per mat.
for mat_num in range(1, matsval + 1):
self.match_spinner_label.append(Label(self.infoframe, text="Number of bouts on MAT " + str(mat_num)))
self.match_num_spin.append(Spinbox(self.infoframe,
from_=1,
to=99,
width=6,
validate='all',
validatecommand=(validate_spin_cmd, '%P')))
self.match_spinner_label[mat_num - 1].grid(column=0, row=mat_num + 1)
self.match_num_spin[mat_num - 1].grid(column=1, row=mat_num + 1)
# Button to init all values selected
# Calls function to set up meet set and destroy init widgets.
self.init_button = Button(self.infoframe, text="Initialize Meet", bg='red', command=self.meet_init)
self.init_button.grid(column=0, row=self.maxmats + 2)
def clear_listboxes(self):
"""
Wipe the contents of the admin listboxes
"""
for i in range(self.maxmats):
self.mat[i].delete(0, END)
def draw_init_dialogs(self):
"""
Create a list from 1 to the maximum number of mats on the floor.
This establishes the options for the Optionmenu of mats running this day
"""
if tkMessageBox.askyesno("Initialize", "Board will reset.\nAre you sure?"):
# Create a list to hold the Optionmenu choices for the number of mats running
mat_opts_list = list(range(1, self.maxmats + 1))
# Track a variable for when hte number of mats changes...as an integer.
self.init_mat_num = IntVar()
# Set the default to the 3rd value in the list. Should always be the integer 3.
self.init_mat_num.set(mat_opts_list[2])
# Create the drop down menu.
self.init_mat_label = Label(self.infoframe, text="Number of mats running today?")
self.init_mat_optmenu = OptionMenu(self.infoframe,
self.init_mat_num,
*mat_opts_list,
command=self.draw_matchnum_spin)
self.init_mat_label.grid(column=0, row=1)
self.init_mat_optmenu.grid(sticky=EW, column=1, row=1)
# Check to see if list of spinners has values
# If not, generate some spinners with the default value of the Optionmenu
if not self.match_num_spin:
self.draw_matchnum_spin(self.init_mat_num.get())
# Clean up any running boards
self.stop_board()
# Clear listboxes as to not confuse the user after the selected a new setup
self.clear_listboxes()
def draw_lists(self):
"""
Time to draw the listboxes in the admin window with the contents of the entire meet set
We can also call this function to clear the boxes for a new init.
Note: in that case it will not wipe the meet set but we will get a clean window.
"""
global meet
# Make sure boxes are cleared on subsequent runs
self.clear_listboxes()
# Iterate through each WrMat then iterate through the list of bouts in that mat object.
# add each mat number to the list boxes.
for temp_mat in meet:
mn = temp_mat.get_mat_num() - 1
for temp_bout in temp_mat.get_mat_bouts():
self.mat[mn].insert(END, temp_bout)
self.mat[mn].insert(END, "END of BOUTS")
def rm_bout(self):
"""
Iterate through all the list boxes and check for a selection.
When a selection is found, delete the bout number from the listbox and remove the bout from the class.
"""
for i in range(len(meet)):
sel_bout = self.mat[i].curselection()
# Check for a selection line number and that the line selected isn't the only one.
if sel_bout and (self.mat[i].size() - 1) != sel_bout[0]:
sel_bout_int = sel_bout[0]
cur_mat = get_mat(i+1)
cur_mat.finish_bout(sel_bout_int)
self.mat[i].delete(sel_bout_int)
# Make sure whatever the last selected position in listbox stays selected
self.mat[i].selection_set(sel_bout_int)
if self.board_running:
update_grid(i + 1)
def add_bout(self):
"""
Free form entry box to add bouts to the list.
Currently only checks for duplicates on the same mat.
"""
# Iterate through each admin listbox and check for a selection
for i in range(len(meet)):
matnum = i + 1
duplicate = False
sel_bout = self.mat[i].curselection()
# Get value in entry box
box_val = self.add_dialog.get()
# Check for a selection and no blank values
if sel_bout and box_val:
sel_bout_int = sel_bout[0]
cur_mat = get_mat(matnum)
# Check for duplicates
for check_dup in cur_mat.get_mat_bouts():
if check_dup == box_val:
duplicate = True
if not duplicate:
# First update the mat object from the class WrMat with the new bout number
cur_mat.insert_bout(sel_bout_int, self.add_dialog.get())
# Keep the corresponding list box in sync
# by inserting the new bout into the box in the same position
self.mat[i].insert(sel_bout_int, self.add_dialog.get())
# Check to see if the board is being displayed by checking the flag
# If so, call out to redraw based on the mat queue that changed
if self.board_running:
update_grid(matnum)
def start_board(self):
"""
Leverage the TopLevel function to start a new window as a child of the root
Because this is intended to run on an XGA projector, set the dimensions to 1024
The board_running flag needs to be maintained if we want to make sure multiple windows don't get spawned.
"""
if not self.board_running and meet:
self.board_running = True
self.board_window = Toplevel(self.master)
self.board_window.geometry('1024x768')
# Capture destroy events. This makes it board_running flag consistent no matter how window is destroyed.
self.board_window.protocol("WM_DELETE_WINDOW", self.stop_board)
# Make root resizable
self.board_window.rowconfigure(0, weight=1)
self.board_window.columnconfigure(0, weight=1)
# Boardwin class deals with all functions of the actual bout board display
self.start_board_window = Boardwin(self.board_window)
def stop_board(self):
"""
If the board needs to close for some reason, this function destroys the top level window.
"""
if self.board_running:
self.board_window.destroy()
self.board_running = False
class Boardwin:
"""
This class is defined specifically to start a second window to draw the board.
It is called as a child window in the Adminwin class with the TopLevel function.
"""
def __init__(self, master):
"""
The entire window is managed in __init__
The board makes use of a corner image that can be customized to whatever is placed in bb_team_image.gif
Otherwise, there are a few tunable parameters like fonts, colors and boarder type.
ToDo - organize the tunables such that that the variables containing them are all in one place.
"""
# "text" globals hold the actual values that will be in each row of the grid
global meet, nw_text, od_text, dd_text, ith_text, team_image
# Set up the child window
self.master = master
self.master.title("BOUT BOARD")
# Make the root resizable by giving the columns and rows a weight
self.master.rowconfigure(0, weight=1)
self.master.columnconfigure(0, weight=1)
# Put a frame in the grid hold the bout board matrix
self.boardframe = Frame(self.master, bd=6, relief=SUNKEN)
self.boardframe.grid(sticky=N+S+E+W, column=0, row=0)
# Determine number of mats for iterations of mats.
self.active_mats = len(meet)
# Number of columns is number of mats plus 1. Again, for iterations of columns.
max_x = len(meet) + 1
# Board will always have 5 rows (Header, NW, OD, DD, ITH)
max_y = 5
# Initialize row Label variables
self.header_row = []
self.nw_label = []
self.od_label = []
self.dd_label = []
self.ith_label = []
# Formatting variables. In this case only one just to set up the border around each grid box.
bb_relief = RIDGE
# Attempt to define a variable to render the corner image.
# If the attempt fails, skip this altogether.
try:
corner_image = PhotoImage(file=team_image)
except TclError:
corner_image = None
# Make all the columns resizable
# The 1st column will maintain 2/3 the width of the other ones.
for x in range(max_x):
if x == 0:
self.boardframe.columnconfigure(x, weight=2)
else:
self.boardframe.columnconfigure(x, weight=3)
# Make all the rows resizable
# The 1st row will be 2/3 the height of the other rows.
for y in range(max_y):
if y == 0:
self.boardframe.rowconfigure(y, weight=2)
else:
self.boardframe.rowconfigure(y, weight=3)
# Draw the first row of the grid
# Create the column headers based on how many mats there are
for mat_column in range(max_x):
# Special handling of (0,0)
# make it an image not text like the others
if mat_column == 0:
self.header_row.append(Label(self.boardframe, image=corner_image,
bg='goldenrod', relief=bb_relief))
self.header_row[0].image = corner_image
# The rest of the columns will display the mat number
else:
self.header_row.append(Label(self.boardframe, text="MAT " + str(mat_column),
font='Helvetica 42 bold', bg='goldenrod', relief=bb_relief))
# Time to place the contents of the list instance into the first row of the grid
self.header_row[mat_column].grid(sticky=N+S+E+W, column=mat_column, row=0)
# Initialize the rest of the board with the following for loop
for pos in range(max_x):
# By setting up each list entry as a StringVar, changing them redraws the new value
nw_text.append(StringVar())
od_text.append(StringVar())
dd_text.append(StringVar())
ith_text.append(StringVar())
if pos == 0:
# Static row headers for column 0
nw_text[pos].set("Now \nWrestling")
od_text[pos].set("On \nDeck")
dd_text[pos].set("Double \nDeck")
ith_text[pos].set("In the \nHole")
else:
# update_grid will assign values to the newly assigned StringVar
# Notice the function must be passed the mat number here indicated by a "pos"ition on the grid
update_grid(pos)
# Define formatting variables based on the column number being worked on
if pos == 0:
grid_color = 'goldenrod'
grid_font = 'Helvetica 40 bold'
# By setting the grid width, in characters, we insure the auto resize
# won't shrink or grow the grid squares based on the contents
# 9 is the number of characters in the word "Wrestling"
grid_width = 9
else:
grid_color = 'light goldenrod'
grid_font = 'Helvetica 60 bold'
# By setting the grid width, in characters, we insure the auto resize
# won't shrink or grow the grid squares based on the contents
# 5 characters gives us room for a 5 character bout number
grid_width = 5
# As each column position is updated establish a label widget for the value
# Now Wrestling
self.nw_label.append(Label(self.boardframe, textvariable=nw_text[pos], height=2,
font=grid_font, bg=grid_color, relief=bb_relief, width=grid_width))
# On Deck
self.od_label.append(Label(self.boardframe, textvariable=od_text[pos], height=2,
font=grid_font, bg=grid_color, relief=bb_relief, width=grid_width))
# Double Deck
self.dd_label.append(Label(self.boardframe, textvariable=dd_text[pos], height=2,
font=grid_font, bg=grid_color, relief=bb_relief, width=grid_width))
# In the Hole
self.ith_label.append(Label(self.boardframe, textvariable=ith_text[pos], height=2,
font=grid_font, bg=grid_color, relief=bb_relief, width=grid_width))
# Place each bout label on the grid
self.nw_label[pos].grid(sticky=N + S + E + W, column=pos, row=1)
self.od_label[pos].grid(sticky=N + S + E + W, column=pos, row=2)
self.dd_label[pos].grid(sticky=N + S + E + W, column=pos, row=3)
self.ith_label[pos].grid(sticky=N + S + E + W, column=pos, row=4)
def main():
root = Tk()
app1 = Adminwin(root)
root.mainloop()
try:
root.destroy()
except TclError:
pass
if __name__ == '__main__':
main()
|
nitrotc/tc-bout-board
|
app/tc_bout_board.py
|
Python
|
gpl-3.0
| 27,003 | 0.003444 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class NetworkWatchersOperations(object):
"""NetworkWatchersOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2018_07_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def create_or_update(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
parameters, # type: "_models.NetworkWatcher"
**kwargs # type: Any
):
# type: (...) -> "_models.NetworkWatcher"
"""Creates or updates a network watcher in the specified resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_watcher_name: The name of the network watcher.
:type network_watcher_name: str
:param parameters: Parameters that define the network watcher resource.
:type parameters: ~azure.mgmt.network.v2018_07_01.models.NetworkWatcher
:keyword callable cls: A custom type or function that will be passed the direct response
:return: NetworkWatcher, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2018_07_01.models.NetworkWatcher
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkWatcher"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-07-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.create_or_update.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'NetworkWatcher')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('NetworkWatcher', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('NetworkWatcher', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}'} # type: ignore
def get(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "_models.NetworkWatcher"
"""Gets the specified network watcher by resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_watcher_name: The name of the network watcher.
:type network_watcher_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: NetworkWatcher, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2018_07_01.models.NetworkWatcher
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkWatcher"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-07-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('NetworkWatcher', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}'} # type: ignore
def _delete_initial(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-07-01"
accept = "application/json"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}'} # type: ignore
def begin_delete(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller[None]
"""Deletes the specified network watcher resource.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_watcher_name: The name of the network watcher.
:type network_watcher_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_initial(
resource_group_name=resource_group_name,
network_watcher_name=network_watcher_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}'} # type: ignore
def update_tags(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
parameters, # type: "_models.TagsObject"
**kwargs # type: Any
):
# type: (...) -> "_models.NetworkWatcher"
"""Updates a network watcher tags.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_watcher_name: The name of the network watcher.
:type network_watcher_name: str
:param parameters: Parameters supplied to update network watcher tags.
:type parameters: ~azure.mgmt.network.v2018_07_01.models.TagsObject
:keyword callable cls: A custom type or function that will be passed the direct response
:return: NetworkWatcher, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2018_07_01.models.NetworkWatcher
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkWatcher"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-07-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.update_tags.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'TagsObject')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('NetworkWatcher', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update_tags.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}'} # type: ignore
def list(
self,
resource_group_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.NetworkWatcherListResult"]
"""Gets all network watchers by resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either NetworkWatcherListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2018_07_01.models.NetworkWatcherListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkWatcherListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-07-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('NetworkWatcherListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers'} # type: ignore
def list_all(
self,
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.NetworkWatcherListResult"]
"""Gets all network watchers by subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either NetworkWatcherListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2018_07_01.models.NetworkWatcherListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkWatcherListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-07-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_all.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('NetworkWatcherListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_all.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/networkWatchers'} # type: ignore
def get_topology(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
parameters, # type: "_models.TopologyParameters"
**kwargs # type: Any
):
# type: (...) -> "_models.Topology"
"""Gets the current network topology by resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_watcher_name: The name of the network watcher.
:type network_watcher_name: str
:param parameters: Parameters that define the representation of topology.
:type parameters: ~azure.mgmt.network.v2018_07_01.models.TopologyParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:return: Topology, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2018_07_01.models.Topology
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.Topology"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-07-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.get_topology.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'TopologyParameters')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('Topology', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_topology.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/topology'} # type: ignore
def _verify_ip_flow_initial(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
parameters, # type: "_models.VerificationIPFlowParameters"
**kwargs # type: Any
):
# type: (...) -> "_models.VerificationIPFlowResult"
cls = kwargs.pop('cls', None) # type: ClsType["_models.VerificationIPFlowResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-07-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._verify_ip_flow_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'VerificationIPFlowParameters')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('VerificationIPFlowResult', pipeline_response)
if response.status_code == 202:
deserialized = self._deserialize('VerificationIPFlowResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_verify_ip_flow_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/ipFlowVerify'} # type: ignore
def begin_verify_ip_flow(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
parameters, # type: "_models.VerificationIPFlowParameters"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.VerificationIPFlowResult"]
"""Verify IP flow from the specified VM to a location given the currently configured NSG rules.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_watcher_name: The name of the network watcher.
:type network_watcher_name: str
:param parameters: Parameters that define the IP flow to be verified.
:type parameters: ~azure.mgmt.network.v2018_07_01.models.VerificationIPFlowParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either VerificationIPFlowResult or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2018_07_01.models.VerificationIPFlowResult]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.VerificationIPFlowResult"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._verify_ip_flow_initial(
resource_group_name=resource_group_name,
network_watcher_name=network_watcher_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('VerificationIPFlowResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_verify_ip_flow.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/ipFlowVerify'} # type: ignore
def _get_next_hop_initial(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
parameters, # type: "_models.NextHopParameters"
**kwargs # type: Any
):
# type: (...) -> "_models.NextHopResult"
cls = kwargs.pop('cls', None) # type: ClsType["_models.NextHopResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-07-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._get_next_hop_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'NextHopParameters')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('NextHopResult', pipeline_response)
if response.status_code == 202:
deserialized = self._deserialize('NextHopResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_get_next_hop_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/nextHop'} # type: ignore
def begin_get_next_hop(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
parameters, # type: "_models.NextHopParameters"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.NextHopResult"]
"""Gets the next hop from the specified VM.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_watcher_name: The name of the network watcher.
:type network_watcher_name: str
:param parameters: Parameters that define the source and destination endpoint.
:type parameters: ~azure.mgmt.network.v2018_07_01.models.NextHopParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either NextHopResult or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2018_07_01.models.NextHopResult]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.NextHopResult"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._get_next_hop_initial(
resource_group_name=resource_group_name,
network_watcher_name=network_watcher_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('NextHopResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_get_next_hop.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/nextHop'} # type: ignore
def _get_vm_security_rules_initial(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
parameters, # type: "_models.SecurityGroupViewParameters"
**kwargs # type: Any
):
# type: (...) -> "_models.SecurityGroupViewResult"
cls = kwargs.pop('cls', None) # type: ClsType["_models.SecurityGroupViewResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-07-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._get_vm_security_rules_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'SecurityGroupViewParameters')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('SecurityGroupViewResult', pipeline_response)
if response.status_code == 202:
deserialized = self._deserialize('SecurityGroupViewResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_get_vm_security_rules_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/securityGroupView'} # type: ignore
def begin_get_vm_security_rules(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
parameters, # type: "_models.SecurityGroupViewParameters"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.SecurityGroupViewResult"]
"""Gets the configured and effective security group rules on the specified VM.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_watcher_name: The name of the network watcher.
:type network_watcher_name: str
:param parameters: Parameters that define the VM to check security groups for.
:type parameters: ~azure.mgmt.network.v2018_07_01.models.SecurityGroupViewParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either SecurityGroupViewResult or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2018_07_01.models.SecurityGroupViewResult]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.SecurityGroupViewResult"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._get_vm_security_rules_initial(
resource_group_name=resource_group_name,
network_watcher_name=network_watcher_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('SecurityGroupViewResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_get_vm_security_rules.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/securityGroupView'} # type: ignore
def _get_troubleshooting_initial(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
parameters, # type: "_models.TroubleshootingParameters"
**kwargs # type: Any
):
# type: (...) -> "_models.TroubleshootingResult"
cls = kwargs.pop('cls', None) # type: ClsType["_models.TroubleshootingResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-07-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._get_troubleshooting_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'TroubleshootingParameters')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('TroubleshootingResult', pipeline_response)
if response.status_code == 202:
deserialized = self._deserialize('TroubleshootingResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_get_troubleshooting_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/troubleshoot'} # type: ignore
def begin_get_troubleshooting(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
parameters, # type: "_models.TroubleshootingParameters"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.TroubleshootingResult"]
"""Initiate troubleshooting on a specified resource.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_watcher_name: The name of the network watcher resource.
:type network_watcher_name: str
:param parameters: Parameters that define the resource to troubleshoot.
:type parameters: ~azure.mgmt.network.v2018_07_01.models.TroubleshootingParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either TroubleshootingResult or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2018_07_01.models.TroubleshootingResult]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.TroubleshootingResult"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._get_troubleshooting_initial(
resource_group_name=resource_group_name,
network_watcher_name=network_watcher_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('TroubleshootingResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_get_troubleshooting.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/troubleshoot'} # type: ignore
def _get_troubleshooting_result_initial(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
parameters, # type: "_models.QueryTroubleshootingParameters"
**kwargs # type: Any
):
# type: (...) -> "_models.TroubleshootingResult"
cls = kwargs.pop('cls', None) # type: ClsType["_models.TroubleshootingResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-07-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._get_troubleshooting_result_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'QueryTroubleshootingParameters')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('TroubleshootingResult', pipeline_response)
if response.status_code == 202:
deserialized = self._deserialize('TroubleshootingResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_get_troubleshooting_result_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/queryTroubleshootResult'} # type: ignore
def begin_get_troubleshooting_result(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
parameters, # type: "_models.QueryTroubleshootingParameters"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.TroubleshootingResult"]
"""Get the last completed troubleshooting result on a specified resource.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_watcher_name: The name of the network watcher resource.
:type network_watcher_name: str
:param parameters: Parameters that define the resource to query the troubleshooting result.
:type parameters: ~azure.mgmt.network.v2018_07_01.models.QueryTroubleshootingParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either TroubleshootingResult or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2018_07_01.models.TroubleshootingResult]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.TroubleshootingResult"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._get_troubleshooting_result_initial(
resource_group_name=resource_group_name,
network_watcher_name=network_watcher_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('TroubleshootingResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_get_troubleshooting_result.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/queryTroubleshootResult'} # type: ignore
def _set_flow_log_configuration_initial(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
parameters, # type: "_models.FlowLogInformation"
**kwargs # type: Any
):
# type: (...) -> "_models.FlowLogInformation"
cls = kwargs.pop('cls', None) # type: ClsType["_models.FlowLogInformation"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-07-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._set_flow_log_configuration_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'FlowLogInformation')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('FlowLogInformation', pipeline_response)
if response.status_code == 202:
deserialized = self._deserialize('FlowLogInformation', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_set_flow_log_configuration_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/configureFlowLog'} # type: ignore
def begin_set_flow_log_configuration(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
parameters, # type: "_models.FlowLogInformation"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.FlowLogInformation"]
"""Configures flow log and traffic analytics (optional) on a specified resource.
:param resource_group_name: The name of the network watcher resource group.
:type resource_group_name: str
:param network_watcher_name: The name of the network watcher resource.
:type network_watcher_name: str
:param parameters: Parameters that define the configuration of flow log.
:type parameters: ~azure.mgmt.network.v2018_07_01.models.FlowLogInformation
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either FlowLogInformation or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2018_07_01.models.FlowLogInformation]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.FlowLogInformation"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._set_flow_log_configuration_initial(
resource_group_name=resource_group_name,
network_watcher_name=network_watcher_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('FlowLogInformation', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_set_flow_log_configuration.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/configureFlowLog'} # type: ignore
def _get_flow_log_status_initial(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
parameters, # type: "_models.FlowLogStatusParameters"
**kwargs # type: Any
):
# type: (...) -> "_models.FlowLogInformation"
cls = kwargs.pop('cls', None) # type: ClsType["_models.FlowLogInformation"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-07-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._get_flow_log_status_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'FlowLogStatusParameters')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('FlowLogInformation', pipeline_response)
if response.status_code == 202:
deserialized = self._deserialize('FlowLogInformation', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_get_flow_log_status_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/queryFlowLogStatus'} # type: ignore
def begin_get_flow_log_status(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
parameters, # type: "_models.FlowLogStatusParameters"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.FlowLogInformation"]
"""Queries status of flow log and traffic analytics (optional) on a specified resource.
:param resource_group_name: The name of the network watcher resource group.
:type resource_group_name: str
:param network_watcher_name: The name of the network watcher resource.
:type network_watcher_name: str
:param parameters: Parameters that define a resource to query flow log and traffic analytics
(optional) status.
:type parameters: ~azure.mgmt.network.v2018_07_01.models.FlowLogStatusParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either FlowLogInformation or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2018_07_01.models.FlowLogInformation]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.FlowLogInformation"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._get_flow_log_status_initial(
resource_group_name=resource_group_name,
network_watcher_name=network_watcher_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('FlowLogInformation', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_get_flow_log_status.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/queryFlowLogStatus'} # type: ignore
def _check_connectivity_initial(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
parameters, # type: "_models.ConnectivityParameters"
**kwargs # type: Any
):
# type: (...) -> "_models.ConnectivityInformation"
cls = kwargs.pop('cls', None) # type: ClsType["_models.ConnectivityInformation"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-07-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._check_connectivity_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'ConnectivityParameters')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('ConnectivityInformation', pipeline_response)
if response.status_code == 202:
deserialized = self._deserialize('ConnectivityInformation', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_check_connectivity_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/connectivityCheck'} # type: ignore
def begin_check_connectivity(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
parameters, # type: "_models.ConnectivityParameters"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.ConnectivityInformation"]
"""Verifies the possibility of establishing a direct TCP connection from a virtual machine to a
given endpoint including another VM or an arbitrary remote server.
:param resource_group_name: The name of the network watcher resource group.
:type resource_group_name: str
:param network_watcher_name: The name of the network watcher resource.
:type network_watcher_name: str
:param parameters: Parameters that determine how the connectivity check will be performed.
:type parameters: ~azure.mgmt.network.v2018_07_01.models.ConnectivityParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either ConnectivityInformation or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2018_07_01.models.ConnectivityInformation]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.ConnectivityInformation"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._check_connectivity_initial(
resource_group_name=resource_group_name,
network_watcher_name=network_watcher_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('ConnectivityInformation', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_check_connectivity.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/connectivityCheck'} # type: ignore
def _get_azure_reachability_report_initial(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
parameters, # type: "_models.AzureReachabilityReportParameters"
**kwargs # type: Any
):
# type: (...) -> "_models.AzureReachabilityReport"
cls = kwargs.pop('cls', None) # type: ClsType["_models.AzureReachabilityReport"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-07-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._get_azure_reachability_report_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'AzureReachabilityReportParameters')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('AzureReachabilityReport', pipeline_response)
if response.status_code == 202:
deserialized = self._deserialize('AzureReachabilityReport', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_get_azure_reachability_report_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/azureReachabilityReport'} # type: ignore
def begin_get_azure_reachability_report(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
parameters, # type: "_models.AzureReachabilityReportParameters"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.AzureReachabilityReport"]
"""Gets the relative latency score for internet service providers from a specified location to
Azure regions.
:param resource_group_name: The name of the network watcher resource group.
:type resource_group_name: str
:param network_watcher_name: The name of the network watcher resource.
:type network_watcher_name: str
:param parameters: Parameters that determine Azure reachability report configuration.
:type parameters: ~azure.mgmt.network.v2018_07_01.models.AzureReachabilityReportParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either AzureReachabilityReport or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2018_07_01.models.AzureReachabilityReport]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.AzureReachabilityReport"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._get_azure_reachability_report_initial(
resource_group_name=resource_group_name,
network_watcher_name=network_watcher_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('AzureReachabilityReport', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_get_azure_reachability_report.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/azureReachabilityReport'} # type: ignore
def _list_available_providers_initial(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
parameters, # type: "_models.AvailableProvidersListParameters"
**kwargs # type: Any
):
# type: (...) -> "_models.AvailableProvidersList"
cls = kwargs.pop('cls', None) # type: ClsType["_models.AvailableProvidersList"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-07-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._list_available_providers_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'AvailableProvidersListParameters')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('AvailableProvidersList', pipeline_response)
if response.status_code == 202:
deserialized = self._deserialize('AvailableProvidersList', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_list_available_providers_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/availableProvidersList'} # type: ignore
def begin_list_available_providers(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
parameters, # type: "_models.AvailableProvidersListParameters"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.AvailableProvidersList"]
"""Lists all available internet service providers for a specified Azure region.
:param resource_group_name: The name of the network watcher resource group.
:type resource_group_name: str
:param network_watcher_name: The name of the network watcher resource.
:type network_watcher_name: str
:param parameters: Parameters that scope the list of available providers.
:type parameters: ~azure.mgmt.network.v2018_07_01.models.AvailableProvidersListParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either AvailableProvidersList or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2018_07_01.models.AvailableProvidersList]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.AvailableProvidersList"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._list_available_providers_initial(
resource_group_name=resource_group_name,
network_watcher_name=network_watcher_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('AvailableProvidersList', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_list_available_providers.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/availableProvidersList'} # type: ignore
def _get_network_configuration_diagnostic_initial(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
parameters, # type: "_models.NetworkConfigurationDiagnosticParameters"
**kwargs # type: Any
):
# type: (...) -> "_models.NetworkConfigurationDiagnosticResponse"
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkConfigurationDiagnosticResponse"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-07-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._get_network_configuration_diagnostic_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'NetworkConfigurationDiagnosticParameters')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('NetworkConfigurationDiagnosticResponse', pipeline_response)
if response.status_code == 202:
deserialized = self._deserialize('NetworkConfigurationDiagnosticResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_get_network_configuration_diagnostic_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/networkConfigurationDiagnostic'} # type: ignore
def begin_get_network_configuration_diagnostic(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
parameters, # type: "_models.NetworkConfigurationDiagnosticParameters"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.NetworkConfigurationDiagnosticResponse"]
"""Get network configuration diagnostic.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_watcher_name: The name of the network watcher.
:type network_watcher_name: str
:param parameters: Parameters to get network configuration diagnostic.
:type parameters: ~azure.mgmt.network.v2018_07_01.models.NetworkConfigurationDiagnosticParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either NetworkConfigurationDiagnosticResponse or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2018_07_01.models.NetworkConfigurationDiagnosticResponse]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkConfigurationDiagnosticResponse"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._get_network_configuration_diagnostic_initial(
resource_group_name=resource_group_name,
network_watcher_name=network_watcher_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('NetworkConfigurationDiagnosticResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_get_network_configuration_diagnostic.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/networkConfigurationDiagnostic'} # type: ignore
|
Azure/azure-sdk-for-python
|
sdk/network/azure-mgmt-network/azure/mgmt/network/v2018_07_01/operations/_network_watchers_operations.py
|
Python
|
mit
| 105,992 | 0.00551 |
# -*- coding: utf-8 -*-
import anydbm
import random
arquivo = anydbm.open('dados' , 'c')
arquivo['P1N1'] = 'Longbottom era o sobrenome de quem, nas séries de livros de Harry Potter?'
arquivo['P1C1'] = 'Neville'
arquivo['P1R2'] = 'Hermione'
arquivo['P1R3'] = 'Snape'
arquivo['P1R4'] = 'Dumbledore'
arquivo['P2N1'] = 'Qual Local que hoje se supõe onde foi o Nascimente de Jesus Cristo?'
arquivo['P2R1'] = 'Igreja da Penha'
arquivo['P2C2'] = 'Basílica da Natividade'
arquivo['P2R3'] = 'Natal'
arquivo['P2R4'] = 'Congo'
arquivo['P3N1'] = 'Göpfritz an der Wild é localizado aonde?'
arquivo['P3R1'] = 'Inglaterra'
arquivo['P3R2'] = 'Emirados Árabes'
arquivo['P3C3'] = 'Áustria'
arquivo['P3R4'] = 'Brasil'
arquivo['P4N1'] = 'Complete: Eu me remexo muito, Eu me remexo muito, Eu me remexo...'
arquivo['P4C1'] = 'Muito!'
arquivo['P4R2'] = 'Pouco!'
arquivo['P4R3'] = 'Nem sempre!'
arquivo['P4R4'] = 'Constantemente!'
arquivo['P5N2'] = 'Nofollow É:'
arquivo['P5R1'] = 'Ato de Não seguir no Twitter'
arquivo['P5R2'] = 'Programa usado para não ter seguidores no Twitter'
arquivo['P5R3'] = 'Uma expressão para não ser seguido no Twitter'
arquivo['P5C4'] = 'Um atributo HTML'
arquivo['P6N2'] = 'No Campeonato Sul-Americano de futebol Sub-19 de 1964, foi consagrado campeão:'
arquivo['P6R1'] = 'Paraguai'
arquivo['P6C2'] = 'Uruguai'
arquivo['P6R3'] = 'Argélia'
arquivo['P6R4'] = 'Argentina'
arquivo['P7N2'] = 'No Filme “Indiana Jones No templo da perdição”, as Pedras de Sankara são:'
arquivo['P7R1'] = 'Artefatos Para Abrir um vórtice temporal'
arquivo['P7R2'] = '500KG de cocaína pasteurizada'
arquivo['P7C3'] = 'Pedras místicas dadas pelo deus hindu Shiva'
arquivo['P7R4'] = 'O nome da pistola usada pelo Han Solo'
arquivo['P8N2'] = 'Em Lajes do Pico, nos Açores, encontra-se o povoado de:'
arquivo['P8R1'] = 'Ilha do Manuel'
arquivo['P8R2'] = 'Ilha do Medo'
arquivo['P8C3'] = 'Ribeira do meio'
arquivo['P8R4'] = 'Lajes de Embaixo'
arquivo['P9N2'] = 'No Concurso Miss Mundo 1975, a ganhadora foi:'
arquivo['P9R1'] = 'Um Travesti Maquiado'
arquivo['P9C2'] = 'Wilnelia Merced Cruz'
arquivo['P9R3'] = 'Kaiane Aldorino'
arquivo['P9R4'] = 'Todas ficavam feias em preto-e-branco'
arquivo['P10N3'] = 'Na ciência da computação, o caractere nulo é um caractere da tabela ASCII que:'
arquivo['P10R1'] = 'Representa o forever alone'
arquivo['P10R2'] = 'Foi o primeiro a ser escrito por Charles Baggage'
arquivo['P10C3'] = 'Representa um espaço vazio'
arquivo['P10R4'] = 'Faz o programa ficar corrompido'
arquivo['P11N3'] = 'Kingdom City:'
arquivo['P11C1'] = 'Uma vila no estado americano de missouri'
arquivo['P11R2'] = 'Uma fase do Sonic'
arquivo['P11R3'] = 'Uma fase do Mário'
arquivo['P11R4'] = 'Um local bonito de se ver'
arquivo['P12N3'] = 'Uma tecnologia de proteção digital para CDs e DVDs É:'
arquivo['P12R1'] = 'K.O.N.F.I.A.N.Ç.A'
arquivo['P12C2'] = 'SecuROM'
arquivo['P12R3'] = 'Fita Crepe'
arquivo['P12R4'] = 'SecuTroll'
arquivo['P13N3'] = 'Um Site que é um MEME:'
arquivo['P13R1'] = 'http://www.zosima.com/'
arquivo['P13R2'] = 'http://www.ufrj.com.org'
arquivo['P13R3'] = 'http://www.trolface.com'
arquivo['P13C4'] = 'http://nyan.cat/'
arquivo['P14N3'] = 'Qual desses animais é vertebrado?'
arquivo['P14R1'] = 'Borboleta'
arquivo['P14R2'] = 'Barata'
arquivo['P14C3'] = 'Jacaré'
arquivo['P14R4'] = 'Minhoca'
arquivo['P15N4'] = 'linha 11 do metro de Moscovo também é referida como:'
arquivo['P15R1'] = 'Трусость и образования'
arquivo['P15R2'] = 'Не инвестировать в возобновляемые'
arquivo['P15R3'] = 'В один прекрасный день мы будем вторглись китайские'
arquivo['P15C4'] = 'Linha Kakhovskaia'
arquivo['P16N4'] = 'O Qutb Minar é o minarete de tijolo mais alto do mundo, exemplo de arquitetura:'
arquivo['P16C1'] = 'Indo-islâmica'
arquivo['P16R2'] = 'De alguém que gostava de empilhar tijolos'
arquivo['P16R3'] = 'Dos primos da áfrica'
arquivo['P16R4'] = 'Cimento Mauá, Melhor não há'
arquivo['P17N4'] = 'Jugular é algo pertecente...'
arquivo['P17C1'] = 'À garganta'
arquivo['P17R2'] = 'Aos pés'
arquivo['P17R3'] = 'Ao peito'
arquivo['P17R4'] = 'Ao vampiro'
arquivo['P18N4'] = 'Que outro nome também pode ser chamado uma farmácia:'
arquivo['P18R1'] = 'Farmacomania'
arquivo['P18R2'] = 'Perfumaria'
arquivo['P18R3'] = 'Remedista'
arquivo['P18C4'] = 'Drogaria'
arquivo['P19N4'] = 'Nos quadrinhos, Rorschach é:'
arquivo['P19R1'] = 'Quem vigia os watchman?'
arquivo['P19R2'] = 'Shang Tsung'
arquivo['P19C3'] = 'Walter Kovacs'
arquivo['P19R4'] = 'Doutor Manhattan'
arquivo['P20N5'] = 'Qual o nome da esposa de kaká, que é pastora da igreja renascer?'
arquivo['P20R1'] = 'Bruxa do 71'
arquivo['P20C2'] = 'Caroline Celico'
arquivo['P20R3'] = 'Gata Boralheira'
arquivo['P20R4'] = 'Gaviã Arqueira'
arquivo['P21N5'] = 'O que significa a expresão “Fogo de palha”?'
arquivo['P21R1'] = 'Fogo Forte'
arquivo['P21C2'] = 'Entusiasmo Passageiro'
arquivo['P21R3'] = 'Fúria repentina'
arquivo['P21R4'] = 'Tristeza Profunda'
arquivo['P22N5'] = ''
arquivo['P22'] = ''
arquivo['P22'] = ''
arquivo['P22'] = ''
arquivo['P22'] = ''
arquivo.close()
#LEITOR DE ENTRADAS (printa no Shell)
##entrada = anydbm.open('dados', 'r')
##for q in range(1 , 21):
## Q = 'P%i' %q
## for j in range (1, 5):
## J = Q + 'N%i' %j
## if entrada.has_key(J):
## print entrada[J]
## S = Q +'R'+'%i' %j
## L = Q +'C'+'%i' %j
## if entrada.has_key(L):
## print entrada[L]
##
## if entrada.has_key(S):
## print entrada[S]
##entrada.close()
|
fllamber/show_do_milhao
|
Show_do_milhao/Teste.perguntas.py
|
Python
|
unlicense
| 5,657 | 0.004908 |
#!/usr/bin/env python
#-*- coding: utf8 -*-
# Copyright 2009-2012 Kamil Winczek <kwinczek@gmail.com>
#
# This file is part of series.py.
#
# series.py is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option)
# any later version.
#
# series.py is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
# for more details.
#
# You should have received a copy of the GNU General Public License along
# with series.py. If not, see http://www.gnu.org/licenses/.
import contextlib
import sys
import lxml.etree as etree
import shelve
import subprocess
try:
import urllib2
except ImportError:
import urllib as urllib2
import time
import tvs.show
# Spinner implementation.
@contextlib.contextmanager
def spinning_distraction(spin):
if spin:
global p
p = subprocess.Popen(['tvs_spin.py'])
yield
p.terminate()
sys.stdout.write("\r")
sys.stdout.flush()
else:
yield
# --------------------------------------------------------------------- #
# #
# Class Cache #
# #
# --------------------------------------------------------------------- #
class Cache(object):
"""
Cache implementation.
Cache is a wraper class for Show class.
It is capable of retrieving and storing data from tvrage.com.
ttl contains date upto when object is valid.
"""
def __init__(self, keyword, options):
self.keyword = keyword
self.show = None
self.options = options
self.now = time.time()
if self.options.cache:
self.c = shelve.open(self.options.cachefile)
self.i = shelve.open(self.options.cacheids)
self.url_search = "http://services.tvrage.com/feeds/search.php?show=%s" % self.keyword
self.showid = self.__get_show_id()
self.url_full_show = "http://services.tvrage.com/feeds/full_show_info.php?sid=%s" % self.showid
self.show = self.__get_show()
if self.options.debug:
print("Search URL: %s" % self.url_search)
print("Shows full URL: %s" % self.url_full_show)
def __del__(self):
"""If cache was used all files need to be closed."""
if self.options.cache:
self.c.close()
self.i.close()
def __save_id_to_cache(self, showid):
"""Saves retrieved show's id to cache"""
self.i[self.keyword] = showid
def __save_show_to_cache(self, show):
if not show:
return False
# Set TTL, add 12h (43200secs) to current time (12h TTL)
self.c[str(self.showid)] = (self.now+43200, show)
return True
def __get_id_from_cache(self):
try:
return self.i[self.keyword]
except:
return None
def __get_id_from_tvrage(self):
try:
with spinning_distraction(spin=self.options.spinner):
return etree.fromstring(urllib2.urlopen(self.url_search).read()).xpath('//Results/show/showid')[0].text
except KeyboardInterrupt:
raise
except:
return None
def __get_show_from_cache(self):
try:
return self.c[str(self.showid)]
except:
return (None, None)
def __get_show_from_tvrage(self):
try:
with spinning_distraction(spin=self.options.spinner):
return tvs.show.Show(etree.fromstring(urllib2.urlopen(self.url_full_show).read()), self.options)
except KeyboardInterrupt:
raise
except:
return None
def __get_show_id(self):
"""Returns first found id from search list. """
# Try to get id from ids cache file
if self.options.cache and not self.options.refresh:
showid = self.__get_id_from_cache()
if not showid:
showid = self.__get_id_from_tvrage()
if showid:
self.__save_id_to_cache(showid)
return showid
return showid
else:
return showid
elif self.options.refresh:
showid = self.__get_id_from_tvrage()
if showid:
self.__save_id_to_cache(showid)
return showid
elif not self.options.cache:
return self.__get_id_from_tvrage()
else:
showid = self.__get_id_from_tvrage()
if showid:
self.__save_id_to_cache(showid)
return showid
return None
def __get_show(self):
"""Returns show instance with data from tvrage."""
if self.showid == None: # Previously not found show id
return None
if self.options.cache and not self.options.refresh:
ttl, show = self.__get_show_from_cache()
if not ttl and not self.show or ttl < self.now:
show = self.__get_show_from_tvrage()
self.__save_show_to_cache(show)
elif self.options.refresh:
show = self.__get_show_from_tvrage()
self.__save_show_to_cache(show)
# If no cache to be used.
else:
show = self.__get_show_from_tvrage()
return show
def get_show(self):
return self.show
|
kwinczek/tvseries
|
tvs/cache.py
|
Python
|
gpl-2.0
| 5,742 | 0.003657 |
#
# Copyright 2013 ZHAW SoE
# Copyright 2014 Intel Corp.
#
# Authors: Lucas Graf <graflu0@students.zhaw.ch>
# Toni Zehnder <zehndton@students.zhaw.ch>
# Lianhao Lu <lianhao.lu@intel.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from ceilometer.hardware import plugin
from ceilometer.hardware.pollsters import util
from ceilometer import sample
class _Base(plugin.HardwarePollster):
CACHE_KEY = 'cpu'
def generate_one_sample(self, host, c_data):
value, metadata, extra = c_data
return util.make_sample_from_host(host,
name=self.IDENTIFIER,
sample_type=sample.TYPE_GAUGE,
unit='process',
volume=value,
res_metadata=metadata,
extra=extra)
class CPULoad1MinPollster(_Base):
IDENTIFIER = 'cpu.load.1min'
class CPULoad5MinPollster(_Base):
IDENTIFIER = 'cpu.load.5min'
class CPULoad15MinPollster(_Base):
IDENTIFIER = 'cpu.load.15min'
|
ChinaMassClouds/copenstack-server
|
openstack/src/ceilometer-2014.2.2/ceilometer/hardware/pollsters/cpu.py
|
Python
|
gpl-2.0
| 1,650 | 0 |
'''
Created on Jun 8, 2015
@author: cliff
'''
CAR_SYMBOLS = ['Q', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'J', 'K', 'L']
TRUCK_SYMBOLS = ['T', 'R', 'W', 'Z']
CAR_COLORS_NO_HASH = ['7FFF00', '7FFFD4', 'D2691E', '8B008B', 'BDB76B',\
'8B0000', 'FF1493', '1E90FF', 'FFD700', 'ADFF2F', \
'CD5C5C', 'F0E68C']
CAR_COLORS_WITH_HASH = ['#'+x for x in CAR_COLORS_NO_HASH]
TRUCK_COLORS_NO_HASH = ['F08080', 'FFA07A', 'FF00FF', '00FA9A']
TRUCK_COLORS_WITH_HASH = ['#'+x for x in TRUCK_COLORS_NO_HASH]
RED_COLOR_WITH_HASH = '#FF0000'
RED_COLOR_NO_HASH = 'FF0000'
RED_SYMBOL = 'X'
BLANK_COLOR_WITH_HASH = "#E6E6E6"
BLANK_COLOR_NO_HASH = "E6E6E6"
# Topology Values
EMPTY = '000'
ONE_CAR = '001'
TWO_CAR = '010'
THREE_CAR = '011'
ONE_TRUCK = '100'
TWO_TRUCK = '110'
ONE_CAR_ONE_TRUCK = '101'
ONE_TRUCK_ONE_CAR = '111'
# relabeling: 208-08-01
# for numpy implementation, want to use matrix math. Need to contrive values such that
# for z in values, x + y = z if an only if x or y = 0.
BLANK_SPACE = '000'
HORIZONTAL_CAR = '010'
HORIZONTAL_TRUCK = '100'
VERTICAL_CAR = '011'
VERTICAL_TRUCK = '101'
blank = 0
vcar = 4
vtruck = 5
hcar = 6
htruck = 7
# Relabeling These 2017-08-28
# Coding Scheme:
# 3-bits: x y z
# x - orientation (0 = horizontal, 1 = vertical)
# y - Truck Bit (0 = Not Truck, 1 = Truck )
# z - Car Bit (0 = Not Car, 1 = car)
# 000 - Horizontal, Not Car, Not Truck (i.e. Empty Space)
# BLANK_SPACE = '000'
# HORIZONTAL_CAR = '001'
# HORIZONTAL_TRUCK = '010'
# VERTICAL_CAR = '101'
# VERTICAL_TRUCK = '110'
# Given dependencies throughout the code base. Keeping a copy of pre-2018-08-28 values
#BLANK_SPACE = '000'
#VERTICAL_CAR = '001'
#VERTICAL_TRUCK = '010'
#HORIZONTAL_CAR = '011'
#HORIZONTAL_TRUCK = '100'
|
crhaithcock/RushHour
|
Analytics/shared_code/RHconstants.py
|
Python
|
cc0-1.0
| 1,791 | 0.007259 |
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tensor utility functions."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import variables
__all__ = [
'assert_same_float_dtype',
'assert_scalar_int',
'convert_to_tensor_or_sparse_tensor',
'is_tensor',
'reduce_sum_n',
'with_shape',
'with_same_shape']
def _assert_same_base_type(items, expected_type=None):
r"""Asserts all items are of the same base type.
Args:
items: List of graph items (e.g., `Variable`, `Tensor`, `SparseTensor`,
`Operation`, or `IndexedSlices`). Can include `None` elements, which
will be ignored.
expected_type: Expected type. If not specified, assert all items are
of the same base type.
Returns:
Validated type, or none if neither expected_type nor items provided.
Raises:
ValueError: If any types do not match.
"""
original_item_str = None
for item in items:
if item is not None:
item_type = item.dtype.base_dtype
if not expected_type:
expected_type = item_type
original_item_str = item.name if hasattr(item, 'name') else str(item)
elif expected_type != item_type:
raise ValueError('%s, type=%s, must be of the same type (%s)%s.' % (
item.name if hasattr(item, 'name') else str(item),
item_type, expected_type,
(' as %s' % original_item_str) if original_item_str else ''))
return expected_type
def assert_same_float_dtype(tensors=None, dtype=None):
"""Validate and return float type based on `tensors` and `dtype`.
For ops such as matrix multiplication, inputs and weights must be of the
same float type. This function validates that all `tensors` are the same type,
validates that type is `dtype` (if supplied), and returns the type. Type must
be `dtypes.float32` or `dtypes.float64`. If neither `tensors` nor
`dtype` is supplied, default to `dtypes.float32`.
Args:
tensors: Tensors of input values. Can include `None` elements, which will be
ignored.
dtype: Expected type.
Returns:
Validated type.
Raises:
ValueError: if neither `tensors` nor `dtype` is supplied, or result is not
float.
"""
if tensors:
dtype = _assert_same_base_type(tensors, dtype)
if not dtype:
dtype = dtypes.float32
elif not dtype.is_floating:
raise ValueError('Expected float, got %s.' % dtype)
return dtype
def assert_scalar_int(tensor):
"""Assert `tensor` is 0-D, of type `tf.int32` or `tf.int64`.
Args:
tensor: Tensor to test.
Returns:
`tensor`, for chaining.
Raises:
ValueError: if `tensor` is not 0-D, of type `tf.int32` or `tf.int64`.
"""
data_type = tensor.dtype
if data_type.base_dtype not in [dtypes.int32, dtypes.int64]:
raise ValueError('Unexpected type %s for %s.' % (data_type, tensor.name))
shape = tensor.get_shape()
if shape.ndims != 0:
raise ValueError('Unexpected shape %s for %s.' % (shape, tensor.name))
return tensor
def reduce_sum_n(tensors, name=None):
"""Reduce tensors to a scalar sum.
This reduces each tensor in `tensors` to a scalar via `tf.reduce_sum`, then
adds them via `tf.add_n`.
Args:
tensors: List of tensors, all of the same numeric type.
name: Tensor name, and scope for all other ops.
Returns:
Total loss tensor, or None if no losses have been configured.
Raises:
ValueError: if `losses` is missing or empty.
"""
if not tensors:
raise ValueError('No tensors provided.')
tensors = [math_ops.reduce_sum(t, name='%s/sum' % t.op.name) for t in tensors]
if len(tensors) == 1:
return tensors[0]
with ops.name_scope(name, 'reduce_sum_n', tensors) as scope:
return math_ops.add_n(tensors, name=scope)
def _all_equal(tensor0, tensor1):
with ops.name_scope('all_equal', values=[tensor0, tensor1]) as scope:
return math_ops.reduce_all(
math_ops.equal(tensor0, tensor1, name='equal'), name=scope)
def _is_rank(expected_rank, actual_tensor):
"""Returns whether actual_tensor's rank is expected_rank.
Args:
expected_rank: Integer defining the expected rank, or tensor of same.
actual_tensor: Tensor to test.
Returns:
New tensor.
"""
with ops.name_scope('is_rank', values=[actual_tensor]) as scope:
expected = ops.convert_to_tensor(expected_rank, name='expected')
actual = array_ops.rank(actual_tensor, name='actual')
return math_ops.equal(expected, actual, name=scope)
def _is_shape(expected_shape, actual_tensor, actual_shape=None):
"""Returns whether actual_tensor's shape is expected_shape.
Args:
expected_shape: Integer list defining the expected shape, or tensor of same.
actual_tensor: Tensor to test.
actual_shape: Shape of actual_tensor, if we already have it.
Returns:
New tensor.
"""
with ops.name_scope('is_shape', values=[actual_tensor]) as scope:
is_rank = _is_rank(array_ops.size(expected_shape), actual_tensor)
if actual_shape is None:
actual_shape = array_ops.shape(actual_tensor, name='actual')
shape_equal = _all_equal(
ops.convert_to_tensor(expected_shape, name='expected'),
actual_shape)
return math_ops.logical_and(is_rank, shape_equal, name=scope)
def _assert_shape_op(expected_shape, actual_tensor):
"""Asserts actual_tensor's shape is expected_shape.
Args:
expected_shape: List of integers defining the expected shape, or tensor of
same.
actual_tensor: Tensor to test.
Returns:
New assert tensor.
"""
with ops.name_scope('assert_shape', values=[actual_tensor]) as scope:
actual_shape = array_ops.shape(actual_tensor, name='actual')
is_shape = _is_shape(expected_shape, actual_tensor, actual_shape)
return control_flow_ops.Assert(
is_shape, [
'Wrong shape for %s [expected] [actual].' % actual_tensor.name,
expected_shape,
actual_shape
], name=scope)
def with_same_shape(expected_tensor, tensor):
"""Assert tensors are the same shape, from the same graph.
Args:
expected_tensor: Tensor with expected shape.
tensor: Tensor of actual values.
Returns:
Tuple of (actual_tensor, label_tensor), possibly with assert ops added.
"""
with ops.name_scope('%s/' % tensor.op.name, values=[expected_tensor, tensor]):
tensor_shape = expected_tensor.get_shape()
expected_shape = (
tensor_shape.as_list() if tensor_shape.is_fully_defined()
else array_ops.shape(expected_tensor, name='expected_shape'))
return with_shape(expected_shape, tensor)
def is_tensor(x):
"""Check for tensor types.
Check whether an object is a tensor. Equivalent to
`isinstance(x, [tf.Tensor, tf.SparseTensor, tf.Variable])`.
Args:
x: An python object to check.
Returns:
`True` if `x` is a tensor, `False` if not.
"""
tensor_types = (ops.Tensor, ops.SparseTensor, variables.Variable)
return isinstance(x, tensor_types)
def with_shape(expected_shape, tensor):
"""Asserts tensor has expected shape.
If tensor shape and expected_shape, are fully defined, assert they match.
Otherwise, add assert op that will validate the shape when tensor is
evaluated, and set shape on tensor.
Args:
expected_shape: Expected shape to assert, as a 1D array of ints, or tensor
of same.
tensor: Tensor whose shape we're validating.
Returns:
tensor, perhaps with a dependent assert operation.
Raises:
ValueError: if tensor has an invalid shape.
"""
if isinstance(tensor, ops.SparseTensor):
raise ValueError('SparseTensor not supported.')
# Shape type must be 1D int32.
if is_tensor(expected_shape):
if expected_shape.dtype.base_dtype != dtypes.int32:
raise ValueError(
'Invalid dtype %s for shape %s expected of tensor %s.' % (
expected_shape.dtype, expected_shape, tensor.name))
if isinstance(expected_shape, (list, tuple)):
if not expected_shape:
expected_shape = np.asarray([], dtype=np.int32)
else:
np_expected_shape = np.asarray(expected_shape)
expected_shape = (
np.asarray(expected_shape, dtype=np.int32)
if np_expected_shape.dtype == np.int64 else np_expected_shape)
if isinstance(expected_shape, np.ndarray):
if expected_shape.ndim > 1:
raise ValueError(
'Invalid rank %s for shape %s expected of tensor %s.' % (
expected_shape.ndim, expected_shape, tensor.name))
if expected_shape.dtype != np.int32:
raise ValueError(
'Invalid dtype %s for shape %s expected of tensor %s.' % (
expected_shape.dtype, expected_shape, tensor.name))
actual_shape = tensor.get_shape()
if not actual_shape.is_fully_defined() or is_tensor(expected_shape):
with ops.name_scope('%s/' % tensor.op.name, values=[tensor]):
if not is_tensor(expected_shape) and (len(expected_shape) < 1):
# TODO(irving): Remove scalar special case
return array_ops.reshape(tensor, [])
with ops.control_dependencies([_assert_shape_op(expected_shape, tensor)]):
result = array_ops.identity(tensor)
if not is_tensor(expected_shape):
result.set_shape(expected_shape)
return result
if (not is_tensor(expected_shape) and
not actual_shape.is_compatible_with(expected_shape)):
if (len(expected_shape) < 1) and actual_shape.is_compatible_with([1]):
# TODO(irving): Remove scalar special case.
with ops.name_scope('%s/' % tensor.op.name, values=[tensor]):
return array_ops.reshape(tensor, [])
raise ValueError('Invalid shape for tensor %s, expected %s, got %s.' % (
tensor.name, expected_shape, actual_shape))
return tensor
def convert_to_tensor_or_sparse_tensor(
value, dtype=None, name=None, as_ref=False):
"""Converts value to a `SparseTensor` or `Tensor`.
Args:
value: A `SparseTensor`, `SparseTensorValue`, or an object whose type has a
registered `Tensor` conversion function.
dtype: Optional element type for the returned tensor. If missing, the
type is inferred from the type of `value`.
name: Optional name to use if a new `Tensor` is created.
as_ref: True if we want the result as a ref tensor. Only used if a new
`Tensor` is created.
Returns:
A `SparseTensor` or `Tensor` based on `value`.
Raises:
RuntimeError: If result type is incompatible with `dtype`.
"""
if dtype is not None:
dtype = dtypes.as_dtype(dtype)
if isinstance(value, ops.SparseTensorValue):
value = ops.SparseTensor.from_value(value)
if isinstance(value, ops.SparseTensor):
if dtype and not dtype.is_compatible_with(value.dtype):
raise RuntimeError(
'Sparse dtype: requested = %s, actual = %s' % (
dtype.name, value.dtype.name))
return value
return ops.convert_to_tensor(value, dtype=dtype, name=name, as_ref=as_ref)
|
naturali/tensorflow
|
tensorflow/contrib/framework/python/framework/tensor_util.py
|
Python
|
apache-2.0
| 11,844 | 0.006079 |
# coding: utf-8
# Copyright 2014 Globo.com Player authors. All rights reserved.
# Use of this source code is governed by a MIT License
# license that can be found in the LICENSE file.
import sys
PYTHON_MAJOR_VERSION = sys.version_info
import os
import posixpath
try:
import urlparse as url_parser
import urllib2
urlopen = urllib2.urlopen
except ImportError:
import urllib.parse as url_parser
from urllib.request import urlopen as url_opener
urlopen = url_opener
from m3u8.model import M3U8, Playlist, IFramePlaylist, Media, Segment
from m3u8.parser import parse, is_url, ParseError
__all__ = ('M3U8', 'Playlist', 'IFramePlaylist', 'Media',
'Segment', 'loads', 'load', 'parse', 'ParseError')
def loads(content):
'''
Given a string with a m3u8 content, returns a M3U8 object.
Raises ValueError if invalid content
'''
return M3U8(content)
def load(uri):
'''
Retrieves the content from a given URI and returns a M3U8 object.
Raises ValueError if invalid content or IOError if request fails.
'''
if is_url(uri):
return _load_from_uri(uri)
else:
return _load_from_file(uri)
# Support for python3 inspired by https://github.com/szemtiv/m3u8/
def _load_from_uri(uri):
resource = urlopen(uri)
base_uri = _parsed_url(_url_for(uri))
if PYTHON_MAJOR_VERSION < (3,):
content = _read_python2x(resource)
else:
content = _read_python3x(resource)
return M3U8(content, base_uri=base_uri)
def _url_for(uri):
return urlopen(uri).geturl()
def _parsed_url(url):
parsed_url = url_parser.urlparse(url)
prefix = parsed_url.scheme + '://' + parsed_url.netloc
base_path = posixpath.normpath(parsed_url.path + '/..')
return url_parser.urljoin(prefix, base_path)
def _read_python2x(resource):
return resource.read().strip()
def _read_python3x(resource):
return resource.read().decode(resource.headers.get_content_charset(failobj="utf-8"))
def _load_from_file(uri):
with open(uri) as fileobj:
raw_content = fileobj.read().strip()
base_uri = os.path.dirname(uri)
return M3U8(raw_content, base_uri=base_uri)
|
cristina0botez/m3u8
|
m3u8/__init__.py
|
Python
|
mit
| 2,171 | 0.006909 |
import unittest
from apel.db.records import Record, InvalidRecordException
class RecordTest(unittest.TestCase):
'''
Test case for Record
'''
# test for public interface
def test_set_field(self):
record = Record()
self.assertRaises(InvalidRecordException,
record.set_field, 'Test', 'value')
record._db_fields = ['Test']
record.set_field('Test', 'value')
self.assertEqual(record._record_content['Test'], 'value')
def test_set_all(self):
record = Record()
self.assertRaises(InvalidRecordException,
record.set_all, {'Test':'value'})
record._db_fields = ['Test']
record.set_all({'Test':'value'})
self.assertEqual(record._record_content['Test'], 'value')
def test_get_field(self):
record = Record()
record._db_fields = ['Test']
record._record_content['Test'] = 'value'
self.assertEqual(record.get_field('Test'), 'value')
if __name__ == '__main__':
unittest.main()
|
tofu-rocketry/apel
|
test/test_record.py
|
Python
|
apache-2.0
| 1,067 | 0.004686 |
# -*- coding: utf-8 -*-
#
# This file is distributed under MIT License or default open-tamil license.
# (C) 2013-2015 Muthiah Annamalai
#
# This file is part of 'open-tamil' examples
# It can be used to identify patterns in a Tamil text files;
# e.g. it has been used to identify patterns in Tamil Wikipedia
# articles.
#
from __future__ import print_function
import tamil
import sys
import codecs
from transliterate import *
import re
from functools import cmp_to_key
import operator
PYTHON3 = sys.version[0] > '2'
if not PYTHON3:
sys.stdout = codecs.getwriter('utf-8')(sys.stdout)
# use generators for better memory footprint -- 04/04/15
class WordFrequency(object):
# get words
@staticmethod
def get_tamil_words_iterable( letters ):
""" given a list of UTF-8 letters section them into words, grouping them at spaces """
#punctuations = u'-,+,/,*,>,<,_,],[,{,},(,)'.split(',')+[',']
#isspace_or_tamil = lambda x: not x in punctuations and tamil.utf8.istamil(x)
# correct algorithm for get-tamil-words
buf = []
for idx,let in enumerate(letters):
if tamil.utf8.istamil( let ):
buf.append( let )
else:
if len(buf) > 0:
yield u"".join( buf )
buf = []
if len(buf) > 0:
yield u"".join(buf)
# sentinel
def __init__(self,tatext=u''):
object.__init__(self)
self.frequency = {}
# process data
def process(self,new_text):
for taline in new_text.split(u"\n"):
self.tamil_words_process( taline )
return
# finalize
def display(self):
self.print_tamil_words( )
return
# processor / core
def tamil_words_process( self, taline ):
taletters = tamil.utf8.get_letters_iterable(taline)
# raw words
#for word in re.split(u"\s+",tatext):
# print(u"-> ",word)
# tamil words only
for pos,word in enumerate(WordFrequency.get_tamil_words_iterable(taletters)):
if len(word) < 1:
continue
self.frequency[word] = 1 + self.frequency.get(word,0)
return
# closer/results
def print_tamil_words(self):
# sort words by descending order of occurence
print(u"# unique words = %d"%(len(self.frequency)))
for l in sorted(self.frequency.items(), key=operator.itemgetter(1)):
print( l[0],':',l[1])
print(u"#"*80)
print(u"# sorted in Tamil order")
for l in sorted(self.frequency.keys(), key=cmp_to_key(tamil.utf8.compare_words_lexicographic)):
print( l,':',self.frequency[l])
return
# driver
def demo_tamil_text_filter( file_urls ):
#url = u"../tawiki-20150316-all-titles"
if not type(file_urls) is list:
file_urls = [file_urls]
obj = WordFrequency( )
for filepath in file_urls:
try:
tatext = codecs.open(filepath,'r','utf-8').read()
obj.process(tatext)
except Exception as e:
sys.stderr.write("Skipping the file :"+filepath+" due to exception\n\t\t " + str(e)+"\n")
obj.display()
return obj
if __name__ == u"__main__":
if len(sys.argv) < 2:
print("usage: python solpattiyal.py <filename>")
print(" this command shows list of unique words in Tamil and their frequencies in document(s);")
print(" it also relists the words in the sorted order")
sys.exit(-1)
demo_tamil_text_filter(sys.argv[1:])
|
tshrinivasan/open-tamil
|
examples/solpattiyal.py
|
Python
|
mit
| 3,639 | 0.016213 |
## dea_spatialtools.py
'''
Description: This file contains a set of python functions for conducting
spatial analyses on Digital Earth Australia data.
License: The code in this notebook is licensed under the Apache License,
Version 2.0 (https://www.apache.org/licenses/LICENSE-2.0). Digital Earth
Australia data is licensed under the Creative Commons by Attribution 4.0
license (https://creativecommons.org/licenses/by/4.0/).
Contact: If you need assistance, please post a question on the Open Data
Cube Slack channel (http://slack.opendatacube.org/) or on the GIS Stack
Exchange (https://gis.stackexchange.com/questions/ask?tags=open-data-cube)
using the `open-data-cube` tag (you can view previously asked questions
here: https://gis.stackexchange.com/questions/tagged/open-data-cube).
If you would like to report an issue with this script, file one on
Github: https://github.com/GeoscienceAustralia/dea-notebooks/issues/new
Functions included:
xr_vectorize
xr_rasterize
subpixel_contours
interpolate_2d
contours_to_array
largest_region
transform_geojson_wgs_to_epsg
zonal_stats_parallel
Last modified: November 2020
'''
# Import required packages
import collections
import numpy as np
import xarray as xr
import geopandas as gpd
import rasterio.features
import scipy.interpolate
from scipy import ndimage as nd
from skimage.measure import label
from rasterstats import zonal_stats
from skimage.measure import find_contours
from datacube.utils.cog import write_cog
from datacube.helpers import write_geotiff
from datacube.utils.geometry import assign_crs
from datacube.utils.geometry import CRS, Geometry
from shapely.geometry import LineString, MultiLineString, shape
def xr_vectorize(da,
attribute_col='attribute',
transform=None,
crs=None,
dtype='float32',
export_shp=False,
verbose=False,
**rasterio_kwargs):
"""
Vectorises a xarray.DataArray into a geopandas.GeoDataFrame.
Parameters
----------
da : xarray dataarray or a numpy ndarray
attribute_col : str, optional
Name of the attribute column in the resulting geodataframe.
Values of the raster object converted to polygons will be
assigned to this column. Defaults to 'attribute'.
transform : affine.Affine object, optional
An affine.Affine object (e.g. `from affine import Affine;
Affine(30.0, 0.0, 548040.0, 0.0, -30.0, "6886890.0) giving the
affine transformation used to convert raster coordinates
(e.g. [0, 0]) to geographic coordinates. If none is provided,
the function will attempt to obtain an affine transformation
from the xarray object (e.g. either at `da.transform` or
`da.geobox.transform`).
crs : str or CRS object, optional
An EPSG string giving the coordinate system of the array
(e.g. 'EPSG:3577'). If none is provided, the function will
attempt to extract a CRS from the xarray object's `crs`
attribute.
dtype : str, optional
Data type must be one of int16, int32, uint8, uint16,
or float32
export_shp : Boolean or string path, optional
To export the output vectorised features to a shapefile, supply
an output path (e.g. 'output_dir/output.shp'. The default is
False, which will not write out a shapefile.
verbose : bool, optional
Print debugging messages. Default False.
**rasterio_kwargs :
A set of keyword arguments to rasterio.features.shapes
Can include `mask` and `connectivity`.
Returns
-------
gdf : Geopandas GeoDataFrame
"""
# Check for a crs object
try:
crs = da.crs
except:
if crs is None:
raise Exception("Please add a `crs` attribute to the "
"xarray.DataArray, or provide a CRS using the "
"function's `crs` parameter (e.g. 'EPSG:3577')")
# Check if transform is provided as a xarray.DataArray method.
# If not, require supplied Affine
if transform is None:
try:
# First, try to take transform info from geobox
transform = da.geobox.transform
# If no geobox
except:
try:
# Try getting transform from 'transform' attribute
transform = da.transform
except:
# If neither of those options work, raise an exception telling the
# user to provide a transform
raise TypeError("Please provide an Affine transform object using the "
"`transform` parameter (e.g. `from affine import "
"Affine; Affine(30.0, 0.0, 548040.0, 0.0, -30.0, "
"6886890.0)`")
# Check to see if the input is a numpy array
if type(da) is np.ndarray:
vectors = rasterio.features.shapes(source=da.astype(dtype),
transform=transform,
**rasterio_kwargs)
else:
# Run the vectorizing function
vectors = rasterio.features.shapes(source=da.data.astype(dtype),
transform=transform,
**rasterio_kwargs)
# Convert the generator into a list
vectors = list(vectors)
# Extract the polygon coordinates and values from the list
polygons = [polygon for polygon, value in vectors]
values = [value for polygon, value in vectors]
# Convert polygon coordinates into polygon shapes
polygons = [shape(polygon) for polygon in polygons]
# Create a geopandas dataframe populated with the polygon shapes
gdf = gpd.GeoDataFrame(data={attribute_col: values},
geometry=polygons,
crs={'init': str(crs)})
# If a file path is supplied, export a shapefile
if export_shp:
gdf.to_file(export_shp)
return gdf
def xr_rasterize(gdf,
da,
attribute_col=False,
crs=None,
transform=None,
name=None,
x_dim='x',
y_dim='y',
export_tiff=None,
verbose=False,
**rasterio_kwargs):
"""
Rasterizes a geopandas.GeoDataFrame into an xarray.DataArray.
Parameters
----------
gdf : geopandas.GeoDataFrame
A geopandas.GeoDataFrame object containing the vector/shapefile
data you want to rasterise.
da : xarray.DataArray or xarray.Dataset
The shape, coordinates, dimensions, and transform of this object
are used to build the rasterized shapefile. It effectively
provides a template. The attributes of this object are also
appended to the output xarray.DataArray.
attribute_col : string, optional
Name of the attribute column in the geodataframe that the pixels
in the raster will contain. If set to False, output will be a
boolean array of 1's and 0's.
crs : str, optional
CRS metadata to add to the output xarray. e.g. 'epsg:3577'.
The function will attempt get this info from the input
GeoDataFrame first.
transform : affine.Affine object, optional
An affine.Affine object (e.g. `from affine import Affine;
Affine(30.0, 0.0, 548040.0, 0.0, -30.0, "6886890.0) giving the
affine transformation used to convert raster coordinates
(e.g. [0, 0]) to geographic coordinates. If none is provided,
the function will attempt to obtain an affine transformation
from the xarray object (e.g. either at `da.transform` or
`da.geobox.transform`).
x_dim : str, optional
An optional string allowing you to override the xarray dimension
used for x coordinates. Defaults to 'x'. Useful, for example,
if x and y dims instead called 'lat' and 'lon'.
y_dim : str, optional
An optional string allowing you to override the xarray dimension
used for y coordinates. Defaults to 'y'. Useful, for example,
if x and y dims instead called 'lat' and 'lon'.
export_tiff: str, optional
If a filepath is provided (e.g 'output/output.tif'), will export a
geotiff file. A named array is required for this operation, if one
is not supplied by the user a default name, 'data', is used
verbose : bool, optional
Print debugging messages. Default False.
**rasterio_kwargs :
A set of keyword arguments to rasterio.features.rasterize
Can include: 'all_touched', 'merge_alg', 'dtype'.
Returns
-------
xarr : xarray.DataArray
"""
# Check for a crs object
try:
crs = da.geobox.crs
except:
try:
crs = da.crs
except:
if crs is None:
raise ValueError("Please add a `crs` attribute to the "
"xarray.DataArray, or provide a CRS using the "
"function's `crs` parameter (e.g. crs='EPSG:3577')")
# Check if transform is provided as a xarray.DataArray method.
# If not, require supplied Affine
if transform is None:
try:
# First, try to take transform info from geobox
transform = da.geobox.transform
# If no geobox
except:
try:
# Try getting transform from 'transform' attribute
transform = da.transform
except:
# If neither of those options work, raise an exception telling the
# user to provide a transform
raise TypeError("Please provide an Affine transform object using the "
"`transform` parameter (e.g. `from affine import "
"Affine; Affine(30.0, 0.0, 548040.0, 0.0, -30.0, "
"6886890.0)`")
# Grab the 2D dims (not time)
try:
dims = da.geobox.dims
except:
dims = y_dim, x_dim
# Coords
xy_coords = [da[dims[0]], da[dims[1]]]
# Shape
try:
y, x = da.geobox.shape
except:
y, x = len(xy_coords[0]), len(xy_coords[1])
# Reproject shapefile to match CRS of raster
if verbose:
print(f'Rasterizing to match xarray.DataArray dimensions ({y}, {x})')
try:
gdf_reproj = gdf.to_crs(crs=crs)
except:
# Sometimes the crs can be a datacube utils CRS object
# so convert to string before reprojecting
gdf_reproj = gdf.to_crs(crs={'init': str(crs)})
# If an attribute column is specified, rasterise using vector
# attribute values. Otherwise, rasterise into a boolean array
if attribute_col:
# Use the geometry and attributes from `gdf` to create an iterable
shapes = zip(gdf_reproj.geometry, gdf_reproj[attribute_col])
else:
# Use geometry directly (will produce a boolean numpy array)
shapes = gdf_reproj.geometry
# Rasterise shapes into an array
arr = rasterio.features.rasterize(shapes=shapes,
out_shape=(y, x),
transform=transform,
**rasterio_kwargs)
# Convert result to a xarray.DataArray
xarr = xr.DataArray(arr,
coords=xy_coords,
dims=dims,
attrs=da.attrs,
name=name if name else None)
# Add back crs if xarr.attrs doesn't have it
if xarr.geobox is None:
xarr = assign_crs(xarr, str(crs))
if export_tiff:
if verbose:
print(f"Exporting GeoTIFF to {export_tiff}")
write_cog(xarr,
export_tiff,
overwrite=True)
return xarr
def subpixel_contours(da,
z_values=[0.0],
crs=None,
affine=None,
attribute_df=None,
output_path=None,
min_vertices=2,
dim='time',
errors='ignore',
verbose=False):
"""
Uses `skimage.measure.find_contours` to extract multiple z-value
contour lines from a two-dimensional array (e.g. multiple elevations
from a single DEM), or one z-value for each array along a specified
dimension of a multi-dimensional array (e.g. to map waterlines
across time by extracting a 0 NDWI contour from each individual
timestep in an xarray timeseries).
Contours are returned as a geopandas.GeoDataFrame with one row per
z-value or one row per array along a specified dimension. The
`attribute_df` parameter can be used to pass custom attributes
to the output contour features.
Last modified: November 2020
Parameters
----------
da : xarray DataArray
A two-dimensional or multi-dimensional array from which
contours are extracted. If a two-dimensional array is provided,
the analysis will run in 'single array, multiple z-values' mode
which allows you to specify multiple `z_values` to be extracted.
If a multi-dimensional array is provided, the analysis will run
in 'single z-value, multiple arrays' mode allowing you to
extract contours for each array along the dimension specified
by the `dim` parameter.
z_values : int, float or list of ints, floats
An individual z-value or list of multiple z-values to extract
from the array. If operating in 'single z-value, multiple
arrays' mode specify only a single z-value.
crs : string or CRS object, optional
An EPSG string giving the coordinate system of the array
(e.g. 'EPSG:3577'). If none is provided, the function will
attempt to extract a CRS from the xarray object's `crs`
attribute.
affine : affine.Affine object, optional
An affine.Affine object (e.g. `from affine import Affine;
Affine(30.0, 0.0, 548040.0, 0.0, -30.0, "6886890.0) giving the
affine transformation used to convert raster coordinates
(e.g. [0, 0]) to geographic coordinates. If none is provided,
the function will attempt to obtain an affine transformation
from the xarray object (e.g. either at `da.transform` or
`da.geobox.transform`).
output_path : string, optional
The path and filename for the output shapefile.
attribute_df : pandas.Dataframe, optional
A pandas.Dataframe containing attributes to pass to the output
contour features. The dataframe must contain either the same
number of rows as supplied `z_values` (in 'multiple z-value,
single array' mode), or the same number of rows as the number
of arrays along the `dim` dimension ('single z-value, multiple
arrays mode').
min_vertices : int, optional
The minimum number of vertices required for a contour to be
extracted. The default (and minimum) value is 2, which is the
smallest number required to produce a contour line (i.e. a start
and end point). Higher values remove smaller contours,
potentially removing noise from the output dataset.
dim : string, optional
The name of the dimension along which to extract contours when
operating in 'single z-value, multiple arrays' mode. The default
is 'time', which extracts contours for each array along the time
dimension.
errors : string, optional
If 'raise', then any failed contours will raise an exception.
If 'ignore' (the default), a list of failed contours will be
printed. If no contours are returned, an exception will always
be raised.
verbose : bool, optional
Print debugging messages. Default False.
Returns
-------
output_gdf : geopandas geodataframe
A geopandas geodataframe object with one feature per z-value
('single array, multiple z-values' mode), or one row per array
along the dimension specified by the `dim` parameter ('single
z-value, multiple arrays' mode). If `attribute_df` was
provided, these values will be included in the shapefile's
attribute table.
"""
def contours_to_multiline(da_i, z_value, min_vertices=2):
'''
Helper function to apply marching squares contour extraction
to an array and return a data as a shapely MultiLineString.
The `min_vertices` parameter allows you to drop small contours
with less than X vertices.
'''
# Extracts contours from array, and converts each discrete
# contour into a Shapely LineString feature. If the function
# returns a KeyError, this may be due to an unresolved issue in
# scikit-image: https://github.com/scikit-image/scikit-image/issues/4830
line_features = [LineString(i[:,[1, 0]])
for i in find_contours(da_i.data, z_value)
if i.shape[0] > min_vertices]
# Output resulting lines into a single combined MultiLineString
return MultiLineString(line_features)
# Check if CRS is provided as a xarray.DataArray attribute.
# If not, require supplied CRS
try:
crs = da.crs
except:
if crs is None:
raise ValueError("Please add a `crs` attribute to the "
"xarray.DataArray, or provide a CRS using the "
"function's `crs` parameter (e.g. 'EPSG:3577')")
# Check if Affine transform is provided as a xarray.DataArray method.
# If not, require supplied Affine
try:
affine = da.geobox.transform
except KeyError:
affine = da.transform
except:
if affine is None:
raise TypeError("Please provide an Affine object using the "
"`affine` parameter (e.g. `from affine import "
"Affine; Affine(30.0, 0.0, 548040.0, 0.0, -30.0, "
"6886890.0)`")
# If z_values is supplied is not a list, convert to list:
z_values = z_values if (isinstance(z_values, list) or
isinstance(z_values, np.ndarray)) else [z_values]
# Test number of dimensions in supplied data array
if len(da.shape) == 2:
if verbose:
print(f'Operating in multiple z-value, single array mode')
dim = 'z_value'
contour_arrays = {str(i)[0:10]:
contours_to_multiline(da, i, min_vertices)
for i in z_values}
else:
# Test if only a single z-value is given when operating in
# single z-value, multiple arrays mode
if verbose:
print(f'Operating in single z-value, multiple arrays mode')
if len(z_values) > 1:
raise ValueError('Please provide a single z-value when operating '
'in single z-value, multiple arrays mode')
contour_arrays = {str(i)[0:10]:
contours_to_multiline(da_i, z_values[0], min_vertices)
for i, da_i in da.groupby(dim)}
# If attributes are provided, add the contour keys to that dataframe
if attribute_df is not None:
try:
attribute_df.insert(0, dim, contour_arrays.keys())
except ValueError:
raise ValueError("One of the following issues occured:\n\n"
"1) `attribute_df` contains a different number of "
"rows than the number of supplied `z_values` ("
"'multiple z-value, single array mode')\n"
"2) `attribute_df` contains a different number of "
"rows than the number of arrays along the `dim` "
"dimension ('single z-value, multiple arrays mode')")
# Otherwise, use the contour keys as the only main attributes
else:
attribute_df = list(contour_arrays.keys())
# Convert output contours to a geopandas.GeoDataFrame
contours_gdf = gpd.GeoDataFrame(data=attribute_df,
geometry=list(contour_arrays.values()),
crs=crs)
# Define affine and use to convert array coords to geographic coords.
# We need to add 0.5 x pixel size to the x and y to obtain the centre
# point of our pixels, rather than the top-left corner
shapely_affine = [affine.a, affine.b, affine.d, affine.e,
affine.xoff + affine.a / 2.0,
affine.yoff + affine.e / 2.0]
contours_gdf['geometry'] = contours_gdf.affine_transform(shapely_affine)
# Rename the data column to match the dimension
contours_gdf = contours_gdf.rename({0: dim}, axis=1)
# Drop empty timesteps
empty_contours = contours_gdf.geometry.is_empty
failed = ', '.join(map(str, contours_gdf[empty_contours][dim].to_list()))
contours_gdf = contours_gdf[~empty_contours]
# Raise exception if no data is returned, or if any contours fail
# when `errors='raise'. Otherwise, print failed contours
if empty_contours.all() and errors == 'raise':
raise RuntimeError("Failed to generate any valid contours; verify that "
"values passed to `z_values` are valid and present "
"in `da`")
elif empty_contours.all() and errors == 'ignore':
if verbose:
print ("Failed to generate any valid contours; verify that "
"values passed to `z_values` are valid and present "
"in `da`")
elif empty_contours.any() and errors == 'raise':
raise Exception(f'Failed to generate contours: {failed}')
elif empty_contours.any() and errors == 'ignore':
if verbose:
print(f'Failed to generate contours: {failed}')
# If asked to write out file, test if geojson or shapefile
if output_path and output_path.endswith('.geojson'):
if verbose:
print(f'Writing contours to {output_path}')
contours_gdf.to_crs('EPSG:4326').to_file(filename=output_path,
driver='GeoJSON')
if output_path and output_path.endswith('.shp'):
if verbose:
print(f'Writing contours to {output_path}')
contours_gdf.to_file(filename=output_path)
return contours_gdf
def interpolate_2d(ds,
x_coords,
y_coords,
z_coords,
method='linear',
factor=1,
verbose=False,
**kwargs):
"""
This function takes points with X, Y and Z coordinates, and
interpolates Z-values across the extent of an existing xarray
dataset. This can be useful for producing smooth surfaces from point
data that can be compared directly against satellite data derived
from an OpenDataCube query.
Supported interpolation methods include 'linear', 'nearest' and
'cubic (using `scipy.interpolate.griddata`), and 'rbf' (using
`scipy.interpolate.Rbf`).
Last modified: February 2020
Parameters
----------
ds : xarray DataArray or Dataset
A two-dimensional or multi-dimensional array from which x and y
dimensions will be copied and used for the area in which to
interpolate point data.
x_coords, y_coords : numpy array
Arrays containing X and Y coordinates for all points (e.g.
longitudes and latitudes).
z_coords : numpy array
An array containing Z coordinates for all points (e.g.
elevations). These are the values you wish to interpolate
between.
method : string, optional
The method used to interpolate between point values. This string
is either passed to `scipy.interpolate.griddata` (for 'linear',
'nearest' and 'cubic' methods), or used to specify Radial Basis
Function interpolation using `scipy.interpolate.Rbf` ('rbf').
Defaults to 'linear'.
factor : int, optional
An optional integer that can be used to subsample the spatial
interpolation extent to obtain faster interpolation times, then
up-sample this array back to the original dimensions of the
data as a final step. For example, setting `factor=10` will
interpolate data into a grid that has one tenth of the
resolution of `ds`. This approach will be significantly faster
than interpolating at full resolution, but will potentially
produce less accurate or reliable results.
verbose : bool, optional
Print debugging messages. Default False.
**kwargs :
Optional keyword arguments to pass to either
`scipy.interpolate.griddata` (if `method` is 'linear', 'nearest'
or 'cubic'), or `scipy.interpolate.Rbf` (is `method` is 'rbf').
Returns
-------
interp_2d_array : xarray DataArray
An xarray DataArray containing with x and y coordinates copied
from `ds_array`, and Z-values interpolated from the points data.
"""
# Extract xy and elev points
points_xy = np.vstack([x_coords, y_coords]).T
# Extract x and y coordinates to interpolate into.
# If `factor` is greater than 1, the coordinates will be subsampled
# for faster run-times. If the last x or y value in the subsampled
# grid aren't the same as the last x or y values in the original
# full resolution grid, add the final full resolution grid value to
# ensure data is interpolated up to the very edge of the array
if ds.x[::factor][-1].item() == ds.x[-1].item():
x_grid_coords = ds.x[::factor].values
else:
x_grid_coords = ds.x[::factor].values.tolist() + [ds.x[-1].item()]
if ds.y[::factor][-1].item() == ds.y[-1].item():
y_grid_coords = ds.y[::factor].values
else:
y_grid_coords = ds.y[::factor].values.tolist() + [ds.y[-1].item()]
# Create grid to interpolate into
grid_y, grid_x = np.meshgrid(x_grid_coords, y_grid_coords)
# Apply scipy.interpolate.griddata interpolation methods
if method in ('linear', 'nearest', 'cubic'):
# Interpolate x, y and z values
interp_2d = scipy.interpolate.griddata(points=points_xy,
values=z_coords,
xi=(grid_y, grid_x),
method=method,
**kwargs)
# Apply Radial Basis Function interpolation
elif method == 'rbf':
# Interpolate x, y and z values
rbf = scipy.interpolate.Rbf(x_coords, y_coords, z_coords, **kwargs)
interp_2d = rbf(grid_y, grid_x)
# Create xarray dataarray from the data and resample to ds coords
interp_2d_da = xr.DataArray(interp_2d,
coords=[y_grid_coords, x_grid_coords],
dims=['y', 'x'])
# If factor is greater than 1, resample the interpolated array to
# match the input `ds` array
if factor > 1:
interp_2d_da = interp_2d_da.interp_like(ds)
return interp_2d_da
def contours_to_arrays(gdf, col):
"""
This function converts a polyline shapefile into an array with three
columns giving the X, Y and Z coordinates of each vertex. This data
can then be used as an input to interpolation procedures (e.g. using
a function like `interpolate_2d`.
Last modified: October 2019
Parameters
----------
gdf : Geopandas GeoDataFrame
A GeoPandas GeoDataFrame of lines to convert into point
coordinates.
col : str
A string giving the name of the GeoDataFrame field to use as
Z-values.
Returns
-------
A numpy array with three columns giving the X, Y and Z coordinates
of each vertex in the input GeoDataFrame.
"""
coords_zvals = []
for i in range(0, len(gdf)):
val = gdf.iloc[i][col]
try:
coords = np.concatenate([np.vstack(x.coords.xy).T
for x in gdf.iloc[i].geometry])
except:
coords = np.vstack(gdf.iloc[i].geometry.coords.xy).T
coords_zvals.append(np.column_stack((coords,
np.full(np.shape(coords)[0],
fill_value=val))))
return np.concatenate(coords_zvals)
def largest_region(bool_array, **kwargs):
'''
Takes a boolean array and identifies the largest contiguous region of
connected True values. This is returned as a new array with cells in
the largest region marked as True, and all other cells marked as False.
Parameters
----------
bool_array : boolean array
A boolean array (numpy or xarray.DataArray) with True values for
the areas that will be inspected to find the largest group of
connected cells
**kwargs :
Optional keyword arguments to pass to `measure.label`
Returns
-------
largest_region : boolean array
A boolean array with cells in the largest region marked as True,
and all other cells marked as False.
'''
# First, break boolean array into unique, discrete regions/blobs
blobs_labels = label(bool_array, background=0, **kwargs)
# Count the size of each blob, excluding the background class (0)
ids, counts = np.unique(blobs_labels[blobs_labels > 0],
return_counts=True)
# Identify the region ID of the largest blob
largest_region_id = ids[np.argmax(counts)]
# Produce a boolean array where 1 == the largest region
largest_region = blobs_labels == largest_region_id
return largest_region
def transform_geojson_wgs_to_epsg(geojson, EPSG):
"""
Takes a geojson dictionary and converts it from WGS84 (EPSG:4326) to desired EPSG
Parameters
----------
geojson: dict
a geojson dictionary containing a 'geometry' key, in WGS84 coordinates
EPSG: int
numeric code for the EPSG coordinate referecnce system to transform into
Returns
-------
transformed_geojson: dict
a geojson dictionary containing a 'coordinates' key, in the desired CRS
"""
gg = Geometry(geojson['geometry'], CRS('epsg:4326'))
gg = gg.to_crs(CRS(f'epsg:{EPSG}'))
return gg.__geo_interface__
def zonal_stats_parallel(shp,
raster,
statistics,
out_shp,
ncpus,
**kwargs):
"""
Summarizing raster datasets based on vector geometries in parallel.
Each cpu recieves an equal chunk of the dataset.
Utilizes the perrygeo/rasterstats package.
Parameters
----------
shp : str
Path to shapefile that contains polygons over
which zonal statistics are calculated
raster: str
Path to the raster from which the statistics are calculated.
This can be a virtual raster (.vrt).
statistics: list
list of statistics to calculate. e.g.
['min', 'max', 'median', 'majority', 'sum']
out_shp: str
Path to export shapefile containing zonal statistics.
ncpus: int
number of cores to parallelize the operations over.
kwargs:
Any other keyword arguments to rasterstats.zonal_stats()
See https://github.com/perrygeo/python-rasterstats for
all options
Returns
-------
Exports a shapefile to disk containing the zonal statistics requested
"""
#yields n sized chunks from list l (used for splitting task to multiple processes)
def chunks(l, n):
for i in range(0, len(l), n):
yield l[i:i + n]
#calculates zonal stats and adds results to a dictionary
def worker(z,raster,d):
z_stats = zonal_stats(z,raster,stats=statistics,**kwargs)
for i in range(0,len(z_stats)):
d[z[i]['id']]=z_stats[i]
#write output polygon
def write_output(zones, out_shp,d):
#copy schema and crs from input and add new fields for each statistic
schema = zones.schema.copy()
crs = zones.crs
for stat in statistics:
schema['properties'][stat] = 'float'
with fiona.open(out_shp, 'w', 'ESRI Shapefile', schema, crs) as output:
for elem in zones:
for stat in statistics:
elem['properties'][stat]=d[elem['id']][stat]
output.write({'properties':elem['properties'],'geometry': mapping(shape(elem['geometry']))})
with fiona.open(shp) as zones:
jobs = []
# create manager dictionary (polygon ids=keys, stats=entries)
# where multiple processes can write without conflicts
man = mp.Manager()
d = man.dict()
#split zone polygons into 'ncpus' chunks for parallel processing
# and call worker() for each
split = chunks(zones, len(zones)//ncpus)
for z in split:
p = mp.Process(target=worker,args=(z, raster,d))
p.start()
jobs.append(p)
#wait that all chunks are finished
[j.join() for j in jobs]
write_output(zones,out_shp,d)
|
ceos-seo/data_cube_utilities
|
dea_tools/dea_tools/spatial.py
|
Python
|
apache-2.0
| 34,337 | 0.009145 |
#
# Copyright © 2012–2022 Michal Čihař <michal@cihar.com>
#
# This file is part of Weblate <https://weblate.org/>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
import subprocess
from distutils.version import LooseVersion
from unittest import SkipTest
from django.core.cache import cache
from django.test import TestCase
from django.test.utils import override_settings
import weblate.vcs.gpg
from weblate.utils.checks import check_data_writable
from weblate.utils.unittest import tempdir_setting
from weblate.vcs.gpg import (
generate_gpg_key,
get_gpg_key,
get_gpg_public_key,
get_gpg_sign_key,
)
class GPGTest(TestCase):
gpg_error = None
@classmethod
def setUpClass(cls):
"""Check whether we can use gpg."""
super().setUpClass()
try:
result = subprocess.run(
["gpg", "--version"],
check=True,
text=True,
capture_output=True,
)
version = result.stdout.splitlines()[0].strip().rsplit(None, 1)[-1]
if LooseVersion(version) < LooseVersion("2.1"):
cls.gpg_error = "gpg too old"
except (subprocess.CalledProcessError, OSError):
cls.gpg_error = "gpg not found"
def setUp(self):
if self.gpg_error:
raise SkipTest(self.gpg_error)
def check_errors(self):
self.assertEqual(weblate.vcs.gpg.GPG_ERRORS, {})
@tempdir_setting("DATA_DIR")
@override_settings(
WEBLATE_GPG_IDENTITY="Weblate <weblate@example.com>", WEBLATE_GPG_ALGO="rsa512"
)
def test_generate(self):
self.assertEqual(check_data_writable(), [])
self.assertIsNone(get_gpg_key(silent=True))
key = generate_gpg_key()
self.check_errors()
self.assertIsNotNone(key)
self.assertEqual(key, get_gpg_key())
@tempdir_setting("DATA_DIR")
@override_settings(
WEBLATE_GPG_IDENTITY="Weblate <weblate@example.com>", WEBLATE_GPG_ALGO="rsa512"
)
def test_get(self):
self.assertEqual(check_data_writable(), [])
# This will generate new key
key = get_gpg_sign_key()
self.check_errors()
self.assertIsNotNone(key)
# Check cache access
self.assertEqual(key, get_gpg_sign_key())
# Check empty cache
cache.delete("gpg-key-id")
self.assertEqual(key, get_gpg_sign_key())
@tempdir_setting("DATA_DIR")
@override_settings(
WEBLATE_GPG_IDENTITY="Weblate <weblate@example.com>", WEBLATE_GPG_ALGO="rsa512"
)
def test_public(self):
self.assertEqual(check_data_writable(), [])
# This will generate new key
key = get_gpg_public_key()
self.check_errors()
self.assertIsNotNone(key)
# Check cache access
self.assertEqual(key, get_gpg_public_key())
|
nijel/weblate
|
weblate/vcs/tests/test_gpg.py
|
Python
|
gpl-3.0
| 3,459 | 0.000869 |
#!/usr/bin/env python
""" Examples of using qhull via scipy to generate 3D plots in visvis.
Requires numpy ver 1.5, scipy ver 0.9 and qhull from
http://www.qhull.org/ (on Windows this comes with Scipy).
plot3D meshes and plots random convex transformable data in both cartesian
and spherical coordinates
Play around with the many input parameters to plot3D to make interesting plots.
Keith Smith, 4 March 2011
"""
import numpy as np
import scipy as sp
from scipy.spatial import Delaunay
import visvis as vv
def plot3D( vuvi,
coordSys='Cartesian',
raised = True,
depRange=[-40,0],
ambient = 0.9,
diffuse = 0.4,
colormap = vv.CM_JET,
faceShading='smooth',
edgeColor = (0.5,0.5,0.5,1),
edgeShading = 'smooth',
faceColor = (1,1,1,1),
shininess = 50,
specular = 0.35,
emission = 0.45 ):
""" plot3D(vxyz,
coordSys=['Cartesian', 'Spherical'],
raised = True,
depRange=[-40,0], #Note: second range limit not currently used
rangeR=[-40,0],
ambient = 0.9,
diffuse = 0.4,
colormap = vv.CM_JET,
faceShading='smooth',
edgeColor = (0.5,0.5,0.5,1),
edgeShading = 'smooth',
faceColor = (1,1,1,1),
shininess = 50,
specular = 0.35,
emission = 0.45 ))
"""
if coordSys == 'Spherical':
thetaPhiR = vuvi # data cols are theta, phi, radius
vxyz = np.zeros(vuvi.shape)
# Now find xyz data points on unit sphere (for meshing)
vxyz[:,0] = np.sin(thetaPhiR[:,0])*np.cos(thetaPhiR[:,1])
vxyz[:,1] = np.sin(thetaPhiR[:,0])*np.sin(thetaPhiR[:,1])
vxyz[:,2] = np.cos(thetaPhiR[:,0])
#normalize and scale dependent values
thetaPhiR[thetaPhiR[:,2] < depRange[0], 2] = depRange[0]
depVal = thetaPhiR[:,2] - np.min(thetaPhiR[:,2])
else:
vxyz = vuvi
vxyz[vxyz[:,2] < depRange[0], 2] = depRange[0]
numOfPts = np.shape(vxyz)[0]
depVal = vxyz[:,2]
# set to convex surface for meshing
# find center of data
center = np.average(vxyz, 0)
#center data
vxyz = vxyz - center
# find x-y plane distance to each point
radials = np.sqrt(vxyz[:,0]**2 + vxyz[:,1]**2)
# get max and adjust so that arctan ranges between +-45 deg
maxRadial = np.max(radials)/0.7
#get angle on sphere
xi = np.arctan2(radials / maxRadial, 1)
#force z axis data to sphere
vxyz[:,2] = maxRadial * np.cos(xi)
vxyz = np.append(vxyz, [[0.7, 0.7, -0.7],[-0.7, 0.7, -0.7],[0.7, -0.7, -0.7],[-0.7, -0.7, -0.7]], axis=0)
# Send data to convex_hull program qhull
dly = Delaunay(vxyz)
meshIndx = dly.convex_hull
# Check each triangle facet and flip if
# vertex order puts back side out
for index, (I1, I2, I3) in enumerate(meshIndx):
a = vxyz[I1,:] - vxyz[I2,:]
b = vxyz[I2,:] - vxyz[I3,:]
c = np.cross(a, b)
if np.dot(c, vxyz[I2,:]) > 0:
meshIndx[index] = (I1, I3, I2)
# if 3D surface adjust dependent coordinates
if raised:
if coordSys == 'Spherical':
vxyz[:,0] = depVal*np.sin(thetaPhiR[:,0])*np.cos(thetaPhiR[:,1])
vxyz[:,1] = depVal*np.sin(thetaPhiR[:,0])*np.sin(thetaPhiR[:,1])
vxyz[:,2] = depVal*np.cos(thetaPhiR[:,0])
else:
vxyz = vxyz + center
vxyz[:numOfPts,2] = depVal
else:
if coordSys == 'Spherical':
depRange[0] = 1.0
else:
# Since qhull encloses the data with Delaunay triangles there will be
# a set of facets which cover the bottom of the data. For flat
# contours, the bottom facets need to be separated a fraction from
# the top facets else you don't see colormap colors
depValRange = np.max(vxyz[:numOfPts,2]) - np.min(vxyz[:numOfPts,2])
vxyz[:numOfPts,2] = vxyz[:numOfPts,2] / (10 * depValRange )
#normalize depVal for color mapping
dataRange = np.max(depVal) - np.min(depVal)
depVal = (depVal- np.min(depVal)) / dataRange
# Get axes
ax = vv.gca()
ms = vv.Mesh(ax, vxyz, faces=meshIndx, normals=vxyz)
ms.SetValues(np.reshape(depVal,np.size(depVal)))
ms.ambient = ambient
ms.diffuse = diffuse
ms.colormap = colormap
ms.faceShading = faceShading
ms.edgeColor = edgeColor
ms.edgeShading = edgeShading
ms.faceColor = faceColor
ms.shininess = shininess
ms.specular = specular
ms.emission = emission
ax.SetLimits(rangeX=[-depRange[0],depRange[0]],
rangeY=[-depRange[0],depRange[0]],
rangeZ=[-depRange[0], depRange[0]])
# Start of test code.
if __name__ == '__main__':
# Create figure
fig = vv.figure()
fig.position.w = 600
# Cartesian plot
numOfPts = 2000
scale = 1
# Create random points
xyz = 2 * scale * (np.random.rand(numOfPts,3) - 0.5)
# 2D sync function
xyz[:,2] = np.sinc(5*(np.sqrt(xyz[:,0]**2 + xyz[:,1]**2)))
#xyz[:,2] = scale - ( xyz[:,0]**2 + xyz[:,1]**2)
# Plot
vv.subplot(121)
vv.title('Cartesian coordinates')
plot3D(xyz, depRange=[-1,0])
#plot3D(xyz, depRange=[-1,0], raised=False)
# Sperical plot
numOfPts = 1000
# Create random points
ThetaPhiR = np.zeros((numOfPts,3))
ThetaPhiR[:,0] = np.pi * np.random.rand(numOfPts) # theta is 0 to 180 deg
ThetaPhiR[:,1] = 2 * np.pi * np.random.rand(numOfPts) # phi is 0 to 360 deg
ThetaPhiR[:,2] = 10 * np.log10((np.sin(ThetaPhiR[:,0])**4) * (np.cos(ThetaPhiR[:,1])**2))
# Plot
vv.subplot(122)
vv.title('Sperical coordinates')
plot3D(ThetaPhiR, coordSys='Spherical')
#plot3D(ThetaPhiR, coordSys='Spherical', raised=False)
# Run main loop
app = vv.use()
app.Run()
|
pbfy0/visvis
|
examples/surfaceFromRandomPoints.py
|
Python
|
bsd-3-clause
| 6,186 | 0.023278 |
# jsb/plugs/socket/dns.py
#
#
""" do a fqdn loopup. """
## jsb imports
from jsb.lib.commands import cmnds
from jsb.lib.examples import examples
## basic imports
from socket import gethostbyname
from socket import getfqdn
import re
## dns command
def handle_dns(bot, event):
""" arguments: <ip>|<hostname> - do a dns lookup. """
if not event.rest: event.missing("<ip>|<hostname>") ; return
query = event.rest.strip()
ippattern = re.match(r"^([0-9]{1,3}\.){3}[0-9]{1,3}$", query)
hostpattern = re.match(r"(\w+://)?(?P<hostname>\S+\.\w+)", query)
if ippattern:
try:
answer = getfqdn(ippattern.group(0))
event.reply("%(hostname)s is %(answer)s" % {"hostname": query, "answer": answer})
except: event.reply("Couldn't lookup ip")
elif hostpattern:
try:
answer = gethostbyname(hostpattern.group('hostname'))
event.reply("%(ip)s is %(answer)s" % {"ip": query, "answer": answer})
except: event.reply("Couldn't look up the hostname")
else: return
cmnds.add("dns", handle_dns, ["OPER", "USER", "GUEST"])
examples.add("dns", "resolve the ip or the hostname", "dns google.com")
|
Petraea/jsonbot
|
jsb/plugs/socket/dns.py
|
Python
|
mit
| 1,189 | 0.015139 |
from Components.Renderer.Renderer import Renderer
from enigma import eDVBCI_UI, eLabel, iPlayableService
from skin import parameters
from Components.SystemInfo import SystemInfo
from Components.VariableText import VariableText
from Tools.Hex2strColor import Hex2strColor
from os import popen
class CiModuleControl(Renderer, VariableText):
def __init__(self):
Renderer.__init__(self)
VariableText.__init__(self)
self.eDVBCIUIInstance = eDVBCI_UI.getInstance()
self.eDVBCIUIInstance and self.eDVBCIUIInstance.ciStateChanged.get().append(self.ciModuleStateChanged)
self.text = ""
self.allVisible = False
self.no_visible_state1 = "ciplushelper" in popen("top -n 1").read()
self.colors = parameters.get("CiModuleControlColors", (0x007F7F7F, 0x00FFFF00, 0x00FFFF00, 0x00FF2525)) # "state 0 (no module) gray", "state 1 (init module) yellow", "state 2 (module ready) green", "state -1 (error) red"
GUI_WIDGET = eLabel
def applySkin(self, desktop, parent):
attribs = self.skinAttributes[:]
for (attrib, value) in self.skinAttributes:
if attrib == "allVisible":
self.allVisible = value == "1"
attribs.remove((attrib, value))
break
self.skinAttributes = attribs
return Renderer.applySkin(self, desktop, parent)
def ciModuleStateChanged(self, slot):
self.changed(True)
def changed(self, what):
if what == True or what[0] == self.CHANGED_SPECIFIC and what[1] == iPlayableService.evStart:
string = ""
NUM_CI = SystemInfo["CommonInterface"]
if NUM_CI and NUM_CI > 0:
if self.eDVBCIUIInstance:
for slot in range(NUM_CI):
state = self.eDVBCIUIInstance.getState(slot)
if state == 1 and self.no_visible_state1:
continue
add_num = True
if string:
string += " "
if state != -1:
if state == 0:
if not self.allVisible:
string += ""
add_num = False
else:
string += Hex2strColor(self.colors[0]) # no module
elif state == 1:
string += Hex2strColor(self.colors[1]) # init module
elif state == 2:
string += Hex2strColor(self.colors[2]) # module ready
else:
if not self.allVisible:
string += ""
add_num = False
else:
string += Hex2strColor(self.colors[3]) # error
if add_num:
string += "%d" % (slot + 1)
if string:
string = _("CI slot: ") + string
self.text = string
|
TwolDE2/enigma2
|
lib/python/Components/Renderer/CiModuleControl.py
|
Python
|
gpl-2.0
| 2,404 | 0.027454 |
import unittest
import random
import pickle
from ..wrapper import NTracer,CUBE,SPHERE
from ..render import Material,Color
def pydot(a,b):
return sum(ia*ib for ia,ib in zip(a,b))
def and_generic(f):
def inner(self):
with self.subTest(generic=False):
f(self,False)
with self.subTest(generic=True):
f(self,True)
return inner
def object_equal_method(*attrs):
def inner(self,a,b,msg=None):
if a is not b:
for attr in attrs:
self.assertEqual(getattr(a,attr),getattr(b,attr),msg)
return inner
def rand_vector(nt,lo=-1000,hi=1000):
return nt.Vector([random.uniform(lo,hi) for x in range(nt.dimension)])
def rand_triangle_verts(nt):
points = []
d = nt.dimension
for i in range(d):
points.append(nt.Vector(
[random.uniform(-10,10) for j in range(0,i)] +
[random.uniform(1,10)] +
[0 for j in range(i+1,d)]))
return points
def walk_bounds(n,aabb,nt,f):
f(aabb,n)
if isinstance(n,nt.KDBranch):
walk_bounds(n.left,aabb.left(n.axis,n.split),nt,f)
walk_bounds(n.right,aabb.right(n.axis,n.split),nt,f)
def aabb_intersects(a,b):
return all(a_min <= b_max and a_max >= b_min
for a_min,a_max,b_min,b_max in zip(a.start,a.end,b.start,b.end))
def to_prototype(nt,x):
if isinstance(x,nt.Triangle): return nt.TrianglePrototype(x)
if isinstance(x,nt.TriangleBatch): return nt.TriangleBatchPrototype(x)
# this constructor isn't implemented yet
if isinstance(x,nt.Solid): return nt.SolidPrototype(x)
raise TypeError('x is not a primitive')
class Tests(unittest.TestCase):
def __init__(self,*args,**kwds):
super().__init__(*args,**kwds)
self._nt_cache = set()
self.addTypeEqualityFunc(Material,'_material_equal')
def get_ntracer(self,dimension,generic=False):
r = NTracer(dimension,generic)
if r not in self._nt_cache:
self._nt_cache.add(r)
#self.addTypeEqualityFunc(r.Vector,'_vector_equal')
self.addTypeEqualityFunc(r.base.AABB,'_aabb_equal')
self.addTypeEqualityFunc(r.base.KDBranch,'_kdbranch_equal')
self.addTypeEqualityFunc(r.base.KDLeaf,'listlike_equal')
self.addTypeEqualityFunc(r.base.Triangle,'_triangle_equal')
self.addTypeEqualityFunc(r.base.TriangleBatch,'listlike_equal')
return r
_aabb_equal = object_equal_method('start','end')
_material_equal = object_equal_method('color','opacity','reflectivity','specular_intensity','specular_exp','specular')
_kdbranch_equal = object_equal_method('axis','split','left','right')
def listlike_equal(self,a,b,msg=None):
self.assertEqual(list(a),list(b),msg)
def _triangle_equal(self,a,b,msg=None):
self.assertEqual(a.p1,b.p1,msg)
self.assertEqual(a.face_normal,b.face_normal,msg)
self.assertEqual(list(a.edge_normals),list(b.edge_normals),msg)
self.assertEqual(a.material,b.material,msg)
def vector_almost_equal(self,va,vb):
self.assertEqual(len(va),len(vb))
for a,b in zip(va,vb):
self.assertAlmostEqual(a,b,4)
#def check_kdtree(self,nt,scene):
# prims = set()
# leaf_boundaries = []
# def handler(aabb,node):
# if node is None:
# leaf_boundaries.append((aabb,frozenset()))
# elif isinstance(node,nt.KDLeaf):
# prims.update(to_prototype(nt,p) for p in node)
# leaf_boundaries.append((aabb,frozenset(node)))
# walk_bounds(scene.root,scene.boundary,nt,handler)
# for p in prims:
# for bound,contained in leaf_boundaries:
# self.assertEqual(bound.intersects(p),p.primitive in contained)
def test_simd(self):
d = 64
while d > 4:
nt = self.get_ntracer(d)
a = nt.Vector(range(d))
b = nt.Vector(x+12 for x in range(d-1,-1,-1))
self.assertAlmostEqual(nt.dot(a,b),pydot(a,b),4)
d = d >> 1
@and_generic
def test_math(self,generic):
nt = self.get_ntracer(4,generic)
ma = nt.Matrix([[10,2,3,4],[5,6,7,8],[9,10,11,12],[13,14,15,16]])
mb = nt.Matrix([13,6,9,6,7,3,3,13,1,11,12,7,12,15,17,15])
mx = ma * mb
my = nt.Matrix([195,159,200,167,210,245,283,277,342,385,447,441,474,525,611,605])
self.listlike_equal(mx.values,my.values)
self.vector_almost_equal((mb * mb.inverse()).values,[1,0,0,0,0,1,0,0,0,0,1,0,0,0,0,1])
self.vector_almost_equal(nt.Vector(13,2,16,14).unit(),[0.52,0.08,0.64,0.56])
@and_generic
def test_aabb(self,generic):
nt = self.get_ntracer(5,generic)
a = nt.AABB((1,7,-5,5,4),(5,13,-1,6,12))
self.assertEqual(a.dimension,5)
self.listlike_equal(a.end,[5,13,-1,6,12])
self.listlike_equal(a.start,[1,7,-5,5,4])
self.listlike_equal(a.right(2,-3).start,[1,7,-3,5,4])
self.listlike_equal(a.left(0,2).end,[2,13,-1,6,12])
@and_generic
def test_triangle(self,generic):
nt = self.get_ntracer(3,generic)
mat = Material((1,1,1))
box = nt.AABB((-1,-1,-1),(1,1,1))
self.assertFalse(box.intersects(nt.TrianglePrototype([
(-2.092357,0.1627209,0.9231308),
(0.274588,0.8528936,2.309217),
(-1.212236,1.855952,0.3137006)],mat)))
self.assertFalse(box.intersects(nt.TrianglePrototype([
(2.048058,-3.022543,1.447644),
(1.961913,-0.5438575,-0.1552723),
(0.3618142,-1.684767,0.2162201)],mat)))
self.assertFalse(box.intersects(nt.TrianglePrototype([
(-4.335572,-1.690142,-1.302721),
(0.8976227,0.5090631,4.6815),
(-0.8176082,4.334341,-1.763081)],mat)))
self.assertTrue(box.intersects(nt.TrianglePrototype([
(0,0,0),
(5,5,5),
(1,2,3)],mat)))
self.assertTrue(nt.AABB(
(-0.894424974918,-1.0,-0.850639998913),
(0.0,-0.447214990854,0.850639998913)).intersects(
nt.TrianglePrototype([
(0.0,-1.0,0.0),
(0.723599970341,-0.447214990854,0.525720000267),
(-0.276385009289,-0.447214990854,0.850639998913)],mat)))
points = [rand_triangle_verts(nt) for i in range(nt.BATCH_SIZE)]
max_v = min_v = points[0][0]
for tri in points:
for p in tri:
max_v = [max(a,b) for a,b in zip(max_v,p)]
min_v = [min(a,b) for a,b in zip(min_v,p)]
tbp = nt.TriangleBatchPrototype(nt.TrianglePrototype(tri,mat) for tri in points)
self.vector_almost_equal(tbp.boundary.start,min_v)
self.vector_almost_equal(tbp.boundary.end,max_v)
if nt.BATCH_SIZE == 4:
self.assertTrue(box.intersects(nt.TriangleBatchPrototype([
nt.TrianglePrototype([
(5.8737568855285645,0.0,0.0),
(2.362654209136963,1.4457907676696777,0.0),
(-7.4159417152404785,-2.368093252182007,5.305923938751221)],mat),
nt.TrianglePrototype([
(6.069871425628662,0.0,0.0),
(8.298105239868164,1.4387503862380981,0.0),
(-7.501928806304932,4.3413987159729,5.4995622634887695)],mat),
nt.TrianglePrototype([
(5.153589248657227,0.0,0.0),
(-0.8880055546760559,3.595335006713867,0.0),
(-0.14510761201381683,6.0621466636657715,1.7603594064712524)],mat),
nt.TrianglePrototype([
(1.9743329286575317,0.0,0.0),
(-0.6579152345657349,8.780682563781738,0.0),
(1.0433781147003174,0.5538825988769531,4.187061309814453)],mat)])))
@and_generic
def test_cube(self,generic):
nt = self.get_ntracer(3,generic)
mat = Material((1,1,1))
box = nt.AABB((-1,-1,-1),(1,1,1))
self.assertFalse(box.intersects(nt.SolidPrototype(
CUBE,
nt.Vector(1.356136,1.717844,1.577731),
nt.Matrix(-0.01922399,-0.3460019,0.8615935,
-0.03032121,-0.6326356,-0.5065715,
0.03728577,-0.6928598,0.03227519),
mat)))
self.assertFalse(box.intersects(nt.SolidPrototype(
CUBE,
nt.Vector(1.444041,1.433598,1.975453),
nt.Matrix(0.3780299,-0.3535482,0.8556266,
-0.7643852,-0.6406123,0.07301452,
0.5223108,-0.6816301,-0.5124177),
mat)))
self.assertFalse(box.intersects(nt.SolidPrototype(
CUBE,
nt.Vector(-0.31218,-3.436678,1.473133),
nt.Matrix(0.8241131,-0.2224413,1.540015,
-1.461101,-0.7099018,0.6793453,
0.5350775,-1.595884,-0.516849),
mat)))
self.assertFalse(box.intersects(nt.SolidPrototype(
CUBE,
nt.Vector(0.7697315,-3.758033,1.847144),
nt.Matrix(0.6002195,-1.608681,-0.3900863,
-1.461104,-0.7098908,0.6793506,
-0.7779449,0.0921175,-1.576897),
mat)))
self.assertTrue(box.intersects(nt.SolidPrototype(
CUBE,
nt.Vector(0.4581598,-1.56134,0.5541568),
nt.Matrix(0.3780299,-0.3535482,0.8556266,
-0.7643852,-0.6406123,0.07301452,
0.5223108,-0.6816301,-0.5124177),
mat)))
@and_generic
def test_sphere(self,generic):
nt = self.get_ntracer(3,generic)
mat = Material((1,1,1))
box = nt.AABB((-1,-1,-1),(1,1,1))
self.assertFalse(box.intersects(nt.SolidPrototype(
SPHERE,
nt.Vector(-1.32138,1.6959,1.729396),
nt.Matrix.identity(),
mat)))
self.assertTrue(box.intersects(nt.SolidPrototype(
SPHERE,
nt.Vector(1.623511,-1.521197,-1.243952),
nt.Matrix.identity(),
mat)))
@and_generic
def test_batch_interface(self,generic):
nt = self.get_ntracer(4,generic)
mat = Material((1,1,1))
lo = lambda: random.uniform(-1,1)
hi = lambda: random.uniform(9,11)
protos = []
for i in range(nt.BATCH_SIZE):
protos.append(nt.TrianglePrototype([
(lo(),lo(),lo(),lo()),
(lo(),hi(),lo(),lo()),
(hi(),lo(),lo(),lo()),
(lo(),lo(),hi(),lo())],Material((1,1,1.0/(i+1)))))
bproto = nt.TriangleBatchPrototype(protos)
for i in range(nt.BATCH_SIZE):
self.assertEqual(protos[i].face_normal,bproto.face_normal[i])
for j in range(nt.dimension):
self.assertEqual(protos[i].point_data[j].point,bproto.point_data[j].point[i])
self.assertEqual(protos[i].point_data[j].edge_normal,bproto.point_data[j].edge_normal[i])
self.assertEqual(protos[i].material,bproto.material[i])
@and_generic
def test_buffer_interface(self,generic):
nt = self.get_ntracer(7,generic)
v = nt.Vector(1,2,3,4,5,6,7)
self.assertEqual(list(v),list(memoryview(v)))
c = Color(0.5,0.1,0)
self.assertEqual(list(c),list(memoryview(c)))
@and_generic
def test_kdtree(self,generic):
nt = self.get_ntracer(3,generic)
mat = Material((1,1,1))
primitives = [
nt.Triangle(
(-1.1755770444869995,0.3819499611854553,-1.6180520057678223),
(1.7082732915878296,-2.3512351512908936,1.4531432390213013),
[(-0.615524172782898,-0.3236003816127777,0.19999605417251587),
(0.49796950817108154,0.0381958931684494,-0.5235964059829712)],mat),
nt.Triangle(
(-1.1755770444869995,0.3819499611854553,-1.6180520057678223),
(1.0557708740234375,-1.4531433582305908,0.8980922102928162),
[(-0.8057316541671753,-0.06180214881896973,0.8471965789794922),
(0.19020742177963257,-0.2617982029914856,-0.6472004652023315)],mat),
nt.Triangle(
(0.7265498042106628,0.9999955296516418,1.6180428266525269),
(0,1.7961481809616089,0.8980742692947388),
[(-1.1135050058364868,-0.1618017703294754,0.32360348105430603),
(0.6881839036941528,-0.09999901801347733,0.19999800622463226)],mat),
nt.Triangle(
(0.7265498042106628,0.9999955296516418,1.6180428266525269),
(0,2.90622878074646,1.4531147480010986),
[(-0.4253210127353668,-0.26180076599121094,0.5236014127731323),
(0.6881839036941528,0.09999898821115494,-0.1999979317188263)],mat),
nt.Triangle(
(1.9021340608596802,0.618022620677948,-0.3819592595100403),
(-1.055770754814148,-1.4531432390213013,0.8980920910835266),
[(-0.30776214599609375,-0.42359834909439087,-1.0471925735473633),
(0.4979696571826935,-0.038195837289094925,0.5235962867736816)],mat),
nt.Triangle(
(1.9021340608596802,0.618022620677948,-0.3819592595100403),
(-1.7082730531692505,-2.3512353897094727,1.4531434774398804),
[(0.19020749628543854,-0.4617941677570343,-0.5235962271690369),
(0.19020745158195496,0.2617981433868408,0.6472005844116211)],mat)]
scene = nt.CompositeScene(
nt.AABB(
(-1.710653305053711e-05,0.618022620677948,-0.3819774389266968),
(0.7265291213989258,2.000016689300537,0.3819882869720459)),
nt.KDBranch(1,2.0000057220458984,
nt.KDBranch(1,0.9999955296516418,
None,
nt.KDLeaf([
primitives[4],
primitives[5],
primitives[2],
primitives[3],
primitives[1],
primitives[0]])),
nt.KDLeaf([
primitives[4],
primitives[5],
primitives[1],
primitives[0]])))
scene.set_fov(0.8)
hits = scene.root.intersects(
(4.917067527770996,2.508934497833252,-4.304379940032959),
(-0.7135500907897949,-0.1356230527162552,0.6873518228530884),
)
self.assertEqual(len(hits),1)
self.assertEqual(primitives.index(hits[0].primitive),4)
self.assertEqual(hits[0].batch_index,-1)
def check_pickle_roundtrip(self,x):
self.assertEqual(pickle.loads(pickle.dumps(x)),x)
def test_pickle(self):
mat = Material((1,1,1))
self.check_pickle_roundtrip(mat)
self.check_pickle_roundtrip(Color(0.2,0.1,1))
for d in [3,5,12]:
with self.subTest(dimension=d):
nt = self.get_ntracer(d)
self.check_pickle_roundtrip(rand_vector(nt))
self.check_pickle_roundtrip(nt.AABB(rand_vector(nt,-100,50),rand_vector(nt,51,200)))
self.check_pickle_roundtrip(nt.Triangle(
rand_vector(nt),
rand_vector(nt),
[rand_vector(nt) for x in range(nt.dimension-1)],mat))
def check_triangle_points_roundtrip(self,nt,points):
newpoints = nt.Triangle.from_points(points,Material((1,1,1))).to_points()
try:
for old,new in zip(points,newpoints):
for c1,c2 in zip(old,new):
self.assertAlmostEqual(c1,c2,4)
except AssertionError:
self.fail('{} != {}'.format(list(points),list(newpoints)))
def check_triangle_batch_points_roundtrip(self,nt,points):
mat = Material((1,1,1))
tbproto = nt.TriangleBatchPrototype(
nt.TriangleBatch([nt.Triangle.from_points(p,mat) for p in points]))
newpoints = []
for i in range(nt.BATCH_SIZE):
newpoints.append([tp.point[i] for tp in tbproto.point_data])
@and_generic
def test_to_from_points(self,generic):
nt = self.get_ntracer(5,generic)
self.check_triangle_points_roundtrip(nt,rand_triangle_verts(nt))
self.check_triangle_batch_points_roundtrip(
nt,
[rand_triangle_verts(nt) for i in range(nt.BATCH_SIZE)])
#@and_generic
#def test_kd_tree_gen(self,generic):
# mat = Material((1,1,1))
# nt = self.get_ntracer(4,generic)
# for j in range(10):
# protos = []
# for i in range(nt.BATCH_SIZE * 4):
# protos.append(nt.TrianglePrototype(rand_triangle_verts(nt),mat))
# scene = nt.build_composite_scene(protos,max_depth=1,split_threshold=1)
# self.check_kdtree(nt,scene)
if __name__ == '__main__':
unittest.main()
|
Rouslan/NTracer
|
lib/ntracer/tests/test.py
|
Python
|
mit
| 16,831 | 0.03149 |
from academic import settings
def default_picture_url(context):
return {
'ACADEMIC_PEOPLE_DEFAULT_PICTURE':
settings.PEOPLE_DEFAULT_PICTURE, }
|
phretor/django-academic
|
academic/apps/people/context_processors.py
|
Python
|
bsd-3-clause
| 168 | 0.005952 |
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.utils import timezone
from django.contrib.auth.models import User
from django.template.loader import render_to_string
from django.conf import settings
from django.contrib.sites.models import Site
from django.core.urlresolvers import reverse
from django.core.mail import send_mail
from django.core.exceptions import ImproperlyConfigured
from django.utils.http import urlquote
from userena.utils import get_gravatar, generate_sha1, get_protocol
from userena.managers import UserenaManager, UserenaBaseProfileManager
from userena import settings as userena_settings
from guardian.shortcuts import get_perms
from guardian.shortcuts import assign
from easy_thumbnails.fields import ThumbnailerImageField
import datetime
import random
import hashlib
PROFILE_PERMISSIONS = (
('view_profile', 'Can view profile'),
)
def upload_to_mugshot(instance, filename):
"""
Uploads a mugshot for a user to the ``USERENA_MUGSHOT_PATH`` and saving it
under unique hash for the image. This is for privacy reasons so others
can't just browse through the mugshot directory.
"""
extension = filename.split('.')[-1].lower()
salt, hash = generate_sha1(instance.id)
return '%(path)s%(hash)s.%(extension)s' % {'path': userena_settings.USERENA_MUGSHOT_PATH,
'hash': hash[:10],
'extension': extension}
class UserenaSignup(models.Model):
"""
Userena model which stores all the necessary information to have a full
functional user implementation on your Django website.
"""
user = models.OneToOneField(User,
verbose_name=_('user'),
related_name='userena_signup')
last_active = models.DateTimeField(_('last active'),
blank=True,
null=True,
help_text=_('The last date that the user was active.'))
activation_key = models.CharField(_('activation key'),
max_length=40,
blank=True)
activation_notification_send = models.BooleanField(_('notification send'),
default=False,
help_text=_('Designates whether this user has already got a notification about activating their account.'))
email_unconfirmed = models.EmailField(_('unconfirmed email address'),
blank=True,
help_text=_('Temporary email address when the user requests an email change.'))
email_confirmation_key = models.CharField(_('unconfirmed email verification key'),
max_length=40,
blank=True)
email_confirmation_key_created = models.DateTimeField(_('creation date of email confirmation key'),
blank=True,
null=True)
objects = UserenaManager()
class Meta:
verbose_name = _('userena registration')
verbose_name_plural = _('userena registrations')
def __unicode__(self):
return '%s' % self.user.username
def change_email(self, email):
"""
Changes the email address for a user.
A user needs to verify this new email address before it becomes
active. By storing the new email address in a temporary field --
``temporary_email`` -- we are able to set this email address after the
user has verified it by clicking on the verification URI in the email.
This email gets send out by ``send_verification_email``.
:param email:
The new email address that the user wants to use.
"""
self.email_unconfirmed = email
salt, hash = generate_sha1(self.user.username)
self.email_confirmation_key = hash
self.email_confirmation_key_created = timezone.now()
self.save()
# Send email for activation
self.send_confirmation_email()
def send_confirmation_email(self):
"""
Sends an email to confirm the new email address.
This method sends out two emails. One to the new email address that
contains the ``email_confirmation_key`` which is used to verify this
this email address with :func:`UserenaUser.objects.confirm_email`.
The other email is to the old email address to let the user know that
a request is made to change this email address.
"""
context= {'user': self.user,
'new_email': self.email_unconfirmed,
'protocol': get_protocol(),
'confirmation_key': self.email_confirmation_key,
'site': Site.objects.get_current()}
# Email to the old address
subject_old = render_to_string('accounts/emails/confirmation_email_subject_old.txt',
context)
subject_old = ''.join(subject_old.splitlines())
message_old = render_to_string('accounts/emails/confirmation_email_message_old.txt',
context)
send_mail(subject_old,
message_old,
settings.DEFAULT_FROM_EMAIL,
[self.user.email])
# Email to the new address
subject_new = render_to_string('accounts/emails/confirmation_email_subject_new.txt',
context)
subject_new = ''.join(subject_new.splitlines())
message_new = render_to_string('accounts/emails/confirmation_email_message_new.txt',
context)
send_mail(subject_new,
message_new,
settings.DEFAULT_FROM_EMAIL,
[self.email_unconfirmed,])
def activation_key_expired(self):
"""
Checks if activation key is expired.
Returns ``True`` when the ``activation_key`` of the user is expired and
``False`` if the key is still valid.
The key is expired when it's set to the value defined in
``USERENA_ACTIVATED`` or ``activation_key_created`` is beyond the
amount of days defined in ``USERENA_ACTIVATION_DAYS``.
"""
expiration_days = datetime.timedelta(days=userena_settings.USERENA_ACTIVATION_DAYS)
expiration_date = self.user.date_joined + expiration_days
if self.activation_key == userena_settings.USERENA_ACTIVATED:
return True
if timezone.now() >= expiration_date:
return True
return False
def send_activation_email(self, auto_join_secret = False):
"""
Sends a activation email to the user.
This email is send when the user wants to activate their newly created
user.
"""
if not auto_join_secret:
activation_url = reverse('userena_activate', args=(self.user.username, self.activation_key))
else:
if isinstance(auto_join_secret, basestring):
auto_join_key = auto_join_secret
else:
auto_join_key = hashlib.md5(self.activation_key +
settings.AGORA_API_AUTO_ACTIVATION_SECRET).hexdigest()
activation_url = reverse('auto_join_activate', args=(self.user.username, auto_join_key))
context= {'user': self.user,
'protocol': get_protocol(),
'activation_days': userena_settings.USERENA_ACTIVATION_DAYS,
'activation_url': activation_url,
'site': Site.objects.get_current()}
subject = render_to_string('accounts/emails/activation_email_subject.txt',
context)
subject = ''.join(subject.splitlines())
message = render_to_string('accounts/emails/activation_email_message.txt',
context)
send_mail(subject,
message,
settings.DEFAULT_FROM_EMAIL,
[self.user.email,])
class UserenaBaseProfile(models.Model):
""" Base model needed for extra profile functionality """
PRIVACY_CHOICES = (
('open', _('Open')),
('registered', _('Registered')),
('closed', _('Closed')),
)
MUGSHOT_SETTINGS = {'size': (userena_settings.USERENA_MUGSHOT_SIZE,
userena_settings.USERENA_MUGSHOT_SIZE),
'crop': 'smart'}
mugshot = ThumbnailerImageField(_('mugshot'),
blank=True,
upload_to=upload_to_mugshot,
resize_source=MUGSHOT_SETTINGS,
help_text=_('A personal image displayed in your profile.'))
privacy = models.CharField(_('privacy'),
max_length=15,
choices=PRIVACY_CHOICES,
default=userena_settings.USERENA_DEFAULT_PRIVACY,
help_text = _('Designates who can view your profile.'))
objects = UserenaBaseProfileManager()
class Meta:
"""
Meta options making the model abstract and defining permissions.
The model is ``abstract`` because it only supplies basic functionality
to a more custom defined model that extends it. This way there is not
another join needed.
We also define custom permissions because we don't know how the model
that extends this one is going to be called. So we don't know what
permissions to check. For ex. if the user defines a profile model that
is called ``MyProfile``, than the permissions would be
``add_myprofile`` etc. We want to be able to always check
``add_profile``, ``change_profile`` etc.
"""
abstract = True
permissions = PROFILE_PERMISSIONS
def __unicode__(self):
return 'Profile of %(username)s' % {'username': self.user.username}
def get_mugshot_url(self, custom_size = userena_settings.USERENA_MUGSHOT_SIZE):
"""
Returns the image containing the mugshot for the user.
The mugshot can be a uploaded image or a Gravatar.
Gravatar functionality will only be used when
``USERENA_MUGSHOT_GRAVATAR`` is set to ``True``.
:return:
``None`` when Gravatar is not used and no default image is supplied
by ``USERENA_MUGSHOT_DEFAULT``.
"""
# First check for a mugshot and if any return that.
if self.mugshot:
return settings.MEDIA_URL +\
settings.MUGSHOTS_DIR +\
self.mugshot.name.split("/")[-1]
# Use Gravatar if the user wants to.
if userena_settings.USERENA_MUGSHOT_GRAVATAR:
if userena_settings.USERENA_MUGSHOT_DEFAULT == 'blank-unitials-ssl':
d = 'https://unitials.com/mugshot/%s/%s.png' % (
custom_size, self.get_initials()
)
elif userena_settings.USERENA_MUGSHOT_DEFAULT == 'blank-unitials':
d = 'http://unitials.com/mugshot/%s/%s.png' % (
custom_size, self.get_initials()
)
return get_gravatar(self.user.email, custom_size, d)
# Gravatar not used, check for a default image.
else:
if userena_settings.USERENA_MUGSHOT_DEFAULT not in ['404', 'mm',
'identicon',
'monsterid',
'wavatar',
'blank']:
return userena_settings.USERENA_MUGSHOT_DEFAULT
else: return None
def get_full_name_or_username(self):
"""
Returns the full name of the user, or if none is supplied will return
the username.
Also looks at ``USERENA_WITHOUT_USERNAMES`` settings to define if it
should return the username or email address when the full name is not
supplied.
:return:
``String`` containing the full name of the user. If no name is
supplied it will return the username or email address depending on
the ``USERENA_WITHOUT_USERNAMES`` setting.
"""
user = self.user
if user.first_name or user.last_name:
# We will return this as translated string. Maybe there are some
# countries that first display the last name.
name = _("%(first_name)s %(last_name)s") % \
{'first_name': user.first_name,
'last_name': user.last_name}
else:
# Fallback to the username if usernames are used
if not userena_settings.USERENA_WITHOUT_USERNAMES:
name = "%(username)s" % {'username': user.username}
else:
name = "%(email)s" % {'email': user.email}
return name.strip()
def can_view_profile(self, user):
"""
Can the :class:`User` view this profile?
Returns a boolean if a user has the rights to view the profile of this
user.
Users are divided into four groups:
``Open``
Everyone can view your profile
``Closed``
Nobody can view your profile.
``Registered``
Users that are registered on the website and signed
in only.
``Admin``
Special cases like superadmin and the owner of the profile.
Through the ``privacy`` field a owner of an profile can define what
they want to show to whom.
:param user:
A Django :class:`User` instance.
"""
# Simple cases first, we don't want to waste CPU and DB hits.
# Everyone.
if self.privacy == 'open': return True
# Registered users.
elif self.privacy == 'registered' and isinstance(user, User):
return True
# Checks done by guardian for owner and admins.
elif 'view_profile' in get_perms(user, self):
return True
# Fallback to closed profile.
return False
class UserenaLanguageBaseProfile(UserenaBaseProfile):
"""
Extends the :class:`UserenaBaseProfile` with a language choice.
Use this model in combination with ``UserenaLocaleMiddleware`` automatically
set the language of users when they are signed in.
"""
language = models.CharField(_('language'),
max_length=5,
choices=settings.LANGUAGES,
default=settings.LANGUAGE_CODE[:2])
class Meta:
abstract = True
permissions = PROFILE_PERMISSIONS
|
pirata-cat/agora-ciudadana
|
userena/models.py
|
Python
|
agpl-3.0
| 15,318 | 0.002742 |
from .Node import error
SYNTAX_NODE_SERIALIZATION_CODES = {
# 0 is 'Token'. Needs to be defined manually
# 1 is 'Unknown'. Needs to be defined manually
'UnknownDecl': 2,
'TypealiasDecl': 3,
'AssociatedtypeDecl': 4,
'IfConfigDecl': 5,
'PoundErrorDecl': 6,
'PoundWarningDecl': 7,
'PoundSourceLocation': 8,
'ClassDecl': 9,
'StructDecl': 10,
'ProtocolDecl': 11,
'ExtensionDecl': 12,
'FunctionDecl': 13,
'InitializerDecl': 14,
'DeinitializerDecl': 15,
'SubscriptDecl': 16,
'ImportDecl': 17,
'AccessorDecl': 18,
'VariableDecl': 19,
'EnumCaseDecl': 20,
'EnumDecl': 21,
'OperatorDecl': 22,
'PrecedenceGroupDecl': 23,
'UnknownExpr': 24,
'InOutExpr': 25,
'PoundColumnExpr': 26,
'TryExpr': 27,
'AwaitExpr': 249,
'IdentifierExpr': 28,
'SuperRefExpr': 29,
'NilLiteralExpr': 30,
'DiscardAssignmentExpr': 31,
'AssignmentExpr': 32,
'SequenceExpr': 33,
'PoundLineExpr': 34,
'PoundFileExpr': 35,
'PoundFunctionExpr': 36,
'PoundDsohandleExpr': 37,
'SymbolicReferenceExpr': 38,
'PrefixOperatorExpr': 39,
'BinaryOperatorExpr': 40,
'ArrowExpr': 41,
'FloatLiteralExpr': 42,
'TupleExpr': 43,
'ArrayExpr': 44,
'DictionaryExpr': 45,
'ImplicitMemberExpr': 46,
'IntegerLiteralExpr': 47,
'StringLiteralExpr': 48,
'BooleanLiteralExpr': 49,
'TernaryExpr': 50,
'MemberAccessExpr': 51,
'DotSelfExpr': 52,
'IsExpr': 53,
'AsExpr': 54,
'TypeExpr': 55,
'ClosureExpr': 56,
'UnresolvedPatternExpr': 57,
'FunctionCallExpr': 58,
'SubscriptExpr': 59,
'OptionalChainingExpr': 60,
'ForcedValueExpr': 61,
'PostfixUnaryExpr': 62,
'SpecializeExpr': 63,
'KeyPathExpr': 65,
'KeyPathBaseExpr': 66,
'ObjcKeyPathExpr': 67,
'ObjcSelectorExpr': 68,
'EditorPlaceholderExpr': 69,
'ObjectLiteralExpr': 70,
'UnknownStmt': 71,
'ContinueStmt': 72,
'WhileStmt': 73,
'DeferStmt': 74,
'ExpressionStmt': 75,
'RepeatWhileStmt': 76,
'GuardStmt': 77,
'ForInStmt': 78,
'SwitchStmt': 79,
'DoStmt': 80,
'ReturnStmt': 81,
'FallthroughStmt': 82,
'BreakStmt': 83,
'DeclarationStmt': 84,
'ThrowStmt': 85,
'IfStmt': 86,
'Decl': 87,
'Expr': 88,
'Stmt': 89,
'Type': 90,
'Pattern': 91,
'CodeBlockItem': 92,
'CodeBlock': 93,
'DeclNameArgument': 94,
'DeclNameArguments': 95,
# removed: 'FunctionCallArgument': 96,
'TupleExprElement': 97,
'ArrayElement': 98,
'DictionaryElement': 99,
'ClosureCaptureItem': 100,
'ClosureCaptureSignature': 101,
'ClosureParam': 102,
'ClosureSignature': 103,
'StringSegment': 104,
'ExpressionSegment': 105,
'ObjcNamePiece': 106,
'TypeInitializerClause': 107,
'ParameterClause': 108,
'ReturnClause': 109,
'FunctionSignature': 110,
'IfConfigClause': 111,
'PoundSourceLocationArgs': 112,
'DeclModifier': 113,
'InheritedType': 114,
'TypeInheritanceClause': 115,
'MemberDeclBlock': 116,
'MemberDeclListItem': 117,
'SourceFile': 118,
'InitializerClause': 119,
'FunctionParameter': 120,
'AccessLevelModifier': 121,
'AccessPathComponent': 122,
'AccessorParameter': 123,
'AccessorBlock': 124,
'PatternBinding': 125,
'EnumCaseElement': 126,
'OperatorPrecedenceAndTypes': 127,
'PrecedenceGroupRelation': 128,
'PrecedenceGroupNameElement': 129,
'PrecedenceGroupAssignment': 130,
'PrecedenceGroupAssociativity': 131,
'Attribute': 132,
'LabeledSpecializeEntry': 133,
'ImplementsAttributeArguments': 134,
'ObjCSelectorPiece': 135,
'WhereClause': 136,
'ConditionElement': 137,
'AvailabilityCondition': 138,
'MatchingPatternCondition': 139,
'OptionalBindingCondition': 140,
'ElseIfContinuation': 141,
'ElseBlock': 142,
'SwitchCase': 143,
'SwitchDefaultLabel': 144,
'CaseItem': 145,
'SwitchCaseLabel': 146,
'CatchClause': 147,
'GenericWhereClause': 148,
'SameTypeRequirement': 149,
'GenericParameter': 150,
'GenericParameterClause': 151,
'ConformanceRequirement': 152,
'CompositionTypeElement': 153,
'TupleTypeElement': 154,
'GenericArgument': 155,
'GenericArgumentClause': 156,
'TypeAnnotation': 157,
'TuplePatternElement': 158,
'AvailabilityArgument': 159,
'AvailabilityLabeledArgument': 160,
'AvailabilityVersionRestriction': 161,
'VersionTuple': 162,
'CodeBlockItemList': 163,
# removed: 'FunctionCallArgumentList': 164,
'TupleExprElementList': 165,
'ArrayElementList': 166,
'DictionaryElementList': 167,
'StringLiteralSegments': 168,
'DeclNameArgumentList': 169,
'ExprList': 170,
'ClosureCaptureItemList': 171,
'ClosureParamList': 172,
'ObjcName': 173,
'FunctionParameterList': 174,
'IfConfigClauseList': 175,
'InheritedTypeList': 176,
'MemberDeclList': 177,
'ModifierList': 178,
'AccessPath': 179,
'AccessorList': 180,
'PatternBindingList': 181,
'EnumCaseElementList': 182,
'PrecedenceGroupAttributeList': 183,
'PrecedenceGroupNameList': 184,
'TokenList': 185,
'NonEmptyTokenList': 186,
'AttributeList': 187,
'SpecializeAttributeSpecList': 188,
'ObjCSelector': 189,
'SwitchCaseList': 190,
'CatchClauseList': 191,
'CaseItemList': 192,
'ConditionElementList': 193,
'GenericRequirementList': 194,
'GenericParameterList': 195,
'CompositionTypeElementList': 196,
'TupleTypeElementList': 197,
'GenericArgumentList': 198,
'TuplePatternElementList': 199,
'AvailabilitySpecList': 200,
'UnknownPattern': 201,
'EnumCasePattern': 202,
'IsTypePattern': 203,
'OptionalPattern': 204,
'IdentifierPattern': 205,
'AsTypePattern': 206,
'TuplePattern': 207,
'WildcardPattern': 208,
'ExpressionPattern': 209,
'ValueBindingPattern': 210,
'UnknownType': 211,
'SimpleTypeIdentifier': 212,
'MemberTypeIdentifier': 213,
'ClassRestrictionType': 214,
'ArrayType': 215,
'DictionaryType': 216,
'MetatypeType': 217,
'OptionalType': 218,
'ImplicitlyUnwrappedOptionalType': 219,
'CompositionType': 220,
'TupleType': 221,
'FunctionType': 222,
'AttributedType': 223,
'YieldStmt': 224,
'YieldList': 225,
'IdentifierList': 226,
'NamedAttributeStringArgument': 227,
'DeclName': 228,
'PoundAssertStmt': 229,
'SomeType': 230,
'CustomAttribute': 231,
'GenericRequirement': 232,
'DifferentiableAttributeArguments': 233,
'DifferentiabilityParamsClause': 234,
'DifferentiabilityParams': 235,
'DifferentiabilityParamList': 236,
'DifferentiabilityParam': 237,
# removed: 'DifferentiableAttributeFuncSpecifier': 238,
'FunctionDeclName': 239,
'PoundFilePathExpr': 240,
'DerivativeRegistrationAttributeArguments': 241,
'QualifiedDeclName': 242,
'CatchItem': 243,
'CatchItemList': 244,
'MultipleTrailingClosureElementList': 245,
'MultipleTrailingClosureElement': 246,
'PoundFileIDExpr': 247,
'TargetFunctionEntry': 248,
}
def verify_syntax_node_serialization_codes(nodes, serialization_codes):
# Verify that all nodes have serialization codes
for node in nodes:
if not node.is_base() and node.syntax_kind not in serialization_codes:
error('Node %s has no serialization code' % node.syntax_kind)
# Verify that no serialization code is used twice
used_codes = set()
for serialization_code in serialization_codes.values():
if serialization_code in used_codes:
error("Serialization code %d used twice" % serialization_code)
used_codes.add(serialization_code)
def get_serialization_code(syntax_kind):
return SYNTAX_NODE_SERIALIZATION_CODES[syntax_kind]
|
atrick/swift
|
utils/gyb_syntax_support/NodeSerializationCodes.py
|
Python
|
apache-2.0
| 7,910 | 0 |
# Copyright 2021 Tecnativa - Sergio Teruel
# License AGPL-3.0 or later (http://www.gnu.org/licenses/lgpl).
{
"name": "Account Invoice Margin Sale Delivered Sync",
"summary": "Sync invoice margin between invoices and sale orders",
"version": "12.0.1.0.1",
"development_status": "Beta",
"maintainers": ["sergio-teruel"],
"category": "Account",
"website": "https://github.com/OCA/margin-analysis",
"author": "Tecnativa, "
"Odoo Community Association (OCA)",
"license": "AGPL-3",
"application": False,
"installable": True,
"depends": [
"sale_margin_delivered",
"account_invoice_margin_sale",
],
}
|
OCA/margin-analysis
|
account_invoice_margin_sale_delivered_sync/__manifest__.py
|
Python
|
agpl-3.0
| 676 | 0 |
'''
Contains Vmstat() class
Typical contents of vmstat file::
nr_free_pages 1757414
nr_inactive_anon 2604
nr_active_anon 528697
nr_inactive_file 841209
nr_active_file 382447
nr_unevictable 7836
nr_mlock 7837
nr_anon_pages 534070
nr_mapped 76013
nr_file_pages 1228693
nr_dirty 21
nr_writeback 0
nr_slab_reclaimable 511040
nr_slab_unreclaimable 13487
nr_page_table_pages 13920
nr_kernel_stack 809
nr_unstable 0
nr_bounce 0
nr_vmscan_write 0
nr_vmscan_immediate_reclaim 0
nr_writeback_temp 0
nr_isolated_anon 0
nr_isolated_file 0
nr_shmem 3583
nr_dirtied 1034714
nr_written 972154
numa_hit 29109076
numa_miss 0
numa_foreign 0
numa_interleave 11066
numa_local 29109076
numa_other 0
nr_anon_transparent_hugepages 0
nr_dirty_threshold 347004
nr_dirty_background_threshold 173502
pgpgin 6038832
pgpgout 6412006
pswpin 0
pswpout 0
pgalloc_dma 0
pgalloc_dma32 51
pgalloc_normal 30639735
pgalloc_movable 0
pgfree 32398292
pgactivate 2344853
pgdeactivate 1
pgfault 37440670
pgmajfault 3319
pgrefill_dma 0
pgrefill_dma32 0
pgrefill_normal 0
pgrefill_movable 0
pgsteal_kswapd_dma 0
pgsteal_kswapd_dma32 0
pgsteal_kswapd_normal 0
pgsteal_kswapd_movable 0
pgsteal_direct_dma 0
pgsteal_direct_dma32 0
pgsteal_direct_normal 0
pgsteal_direct_movable 0
pgscan_kswapd_dma 0
pgscan_kswapd_dma32 0
pgscan_kswapd_normal 0
pgscan_kswapd_movable 0
pgscan_direct_dma 0
pgscan_direct_dma32 0
pgscan_direct_normal 0
pgscan_direct_movable 0
zone_reclaim_failed 0
pginodesteal 0
slabs_scanned 0
kswapd_inodesteal 0
kswapd_low_wmark_hit_quickly 0
kswapd_high_wmark_hit_quickly 0
kswapd_skip_congestion_wait 0
pageoutrun 1
allocstall 0
pgrotated 23
compact_blocks_moved 0
compact_pages_moved 0
compact_pagemigrate_failed 0
compact_stall 0
compact_fail 0
compact_success 0
htlb_buddy_alloc_success 0
htlb_buddy_alloc_fail 0
unevictable_pgs_culled 8305
unevictable_pgs_scanned 0
unevictable_pgs_rescued 6377
unevictable_pgs_mlocked 15565
unevictable_pgs_munlocked 7197
unevictable_pgs_cleared 0
unevictable_pgs_stranded 0
unevictable_pgs_mlockfreed 0
thp_fault_alloc 0
thp_fault_fallback 0
thp_collapse_alloc 0
thp_collapse_alloc_failed 0
thp_split 0
'''
from logging import getLogger
from os import path as ospath
from .readfile import ReadFile
LOGGER = getLogger(__name__)
class VMstat(ReadFile):
'''
VMstat handling
'''
FILENAME = ospath.join('proc', 'vmstat')
KEY = 'vmstat'
def normalize(self):
'''
Translates data into dictionary
The vmstat file is a number of records keyed on ' ' separator
'''
LOGGER.debug("Normalize")
lines = self.lines
ret = {}
for line in lines:
top, tail = line.split()
ret[top.strip()] = int(tail.strip())
return ret
|
eccles/lnxproc
|
lnxproc/vmstat.py
|
Python
|
mit
| 3,041 | 0 |
# coding: utf-8
from __future__ import unicode_literals
from .common import InfoExtractor
from .pladform import PladformIE
from ..utils import (
unescapeHTML,
int_or_none,
ExtractorError,
)
class METAIE(InfoExtractor):
_VALID_URL = r'https?://video\.meta\.ua/(?:iframe/)?(?P<id>[0-9]+)'
_TESTS = [{
'url': 'http://video.meta.ua/5502115.video',
'md5': '71b6f3ee274bef16f1ab410f7f56b476',
'info_dict': {
'id': '5502115',
'ext': 'mp4',
'title': 'Sony Xperia Z camera test [HQ]',
'description': 'Xperia Z shoots video in FullHD HDR.',
'uploader_id': 'nomobile',
'uploader': 'CHЁZA.TV',
'upload_date': '20130211',
},
'add_ie': ['Youtube'],
}, {
'url': 'http://video.meta.ua/iframe/5502115',
'only_matching': True,
}, {
# pladform embed
'url': 'http://video.meta.ua/7121015.video',
'only_matching': True,
}]
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
st_html5 = self._search_regex(
r"st_html5\s*=\s*'#([^']+)'", webpage, 'uppod html5 st', default=None)
if st_html5:
# uppod st decryption algorithm is reverse engineered from function un(s) at uppod.js
json_str = ''
for i in range(0, len(st_html5), 3):
json_str += '�%s;' % st_html5[i:i + 3]
uppod_data = self._parse_json(unescapeHTML(json_str), video_id)
error = uppod_data.get('customnotfound')
if error:
raise ExtractorError('%s said: %s' % (self.IE_NAME, error), expected=True)
video_url = uppod_data['file']
info = {
'id': video_id,
'url': video_url,
'title': uppod_data.get('comment') or self._og_search_title(webpage),
'description': self._og_search_description(webpage, default=None),
'thumbnail': uppod_data.get('poster') or self._og_search_thumbnail(webpage),
'duration': int_or_none(self._og_search_property(
'video:duration', webpage, default=None)),
}
if 'youtube.com/' in video_url:
info.update({
'_type': 'url_transparent',
'ie_key': 'Youtube',
})
return info
pladform_url = PladformIE._extract_url(webpage)
if pladform_url:
return self.url_result(pladform_url)
|
valmynd/MediaFetcher
|
src/plugins/youtube_dl/youtube_dl/extractor/meta.py
|
Python
|
gpl-3.0
| 2,155 | 0.027855 |
# Fork: https://github.com/fabioz/u-msgpack-python
#
'''
This module provides a way to do full-duplex communication over a socket with umsgpack_s.
Basic usage is:
# Create our server handler (must handle decoded messages)
class ServerHandler(ConnectionHandler, UMsgPacker):
def _handle_decoded(self, decoded):
# Some message was received from the client in the server.
if decoded == 'echo':
# Actual implementations may want to put that in a queue and have an additional
# thread to check the queue and handle what was received and send the results back.
self.send('echo back')
def send(self, obj):
# Send a message to the client
self.connection.sendall(self.pack_obj(obj))
# Start the server
server = umsgpack_s_conn.Server(ServerHandler)
server.serve_forever('127.0.0.1', 0, block=True)
port = server.get_port() # Port only available after socket is created
...
On the client side:
class ClientHandler(ConnectionHandler, UMsgPacker):
def _handle_decoded(self, decoded):
print('Client received: %s' % (decoded,))
client = umsgpack_s_conn.Client('127.0.0.1', port, ClientHandler)
# Note, as above, actual implementations may want to put that in a queue and have an additional
# thread do the actual send.
client.send('echo')
@license: MIT
@author: Fabio Zadrozny
'''
from mu_repo import umsgpack_s
import binascii
import select
import socket
import struct
import sys
import threading
import weakref
try:
basestring
except:
basestring = str
_as_bytes = umsgpack_s._as_bytes
DEBUG = 0 # > 3 to see actual messages
BUFFER_SIZE = 1024 * 8
MAX_INT32 = 2147483647 # ((2** 32) -1)
def get_free_port():
'''
Helper to get free port (usually not needed as the server can receive '0' to connect to a new
port).
'''
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind(('127.0.0.1', 0))
_, port = s.getsockname()
s.close()
return port
def wait_for_condition(condition, timeout=2.):
'''
Helper to wait for a condition with a timeout.
:param float condition:
Timeout to reach condition (in seconds).
:return bool:
True if the condition wasn't satisfied and True if it was.
'''
import time
initial = time.time()
while not condition():
if time.time() - initial > timeout:
return False
time.sleep(.01)
return True
def assert_waited_condition(condition, timeout=2.):
'''
Helper to wait for a condition with a timeout.
:param callable condition:
A callable that returns either a True/False boolean (where True indicates the condition was
reached) or a string (where an empty string means the condition was reached or a non-empty
string to show some message to the user regarding the failure).
:param float condition:
Timeout to reach condition (in seconds).
'''
import time
initial = time.time()
while True:
c = condition()
if isinstance(c, bool):
if c:
return
elif isinstance(c, basestring):
if not c:
return
else:
raise AssertionError('Expecting bool or string as the return.')
if time.time() - initial > timeout:
raise AssertionError(
u'Could not reach condition before timeout: %s (condition return: %s)' %
(timeout, c))
time.sleep(.01)
class Server(object):
def __init__(self, connection_handler_class=None, params=(), thread_name='', thread_class=None):
if thread_class is None:
thread_class = threading.Thread
self._thread_class = thread_class
if connection_handler_class is None:
connection_handler_class = EchoHandler
self.connection_handler_class = connection_handler_class
self._params = params
self._block = None
self._shutdown_event = threading.Event()
self._thread_name = thread_name
def serve_forever(self, host, port, block=False):
if self._block is not None:
raise AssertionError(
'Server already started. Please create new one instead of trying to reuse.')
if not block:
self.thread = self._thread_class(target=self._serve_forever, args=(host, port))
self.thread.setDaemon(True)
if self._thread_name:
self.thread.setName(self._thread_name)
self.thread.start()
else:
self._serve_forever(host, port)
self._block = block
def is_alive(self):
if self._block is None:
return False
sock = getattr(self, '_sock', None)
return sock is not None
def get_port(self):
'''
Note: only available after socket is already connected. Raises AssertionError if it's not
connected at this point.
'''
wait_for_condition(lambda: hasattr(self, '_sock'), timeout=5.0)
return self._sock.getsockname()[1]
def shutdown(self):
if DEBUG:
sys.stderr.write('Shutting down server.\n')
self._shutdown_event.set()
sock = getattr(self, '_sock', None)
if sock is not None:
self._sock = None
try:
sock.shutdown(socket.SHUT_RDWR)
except:
pass
try:
sock.close()
except:
pass
def after_bind_socket(self, host, port):
'''
Clients may override to do something after the host/port is bound.
'''
def _serve_forever(self, host, port):
if DEBUG:
sys.stderr.write('Listening at: %s (%s)\n' % (host, port))
# Create a TCP/IP socket
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
# We should cleanly call shutdown, but just in case let's set to reuse the address.
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.bind((host, port))
sock.listen(5) # Request queue size
self._sock = sock
self.after_bind_socket(host, self.get_port())
connections = []
try:
while not self._shutdown_event.is_set():
sock = self._sock
if sock is None:
break
# Will block until available (no timeout). If closed returns properly.
try:
fd_sets = select.select([sock], [], [])
except:
break # error: (9, 'Bad file descriptor')
if DEBUG:
sys.stderr.write('Select returned: %s\n' % fd_sets[0])
if self._shutdown_event.is_set():
break
sock = self._sock
if sock is None:
break
if fd_sets[0]:
connection, _client_address = sock.accept()
if DEBUG:
sys.stderr.write('Accepted socket.\n')
try:
connection_handler = self.connection_handler_class(
connection,
*self._params)
connections.append(weakref.ref(connection))
connection_handler.start()
except:
import traceback
traceback.print_exc()
finally:
if DEBUG:
sys.stderr.write('Exited _serve_forever.\n')
for c in connections:
c = c()
if c is not None:
try:
c.shutdown(socket.SHUT_RDWR)
except:
pass
try:
c.close()
except:
pass
self.shutdown()
class UMsgPacker(object):
'''
Helper to pack some object as bytes to the socket.
'''
def pack_obj(self, obj):
'''
Mostly packs the object with umsgpack_s then adds the size (in bytes) to the front of the msg
and returns it to be passed on the socket..
:param object obj:
The object to be packed.
'''
msg = umsgpack_s.packb(obj)
assert msg.__len__() < MAX_INT32, 'Message from object received is too big: %s bytes' % (
msg.__len__(),)
msg_len_in_bytes = struct.pack("<I", msg.__len__())
return(msg_len_in_bytes + msg)
class Client(UMsgPacker):
def __init__(self, host, port, connection_handler_class=None):
'''
:param connection_handler_class: if passed, this is a full-duplex communication (so, handle
incoming requests from server).
'''
if DEBUG:
sys.stderr.write('Connecting to server at: %s (%s)\n' % (host, port))
self._sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self._sock.connect((host, port))
if connection_handler_class:
connection_handler = self.connection_handler = connection_handler_class(self._sock)
connection_handler.start()
def get_host_port(self):
try:
return self._sock.getsockname()
except:
return None, None
def is_alive(self):
try:
self._sock.getsockname()
return True
except:
return False
def send(self, obj):
s = self._sock
if s is None:
raise RuntimeError('Connection already closed')
self._sock.sendall(self.pack_obj(obj))
def shutdown(self):
s = self._sock
if self._sock is None:
return
self._sock = None
try:
s.shutdown(socket.SHUT_RDWR)
except:
pass
try:
s.close()
except:
pass
class ConnectionHandler(threading.Thread, UMsgPacker):
def __init__(self, connection, **kwargs):
threading.Thread.__init__(self, **kwargs)
self.setDaemon(True)
self.connection = connection
try:
connection.settimeout(None) # No timeout
except:
pass
def run(self):
data = _as_bytes('')
number_of_bytes = 0
try:
while True:
# I.e.: check if the remaining bytes from our last recv already contained
# a new message.
if number_of_bytes == 0 and data.__len__() >= 4:
number_of_bytes = data[
:4] # first 4 bytes say the number_of_bytes of the message
number_of_bytes = struct.unpack("<I", number_of_bytes)[0]
assert number_of_bytes >= 0, 'Error: wrong message received. Shutting down connection!'
data = data[4:] # The remaining is the actual data
while not data or number_of_bytes == 0 or data.__len__() < number_of_bytes:
if DEBUG > 3:
sys.stderr.write('%s waiting to receive.\n' % (self,))
try:
# It's usually waiting here: when the remote side disconnects, that's
# where we get an exception.
rec = self.connection.recv(BUFFER_SIZE)
if len(rec) == 0:
if DEBUG:
sys.stderr.write('Disconnected (socket closed).\n')
return
except:
if DEBUG:
sys.stderr.write('Disconnected.\n')
return
if DEBUG > 3:
sys.stderr.write('%s received: %s\n' % (self, binascii.b2a_hex(rec)))
data += rec
if not number_of_bytes and data.__len__() >= 4:
number_of_bytes = data[
:4] # first 4 bytes say the number_of_bytes of the message
number_of_bytes = struct.unpack("<I", number_of_bytes)[0]
assert number_of_bytes >= 0, 'Error: wrong message received. Shutting down connection!'
data = data[4:] # The remaining is the actual data
if DEBUG:
sys.stderr.write('Number of bytes expected: %s\n' % number_of_bytes)
sys.stderr.write('Current data len: %s\n' % data.__len__())
msg = data[:number_of_bytes]
data = data[number_of_bytes:] # Keep the remaining for the next message
number_of_bytes = 0
self._handle_msg(msg)
finally:
try:
self.connection.shutdown(socket.SHUT_RDWR)
except:
pass
try:
self.connection.close()
except:
pass
def _handle_msg(self, msg_as_bytes):
if DEBUG > 3:
sys.stderr.write('%s handling message: %s\n' % (self, binascii.b2a_hex(msg_as_bytes)))
decoded = umsgpack_s.unpackb(msg_as_bytes)
self._handle_decoded(decoded)
def _handle_decoded(self, decoded):
pass
class EchoHandler(ConnectionHandler):
def _handle_decoded(self, decoded):
sys.stdout.write('%s\n' % (decoded,))
if __name__ == '__main__':
# Simple example of client-server.
import time
class ServerHandler(ConnectionHandler, UMsgPacker):
def _handle_decoded(self, decoded):
# Some message was received from the client in the server.
if decoded == 'echo':
# Actual implementations may want to put that in a queue and have an additional
# thread to check the queue and handle what was received and send the results back.
self.send('echo back')
def send(self, obj):
# Send a message to the client
self.connection.sendall(self.pack_obj(obj))
# Start the server
server = Server(ServerHandler)
# Note: not blocking means it'll start in another thread
server.serve_forever('127.0.0.1', 0, block=False)
time.sleep(2) # Wait for the other thread to actually start the server.
port = server.get_port() # Port only available after socket is created
received = [False]
class ClientHandler(ConnectionHandler, UMsgPacker):
def _handle_decoded(self, decoded):
print('Client received: %s' % (decoded,))
received[0] = True
client = Client('127.0.0.1', port, ClientHandler)
# Note, as above, actual implementations may want to put that in a queue and have an additional
# thread do the actual send.
client.send('echo')
assert_waited_condition(lambda: received[0])
|
fabioz/mu-repo
|
mu_repo/umsgpack_s_conn.py
|
Python
|
gpl-3.0
| 15,598 | 0.003526 |
# -*- coding: utf-8 -*-
'''
Specto Add-on
Copyright (C) 2015 lambda
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import os,xbmc,xbmcaddon,xbmcplugin,xbmcgui,xbmcvfs
import base64, jsunpack
import random, time
tmdb_key = jsunpack.jsunpack_keys()
tvdb_key = base64.urlsafe_b64decode('MUQ2MkYyRjkwMDMwQzQ0NA==')
fanarttv_key = base64.urlsafe_b64decode('YTc4YzhmZWRjN2U3NTE1MjRkMzkyNmNhMmQyOTU3OTg=')
trakt_key = base64.urlsafe_b64decode('NDFjYzI1NjY5Y2Y2OTc0NTg4ZjA0MTMxYjcyZjc4MjEwMzdjY2I1ZTdlMjMzNDVjN2MxZTk3NGI4MGI5ZjI1NQ==')
trakt_secret = base64.urlsafe_b64decode('Y2I4OWExYTViN2ZlYmJiMDM2NmQ3Y2EyNzJjZDc4YTU5MWQ1ODI2Y2UyMTQ1NWVmYzE1ZDliYzQ1ZWNjY2QyZQ==')
scriptID = 'plugin.video.pancas'
##scriptIDMedia = 'script.pancas.media'
ptv = xbmcaddon.Addon(scriptID)
lang = xbmcaddon.Addon().getLocalizedString
setting = xbmcaddon.Addon().getSetting
addon = xbmcaddon.Addon
addItem = xbmcplugin.addDirectoryItem
item = xbmcgui.ListItem
directory = xbmcplugin.endOfDirectory
content = xbmcplugin.setContent
property = xbmcplugin.setProperty
addonInfo = xbmcaddon.Addon().getAddonInfo
##addonInfoMedia = xbmcaddon.Addon(scriptIDMedia).getAddonInfo
infoLabel = xbmc.getInfoLabel
condVisibility = xbmc.getCondVisibility
jsonrpc = xbmc.executeJSONRPC
window = xbmcgui.Window(10000)
dialog = xbmcgui.Dialog()
progressDialog = xbmcgui.DialogProgress()
windowDialog = xbmcgui.WindowDialog()
button = xbmcgui.ControlButton
image = xbmcgui.ControlImage
keyboard = xbmc.Keyboard
sleep = xbmc.sleep
execute = xbmc.executebuiltin
skin = xbmc.getSkinDir()
player = xbmc.Player()
playlist = xbmc.PlayList(xbmc.PLAYLIST_VIDEO)
resolve = xbmcplugin.setResolvedUrl
openFile = xbmcvfs.File
makeFile = xbmcvfs.mkdir
deleteFile = xbmcvfs.delete
listDir = xbmcvfs.listdir
transPath = xbmc.translatePath
skinPath = xbmc.translatePath('special://skin/')
addonPath = xbmc.translatePath(addonInfo('path'))
##addonPathMedia = xbmc.translatePath(addonInfoMedia('path'))
dataPath = xbmc.translatePath(addonInfo('profile')).decode('utf-8')
settingsFile = os.path.join(dataPath, 'settings.xml')
databaseFile = os.path.join(dataPath, 'settings.db')
favouritesFile = os.path.join(dataPath, 'favourites.db')
sourcescacheFile = os.path.join(dataPath, 'sources.db')
sourcescachedUrl = os.path.join(dataPath, 'sourcesurl.db')
cachemetaFile = os.path.join(dataPath, 'metacache.db')
libcacheFile = os.path.join(dataPath, 'library.db')
metacacheFile = os.path.join(dataPath, 'meta.db')
cacheFile = os.path.join(dataPath, 'cache.db')
cookieDir = os.path.join(dataPath, 'Cookies')
progressDialogBG = xbmcgui.DialogProgressBG()
info_lang = xbmc.getLanguage(xbmc.ISO_639_1)
try:
makeFile(cookieDir)
except:
pass
def addonIcon():
appearance = setting('appearance').lower()
if appearance in ['-', '']: return addonInfo('icon')
else: return os.path.join(addonPathMedia, 'resources', 'media', appearance, 'icon.png')
def addonPoster():
appearance = setting('appearance').lower()
if appearance in ['-', '']: return 'DefaultVideo.png'
else: return os.path.join(addonPathMedia, 'resources', 'media', appearance, 'poster.png')
def addonBanner():
appearance = setting('appearance').lower()
if appearance in ['-', '']: return 'DefaultVideo.png'
else: return os.path.join(addonPathMedia, 'resources', 'media', appearance, 'banner.png')
def addonThumb():
appearance = setting('appearance').lower()
if appearance == '-': return 'DefaultFolder.png'
elif appearance == '': return addonInfo('icon')
else: return os.path.join(addonPathMedia, 'resources', 'media', appearance, 'icon.png')
def addonFanart():
appearance = setting('appearance').lower()
if appearance == '-': return None
elif appearance == '': return addonInfo('fanart')
else: return os.path.join(addonPathMedia, 'resources', 'media', appearance, 'fanart.jpg')
def addonNext():
appearance = setting('appearance').lower()
if appearance in ['-', '']: return 'DefaultFolderBack.png'
else: return os.path.join(addonPathMedia, 'resources', 'media', appearance, 'next.jpg')
def artPath():
appearance = setting('appearance').lower()
if appearance in ['-', '']: return None
else: return os.path.join(addonPathMedia, 'resources', 'media', appearance)
def infoDialog(message, heading=addonInfo('name'), icon=addonIcon(), time=3000):
try: dialog.notification(heading, message, icon, time, sound=False)
except: execute("Notification(%s,%s, %s, %s)" % (heading, message, time, icon))
def yesnoDialog(line1, line2, line3, heading=addonInfo('name'), nolabel='', yeslabel=''):
return dialog.yesno(heading, line1, line2, line3, nolabel, yeslabel)
def selectDialog(list, heading=addonInfo('name')):
return dialog.select(heading, list)
def version():
num = ''
try: version = addon('xbmc.addon').getAddonInfo('version')
except: version = '999'
for i in version:
if i.isdigit(): num += i
else: break
return int(num)
def refresh():
return execute('Container.Refresh')
def idle():
return execute('Dialog.Close(busydialog)')
def queueItem():
return execute('Action(Queue)')
def openPlaylist():
return execute('ActivateWindow(VideoPlaylist)')
def openSettings(query=None, id=addonInfo('id')):
try:
idle()
execute('Addon.OpenSettings(%s)' % id)
if query == None: raise Exception()
c, f = query.split('.')
execute('SetFocus(%i)' % (int(c) + 100))
execute('SetFocus(%i)' % (int(f) + 200))
except:
return
def set_setting(id, value):
if not isinstance(value, basestring): value = str(value)
ptv.setSetting(id=id, value=value)
def log(msg, level=xbmc.LOGNOTICE):
#return
level = xbmc.LOGNOTICE
print('[SPECTO]: %s' % (msg))
try:
if isinstance(msg, unicode):
msg = msg.encode('utf-8')
xbmc.log('[SPECTO]: %s' % (msg), level)
except Exception as e:
try:
#xbmc.log('Logging Failure: %s' % (e), level)
a=1
except: pass # just give up
def randomagent():
BR_VERS = [
['%s.0' % i for i in xrange(18, 43)],
['37.0.2062.103', '37.0.2062.120', '37.0.2062.124', '38.0.2125.101', '38.0.2125.104', '38.0.2125.111', '39.0.2171.71', '39.0.2171.95', '39.0.2171.99', '40.0.2214.93', '40.0.2214.111',
'40.0.2214.115', '42.0.2311.90', '42.0.2311.135', '42.0.2311.152', '43.0.2357.81', '43.0.2357.124', '44.0.2403.155', '44.0.2403.157', '45.0.2454.101', '45.0.2454.85', '46.0.2490.71',
'46.0.2490.80', '46.0.2490.86', '47.0.2526.73', '47.0.2526.80'],
['11.0']]
WIN_VERS = ['Windows NT 10.0', 'Windows NT 7.0', 'Windows NT 6.3', 'Windows NT 6.2', 'Windows NT 6.1', 'Windows NT 6.0', 'Windows NT 5.1', 'Windows NT 5.0']
FEATURES = ['; WOW64', '; Win64; IA64', '; Win64; x64', '']
RAND_UAS = ['Mozilla/5.0 ({win_ver}{feature}; rv:{br_ver}) Gecko/20100101 Firefox/{br_ver}',
'Mozilla/5.0 ({win_ver}{feature}) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/{br_ver} Safari/537.36',
'Mozilla/5.0 ({win_ver}{feature}; Trident/7.0; rv:{br_ver}) like Gecko']
index = random.randrange(len(RAND_UAS))
return RAND_UAS[index].format(win_ver=random.choice(WIN_VERS), feature=random.choice(FEATURES), br_ver=random.choice(BR_VERS[index]))
DEFAULT_TIMEOUT = 30
BR_VERS = [
['%s.0' % i for i in xrange(18, 43)],
['37.0.2062.103', '37.0.2062.120', '37.0.2062.124', '38.0.2125.101', '38.0.2125.104', '38.0.2125.111', '39.0.2171.71', '39.0.2171.95', '39.0.2171.99', '40.0.2214.93', '40.0.2214.111',
'40.0.2214.115', '42.0.2311.90', '42.0.2311.135', '42.0.2311.152', '43.0.2357.81', '43.0.2357.124', '44.0.2403.155', '44.0.2403.157', '45.0.2454.101', '45.0.2454.85', '46.0.2490.71',
'46.0.2490.80', '46.0.2490.86', '47.0.2526.73', '47.0.2526.80'],
['11.0']]
WIN_VERS = ['Windows NT 10.0', 'Windows NT 7.0', 'Windows NT 6.3', 'Windows NT 6.2', 'Windows NT 6.1', 'Windows NT 6.0', 'Windows NT 5.1', 'Windows NT 5.0']
FEATURES = ['; WOW64', '; Win64; IA64', '; Win64; x64', '']
RAND_UAS = ['Mozilla/5.0 ({win_ver}{feature}; rv:{br_ver}) Gecko/20100101 Firefox/{br_ver}',
'Mozilla/5.0 ({win_ver}{feature}) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/{br_ver} Safari/537.36',
'Mozilla/5.0 ({win_ver}{feature}; Trident/7.0; rv:{br_ver}) like Gecko']
MAX_RESPONSE = 1024 * 1024 * 2
USER_AGENT = "Mozilla/5.0 (compatible, MSIE 11, Windows NT 6.3; Trident/7.0; rv:11.0) like Gecko"
def get_ua():
try: last_gen = int(setting('last_ua_create'))
except: last_gen = 0
if not setting('current_ua') or last_gen < (time.time() - (7 * 24 * 60 * 60)):
index = random.randrange(len(RAND_UAS))
user_agent = RAND_UAS[index].format(win_ver=random.choice(WIN_VERS), feature=random.choice(FEATURES), br_ver=random.choice(BR_VERS[index]))
log('Creating New User Agent: %s' % (user_agent))
set_setting('current_ua', user_agent)
set_setting('last_ua_create', str(int(time.time())))
else:
user_agent = setting('current_ua')
return user_agent
|
repotvsupertuga/repo
|
plugin.video.pancas/resources/lib/libraries/control.py
|
Python
|
gpl-2.0
| 9,795 | 0.00827 |
# -*- coding: utf-8 -*-
#
# python-problem documentation build configuration file, created by
# sphinx-quickstart on Tue Dec 4 12:03:58 2012.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('..'))
sys.path.insert(0, os.path.abspath('../problem/.libs')) # _pyabrt
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.coverage', 'sphinx.ext.viewcode']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'abrt-python'
copyright = u'2012, Richard Marko'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.1'
# The full version, including alpha/beta/rc tags.
release = '0.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
#html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'abrt-pythondoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'abrt-python.tex', u'abrt-python Documentation',
u'Richard Marko', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'abrt-python', u'abrt-python Documentation',
[u'Richard Marko'], 5)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'abrt-python', u'abrt-python Documentation',
u'Richard Marko', 'abrt-python', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
def setup(app):
app.connect('autodoc-process-signature', process_signature)
def process_signature(app, what, name, obj, options, signature,
return_annotation):
if what not in ('function'):
return
new_params = list()
for param in (x.strip() for x in signature[1:-1].split(',')):
if '__' not in param:
new_params.append(param)
return ('(%s)' % ', '.join(new_params), return_annotation)
|
mhabrnal/abrt
|
src/python-problem/doc/conf.py
|
Python
|
gpl-2.0
| 8,344 | 0.00755 |
from django.contrib.auth.decorators import (
login_required, permission_required
)
from django.core.urlresolvers import reverse
from django.db import IntegrityError
from django.shortcuts import render
from django.utils.translation import ugettext as _, ungettext
import reversion
from modoboa.lib import events
from modoboa.lib.exceptions import PermDeniedException, Conflict
from modoboa.lib.web_utils import render_to_json_response
from ..forms import AliasForm
from ..models import Alias
def _validate_alias(request, form, successmsg, callback=None):
"""Alias validation
Common function shared between creation and modification actions.
"""
if form.is_valid():
form.set_recipients()
try:
alias = form.save()
except IntegrityError:
raise Conflict(_("Alias with this name already exists"))
if callback:
callback(request.user, alias)
return render_to_json_response(successmsg)
return render_to_json_response({'form_errors': form.errors}, status=400)
def _new_alias(request, title, action, successmsg,
tplname="modoboa_admin/aliasform.html"):
events.raiseEvent("CanCreate", request.user, "mailbox_aliases")
if request.method == "POST":
def callback(user, alias):
alias.post_create(user)
form = AliasForm(request.user, request.POST)
return _validate_alias(
request, form, successmsg, callback
)
ctx = {
"title": title,
"action": action,
"formid": "aliasform",
"action_label": _("Create"),
"action_classes": "submit",
"form": AliasForm(request.user)
}
return render(request, tplname, ctx)
@login_required
@permission_required("modoboa_admin.add_alias")
@reversion.create_revision()
def newdlist(request):
return _new_alias(
request, _("New distribution list"), reverse("modoboa_admin:dlist_add"),
_("Distribution list created")
)
@login_required
@permission_required("modoboa_admin.add_alias")
@reversion.create_revision()
def newalias(request):
return _new_alias(
request, _("New alias"), reverse("modoboa_admin:alias_add"),
_("Alias created")
)
@login_required
@permission_required("modoboa_admin.add_alias")
@reversion.create_revision()
def newforward(request):
return _new_alias(
request, _("New forward"), reverse("modoboa_admin:forward_add"),
_("Forward created")
)
@login_required
@permission_required("modoboa_admin.change_alias")
@reversion.create_revision()
def editalias(request, alid, tplname="modoboa_admin/aliasform.html"):
alias = Alias.objects.get(pk=alid)
if not request.user.can_access(alias):
raise PermDeniedException
if request.method == "POST":
if len(alias.get_recipients()) >= 2:
successmsg = _("Distribution list modified")
elif alias.extmboxes != "":
successmsg = _("Forward modified")
else:
successmsg = _("Alias modified")
form = AliasForm(request.user, request.POST, instance=alias)
return _validate_alias(request, form, successmsg)
ctx = {
'action': reverse("modoboa_admin:alias_change", args=[alias.id]),
'formid': 'aliasform',
'title': alias.full_address,
'action_label': _('Update'),
'action_classes': 'submit',
'form': AliasForm(request.user, instance=alias)
}
return render(request, tplname, ctx)
@login_required
@permission_required("modoboa_admin.delete_alias")
def delalias(request):
selection = request.GET["selection"].split(",")
for alid in selection:
alias = Alias.objects.get(pk=alid)
if not request.user.can_access(alias):
raise PermDeniedException
if alias.type == 'dlist':
msg = "Distribution list deleted"
msgs = "Distribution lists deleted"
elif alias.type == 'forward':
msg = "Forward deleted"
msgs = "Forwards deleted"
else:
msg = "Alias deleted"
msgs = "Aliases deleted"
alias.delete()
msg = ungettext(msg, msgs, len(selection))
return render_to_json_response(msg)
|
bearstech/modoboa-admin
|
modoboa_admin/views/alias.py
|
Python
|
mit
| 4,252 | 0.000235 |
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
try:
from django.contrib.auth import get_user_model
except ImportError:
from django.contrib.auth.models import User
else:
User = get_user_model()
# With the default User model these will be 'auth.User' and 'auth.user'
# so instead of using orm['auth.User'] we can use orm[user_orm_label]
user_orm_label = '%s.%s' % (User._meta.app_label, User._meta.object_name)
user_model_label = '%s.%s' % (User._meta.app_label, User._meta.module_name)
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'PayPalNVP'
db.create_table('paypal_nvp', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('method', self.gf('django.db.models.fields.CharField')(max_length=64, blank=True)),
('ack', self.gf('django.db.models.fields.CharField')(max_length=32, blank=True)),
('profilestatus', self.gf('django.db.models.fields.CharField')(max_length=32, blank=True)),
('timestamp', self.gf('django.db.models.fields.DateTimeField')(null=True, blank=True)),
('profileid', self.gf('django.db.models.fields.CharField')(max_length=32, blank=True)),
('profilereference', self.gf('django.db.models.fields.CharField')(max_length=128, blank=True)),
('correlationid', self.gf('django.db.models.fields.CharField')(max_length=32, blank=True)),
('token', self.gf('django.db.models.fields.CharField')(max_length=64, blank=True)),
('payerid', self.gf('django.db.models.fields.CharField')(max_length=64, blank=True)),
('firstname', self.gf('django.db.models.fields.CharField')(max_length=255, blank=True)),
('lastname', self.gf('django.db.models.fields.CharField')(max_length=255, blank=True)),
('street', self.gf('django.db.models.fields.CharField')(max_length=255, blank=True)),
('city', self.gf('django.db.models.fields.CharField')(max_length=255, blank=True)),
('state', self.gf('django.db.models.fields.CharField')(max_length=255, blank=True)),
('countrycode', self.gf('django.db.models.fields.CharField')(max_length=2, blank=True)),
('zip', self.gf('django.db.models.fields.CharField')(max_length=32, blank=True)),
('invnum', self.gf('django.db.models.fields.CharField')(max_length=255, blank=True)),
('custom', self.gf('django.db.models.fields.CharField')(max_length=255, blank=True)),
('user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm[user_orm_label], null=True, blank=True)),
('flag', self.gf('django.db.models.fields.BooleanField')(default=False)),
('flag_code', self.gf('django.db.models.fields.CharField')(max_length=32, blank=True)),
('flag_info', self.gf('django.db.models.fields.TextField')(blank=True)),
('ipaddress', self.gf('django.db.models.fields.IPAddressField')(max_length=15, blank=True)),
('query', self.gf('django.db.models.fields.TextField')(blank=True)),
('response', self.gf('django.db.models.fields.TextField')(blank=True)),
('created_at', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('updated_at', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),
))
db.send_create_signal(u'pro', ['PayPalNVP'])
def backwards(self, orm):
# Deleting model 'PayPalNVP'
db.delete_table('paypal_nvp')
models = {
user_model_label: {
'Meta': {'object_name': User.__name__,
'db_table': "'%s'" % User._meta.db_table
},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
},
u'pro.paypalnvp': {
'Meta': {'object_name': 'PayPalNVP', 'db_table': "'paypal_nvp'"},
'ack': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'}),
'city': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'correlationid': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'}),
'countrycode': ('django.db.models.fields.CharField', [], {'max_length': '2', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'custom': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'firstname': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'flag': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'flag_code': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'}),
'flag_info': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'invnum': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'ipaddress': ('django.db.models.fields.IPAddressField', [], {'max_length': '15', 'blank': 'True'}),
'lastname': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'method': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
'payerid': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
'profileid': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'}),
'profilereference': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'profilestatus': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'}),
'query': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'response': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'state': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'street': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'token': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm[user_orm_label]", 'null': 'True', 'blank': 'True'}),
'zip': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'})
}
}
complete_apps = ['pro']
|
canhhs91/greenpointtrees
|
src/paypal/pro/south_migrations/0001_initial.py
|
Python
|
mit
| 7,020 | 0.008832 |
import socket
import time
import re
from util import hook
socket.setdefaulttimeout(10)
nick_re = re.compile(":(.+?)!")
# Auto-join on Invite (Configurable, defaults to True)
@hook.event('INVITE')
def invite(paraml, conn=None):
invite_join = conn.conf.get('invite_join', True)
if invite_join:
conn.join(paraml[-1])
# Identify to NickServ (or other service)
@hook.event('004')
def onjoin(paraml, conn=None, bot=None):
nickserv_password = conn.conf.get('nickserv_password', '')
nickserv_name = conn.conf.get('nickserv_name', 'nickserv')
nickserv_account_name = conn.conf.get('nickserv_user', '')
nickserv_command = conn.conf.get('nickserv_command', 'IDENTIFY')
if nickserv_password:
if nickserv_password in bot.config['censored_strings']:
bot.config['censored_strings'].remove(nickserv_password)
if nickserv_account_name:
conn.msg(nickserv_name, "{} {} {}".format(nickserv_command, nickserv_account_name, nickserv_password))
else:
conn.msg(nickserv_name, "{} {}".format(nickserv_command, nickserv_password))
bot.config['censored_strings'].append(nickserv_password)
time.sleep(1)
# Set bot modes
mode = conn.conf.get('mode')
if mode:
conn.cmd('MODE', [conn.nick, mode])
# Join config-defined channels
for channel in conn.channels:
conn.join(channel)
time.sleep(1)
print "Bot ready."
@hook.event("KICK")
def onkick(paraml, conn=None, chan=None):
# if the bot has been kicked, remove from the channel list
if paraml[1] == conn.nick:
conn.channels.remove(chan)
auto_rejoin = conn.conf.get('auto_rejoin', False)
if auto_rejoin:
conn.join(paraml[0])
@hook.event("NICK")
def onnick(paraml, conn=None, raw=None):
old_nick = nick_re.search(raw).group(1)
new_nick = str(paraml[0])
if old_nick == conn.nick:
conn.nick = new_nick
print "Bot nick changed from '{}' to '{}'.".format(old_nick, new_nick)
@hook.singlethread
@hook.event('004')
def keep_alive(paraml, conn=None):
keepalive = conn.conf.get('keep_alive', False)
if keepalive:
while True:
conn.cmd('PING', [conn.nick])
time.sleep(60)
|
blha303/ytbot
|
plugins/core_misc.py
|
Python
|
gpl-3.0
| 2,257 | 0.000886 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import codecs
import distutils.dir_util
import os
import shutil
import sys
def touch_file(file_path):
"""
Create a new empty file at file_path.
"""
parent_dir = os.path.abspath(os.path.join(file_path, os.pardir))
if not os.path.isdir(parent_dir):
os.makedirs(parent_dir)
with codecs.open(file_path, 'a'):
os.utime(file_path, None)
def copy_file(input_file, output_file, overwrite=False):
"""
Helper function to copy a file that adds an overwrite parameter.
"""
if os.path.isfile(output_file):
if overwrite:
print('File exists, overwriting')
shutil.copyfile(input_file, output_file)
else:
sys.exit('File exists, unable to continue: %s' % output_file)
else:
shutil.copyfile(input_file, output_file)
def copy_tree(input_dir, output_dir, overwrite=False):
"""
Helper function to copy a directory tree that adds an overwrite parameter.
"""
if os.path.isdir(output_dir):
if overwrite:
print('Directory exists, overwriting')
distutils.dir_util.copy_tree(input_dir, output_dir)
else:
sys.exit('Directory exists, unable to continue: %s' % output_dir)
else:
distutils.dir_util.copy_tree(input_dir, output_dir)
def get_file_paths_from_directory(dir_path):
"""
Walk a directory and create a list of all contained file_paths in all sub-directories.
"""
file_paths = []
for root, dirs, files in os.walk(dir_path):
for f in files:
file_paths.append(os.path.join(root, f))
return file_paths
def clean_dsstore(dir_path):
"""
Walk a directory and get rid of all those useless hidden .DS_Store files.
"""
for root, dirs, files in os.walk(dir_path):
for f in files:
if f == '.DS_Store':
os.remove(os.path.join(dir_path, root, f))
|
geberl/droppy-workspace
|
Tasks/DropPy.Common/file_tools.py
|
Python
|
mit
| 1,999 | 0.0005 |
import calendar
from dateutil.parser import parse
import os
from os import listdir
from os.path import isfile, join
from selfspy import config as cfg
from objc import YES, NO
from AppKit import *
from CBGraphView import CBGraphView
TIMELINE_WIDTH = 960
TIMELINE_HEIGHT = 20
WINDOW_PADDING = 18
def unixTimeFromString(self, s=None):
fuzzy_ts = parse(str(s), fuzzy=True)
ts = calendar.timegm(fuzzy_ts.utctimetuple())
return ts
def getScreenshotPath(self, self2=None):
path = os.path.join(cfg.CURRENT_DIR, 'screenshots')
path = os.path.expanduser(path)
return path + '/'
def generateScreenshotList(self, self2=None):
path = getScreenshotPath(self)
list_of_files = [ f for f in listdir(path) if isfile(join(path,f)) ]
return list_of_files
def generateDateQuery(self, s=None):
self.dateQuery = '20' + s[0:2] + '-' + s[2:4] + '-' + s[4:6] + ' ' + s[7:9] + ':' + s[9:11] + ':' + s[11:13] + '.'
def mapFilenameDateToNumber(self, s=None):
return int('20' + s[0:2] + s[2:4] + s[4:6] + s[7:9] + s[9:11] + s[11:13])
def addProcessTimelineSegment(self, process_id, front_bound, back_bound, reviewer):
if front_bound >= reviewer.slider_min and back_bound <= reviewer.slider_max:
# generate unique grayscale color for timeline segment
gray = (30*process_id) % 255
color = NSColor.colorWithCalibratedRed_green_blue_alpha_(gray/255.0, gray/255.0, gray/255.0, 1.0)
# get bounds of segment and draw segment
normalized_front_bound = front_bound - reviewer.slider_min
width_scale_factor = TIMELINE_WIDTH / (reviewer.normalized_max_value*1.0)
segment_x = normalized_front_bound * width_scale_factor
segment_y = 1
segment_height = TIMELINE_HEIGHT-2
segment_width = (back_bound - front_bound) * width_scale_factor
frame = NSRect(NSPoint(segment_x, segment_y),
NSSize(segment_width, segment_height))
this_view = CBGraphView.alloc().initWithFrame_(frame)
reviewer.timeline_view.addSubview_(this_view)
this_view.setBorderColor_(color)
this_view.setAssignedColor_(color)
this_view.setBackgroundColor_(color)
this_view.setWantsLayer_(YES)
# add tooltip to segment
self.processNameQuery = process_id
NSNotificationCenter.defaultCenter().postNotificationName_object_('getProcessNameFromID', self)
this_view.setToolTip_(str(self.processNameResponse[0]))
self.processNameResponse = []
reviewer.nested_timeline_views.append(this_view)
## TIMELINE HELPERS
# def addProcessNameTextLabelToTimeline(self, process_id, reviewer):
# self.processNameQuery = process_id
# NSNotificationCenter.defaultCenter().postNotificationName_object_('getProcessNameFromID', self)
#
# textField_frame = NSRect(NSPoint(0, TIMELINE_HEIGHT / TIMELINE_MAX_ROWS * process_id),
# NSSize(TEXTLABEL_WIDTH, TEXTLABEL_HEIGHT))
# textField = NSTextField.alloc().initWithFrame_(textField_frame)
# textField.setEditable_(NO)
# textField.setDrawsBackground_(NO)
# textField.setSelectable_(NO)
# textField.setBezeled_(NO)
# textField.setStringValue_(str(self.processNameResponse[0]))
#
# self.processNameResponse = []
#
# reviewer.timeline_view.addSubview_(textField)
# reviewer.nested_timeline_labels.append(textField)
|
activityhistory/selfspy
|
selfspy/helpers.py
|
Python
|
gpl-3.0
| 3,410 | 0.006452 |
# Copyright 2008-2014 Nokia Solutions and Networks
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from robot.utils import elapsed_time_to_string, html_escape, normalize
from .tags import TagPatterns
class Stat(object):
"""Generic statistic object used for storing all the statistic values."""
def __init__(self, name):
#: Human readable identifier of the object these statistics
#: belong to. Either `All Tests` or `Critical Tests` for
#: :class:`~robot.model.totalstatistics.TotalStatistics`,
#: long name of the suite for
#: :class:`~robot.model.suitestatistics.SuiteStatistics`
#: or name of the tag for
#: :class:`~robot.model.tagstatistics.TagStatistics`
self.name = name
#: Number of passed tests.
self.passed = 0
#: Number of failed tests.
self.failed = 0
#: Number of milliseconds it took to execute.
self.elapsed = 0
self._norm_name = normalize(name, ignore='_')
def get_attributes(self, include_label=False, include_elapsed=False,
exclude_empty=False, values_as_strings=False,
html_escape=False):
attrs = {'pass': self.passed, 'fail': self.failed}
attrs.update(self._get_custom_attrs())
if include_label:
attrs['label'] = self.name
if include_elapsed:
attrs['elapsed'] = elapsed_time_to_string(self.elapsed,
include_millis=False)
if exclude_empty:
attrs = dict((k, v) for k, v in attrs.items() if v != '')
if values_as_strings:
attrs = dict((k, unicode(v)) for k, v in attrs.items())
if html_escape:
attrs = dict((k, self._html_escape(v)) for k, v in attrs.items())
return attrs
def _get_custom_attrs(self):
return {}
def _html_escape(self, item):
return html_escape(item) if isinstance(item, basestring) else item
@property
def total(self):
return self.passed + self.failed
def add_test(self, test):
self._update_stats(test)
self._update_elapsed(test)
def _update_stats(self, test):
if test.passed:
self.passed += 1
else:
self.failed += 1
def _update_elapsed(self, test):
self.elapsed += test.elapsedtime
def __cmp__(self, other):
return cmp(self._norm_name, other._norm_name)
def __nonzero__(self):
return not self.failed
def visit(self, visitor):
visitor.visit_stat(self)
class TotalStat(Stat):
"""Stores statistic values for a test run."""
#: Always string `total`
type = 'total'
class SuiteStat(Stat):
"""Stores statistics values for a single suite."""
#: Always string `suite`
type = 'suite'
def __init__(self, suite):
Stat.__init__(self, suite.longname)
#: Identifier of the suite, e.g. `s1-s2`.
self.id = suite.id
#: Number of milliseconds it took to execute this suite,
#: including sub-suites.
self.elapsed = suite.elapsedtime
self._name = suite.name
def _get_custom_attrs(self):
return {'id': self.id, 'name': self._name}
def _update_elapsed(self, test):
pass
def add_stat(self, other):
self.passed += other.passed
self.failed += other.failed
class TagStat(Stat):
"""Stores statistic values for a single tag."""
#: Always string `tag`.
type = 'tag'
def __init__(self, name, doc='', links=None, critical=False,
non_critical=False, combined=''):
Stat.__init__(self, name)
#: Documentation of tag as a string.
self.doc = doc
#: List of tuples in which the first value is the link URL and
#: the second is the link title. An empty list by default.
self.links = links or []
#: ``True`` if tag is considered critical, ``False`` otherwise.
self.critical = critical
#: ``True`` if tag is considered non-critical, ``False`` otherwise.
self.non_critical = non_critical
#: Pattern as a string if the tag is combined,
#: an empty string otherwise.
self.combined = combined
@property
def info(self):
"""Returns additional information of the tag statistics
are about. Either `critical`, `non-critical`, `combined` or an
empty string.
"""
if self.critical:
return 'critical'
if self.non_critical:
return 'non-critical'
if self.combined:
return 'combined'
return ''
def _get_custom_attrs(self):
return {'doc': self.doc, 'links': self._get_links_as_string(),
'info': self.info, 'combined': self.combined}
def _get_links_as_string(self):
return ':::'.join('%s:%s' % (title, url) for url, title in self.links)
def __cmp__(self, other):
return cmp(other.critical, self.critical) \
or cmp(other.non_critical, self.non_critical) \
or cmp(bool(other.combined), bool(self.combined)) \
or Stat.__cmp__(self, other)
class CombinedTagStat(TagStat):
def __init__(self, pattern, name=None, doc='', links=None):
TagStat.__init__(self, name or pattern, doc, links, combined=pattern)
self._matcher = TagPatterns(pattern)
def match(self, tags):
return self._matcher.match(tags)
|
eric-stanley/robotframework
|
src/robot/model/stats.py
|
Python
|
apache-2.0
| 6,017 | 0.000166 |
"""
Tools and data structures for working with genomic intervals (or sets of
regions on a line in general) efficiently.
"""
# For compatiblity with existing stuff
from bx.intervals.intersection import *
|
dnanexus/rseqc
|
rseqc/lib/bx/intervals/__init__.py
|
Python
|
gpl-3.0
| 204 | 0.009804 |
# -*- coding: utf-8 -*-
# Copyright (c) 2014-2016 Marcello Salvati
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
# USA
#
import random
banner1 = """
__ __ ___ .--. __ __ ___
| |/ `.' `. |__| | |/ `.' `. _.._
| .-. .-. '.--. .| | .-. .-. ' .' .._|
| | | | | || | .' |_ | | | | | | | '
| | | | | || | .' || | | | | | __| |__
| | | | | || |'--. .-'| | | | | ||__ __|
| | | | | || | | | | | | | | | | |
|__| |__| |__||__| | | |__| |__| |__| | |
| '.' | |
| / | |
`'-' |_|
"""
banner2= """
███▄ ▄███▓ ██▓▄▄▄█████▓ ███▄ ▄███▓ █████▒
▓██▒▀█▀ ██▒▓██▒▓ ██▒ ▓▒▓██▒▀█▀ ██▒▓██ ▒
▓██ ▓██░▒██▒▒ ▓██░ ▒░▓██ ▓██░▒████ ░
▒██ ▒██ ░██░░ ▓██▓ ░ ▒██ ▒██ ░▓█▒ ░
▒██▒ ░██▒░██░ ▒██▒ ░ ▒██▒ ░██▒░▒█░
░ ▒░ ░ ░░▓ ▒ ░░ ░ ▒░ ░ ░ ▒ ░
░ ░ ░ ▒ ░ ░ ░ ░ ░ ░
░ ░ ▒ ░ ░ ░ ░ ░ ░
░ ░ ░
"""
banner3 = """
▄▄▄▄███▄▄▄▄ ▄█ ███ ▄▄▄▄███▄▄▄▄ ▄████████
▄██▀▀▀███▀▀▀██▄ ███ ▀█████████▄ ▄██▀▀▀███▀▀▀██▄ ███ ███
███ ███ ███ ███▌ ▀███▀▀██ ███ ███ ███ ███ █▀
███ ███ ███ ███▌ ███ ▀ ███ ███ ███ ▄███▄▄▄
███ ███ ███ ███▌ ███ ███ ███ ███ ▀▀███▀▀▀
███ ███ ███ ███ ███ ███ ███ ███ ███
███ ███ ███ ███ ███ ███ ███ ███ ███
▀█ ███ █▀ █▀ ▄████▀ ▀█ ███ █▀ ███
"""
banner4 = """
███╗ ███╗██╗████████╗███╗ ███╗███████╗
████╗ ████║██║╚══██╔══╝████╗ ████║██╔════╝
██╔████╔██║██║ ██║ ██╔████╔██║█████╗
██║╚██╔╝██║██║ ██║ ██║╚██╔╝██║██╔══╝
██║ ╚═╝ ██║██║ ██║ ██║ ╚═╝ ██║██║
╚═╝ ╚═╝╚═╝ ╚═╝ ╚═╝ ╚═╝╚═╝
"""
banner5 = """
@@@@@@@@@@ @@@ @@@@@@@ @@@@@@@@@@ @@@@@@@@
@@@@@@@@@@@ @@@ @@@@@@@ @@@@@@@@@@@ @@@@@@@@
@@! @@! @@! @@! @@! @@! @@! @@! @@!
!@! !@! !@! !@! !@! !@! !@! !@! !@!
@!! !!@ @!@ !!@ @!! @!! !!@ @!@ @!!!:!
!@! ! !@! !!! !!! !@! ! !@! !!!!!:
!!: !!: !!: !!: !!: !!: !!:
:!: :!: :!: :!: :!: :!: :!:
::: :: :: :: ::: :: ::
: : : : : : :
"""
def get_banner():
banners = [banner1, banner2, banner3, banner4, banner5]
return random.choice(banners)
|
sechacking/MITMf
|
core/banners.py
|
Python
|
gpl-3.0
| 4,693 | 0.012703 |
"""Classification-based test and kernel two-sample test.
Author: Sandro Vega-Pons, Emanuele Olivetti.
"""
import os
import numpy as np
from sklearn.metrics import pairwise_distances, confusion_matrix
from sklearn.metrics import pairwise_kernels
from sklearn.svm import SVC
from sklearn.cross_validation import StratifiedKFold, KFold, cross_val_score
from sklearn.grid_search import GridSearchCV
from kernel_two_sample_test import MMD2u, compute_null_distribution
from kernel_two_sample_test import compute_null_distribution_given_permutations
import matplotlib.pylab as plt
from joblib import Parallel, delayed
def compute_rbf_kernel_matrix(X):
"""Compute the RBF kernel matrix with sigma2 as the median pairwise
distance.
"""
sigma2 = np.median(pairwise_distances(X, metric='euclidean'))**2
K = pairwise_kernels(X, X, metric='rbf', gamma=1.0/sigma2, n_jobs=-1)
return K
def balanced_accuracy_scoring(clf, X, y):
"""Scoring function that computes the balanced accuracy to be used
internally in the cross-validation procedure.
"""
y_pred = clf.predict(X)
conf_mat = confusion_matrix(y, y_pred)
bal_acc = 0.
for i in range(len(conf_mat)):
bal_acc += (float(conf_mat[i, i])) / np.sum(conf_mat[i])
bal_acc /= len(conf_mat)
return bal_acc
def compute_svm_cv(K, y, C=100.0, n_folds=5,
scoring=balanced_accuracy_scoring):
"""Compute cross-validated score of SVM with given precomputed kernel.
"""
cv = StratifiedKFold(y, n_folds=n_folds)
clf = SVC(C=C, kernel='precomputed', class_weight='auto')
scores = cross_val_score(clf, K, y,
scoring=scoring, cv=cv)
return scores.mean()
def compute_svm_subjects(K, y, n_folds=5):
"""
"""
cv = KFold(len(K)/2, n_folds)
scores = np.zeros(n_folds)
for i, (train, test) in enumerate(cv):
train_ids = np.concatenate((train, len(K)/2+train))
test_ids = np.concatenate((test, len(K)/2+test))
clf = SVC(kernel='precomputed')
clf.fit(K[train_ids, :][:, train_ids], y[train_ids])
scores[i] = clf.score(K[test_ids, :][:, train_ids], y[test_ids])
return scores.mean()
def permutation_subjects(y):
"""Permute class labels of Contextual Disorder dataset.
"""
y_perm = np.random.randint(0, 2, len(y)/2)
y_perm = np.concatenate((y_perm, np.logical_not(y_perm).astype(int)))
return y_perm
def permutation_subjects_ktst(y):
"""Permute class labels of Contextual Disorder dataset for KTST.
"""
yp = np.random.randint(0, 2, len(y)/2)
yp = np.concatenate((yp, np.logical_not(yp).astype(int)))
y_perm = np.arange(len(y))
for i in range(len(y)/2):
if yp[i] == 1:
y_perm[i] = len(y)/2+i
y_perm[len(y)/2+i] = i
return y_perm
def compute_svm_score_nestedCV(K, y, n_folds,
scoring=balanced_accuracy_scoring,
random_state=None,
param_grid=[{'C': np.logspace(-5, 5, 25)}]):
"""Compute cross-validated score of SVM using precomputed kernel.
"""
cv = StratifiedKFold(y, n_folds=n_folds, shuffle=True,
random_state=random_state)
scores = np.zeros(n_folds)
for i, (train, test) in enumerate(cv):
cvclf = SVC(kernel='precomputed')
y_train = y[train]
cvcv = StratifiedKFold(y_train, n_folds=n_folds,
shuffle=True,
random_state=random_state)
clf = GridSearchCV(cvclf, param_grid=param_grid, scoring=scoring,
cv=cvcv, n_jobs=1)
clf.fit(K[train, :][:, train], y_train)
# print clf.best_params_
scores[i] = clf.score(K[test, :][:, train], y[test])
return scores.mean()
def apply_svm(K, y, n_folds=5, iterations=10000, subjects=False, verbose=True,
random_state=None):
"""
Compute the balanced accuracy, its null distribution and the p-value.
Parameters:
----------
K: array-like
Kernel matrix
y: array_like
class labels
cv: Number of folds in the stratified cross-validation
verbose: bool
Verbosity
Returns:
-------
acc: float
Average balanced accuracy.
acc_null: array
Null distribution of the balanced accuracy.
p_value: float
p-value
"""
# Computing the accuracy
param_grid = [{'C': np.logspace(-5, 5, 20)}]
if subjects:
acc = compute_svm_subjects(K, y, n_folds)
else:
acc = compute_svm_score_nestedCV(K, y, n_folds, param_grid=param_grid,
random_state=random_state)
if verbose:
print("Mean balanced accuracy = %s" % (acc))
print("Computing the null-distribution.")
# Computing the null-distribution
# acc_null = np.zeros(iterations)
# for i in range(iterations):
# if verbose and (i % 1000) == 0:
# print(i),
# stdout.flush()
# y_perm = np.random.permutation(y)
# acc_null[i] = compute_svm_score_nestedCV(K, y_perm, n_folds,
# param_grid=param_grid)
# if verbose:
# print ''
# Computing the null-distribution
if subjects:
yis = [permutation_subjects(y) for i in range(iterations)]
acc_null = Parallel(n_jobs=-1)(delayed(compute_svm_subjects)(K, yis[i], n_folds) for i in range(iterations))
else:
yis = [np.random.permutation(y) for i in range(iterations)]
acc_null = Parallel(n_jobs=-1)(delayed(compute_svm_score_nestedCV)(K, yis[i], n_folds, scoring=balanced_accuracy_scoring, param_grid=param_grid) for i in range(iterations))
# acc_null = Parallel(n_jobs=-1)(delayed(compute_svm_cv)(K, yis[i], C=100., n_folds=n_folds) for i in range(iterations))
p_value = max(1.0 / iterations, (acc_null > acc).sum()
/ float(iterations))
if verbose:
print("p-value ~= %s \t (resolution : %s)" % (p_value, 1.0/iterations))
return acc, acc_null, p_value
def apply_ktst(K, y, iterations=10000, subjects=False, verbose=True):
"""
Compute MMD^2_u, its null distribution and the p-value of the
kernel two-sample test.
Parameters:
----------
K: array-like
Kernel matrix
y: array_like
class labels
verbose: bool
Verbosity
Returns:
-------
mmd2u: float
MMD^2_u value.
acc_null: array
Null distribution of the MMD^2_u
p_value: float
p-value
"""
assert len(np.unique(y)) == 2, 'KTST only works on binary problems'
# Assuming that the first m rows of the kernel matrix are from one
# class and the other n rows from the second class.
m = len(y[y == 0])
n = len(y[y == 1])
mmd2u = MMD2u(K, m, n)
if verbose:
print("MMD^2_u = %s" % mmd2u)
print("Computing the null distribution.")
if subjects:
perms = [permutation_subjects_ktst(y) for i in range(iterations)]
mmd2u_null = compute_null_distribution_given_permutations(K, m, n,
perms,
iterations)
else:
mmd2u_null = compute_null_distribution(K, m, n, iterations,
verbose=verbose)
p_value = max(1.0/iterations, (mmd2u_null > mmd2u).sum()
/ float(iterations))
if verbose:
print("p-value ~= %s \t (resolution : %s)" % (p_value, 1.0/iterations))
return mmd2u, mmd2u_null, p_value
def plot_null_distribution(stats, stats_null, p_value, data_name='',
stats_name='$MMD^2_u$', save_figure=True):
"""Plot the observed value for the test statistic, its null
distribution and p-value.
"""
fig = plt.figure()
ax = fig.add_subplot(111)
prob, bins, patches = plt.hist(stats_null, bins=50, normed=True)
ax.plot(stats, prob.max()/30, 'w*', markersize=15,
markeredgecolor='k', markeredgewidth=2,
label="%s = %s" % (stats_name, stats))
ax.annotate('p-value: %s' % (p_value),
xy=(float(stats), prob.max()/9.), xycoords='data',
xytext=(-105, 30), textcoords='offset points',
bbox=dict(boxstyle="round", fc="1."),
arrowprops={"arrowstyle": "->",
"connectionstyle": "angle,angleA=0,angleB=90,rad=10"},
)
plt.xlabel(stats_name)
plt.ylabel('p(%s)' % stats_name)
plt.legend(numpoints=1)
plt.title('Data: %s' % data_name)
if save_figure:
save_dir = 'figures'
if not os.path.exists(save_dir):
os.makedirs(save_dir)
stn = 'ktst' if stats_name == '$MMD^2_u$' else 'clf'
fig_name = os.path.join(save_dir, '%s_%s.pdf' % (data_name, stn))
fig.savefig(fig_name)
|
emanuele/jstsp2015
|
classif_and_ktst.py
|
Python
|
mit
| 9,044 | 0.000442 |
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'effect_size_locations_page.ui'
#
# Created: Tue Aug 27 16:49:55 2013
# by: PyQt4 UI code generator 4.10.2
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_wizardPage(object):
def setupUi(self, wizardPage):
wizardPage.setObjectName(_fromUtf8("wizardPage"))
wizardPage.resize(498, 195)
self.verticalLayout_3 = QtGui.QVBoxLayout(wizardPage)
self.verticalLayout_3.setObjectName(_fromUtf8("verticalLayout_3"))
self.label_6 = QtGui.QLabel(wizardPage)
self.label_6.setObjectName(_fromUtf8("label_6"))
self.verticalLayout_3.addWidget(self.label_6)
self.horizontalLayout = QtGui.QHBoxLayout()
self.horizontalLayout.setObjectName(_fromUtf8("horizontalLayout"))
self.verticalLayout = QtGui.QVBoxLayout()
self.verticalLayout.setObjectName(_fromUtf8("verticalLayout"))
self.trans_grp_box = QtGui.QGroupBox(wizardPage)
self.trans_grp_box.setObjectName(_fromUtf8("trans_grp_box"))
self.gridLayout = QtGui.QGridLayout(self.trans_grp_box)
self.gridLayout.setObjectName(_fromUtf8("gridLayout"))
self.trans_var_cbo_box = QtGui.QComboBox(self.trans_grp_box)
self.trans_var_cbo_box.setObjectName(_fromUtf8("trans_var_cbo_box"))
self.gridLayout.addWidget(self.trans_var_cbo_box, 1, 1, 1, 1)
self.trans_effect_cbo_box = QtGui.QComboBox(self.trans_grp_box)
self.trans_effect_cbo_box.setObjectName(_fromUtf8("trans_effect_cbo_box"))
self.gridLayout.addWidget(self.trans_effect_cbo_box, 0, 1, 1, 1)
self.label = QtGui.QLabel(self.trans_grp_box)
self.label.setObjectName(_fromUtf8("label"))
self.gridLayout.addWidget(self.label, 0, 0, 1, 1)
self.label_2 = QtGui.QLabel(self.trans_grp_box)
self.label_2.setObjectName(_fromUtf8("label_2"))
self.gridLayout.addWidget(self.label_2, 1, 0, 1, 1)
self.verticalLayout.addWidget(self.trans_grp_box)
spacerItem = QtGui.QSpacerItem(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.verticalLayout.addItem(spacerItem)
self.horizontalLayout.addLayout(self.verticalLayout)
self.verticalLayout_2 = QtGui.QVBoxLayout()
self.verticalLayout_2.setObjectName(_fromUtf8("verticalLayout_2"))
self.raw_grp_box = QtGui.QGroupBox(wizardPage)
self.raw_grp_box.setObjectName(_fromUtf8("raw_grp_box"))
self.gridLayout_3 = QtGui.QGridLayout(self.raw_grp_box)
self.gridLayout_3.setObjectName(_fromUtf8("gridLayout_3"))
self.label_3 = QtGui.QLabel(self.raw_grp_box)
self.label_3.setObjectName(_fromUtf8("label_3"))
self.gridLayout_3.addWidget(self.label_3, 0, 0, 1, 1)
self.raw_effect_cbo_box = QtGui.QComboBox(self.raw_grp_box)
self.raw_effect_cbo_box.setObjectName(_fromUtf8("raw_effect_cbo_box"))
self.gridLayout_3.addWidget(self.raw_effect_cbo_box, 0, 1, 1, 1)
self.label_4 = QtGui.QLabel(self.raw_grp_box)
self.label_4.setObjectName(_fromUtf8("label_4"))
self.gridLayout_3.addWidget(self.label_4, 1, 0, 1, 1)
self.raw_lower_cbo_box = QtGui.QComboBox(self.raw_grp_box)
self.raw_lower_cbo_box.setObjectName(_fromUtf8("raw_lower_cbo_box"))
self.gridLayout_3.addWidget(self.raw_lower_cbo_box, 1, 1, 1, 1)
self.label_5 = QtGui.QLabel(self.raw_grp_box)
self.label_5.setObjectName(_fromUtf8("label_5"))
self.gridLayout_3.addWidget(self.label_5, 2, 0, 1, 1)
self.raw_upper_cbo_box = QtGui.QComboBox(self.raw_grp_box)
self.raw_upper_cbo_box.setObjectName(_fromUtf8("raw_upper_cbo_box"))
self.gridLayout_3.addWidget(self.raw_upper_cbo_box, 2, 1, 1, 1)
self.verticalLayout_2.addWidget(self.raw_grp_box)
spacerItem1 = QtGui.QSpacerItem(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.verticalLayout_2.addItem(spacerItem1)
self.horizontalLayout.addLayout(self.verticalLayout_2)
self.verticalLayout_3.addLayout(self.horizontalLayout)
self.retranslateUi(wizardPage)
QtCore.QMetaObject.connectSlotsByName(wizardPage)
def retranslateUi(self, wizardPage):
wizardPage.setWindowTitle(_translate("wizardPage", "WizardPage", None))
wizardPage.setTitle(_translate("wizardPage", "Effect Size Column Locations", None))
self.label_6.setText(_translate("wizardPage", "Where is your data located?", None))
self.trans_grp_box.setTitle(_translate("wizardPage", "Transformed Scale", None))
self.label.setText(_translate("wizardPage", "Effect Size:", None))
self.label_2.setText(_translate("wizardPage", "Variance:", None))
self.raw_grp_box.setTitle(_translate("wizardPage", "Raw Scale", None))
self.label_3.setText(_translate("wizardPage", "Effect Size:", None))
self.label_4.setText(_translate("wizardPage", "CI Lower Bound:", None))
self.label_5.setText(_translate("wizardPage", "CI Upper Bound:", None))
|
gdietz/OpenMEE
|
common_wizard_pages/ui_effect_size_locations_page.py
|
Python
|
gpl-3.0
| 5,558 | 0.001799 |
# Imports environment-specific settings.
import os
import sys
try:
from colorama import init as colorama_init
except ImportError:
def colorama_init(autoreset=False, convert=None, strip=None, wrap=True):
"""
Fallback function that initializes colorama.
"""
pass
try:
from termcolor import colored
except ImportError:
def colored(text, color=None, on_color=None, attrs=None):
"""
Fallback function to colorize text when termcolor is not installed.
"""
return text
# Use production settings by default as it is the secure setup. To use local
# settings: $ export PRODUCTION=0
production = 'PRODUCTION' not in os.environ or os.environ['PRODUCTION'].lower() in [True, 'y', 'yes', '1',]
local = not production
platform = sys.platform
linux = platform == 'linux2'
os_x = platform == 'darwin'
win32 = platform == 'win32'
# Don't initialize colorama when on Windows and running the shell because the
# ipython colors get confused.
if not win32 or not 'shell' in sys.argv:
colorama_init()
current_settings = []
if production:
current_settings.append(colored('Production', 'green', attrs=['bold']))
from production_settings import *
if local:
current_settings.append(colored('Local', 'yellow', attrs=['bold']))
from local_settings import *
if linux:
current_settings.append(colored('Linux', 'blue', attrs=['bold']))
from linux_settings import *
if os_x:
current_settings.append(colored('OS X', 'blue', attrs=['bold']))
from os_x_settings import *
if win32:
current_settings.append(colored('Windows', 'blue', attrs=['bold']))
from win32_settings import *
if 'runserver' in sys.argv:
print '-' * 80
print ' :: '.join(current_settings)
print '-' * 80
color = '[1;93m' # Bold High Intensity Yellow + Underline
version = 'Development'
if production:
color = '[1;92m' # Bold High Intensity Green + Underline
version = 'Production'
print '\n{star} \x1b{color}{version}\x1b[0m {star}\n'.format(color=color,
star='\xE2\x98\x85',
version=version)
|
django-settings/django-settings
|
myproject/myproject/user_settings.py
|
Python
|
unlicense
| 2,239 | 0.003126 |
import cupy as cp
def read_code(code_filename, params):
with open(code_filename, 'r') as f:
code = f.read()
for k, v in params.items():
code = '#define ' + k + ' ' + str(v) + '\n' + code
return code
def benchmark(func, args, n_run):
times = []
for _ in range(n_run):
start = cp.cuda.Event()
end = cp.cuda.Event()
start.record()
func(*args)
end.record()
end.synchronize()
times.append(cp.cuda.get_elapsed_time(start, end)) # milliseconds
return times
|
cupy/cupy
|
examples/gemm/utils.py
|
Python
|
mit
| 551 | 0 |
# -*- coding: utf-8 -*-
#
# Copyright (c) 2017 F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import json
import pytest
import sys
if sys.version_info < (2, 7):
pytestmark = pytest.mark.skip("F5 Ansible modules require Python >= 2.7")
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six import iteritems
try:
from library.modules.bigip_device_facts import Parameters
from library.modules.bigip_device_facts import VirtualAddressesFactManager
from library.modules.bigip_device_facts import VirtualAddressesParameters
from library.modules.bigip_device_facts import ArgumentSpec
from library.modules.bigip_device_facts import ModuleManager
# In Ansible 2.8, Ansible changed import paths.
from test.units.compat import unittest
from test.units.compat.mock import Mock
from test.units.compat.mock import patch
from test.units.modules.utils import set_module_args
except ImportError:
from ansible.modules.network.f5.bigip_device_facts import Parameters
from ansible.modules.network.f5.bigip_device_facts import VirtualAddressesFactManager
from ansible.modules.network.f5.bigip_device_facts import VirtualAddressesParameters
from ansible.modules.network.f5.bigip_device_facts import ArgumentSpec
from ansible.modules.network.f5.bigip_device_facts import ModuleManager
# Ansible 2.8 imports
from units.compat import unittest
from units.compat.mock import Mock
from units.compat.mock import patch
from units.modules.utils import set_module_args
fixture_path = os.path.join(os.path.dirname(__file__), 'fixtures')
fixture_data = {}
def load_fixture(name):
path = os.path.join(fixture_path, name)
if path in fixture_data:
return fixture_data[path]
with open(path) as f:
data = f.read()
try:
data = json.loads(data)
except Exception:
pass
fixture_data[path] = data
return data
class FakeVirtualAddress:
def __init__(self, *args, **kwargs):
attrs = kwargs.pop('params', {})
for key, value in iteritems(attrs):
setattr(self, key, value)
class TestParameters(unittest.TestCase):
def test_module_parameters(self):
args = dict(
gather_subset=['virtual-servers'],
)
p = Parameters(params=args)
assert p.gather_subset == ['virtual-servers']
class TestManager(unittest.TestCase):
def setUp(self):
self.spec = ArgumentSpec()
try:
self.p1 = patch('library.modules.bigip_device_facts.modules_provisioned')
self.m1 = self.p1.start()
self.m1.return_value = ['ltm', 'gtm', 'asm']
except Exception:
self.p1 = patch('ansible.modules.network.f5.bigip_device_facts.modules_provisioned')
self.m1 = self.p1.start()
self.m1.return_value = ['ltm', 'gtm', 'asm']
def tearDown(self):
self.p1.stop()
def test_get_trunk_facts(self, *args):
set_module_args(dict(
gather_subset=['virtual-addresses'],
provider=dict(
server='localhost',
password='password',
user='admin'
)
))
fixture1 = load_fixture('load_ltm_virtual_address_collection_1.json')
collection = fixture1['items']
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
tm = VirtualAddressesFactManager(module=module)
tm.read_collection_from_device = Mock(return_value=collection)
# Override methods to force specific logic in the module to happen
mm = ModuleManager(module=module)
mm.get_manager = Mock(return_value=tm)
results = mm.exec_module()
assert results['changed'] is True
assert 'virtual_addresses' in results
assert len(results['virtual_addresses']) > 0
|
alxgu/ansible
|
test/units/modules/network/f5/test_bigip_device_facts.py
|
Python
|
gpl-3.0
| 4,137 | 0.001209 |
# Copyright (C) 2010-2011 Richard Lincoln
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
from CIM15.IEC61970.Core.IdentifiedObject import IdentifiedObject
class Medium(IdentifiedObject):
"""A substance that either (1) provides the means of transmission of a force or effect, such as hydraulic fluid, or (2) is used for a surrounding or enveloping substance, such as oil in a transformer or circuit breaker.A substance that either (1) provides the means of transmission of a force or effect, such as hydraulic fluid, or (2) is used for a surrounding or enveloping substance, such as oil in a transformer or circuit breaker.
"""
def __init__(self, kind="gas", volumeSpec=0.0, Specification=None, Assets=None, *args, **kw_args):
"""Initialises a new 'Medium' instance.
@param kind: Kind of this medium. Values are: "gas", "liquid", "solid"
@param volumeSpec: The volume of the medium specified for this application. Note that the actual volume is a type of measurement associated witht the asset.
@param Specification:
@param Assets:
"""
#: Kind of this medium. Values are: "gas", "liquid", "solid"
self.kind = kind
#: The volume of the medium specified for this application. Note that the actual volume is a type of measurement associated witht the asset.
self.volumeSpec = volumeSpec
self._Specification = None
self.Specification = Specification
self._Assets = []
self.Assets = [] if Assets is None else Assets
super(Medium, self).__init__(*args, **kw_args)
_attrs = ["kind", "volumeSpec"]
_attr_types = {"kind": str, "volumeSpec": float}
_defaults = {"kind": "gas", "volumeSpec": 0.0}
_enums = {"kind": "MediumKind"}
_refs = ["Specification", "Assets"]
_many_refs = ["Assets"]
def getSpecification(self):
return self._Specification
def setSpecification(self, value):
if self._Specification is not None:
filtered = [x for x in self.Specification.Mediums if x != self]
self._Specification._Mediums = filtered
self._Specification = value
if self._Specification is not None:
if self not in self._Specification._Mediums:
self._Specification._Mediums.append(self)
Specification = property(getSpecification, setSpecification)
def getAssets(self):
return self._Assets
def setAssets(self, value):
for p in self._Assets:
filtered = [q for q in p.Mediums if q != self]
self._Assets._Mediums = filtered
for r in value:
if self not in r._Mediums:
r._Mediums.append(self)
self._Assets = value
Assets = property(getAssets, setAssets)
def addAssets(self, *Assets):
for obj in Assets:
if self not in obj._Mediums:
obj._Mediums.append(self)
self._Assets.append(obj)
def removeAssets(self, *Assets):
for obj in Assets:
if self in obj._Mediums:
obj._Mediums.remove(self)
self._Assets.remove(obj)
|
rwl/PyCIM
|
CIM15/IEC61970/Informative/InfAssets/Medium.py
|
Python
|
mit
| 4,176 | 0.002155 |
"""
Tests of the neo.io.pickleio.PickleIO class
"""
import os
import unittest
import numpy as np
import quantities as pq
from neo.core import Block, Segment, AnalogSignal, SpikeTrain, Epoch, Event, \
IrregularlySampledSignal, Group
from neo.io import PickleIO
from numpy.testing import assert_array_equal
from neo.test.tools import assert_arrays_equal, assert_file_contents_equal
from neo.test.iotest.common_io_test import BaseTestIO
NCELLS = 5
class CommonTestPickleIO(BaseTestIO, unittest.TestCase):
ioclass = PickleIO
class TestPickleIO(unittest.TestCase):
def test__issue_285(self):
# Spiketrain
train = SpikeTrain([3, 4, 5] * pq.s, t_stop=10.0)
unit = Group()
unit.add(train)
epoch = Epoch(np.array([0, 10, 20]),
np.array([2, 2, 2]),
np.array(["a", "b", "c"]),
units="ms")
blk = Block()
seg = Segment()
seg.spiketrains.append(train)
seg.epochs.append(epoch)
epoch.segment = seg
blk.segments.append(seg)
reader = PickleIO(filename="blk.pkl")
reader.write(blk)
reader = PickleIO(filename="blk.pkl")
r_blk = reader.read_block()
r_seg = r_blk.segments[0]
self.assertIsInstance(r_seg.epochs[0], Epoch)
os.remove('blk.pkl')
# Epoch
epoch = Epoch(times=np.arange(0, 30, 10) * pq.s,
durations=[10, 5, 7] * pq.ms,
labels=np.array(['btn0', 'btn1', 'btn2'], dtype='U'))
epoch.segment = Segment()
blk = Block()
seg = Segment()
seg.epochs.append(epoch)
blk.segments.append(seg)
reader = PickleIO(filename="blk.pkl")
reader.write(blk)
reader = PickleIO(filename="blk.pkl")
r_blk = reader.read_block()
r_seg = r_blk.segments[0]
self.assertIsInstance(r_seg.epochs[0].segment, Segment)
os.remove('blk.pkl')
# Event
event = Event(np.arange(0, 30, 10) * pq.s,
labels=np.array(['trig0', 'trig1', 'trig2'], dtype='U'))
event.segment = Segment()
blk = Block()
seg = Segment()
seg.events.append(event)
blk.segments.append(seg)
reader = PickleIO(filename="blk.pkl")
reader.write(blk)
reader = PickleIO(filename="blk.pkl")
r_blk = reader.read_block()
r_seg = r_blk.segments[0]
self.assertIsInstance(r_seg.events[0].segment, Segment)
os.remove('blk.pkl')
# IrregularlySampledSignal
signal = IrregularlySampledSignal(
[0.0, 1.23, 6.78], [1, 2, 3], units='mV', time_units='ms')
signal.segment = Segment()
blk = Block()
seg = Segment()
seg.irregularlysampledsignals.append(signal)
blk.segments.append(seg)
blk.segments[0].block = blk
reader = PickleIO(filename="blk.pkl")
reader.write(blk)
reader = PickleIO(filename="blk.pkl")
r_blk = reader.read_block()
r_seg = r_blk.segments[0]
self.assertIsInstance(r_seg.irregularlysampledsignals[0].segment, Segment)
os.remove('blk.pkl')
if __name__ == '__main__':
unittest.main()
|
samuelgarcia/python-neo
|
neo/test/iotest/test_pickleio.py
|
Python
|
bsd-3-clause
| 3,272 | 0.000306 |
# Time: O(m * n)
# Space: O(1)
# A 3 x 3 magic square is a 3 x 3 grid filled with
# distinct numbers from 1 to 9 such that each row, column,
# and both diagonals all have the same sum.
#
# Given an grid of integers, how many 3 x 3 "magic square" subgrids are there?
# (Each subgrid is contiguous).
#
# Example 1:
#
# Input: [[4,3,8,4],
# [9,5,1,9],
# [2,7,6,2]]
# Output: 1
# Explanation:
# The following subgrid is a 3 x 3 magic square:
# 438
# 951
# 276
#
# while this one is not:
# 384
# 519
# 762
#
# In total, there is only one magic square inside the given grid.
# Note:
# - 1 <= grid.length <= 10
# - 1 <= grid[0].length <= 10
# - 0 <= grid[i][j] <= 15
try:
xrange # Python 2
except NameError:
xrange = range # Python 3
class Solution(object):
def numMagicSquaresInside(self, grid):
"""
:type grid: List[List[int]]
:rtype: int
"""
def magic(grid, r, c):
expect = k * (k**2+1) // 2
nums = set()
min_num = float("inf")
sum_diag, sum_anti = 0, 0
for i in xrange(k):
sum_diag += grid[r+i][c+i]
sum_anti += grid[r+i][c+k-1-i]
sum_r, sum_c = 0, 0
for j in xrange(k):
min_num = min(min_num, grid[r+i][c+j])
nums.add(grid[r+i][c+j])
sum_r += grid[r+i][c+j]
sum_c += grid[r+j][c+i]
if not (sum_r == sum_c == expect):
return False
return sum_diag == sum_anti == expect and \
len(nums) == k**2 and \
min_num == 1
k = 3
result = 0
for r in xrange(len(grid)-k+1):
for c in xrange(len(grid[r])-k+1):
if magic(grid, r, c):
result += 1
return result
|
kamyu104/LeetCode
|
Python/magic-squares-in-grid.py
|
Python
|
mit
| 1,889 | 0 |
#! /usr/bin/env python
#
# pyfacebook - Python bindings for the Facebook API
#
# Copyright (c) 2008, Samuel Cormier-Iijima
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the <organization> nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY <copyright holder> ``AS IS'' AND ANY
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL <copyright holder> BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
Python bindings for the Facebook API (pyfacebook - http://code.google.com/p/pyfacebook)
PyFacebook is a client library that wraps the Facebook API.
For more information, see
Home Page: http://code.google.com/p/pyfacebook
Developer Wiki: http://wiki.developers.facebook.com/index.php/Python
Facebook IRC Channel: #facebook on irc.freenode.net
PyFacebook can use simplejson if it is installed, which
is much faster than XML and also uses less bandwith. Go to
http://undefined.org/python/#simplejson to download it, or do
apt-get install python-simplejson on a Debian-like system.
"""
import md5
import sys
import time
import urllib
import urllib2
import httplib
import mimetypes
# try to use simplejson first, otherwise fallback to XML
try:
import simplejson
RESPONSE_FORMAT = 'JSON'
except ImportError:
try:
from django.utils import simplejson
RESPONSE_FORMAT = 'JSON'
except ImportError:
from xml.dom import minidom
RESPONSE_FORMAT = 'XML'
# support Google App Engine. GAE does not have a working urllib.urlopen.
try:
from google.appengine.api import urlfetch
def urlread(url, data=None):
if data is not None:
headers = {"Content-type": "application/x-www-form-urlencoded"}
method = urlfetch.POST
else:
headers = {}
method = urlfetch.GET
result = urlfetch.fetch(url, method=method,
payload=data, headers=headers)
if result.status_code == 200:
return result.content
else:
raise urllib2.URLError("fetch error url=%s, code=%d" % (url, result.status_code))
except ImportError:
def urlread(url, data=None):
res = urllib2.urlopen(url, data=data)
return res.read()
__all__ = ['Facebook']
VERSION = '0.1'
# REST URLs
# Change these to /bestserver.php to use the bestserver.
FACEBOOK_URL = 'http://api.facebook.com/restserver.php'
FACEBOOK_SECURE_URL = 'https://api.facebook.com/restserver.php'
class json(object): pass
# simple IDL for the Facebook API
METHODS = {
# feed methods
'feed': {
'publishStoryToUser': [
('title', str, []),
('body', str, ['optional']),
('image_1', str, ['optional']),
('image_1_link', str, ['optional']),
('image_2', str, ['optional']),
('image_2_link', str, ['optional']),
('image_3', str, ['optional']),
('image_3_link', str, ['optional']),
('image_4', str, ['optional']),
('image_4_link', str, ['optional']),
('priority', int, ['optional']),
],
'publishActionOfUser': [
('title', str, []),
('body', str, ['optional']),
('image_1', str, ['optional']),
('image_1_link', str, ['optional']),
('image_2', str, ['optional']),
('image_2_link', str, ['optional']),
('image_3', str, ['optional']),
('image_3_link', str, ['optional']),
('image_4', str, ['optional']),
('image_4_link', str, ['optional']),
('priority', int, ['optional']),
],
'publishTemplatizedAction': [
# facebook expects title_data and body_data to be JSON
# simplejson.dumps({'place':'Florida'}) would do fine
# actor_id is now deprecated, use page_actor_id instead
('title_template', str, []),
('title_data', str, ['optional']),
('page_actor_id', int, ['optional']),
('body_template', str, ['optional']),
('body_data', str, ['optional']),
('body_general', str, ['optional']),
('image_1', str, ['optional']),
('image_1_link', str, ['optional']),
('image_2', str, ['optional']),
('image_2_link', str, ['optional']),
('image_3', str, ['optional']),
('image_3_link', str, ['optional']),
('image_4', str, ['optional']),
('image_4_link', str, ['optional']),
('target_ids', list, ['optional']),
],
'registerTemplateBundle': [
('one_line_story_template', str, []),
('short_story_template', json, ['optional']),
('full_story_template', json, ['optional']),
],
'getRegisteredTemplateBundles': [],
'getRegisteredTemplateBundleByID': [
('template_bundle_id', str, []),
],
'publishUserAction': [
('template_bundle_id', str, []),
('template_data', json, ['optional']),
('target_ids', list, ['optional']),
('body_general', str, ['optional']),
],
},
# fql methods
'fql': {
'query': [
('query', str, []),
],
},
# friends methods
'friends': {
'areFriends': [
('uids1', list, []),
('uids2', list, []),
],
'get': [],
'getAppUsers': [],
},
# notifications methods
'notifications': {
'get': [],
'send': [
('to_ids', list, []),
('notification', str, []),
('email', str, ['optional']),
],
'sendRequest': [
('to_ids', list, []),
('type', str, []),
('content', str, []),
('image', str, []),
('invite', bool, []),
],
'sendEmail': [
('recipients', list, []),
('subject', str, []),
('text', str, ['optional']),
('fbml', str, ['optional']),
]
},
# profile methods
'profile': {
'setFBML': [
('markup', str, ['optional']),
('uid', int, ['optional']),
('profile', str, ['optional']),
('profile_action', str, ['optional']),
('mobile_fbml', str, ['optional']),
],
'getFBML': [
('uid', int, ['optional']),
],
'setInfo': [
('title', str, []),
('type', int, []),
('info_fields', json, []),
('uid', int, []),
],
'getInfo': [
('uid', int, []),
],
'setInfoOptions': [
('field', str, []),
('options', json, []),
],
'getInfoOptions': [
('field', str, []),
],
},
# users methods
'users': {
'getInfo': [
('uids', list, []),
('fields', list, [('default', ['name'])]),
],
'getLoggedInUser': [],
'isAppAdded': [],
'hasAppPermission': [
('ext_perm', str, []),
],
'setStatus': [
('status', str, []),
('clear', bool, []),
],
},
# events methods
'events': {
'get': [
('uid', int, ['optional']),
('eids', list, ['optional']),
('start_time', int, ['optional']),
('end_time', int, ['optional']),
('rsvp_status', str, ['optional']),
],
'getMembers': [
('eid', int, []),
],
},
# update methods
'update': {
'decodeIDs': [
('ids', list, []),
],
},
# groups methods
'groups': {
'get': [
('uid', int, ['optional']),
('gids', list, ['optional']),
],
'getMembers': [
('gid', int, []),
],
},
# marketplace methods
'marketplace': {
'createListing': [
('listing_id', int, []),
('show_on_profile', bool, []),
('listing_attrs', str, []),
],
'getCategories': [],
'getListings': [
('listing_ids', list, []),
('uids', list, []),
],
'getSubCategories': [
('category', str, []),
],
'removeListing': [
('listing_id', int, []),
('status', str, []),
],
'search': [
('category', str, ['optional']),
('subcategory', str, ['optional']),
('query', str, ['optional']),
],
},
# pages methods
'pages': {
'getInfo': [
('page_ids', list, ['optional']),
('uid', int, ['optional']),
],
'isAdmin': [
('page_id', int, []),
],
'isAppAdded': [
('page_id', int, []),
],
'isFan': [
('page_id', int, []),
('uid', int, []),
],
},
# photos methods
'photos': {
'addTag': [
('pid', int, []),
('tag_uid', int, [('default', 0)]),
('tag_text', str, [('default', '')]),
('x', float, [('default', 50)]),
('y', float, [('default', 50)]),
('tags', str, ['optional']),
],
'createAlbum': [
('name', str, []),
('location', str, ['optional']),
('description', str, ['optional']),
],
'get': [
('subj_id', int, ['optional']),
('aid', int, ['optional']),
('pids', list, ['optional']),
],
'getAlbums': [
('uid', int, ['optional']),
('aids', list, ['optional']),
],
'getTags': [
('pids', list, []),
],
},
# fbml methods
'fbml': {
'refreshImgSrc': [
('url', str, []),
],
'refreshRefUrl': [
('url', str, []),
],
'setRefHandle': [
('handle', str, []),
('fbml', str, []),
],
},
'data': {
'getCookies': [
('uid', int, []),
('string', str, []),
],
'setCookie': [
('uid', int, []),
('name', str, []),
('value', str, []),
('expires', int, ['optional']),
('path', str, ['optional']),
],
},
}
class Proxy(object):
"""Represents a "namespace" of Facebook API calls."""
def __init__(self, client, name):
self._client = client
self._name = name
def __call__(self, method, args=None, add_session_args=True):
if add_session_args:
self._client._add_session_args(args)
return self._client('%s.%s' % (self._name, method), args)
# generate the Facebook proxies
def __generate_proxies():
for namespace in METHODS:
methods = {}
for method in METHODS[namespace]:
params = ['self']
body = ['args = {}']
for param_name, param_type, param_options in METHODS[namespace][method]:
param = param_name
for option in param_options:
if isinstance(option, tuple) and option[0] == 'default':
if param_type == list:
param = '%s=None' % param_name
body.append('if %s is None: %s = %s' % (param_name, param_name, repr(option[1])))
else:
param = '%s=%s' % (param_name, repr(option[1]))
if 'optional' in param_options:
param = '%s=None' % param_name
body.append('if %s is not None: args[\'%s\'] = %s' % (param_name, param_name, param_name))
else:
body.append('args[\'%s\'] = %s' % (param_name, param_name))
params.append(param)
# simple docstring to refer them to Facebook API docs
body.insert(0, '"""Facebook API call. See http://developers.facebook.com/documentation.php?v=1.0&method=%s.%s"""' % (namespace, method))
body.insert(0, 'def %s(%s):' % (method, ', '.join(params)))
body.append('return self(\'%s\', args)' % method)
exec('\n '.join(body))
methods[method] = eval(method)
proxy = type('%sProxy' % namespace.title(), (Proxy, ), methods)
globals()[proxy.__name__] = proxy
__generate_proxies()
class FacebookError(Exception):
"""Exception class for errors received from Facebook."""
def __init__(self, code, msg, args=None):
self.code = code
self.msg = msg
self.args = args
def __str__(self):
return 'Error %s: %s' % (self.code, self.msg)
class AuthProxy(Proxy):
"""Special proxy for facebook.auth."""
def getSession(self):
"""Facebook API call. See http://developers.facebook.com/documentation.php?v=1.0&method=auth.getSession"""
args = {}
try:
args['auth_token'] = self._client.auth_token
except AttributeError:
raise RuntimeError('Client does not have auth_token set.')
result = self._client('%s.getSession' % self._name, args)
self._client.session_key = result['session_key']
self._client.uid = result['uid']
self._client.secret = result.get('secret')
self._client.session_key_expires = result['expires']
return result
def createToken(self):
"""Facebook API call. See http://developers.facebook.com/documentation.php?v=1.0&method=auth.createToken"""
token = self._client('%s.createToken' % self._name)
self._client.auth_token = token
return token
class FriendsProxy(FriendsProxy):
"""Special proxy for facebook.friends."""
def get(self):
"""Facebook API call. See http://developers.facebook.com/documentation.php?v=1.0&method=friends.get"""
if self._client._friends:
return self._client._friends
return super(FriendsProxy, self).get()
class PhotosProxy(PhotosProxy):
"""Special proxy for facebook.photos."""
def upload(self, image, aid=None, caption=None, size=(604, 1024)):
"""Facebook API call. See http://developers.facebook.com/documentation.php?v=1.0&method=photos.upload
size -- an optional size (width, height) to resize the image to before uploading. Resizes by default
to Facebook's maximum display width of 604.
"""
args = {}
if aid is not None:
args['aid'] = aid
if caption is not None:
args['caption'] = caption
args = self._client._build_post_args('facebook.photos.upload', self._client._add_session_args(args))
try:
import cStringIO as StringIO
except ImportError:
import StringIO
try:
import Image
except ImportError:
data = StringIO.StringIO(open(image, 'rb').read())
else:
img = Image.open(image)
if size:
img.thumbnail(size, Image.ANTIALIAS)
data = StringIO.StringIO()
img.save(data, img.format)
content_type, body = self.__encode_multipart_formdata(list(args.iteritems()), [(image, data)])
h = httplib.HTTP('api.facebook.com')
h.putrequest('POST', '/restserver.php')
h.putheader('Content-Type', content_type)
h.putheader('Content-Length', str(len(body)))
h.putheader('MIME-Version', '1.0')
h.putheader('User-Agent', 'PyFacebook Client Library')
h.endheaders()
h.send(body)
reply = h.getreply()
if reply[0] != 200:
raise Exception('Error uploading photo: Facebook returned HTTP %s (%s)' % (reply[0], reply[1]))
response = h.file.read()
return self._client._parse_response(response, 'facebook.photos.upload')
def __encode_multipart_formdata(self, fields, files):
"""Encodes a multipart/form-data message to upload an image."""
boundary = '-------tHISiStheMulTIFoRMbOUNDaRY'
crlf = '\r\n'
l = []
for (key, value) in fields:
l.append('--' + boundary)
l.append('Content-Disposition: form-data; name="%s"' % str(key))
l.append('')
l.append(str(value))
for (filename, value) in files:
l.append('--' + boundary)
l.append('Content-Disposition: form-data; filename="%s"' % (str(filename), ))
l.append('Content-Type: %s' % self.__get_content_type(filename))
l.append('')
l.append(value.getvalue())
l.append('--' + boundary + '--')
l.append('')
body = crlf.join(l)
content_type = 'multipart/form-data; boundary=%s' % boundary
return content_type, body
def __get_content_type(self, filename):
"""Returns a guess at the MIME type of the file from the filename."""
return str(mimetypes.guess_type(filename)[0]) or 'application/octet-stream'
class Facebook(object):
"""
Provides access to the Facebook API.
Instance Variables:
added
True if the user has added this application.
api_key
Your API key, as set in the constructor.
app_name
Your application's name, i.e. the APP_NAME in http://apps.facebook.com/APP_NAME/ if
this is for an internal web application. Optional, but useful for automatic redirects
to canvas pages.
auth_token
The auth token that Facebook gives you, either with facebook.auth.createToken,
or through a GET parameter.
callback_path
The path of the callback set in the Facebook app settings. If your callback is set
to http://www.example.com/facebook/callback/, this should be '/facebook/callback/'.
Optional, but useful for automatic redirects back to the same page after login.
desktop
True if this is a desktop app, False otherwise. Used for determining how to
authenticate.
in_canvas
True if the current request is for a canvas page.
internal
True if this Facebook object is for an internal application (one that can be added on Facebook)
page_id
Set to the page_id of the current page (if any)
secret
Secret that is used after getSession for desktop apps.
secret_key
Your application's secret key, as set in the constructor.
session_key
The current session key. Set automatically by auth.getSession, but can be set
manually for doing infinite sessions.
session_key_expires
The UNIX time of when this session key expires, or 0 if it never expires.
uid
After a session is created, you can get the user's UID with this variable. Set
automatically by auth.getSession.
----------------------------------------------------------------------
"""
def __init__(self, api_key, secret_key, auth_token=None, app_name=None, callback_path=None, internal=None):
"""
Initializes a new Facebook object which provides wrappers for the Facebook API.
If this is a desktop application, the next couple of steps you might want to take are:
facebook.auth.createToken() # create an auth token
facebook.login() # show a browser window
wait_login() # somehow wait for the user to log in
facebook.auth.getSession() # get a session key
For web apps, if you are passed an auth_token from Facebook, pass that in as a named parameter.
Then call:
facebook.auth.getSession()
"""
self.api_key = api_key
self.secret_key = secret_key
self.session_key = None
self.session_key_expires = None
self.auth_token = auth_token
self.secret = None
self.uid = None
self.page_id = None
self.in_canvas = False
self.added = False
self.app_name = app_name
self.callback_path = callback_path
self.internal = internal
self._friends = None
for namespace in METHODS:
self.__dict__[namespace] = eval('%sProxy(self, \'%s\')' % (namespace.title(), 'facebook.%s' % namespace))
self.auth = AuthProxy(self, 'facebook.auth')
def _hash_args(self, args, secret=None):
"""Hashes arguments by joining key=value pairs, appending a secret, and then taking the MD5 hex digest."""
hasher = md5.new(''.join(['%s=%s' % (x, args[x]) for x in sorted(args.keys())]))
if secret:
hasher.update(secret)
elif self.secret:
hasher.update(self.secret)
else:
hasher.update(self.secret_key)
return hasher.hexdigest()
def _parse_response_item(self, node):
"""Parses an XML response node from Facebook."""
if node.nodeType == node.DOCUMENT_NODE and \
node.childNodes[0].hasAttributes() and \
node.childNodes[0].hasAttribute('list') and \
node.childNodes[0].getAttribute('list') == "true":
return {node.childNodes[0].nodeName: self._parse_response_list(node.childNodes[0])}
elif node.nodeType == node.ELEMENT_NODE and \
node.hasAttributes() and \
node.hasAttribute('list') and \
node.getAttribute('list')=="true":
return self._parse_response_list(node)
elif len(filter(lambda x: x.nodeType == x.ELEMENT_NODE, node.childNodes)) > 0:
return self._parse_response_dict(node)
else:
return ''.join(node.data for node in node.childNodes if node.nodeType == node.TEXT_NODE)
def _parse_response_dict(self, node):
"""Parses an XML dictionary response node from Facebook."""
result = {}
for item in filter(lambda x: x.nodeType == x.ELEMENT_NODE, node.childNodes):
result[item.nodeName] = self._parse_response_item(item)
if node.nodeType == node.ELEMENT_NODE and node.hasAttributes():
if node.hasAttribute('id'):
result['id'] = node.getAttribute('id')
return result
def _parse_response_list(self, node):
"""Parses an XML list response node from Facebook."""
result = []
for item in filter(lambda x: x.nodeType == x.ELEMENT_NODE, node.childNodes):
result.append(self._parse_response_item(item))
return result
def _check_error(self, response):
"""Checks if the given Facebook response is an error, and then raises the appropriate exception."""
if type(response) is dict and response.has_key('error_code'):
raise FacebookError(response['error_code'], response['error_msg'], response['request_args'])
def _build_post_args(self, method, args=None):
"""Adds to args parameters that are necessary for every call to the API."""
if args is None:
args = {}
for arg in args.items():
if type(arg[1]) == list:
args[arg[0]] = ','.join(str(a) for a in arg[1])
elif type(arg[1]) == unicode:
args[arg[0]] = arg[1].encode("UTF-8")
args['method'] = method
args['api_key'] = self.api_key
args['v'] = '1.0'
args['format'] = RESPONSE_FORMAT
args['sig'] = self._hash_args(args)
return args
def _add_session_args(self, args=None):
"""Adds 'session_key' and 'call_id' to args, which are used for API calls that need sessions."""
if args is None:
args = {}
if not self.session_key:
return args
#some calls don't need a session anymore. this might be better done in the markup
#raise RuntimeError('Session key not set. Make sure auth.getSession has been called.')
args['session_key'] = self.session_key
args['call_id'] = str(int(time.time() * 1000))
return args
def _parse_response(self, response, method, format=None):
"""Parses the response according to the given (optional) format, which should be either 'JSON' or 'XML'."""
if not format:
format = RESPONSE_FORMAT
if format == 'JSON':
result = simplejson.loads(response)
self._check_error(result)
elif format == 'XML':
dom = minidom.parseString(response)
result = self._parse_response_item(dom)
dom.unlink()
if 'error_response' in result:
self._check_error(result['error_response'])
result = result[method[9:].replace('.', '_') + '_response']
else:
raise RuntimeError('Invalid format specified.')
return result
def __call__(self, method, args=None, secure=False):
"""Make a call to Facebook's REST server."""
post_data = urllib.urlencode(self._build_post_args(method, args))
if secure:
response = urlread(FACEBOOK_SECURE_URL, post_data)
else:
response = urlread(FACEBOOK_URL, post_data)
return self._parse_response(response, method)
# URL helpers
def get_url(self, page, **args):
"""
Returns one of the Facebook URLs (www.facebook.com/SOMEPAGE.php).
Named arguments are passed as GET query string parameters.
"""
return 'http://www.facebook.com/%s.php?%s' % (page, urllib.urlencode(args))
def get_app_url(self, path=''):
"""
Returns the URL for this app's canvas page, according to app_name.
"""
return 'http://apps.facebook.com/%s/%s' % (self.app_name, path)
def get_add_url(self, next=None):
"""
Returns the URL that the user should be redirected to in order to add the application.
"""
args = {'api_key': self.api_key, 'v': '1.0'}
if next is not None:
args['next'] = next
return self.get_url('install', **args)
def get_authorize_url(self, next=None, next_cancel=None):
"""
Returns the URL that the user should be redirected to in order to authorize certain actions for application.
"""
args = {'api_key': self.api_key, 'v': '1.0'}
if next is not None:
args['next'] = next
if next_cancel is not None:
args['next_cancel'] = next_cancel
return self.get_url('authorize', **args)
def get_login_url(self, next=None, popup=False, canvas=True):
"""
Returns the URL that the user should be redirected to in order to login.
next -- the URL that Facebook should redirect to after login
"""
args = {'api_key': self.api_key, 'v': '1.0'}
if next is not None:
args['next'] = next
if canvas is True:
args['canvas'] = 1
if popup is True:
args['popup'] = 1
if self.auth_token is not None:
args['auth_token'] = self.auth_token
return self.get_url('login', **args)
def login(self, popup=False):
"""Open a web browser telling the user to login to Facebook."""
import webbrowser
webbrowser.open(self.get_login_url(popup=popup))
def check_session(self, request):
"""
Checks the given Django HttpRequest for Facebook parameters such as
POST variables or an auth token. If the session is valid, returns True
and this object can now be used to access the Facebook API. Otherwise,
it returns False, and the application should take the appropriate action
(either log the user in or have him add the application).
"""
self.in_canvas = (request.POST.get('fb_sig_in_canvas') == '1')
if self.session_key and (self.uid or self.page_id):
return True
if request.method == 'POST':
params = self.validate_signature(request.POST)
else:
if 'installed' in request.GET:
self.added = True
if 'fb_page_id' in request.GET:
self.page_id = request.GET['fb_page_id']
if 'auth_token' in request.GET:
self.auth_token = request.GET['auth_token']
try:
self.auth.getSession()
except FacebookError, e:
self.auth_token = None
return False
return True
params = self.validate_signature(request.GET)
if not params:
return False
if params.get('in_canvas') == '1':
self.in_canvas = True
if params.get('added') == '1':
self.added = True
if params.get('expires'):
self.session_key_expires = int(params['expires'])
if 'friends' in params:
if params['friends']:
self._friends = params['friends'].split(',')
else:
self._friends = []
if 'session_key' in params:
self.session_key = params['session_key']
if 'user' in params:
self.uid = params['user']
elif 'page_id' in params:
self.page_id = params['page_id']
else:
return False
else:
return False
return True
def validate_signature(self, post, prefix='fb_sig', timeout=None):
"""
Validate parameters passed to an internal Facebook app from Facebook.
"""
args = post.copy()
if prefix not in args:
return None
del args[prefix]
if timeout and '%s_time' % prefix in post and time.time() - float(post['%s_time' % prefix]) > timeout:
return None
args = dict([(key[len(prefix + '_'):], value) for key, value in args.items() if key.startswith(prefix)])
hash = self._hash_args(args)
if hash == post[prefix]:
return args
else:
return None
if __name__ == '__main__':
# sample desktop application
api_key = ''
secret_key = ''
facebook = Facebook(api_key, secret_key)
facebook.auth.createToken()
# Show login window
# Set popup=True if you want login without navigational elements
facebook.login()
# Login to the window, then press enter
print 'After logging in, press enter...'
raw_input()
facebook.auth.getSession()
print 'Session Key: ', facebook.session_key
print 'Your UID: ', facebook.uid
info = facebook.users.getInfo([facebook.uid], ['name', 'birthday', 'affiliations', 'sex'])[0]
print 'Your Name: ', info['name']
print 'Your Birthday: ', info['birthday']
print 'Your Gender: ', info['sex']
friends = facebook.friends.get()
friends = facebook.users.getInfo(friends[0:5], ['name', 'birthday', 'relationship_status'])
for friend in friends:
print friend['name'], 'has a birthday on', friend['birthday'], 'and is', friend['relationship_status']
arefriends = facebook.friends.areFriends([friends[0]['uid']], [friends[1]['uid']])
photos = facebook.photos.getAlbums(facebook.uid)
|
JustinTulloss/harmonize.fm
|
libs.py/facebook/__init__.py
|
Python
|
mit
| 32,593 | 0.002976 |
def __bootstrap__():
global __bootstrap__, __loader__, __file__
import sys, pkg_resources, imp
__file__ = pkg_resources.resource_filename(__name__, '_unpacker.cp35-win32.pyd')
__loader__ = None; del __bootstrap__, __loader__
imp.load_dynamic(__name__,__file__)
__bootstrap__()
|
stevenzhang18/Indeed-Flask
|
lib/pandas/msgpack/_unpacker.py
|
Python
|
apache-2.0
| 297 | 0.016835 |
# -*- coding: utf-8 -*-
##############################################################################
#
# Author: Yannick Vaucher
# Copyright 2013 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from lxml import etree
from openerp import models, fields, api
class PostlogisticsLicense(models.Model):
_name = 'postlogistics.license'
_description = 'PostLogistics Franking License'
_order = 'sequence'
name = fields.Char(string='Description',
translate=True,
required=True)
number = fields.Char(string='Number',
required=True)
company_id = fields.Many2one(comodel_name='res.company',
string='Company',
required=True)
sequence = fields.Integer(
string='Sequence',
help="Gives the sequence on company to define priority on license "
"when multiple licenses are available for the same group of "
"service."
)
class PostlogisticsServiceGroup(models.Model):
_name = 'postlogistics.service.group'
_description = 'PostLogistics Service Group'
name = fields.Char(string='Description', translate=True, required=True)
group_extid = fields.Integer(string='Group ID', required=True)
postlogistics_license_ids = fields.Many2many(
comodel_name='postlogistics.license',
relation='postlogistics_license_service_groups_rel',
column1='license_id',
column2='group_id',
string='PostLogistics Franking License')
_sql_constraints = [
('group_extid_uniq', 'unique(group_extid)',
"A service group ID must be unique.")
]
POSTLOGISTIC_TYPES = [
('label_layout', 'Label Layout'),
('output_format', 'Output Format'),
('resolution', 'Output Resolution'),
('basic', 'Basic Service'),
('additional', 'Additional Service'),
('delivery', 'Delivery Instructions')
]
class DeliveryCarrierTemplateOption(models.Model):
""" Set name translatable and add service group """
_inherit = 'delivery.carrier.template.option'
name = fields.Char(translate=True)
postlogistics_service_group_id = fields.Many2one(
comodel_name='postlogistics.service.group',
string='PostLogistics Service Group',
)
postlogistics_type = fields.Selection(
selection=POSTLOGISTIC_TYPES,
string="PostLogistics option type",
)
# relation tables to manage compatiblity between basic services
# and other services
postlogistics_basic_service_ids = fields.Many2many(
comodel_name='delivery.carrier.template.option',
relation='postlogistics_compatibility_service_rel',
column1='service_id',
column2='basic_service_id',
string="Basic Services",
domain=[('postlogistics_type', '=', 'basic')],
help="List of basic service for which this service is compatible",
)
postlogistics_additonial_service_ids = fields.Many2many(
comodel_name='delivery.carrier.template.option',
relation='postlogistics_compatibility_service_rel',
column1='basic_service_id',
column2='service_id',
string="Compatible Additional Services",
domain=[('postlogistics_type', '=', 'additional')],
)
postlogistics_delivery_instruction_ids = fields.Many2many(
comodel_name='delivery.carrier.template.option',
relation='postlogistics_compatibility_service_rel',
column1='basic_service_id',
column2='service_id',
string="Compatible Delivery Instructions",
domain=[('postlogistics_type', '=', 'delivery')],
)
class DeliveryCarrierOption(models.Model):
""" Set name translatable and add service group """
_inherit = 'delivery.carrier.option'
name = fields.Char(translate=True)
def fields_view_get(self, cr, uid, view_id=None, view_type='form',
context=None, toolbar=False, submenu=False):
_super = super(DeliveryCarrierOption, self)
result = _super.fields_view_get(cr, uid, view_id=view_id,
view_type=view_type, context=context,
toolbar=toolbar, submenu=submenu)
xmlid = 'delivery_carrier_label_postlogistics.postlogistics'
ref = self.pool['ir.model.data'].xmlid_to_object
postlogistics_partner = ref(cr, uid, xmlid, context=context)
if context.get('default_carrier_id'):
carrier_obj = self.pool['delivery.carrier']
carrier = carrier_obj.browse(cr, uid,
context['default_carrier_id'],
context=context)
if carrier.partner_id == postlogistics_partner:
arch = result['arch']
doc = etree.fromstring(arch)
for node in doc.xpath("//field[@name='tmpl_option_id']"):
node.set(
'domain',
"[('partner_id', '=', %s), "
" ('id', 'in', parent.allowed_option_ids[0][2])]" %
postlogistics_partner.id
)
result['arch'] = etree.tostring(doc)
return result
class DeliveryCarrier(models.Model):
""" Add service group """
_inherit = 'delivery.carrier'
@api.model
def _get_carrier_type_selection(self):
""" Add postlogistics carrier type """
res = super(DeliveryCarrier, self)._get_carrier_type_selection()
res.append(('postlogistics', 'Postlogistics'))
return res
@api.depends('partner_id',
'available_option_ids',
'available_option_ids.tmpl_option_id',
'available_option_ids.postlogistics_type',
)
def _get_basic_service_ids(self):
""" Search in all options for PostLogistics basic services if set """
xmlid = 'delivery_carrier_label_postlogistics.postlogistics'
postlogistics_partner = self.env.ref(xmlid)
for carrier in self:
if carrier.partner_id != postlogistics_partner:
continue
options = carrier.available_option_ids.filtered(
lambda option: option.postlogistics_type == 'basic'
).mapped('tmpl_option_id')
if not options:
continue
self.postlogistics_basic_service_ids = options
@api.depends('partner_id',
'postlogistics_service_group_id',
'postlogistics_basic_service_ids',
'postlogistics_basic_service_ids',
'available_option_ids',
'available_option_ids.postlogistics_type',
)
def _get_allowed_option_ids(self):
""" Return a list of possible options
A domain would be too complicated.
We do this to ensure the user first select a basic service. And
then he adds additional services.
"""
option_template_obj = self.env['delivery.carrier.template.option']
xmlid = 'delivery_carrier_label_postlogistics.postlogistics'
postlogistics_partner = self.env.ref(xmlid)
for carrier in self:
allowed = option_template_obj.browse()
if carrier.partner_id != postlogistics_partner:
continue
service_group = carrier.postlogistics_service_group_id
if service_group:
basic_services = carrier.postlogistics_basic_service_ids
services = option_template_obj.search(
[('postlogistics_service_group_id', '=', service_group.id)]
)
allowed |= services
if basic_services:
related_services = option_template_obj.search(
[('postlogistics_basic_service_ids', 'in',
basic_services.ids)]
)
allowed |= related_services
# Allows to set multiple optional single option in order to
# let the user select them
single_option_types = [
'label_layout',
'output_format',
'resolution',
]
selected_single_options = [
opt.tmpl_option_id.postlogistics_type
for opt in carrier.available_option_ids
if opt.postlogistics_type in single_option_types and
opt.mandatory]
if selected_single_options != single_option_types:
services = option_template_obj.search(
[('postlogistics_type', 'in', single_option_types),
('postlogistics_type', 'not in',
selected_single_options)],
)
allowed |= services
carrier.allowed_option_ids = allowed
postlogistics_license_id = fields.Many2one(
comodel_name='postlogistics.license',
string='PostLogistics Franking License',
)
postlogistics_service_group_id = fields.Many2one(
comodel_name='postlogistics.service.group',
string='PostLogistics Service Group',
help="Service group defines the available options for "
"this delivery method.",
)
postlogistics_basic_service_ids = fields.One2many(
comodel_name='delivery.carrier.template.option',
compute='_get_basic_service_ids',
string='PostLogistics Service Group',
help="Basic Service defines the available "
"additional options for this delivery method",
)
allowed_option_ids = fields.Many2many(
comodel_name='delivery.carrier.template.option',
compute='_get_allowed_option_ids',
string='Allowed options',
help="Compute allowed options according to selected options.",
)
|
Antiun/carrier-delivery
|
delivery_carrier_label_postlogistics/delivery.py
|
Python
|
agpl-3.0
| 10,691 | 0 |
# coding=utf-8
# Copyright 2022 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow.compat.v1 as tf
def get_config():
return tf.contrib.training.HParams(**{
'total_bs': 64,
'eval_total_bs': 16,
'dataset_name': 'imagenet32',
'dataset_config': tf.contrib.training.HParams(),
'model_name': 'SlicedChannelModel',
'model_config': tf.contrib.training.HParams(**{
'optim': tf.contrib.training.HParams(**{
'max_lr': 1e-4,
'warmup': 5000,
'grad_clip_norm': 1.0,
'ema': 0.99995,
'optimizer': 'adam',
'adam_beta1': 0.9,
'adam_beta2': 0.999,
}),
'dropout': 0.04,
'img_size': 32,
'ardec': tf.contrib.training.HParams(**{
'emb_dim': 1536,
'hdim_factor': 1,
'emb_init_scale': 5.0,
'num_heads': 16,
'num_exterior_layers': 8,
'num_outer_layers': 8,
'num_inner_layers': 8,
'res_init_scale': 1e-10,
}),
})
})
|
google-research/google-research
|
axial/config_imagenet32.py
|
Python
|
apache-2.0
| 1,760 | 0.000568 |
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# Copyright 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Glance Image Cache Invalid Cache Entry and Stalled Image cleaner
This is meant to be run as a periodic task from cron.
If something goes wrong while we're caching an image (for example the fetch
times out, or an exception is raised), we create an 'invalid' entry. These
entires are left around for debugging purposes. However, after some period of
time, we want to clean these up.
Also, if an incomplete image hangs around past the image_cache_stall_time
period, we automatically sweep it up.
"""
import os
import sys
from oslo_log import log as logging
# If ../glance/__init__.py exists, add ../ to Python search path, so that
# it will override what happens to be installed in /usr/(local/)lib/python...
possible_topdir = os.path.normpath(os.path.join(os.path.abspath(sys.argv[0]),
os.pardir,
os.pardir))
if os.path.exists(os.path.join(possible_topdir, 'glance', '__init__.py')):
sys.path.insert(0, possible_topdir)
from daisy.common import config
from daisy.image_cache import cleaner
CONF = config.CONF
logging.register_options(CONF)
def main():
try:
config.parse_cache_args()
logging.setup(CONF, 'glance')
app = cleaner.Cleaner()
app.run()
except RuntimeError as e:
sys.exit("ERROR: %s" % e)
|
OpenDaisy/daisy-api
|
daisy/cmd/cache_cleaner.py
|
Python
|
apache-2.0
| 2,104 | 0.000951 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django import forms
from paypal.pro.fields import CreditCardField, CreditCardExpiryField, CreditCardCVV2Field, CountryField
class PaymentForm(forms.Form):
"""Form used to process direct payments."""
firstname = forms.CharField(255, label="First Name")
lastname = forms.CharField(255, label="Last Name")
street = forms.CharField(255, label="Street Address")
city = forms.CharField(255, label="City")
state = forms.CharField(255, label="State")
countrycode = CountryField(label="Country", initial="US")
zip = forms.CharField(32, label="Postal / Zip Code")
acct = CreditCardField(label="Credit Card Number")
expdate = CreditCardExpiryField(label="Expiration Date")
cvv2 = CreditCardCVV2Field(label="Card Security Code")
def process(self, request, item):
"""Process a PayPal direct payment."""
from paypal.pro.helpers import PayPalWPP
wpp = PayPalWPP(request)
params = self.cleaned_data
params['creditcardtype'] = self.fields['acct'].card_type
params['expdate'] = self.cleaned_data['expdate'].strftime("%m%Y")
params['ipaddress'] = request.META.get("REMOTE_ADDR", "")
params.update(item)
# Create single payment:
if 'billingperiod' not in params:
response = wpp.doDirectPayment(params)
# Create recurring payment:
else:
response = wpp.createRecurringPaymentsProfile(params, direct=True)
return response
class ConfirmForm(forms.Form):
"""Hidden form used by ExpressPay flow to keep track of payer information."""
token = forms.CharField(max_length=255, widget=forms.HiddenInput())
PayerID = forms.CharField(max_length=255, widget=forms.HiddenInput())
|
bluestemscott/librarygadget
|
librarygadget/paypal/pro/forms.py
|
Python
|
mit
| 1,840 | 0.003261 |
KEYS = (
'#',
'S-', 'T-', 'K-', 'P-', 'W-', 'H-', 'R-',
'A-', 'O-',
'*',
'-E', '-U',
'-F', '-R', '-P', '-B', '-L', '-G', '-T', '-S', '-D', '-Z',
)
IMPLICIT_HYPHEN_KEYS = ('A-', 'O-', '5-', '0-', '-E', '-U', '*')
SUFFIX_KEYS = ('-S', '-G', '-Z', '-D')
NUMBER_KEY = '#'
NUMBERS = {
'S-': '1-',
'T-': '2-',
'P-': '3-',
'H-': '4-',
'A-': '5-',
'O-': '0-',
'-F': '-6',
'-P': '-7',
'-L': '-8',
'-T': '-9',
}
UNDO_STROKE_STENO = '*'
ORTHOGRAPHY_RULES = [
# == +ly ==
# artistic + ly = artistically
(r'^(.*[aeiou]c) \^ ly$', r'\1ally'),
# == +ry ==
# statute + ry = statutory
(r'^(.*t)e \^ ry$', r'\1ory'),
# == t +cy ==
# frequent + cy = frequency (tcy/tecy removal)
(r'^(.*[naeiou])te? \^ cy$', r'\1cy'),
# == +s ==
# establish + s = establishes (sibilant pluralization)
(r'^(.*(?:s|sh|x|z|zh)) \^ s$', r'\1es'),
# speech + s = speeches (soft ch pluralization)
(r'^(.*(?:oa|ea|i|ee|oo|au|ou|l|n|(?<![gin]a)r|t)ch) \^ s$', r'\1es'),
# cherry + s = cherries (consonant + y pluralization)
(r'^(.+[bcdfghjklmnpqrstvwxz])y \^ s$', r'\1ies'),
# == y ==
# die+ing = dying
(r'^(.+)ie \^ ing$', r'\1ying'),
# metallurgy + ist = metallurgist
(r'^(.+[cdfghlmnpr])y \^ ist$', r'\1ist'),
# beauty + ful = beautiful (y -> i)
(r'^(.+[bcdfghjklmnpqrstvwxz])y \^ ([a-hj-xz].*)$', r'\1i\2'),
# == e ==
# write + en = written
(r'^(.+)te \^ en$', r'\1tten'),
# free + ed = freed
(r'^(.+e)e \^ (e.+)$', r'\1\2'),
# narrate + ing = narrating (silent e)
(r'^(.+[bcdfghjklmnpqrstuvwxz])e \^ ([aeiouy].*)$', r'\1\2'),
# == misc ==
# defer + ed = deferred (consonant doubling) XXX monitor(stress not on last syllable)
(r'^(.*(?:[bcdfghjklmnprstvwxyz]|qu)[aeiou])([bcdfgklmnprtvz]) \^ ([aeiouy].*)$', r'\1\2\2\3'),
]
ORTHOGRAPHY_RULES_ALIASES = {
'able': 'ible',
}
ORTHOGRAPHY_WORDLIST = 'american_english_words.txt'
KEYMAPS = {
'Gemini PR': {
'#' : ('#1', '#2', '#3', '#4', '#5', '#6', '#7', '#8', '#9', '#A', '#B', '#C'),
'S-' : ('S1-', 'S2-'),
'T-' : 'T-',
'K-' : 'K-',
'P-' : 'P-',
'W-' : 'W-',
'H-' : 'H-',
'R-' : 'R-',
'A-' : 'A-',
'O-' : 'O-',
'*' : ('*1', '*2', '*3', '*4'),
'-E' : '-E',
'-U' : '-U',
'-F' : '-F',
'-R' : '-R',
'-P' : '-P',
'-B' : '-B',
'-L' : '-L',
'-G' : '-G',
'-T' : '-T',
'-S' : '-S',
'-D' : '-D',
'-Z' : '-Z',
'no-op' : ('Fn', 'pwr', 'res1', 'res2'),
},
'Keyboard': {
'#' : ('1', '2', '3', '4', '5', '6', '7', '8', '9', '0', '-', '='),
'S-' : ('a', 'q'),
'T-' : 'w',
'K-' : 's',
'P-' : 'e',
'W-' : 'd',
'H-' : 'r',
'R-' : 'f',
'A-' : 'c',
'O-' : 'v',
'*' : ('t', 'g', 'y', 'h'),
'-E' : 'n',
'-U' : 'm',
'-F' : 'u',
'-R' : 'j',
'-P' : 'i',
'-B' : 'k',
'-L' : 'o',
'-G' : 'l',
'-T' : 'p',
'-S' : ';',
'-D' : '[',
'-Z' : '\'',
'arpeggiate': 'space',
# Suppress adjacent keys to prevent miss-strokes.
'no-op' : ('z', 'x', 'b', ',', '.', '/', ']', '\\'),
},
'Passport': {
'#' : '#',
'S-' : ('S', 'C'),
'T-' : 'T',
'K-' : 'K',
'P-' : 'P',
'W-' : 'W',
'H-' : 'H',
'R-' : 'R',
'A-' : 'A',
'O-' : 'O',
'*' : ('~', '*'),
'-E' : 'E',
'-U' : 'U',
'-F' : 'F',
'-R' : 'Q',
'-P' : 'N',
'-B' : 'B',
'-L' : 'L',
'-G' : 'G',
'-T' : 'Y',
'-S' : 'X',
'-D' : 'D',
'-Z' : 'Z',
'no-op': ('!', '^', '+'),
},
'Stentura': {
'#' : '#',
'S-' : 'S-',
'T-' : 'T-',
'K-' : 'K-',
'P-' : 'P-',
'W-' : 'W-',
'H-' : 'H-',
'R-' : 'R-',
'A-' : 'A-',
'O-' : 'O-',
'*' : '*',
'-E' : '-E',
'-U' : '-U',
'-F' : '-F',
'-R' : '-R',
'-P' : '-P',
'-B' : '-B',
'-L' : '-L',
'-G' : '-G',
'-T' : '-T',
'-S' : '-S',
'-D' : '-D',
'-Z' : '-Z',
'no-op': '^',
},
'TX Bolt': {
'#' : '#',
'S-' : 'S-',
'T-' : 'T-',
'K-' : 'K-',
'P-' : 'P-',
'W-' : 'W-',
'H-' : 'H-',
'R-' : 'R-',
'A-' : 'A-',
'O-' : 'O-',
'*' : '*',
'-E' : '-E',
'-U' : '-U',
'-F' : '-F',
'-R' : '-R',
'-P' : '-P',
'-B' : '-B',
'-L' : '-L',
'-G' : '-G',
'-T' : '-T',
'-S' : '-S',
'-D' : '-D',
'-Z' : '-Z',
},
'Treal': {
'#' : ('#1', '#2', '#3', '#4', '#5', '#6', '#7', '#8', '#9', '#A', '#B'),
'S-' : ('S1-', 'S2-'),
'T-' : 'T-',
'K-' : 'K-',
'P-' : 'P-',
'W-' : 'W-',
'H-' : 'H-',
'R-' : 'R-',
'A-' : 'A-',
'O-' : 'O-',
'*' : ('*1', '*2'),
'-E' : '-E',
'-U' : '-U',
'-F' : '-F',
'-R' : '-R',
'-P' : '-P',
'-B' : '-B',
'-L' : '-L',
'-G' : '-G',
'-T' : '-T',
'-S' : '-S',
'-D' : '-D',
'-Z' : '-Z',
'no-op': ('X1-', 'X2-', 'X3'),
},
}
DICTIONARIES_ROOT = 'asset:plover:assets'
DEFAULT_DICTIONARIES = ('main.json', 'commands.json', 'user.json')
|
Germanika/plover
|
plover/system/english_stenotype.py
|
Python
|
gpl-2.0
| 6,232 | 0.024069 |
#!/usr/bin/python
r"""
PYTHONRC
========
Initialization script for the interactive Python interpreter. Its main purpose
is to enhance the overall user experience when working in such an environment
by adding some niceties to the standard console.
It also works with IPython and BPython, although its utility in that kind of
scenarios can be argued.
Tested in GNU/Linux with Python versions 2.7 and 3.4.
Please read the Installation section below.
Features
--------
- User input completion
+ Introduces a completion mechanism for inputted commands in Python 2.
+ In Python 3, where the standard console is a lot nicer, it just
impersonates the default completion machinery to keep the consistency with
the behavior in Python 2 (and so it's still possible to adapt it to the
user's needs).
- Command History
+ Creates a callable, singleton object called `history`, placing it into
the `__builtins__` object to make it easily available, which enables the
handling of the command history (saving some input lines to a file of your
choice, listing the commands introduced so far, etc.). Try simply
`history()` on the Python prompt to see it in action; inspect its members
(with `dir(history)` or `help(history.write)`) for more information.
- Color prompt
+ Puts a colorful prompt in place, if the terminal supports it.
- Implementation of a bash's "operate-and-get-next" clone
+ Enables a quick re-edition of a code block from the history by
successive keypresses of the `Ctrl-o` hotkey.
Installation
------------
- You must define in your environment (in GNU/Linux and MacOS X that usually
means your `~/.bashrc` file) the variable 'PYTHONSTARTUP' containing the path
to `pythonrc.py`.
- It is also highly recommended to define the variable 'PYTHON_HISTORY_FILE'.
Remember that BPython (unlike the standard interpreter or IPython) ignores that
variable, so you'll have to configure it as well by other means to be able to
use the same history file there (for instance, in Linux, the file
`~/.config/bpython/config` is a good place to start, but please read BPython's
documentation).
### Example configurations
- Extract of `~/.bashrc`
```sh
# python
export PYTHONSTARTUP=~/.python/pythonrc.py
export PYTHON_HISTORY_FILE=~/.python/.python_history
## You may want to also uncomment some of this lines if using an old
## version of virtualenvwrapper
# export VIRTUALENVWRAPPER_PYTHON=/usr/bin/python3.4
# export WORKON_HOME=~/.python/virtualenvs
# source $(which virtualenvwrapper.sh)
```
- Extract of `~/.config/bpython/config`
```
[general]
color_scheme = default
hist_file = ~/.python/.python_history
hist_lenght = 1000
```
Bugs / Caveats / Future enhancements
------------------------------------
- No module/package introspection for the last argument in commands of the form
`from <package> import <not_completing_this>` (this, in fact, could be a not so
bad thing, because it doesn't execute side effects, e.g. modules' init code).
- Depending on the user's system, the compilation of the packages' and modules'
list for completing `import ...` and `from ... import ...` commands can take a
long time, especially the first time it is invoked.
- When completing things like a method's name, the default is to also include
the closing parenthesis along with the opening one, but the cursor is placed
after it no matter what, instead of between them. This is because of the
python module `readline`'s limitations.
You can turn off the inclusion of the closing parenthesis; if you do so, you
might be also interested in modifying the variable called
`dict_keywords_postfix` (especially the strings that act as that dictionary's
indexes).
- IPython has its own `%history` magic. I did my best to not interfere with
it, but I don't know the actual consequences. Also, it's debatable if it
even makes sense to use this file with IPython and/or BPython (though having
a unified history for all the environments is really nice).
You could define some bash aliases like
```sh
alias ipython='PYTHONSTARTUP="" ipython'
alias bpython='PYTHONSTARTUP="" bpython'
```
to be on the safer side.
- Could have used the module `six` for better clarity. Right now it uses my own
made up stubs to work on both Python 2 and 3.
- Needs better comments and documentation, especially the part on history
handling.
- Probably a lot more. Feel free to file bug reports ;-)
"""
def init():
# color prompt
import sys
import os
term_with_colors = ['xterm', 'xterm-color', 'xterm-256color', 'linux',
'screen', 'screen-256color', 'screen-bce']
red = ''
green = ''
reset = ''
if os.environ.get('TERM') in term_with_colors:
escapes_pattern = '\001\033[%sm\002' # \001 and \002 mark non-printing
red = escapes_pattern % '31'
green = escapes_pattern % '32'
reset = escapes_pattern % '0'
sys.ps1 = red + '>>> ' + reset
sys.ps2 = green + '... ' + reset
red = red.strip('\001\002')
green = green.strip('\001\002')
reset = reset.strip('\001\002')
# readline (tab-completion, history)
try:
import readline
except ImportError:
print(red + "Module 'readline' not available. Skipping user customizations." + reset)
return
import rlcompleter
import atexit
from pwd import getpwall
from os.path import isfile, isdir, expanduser, \
join as joinpath, split as splitpath, sep as pathsep
default_history_file = '~/.pythonhist'
majver = sys.version_info[0]
# Both BPython and Django shell change the nature of the __builtins__
# object. This hack workarounds that:
def builtin_setattr(attr, value):
if hasattr(__builtins__, '__dict__'):
setattr(__builtins__, attr, value)
else:
__builtins__[attr] = value
def builtin_getattr(attr):
if hasattr(__builtins__, '__dict__'):
return getattr(__builtins__, attr)
else:
return __builtins__[attr]
# My own "six" library, where I define the following stubs:
# * myrange for xrange() (python2) / range() (python3)
# * exec_stub for exec()
# * iteritems for dict.iteritems() (python2) / list(dict.items()) (python3)
# I could have done "from six import iteritems" and such instead of this
if majver == 2:
myrange = xrange
def exec_stub(textcode, globalz=None, localz=None):
# the parenthesis make it valid python3 syntax, do nothing at all
exec (textcode) in globalz, localz
def iteritems(d):
return d.iteritems()
elif majver == 3:
myrange = range
# def exec_stub(textcode, globalz=None, localz=None):
# # the "in" & "," make it valid python2 syntax, do nothing useful
# exec(textcode, globalz, localz) in globalz #, localz
# the three previous lines work, but this is better
exec_stub = builtin_getattr('exec')
def iteritems(d):
return list(d.items())
# AUXILIARY CLASSES
# History management
class History:
set_length = readline.set_history_length
get_length = readline.get_history_length
get_current_length = readline.get_current_history_length
get_item = readline.get_history_item
write = readline.write_history_file
def __init__(self, path=default_history_file, length=500):
self.path = path
self.reload(path)
self.set_length(length)
def __exit__(self):
print("Saving history (%s)..." % self.path)
self.write(expanduser(self.path))
def __repr__(self):
"""print out current history information"""
# length = self.get_current_length()
# command = self.get_item(length)
# if command == 'history':
# return "\n".join(self.get_item(i)
# for i in myrange(1, length+1))
# else:
# return '<%s instance>' % str(self.__class__)
return '<%s instance>' % str(self.__class__)
def __call__(self, pos=None, end=None):
"""print out current history information with line number"""
if not pos:
pos = 1
elif not end:
end = pos
for i, item in self.iterator(pos, end, enumerate_it=True):
print('%i:\t%s' % (i, item))
def iterator(self, pos, end, enumerate_it=False):
length = self.get_current_length()
if not pos:
pos = 1
if not end:
end = length
pos = min(pos, length)
if pos < 0:
pos = max(1, pos + length + 1)
end = min(end, length)
if end < 0:
end = max(1, end + length + 1)
if enumerate_it:
return ((i, self.get_item(i)) for i in myrange(pos, end + 1))
else:
return (self.get_item(i) for i in myrange(pos, end + 1))
def reload(self, path=""):
"""clear the current history and reload it from saved"""
readline.clear_history()
if isfile(path):
self.path = path
readline.read_history_file(expanduser(self.path))
def save(self, filename, pos=None, end=None):
"""write history number from pos to end into filename file"""
with open(filename, 'w') as f:
for item in self.iterator(pos, end):
f.write(item)
f.write('\n')
def execute(self, pos, end=None):
"""execute history number from pos to end"""
if not end:
end = pos
commands = []
for item in self.iterator(pos, end):
commands.append(item)
readline.add_history(item)
exec_stub("\n".join(commands), globals())
# comment the previous two lines and uncomment those below
# if you prefer to re-add to history just the commands that
# executed without problems
# try:
# exec_stub("\n".join(commands), globals())
# except:
# raise
# else:
# for item in commands:
# readline.add_history(cmdlist)
# Activate completion and make it smarter
class Irlcompleter(rlcompleter.Completer):
"""
This class enables the insertion of "indentation" if there's no text
for completion.
The default "indentation" is four spaces. You can initialize with '\t'
as the tab if you wish to use a genuine tab.
Also, compared to the default rlcompleter, this one performs some
additional useful things, like file completion for string constants
and addition of some decorations to keywords (namely, closing
parenthesis, and whatever you've defined in dict_keywords_postfix --
spaces, colons, etc.)
"""
def __init__(
self,
indent_str=' ',
delims=readline.get_completer_delims(),
binds=('tab: complete', ),
dict_keywords_postfix={" ": ["import", "from"], },
add_closing_parenthesis=True
):
rlcompleter.Completer.__init__(self, namespace=globals())
readline.set_completer_delims(delims)
self.indent_str_list = [indent_str, None]
for bind in binds:
readline.parse_and_bind(bind)
self.dict_keywords_postfix = dict_keywords_postfix
self.add_closing_parenthesis = add_closing_parenthesis
def complete(self, text, state):
line = readline.get_line_buffer()
stripped_line = line.lstrip()
# libraries
if stripped_line.startswith('import '):
value = self.complete_libs(text, state)
elif stripped_line.startswith('from '):
pos = readline.get_begidx()
# end = readline.get_endidx()
if line[:pos].strip() == 'from':
value = self.complete_libs(text, state) + " "
elif state == 0 and line.find(' import ') == -1:
value = 'import '
else:
# Here we could do module introspection (ugh)
value = None
# indentation, files and keywords/identifiers
elif text == '':
value = self.indent_str_list[state]
elif text[0] in ('"', "'"):
value = self.complete_files(text, state)
else:
value = self.complete_keywords(text, state)
return value
def complete_keywords(self, text, state):
txt = rlcompleter.Completer.complete(self, text, state)
if txt is None:
return None
if txt.endswith('('):
if self.add_closing_parenthesis:
return txt + ')'
else:
return txt
for postfix, words in iteritems(self.dict_keywords_postfix):
if txt in words:
return txt + postfix
return txt
def complete_files(self, text, state):
str_delim = text[0]
path = text[1:]
if path.startswith("~/"):
path = expanduser("~/") + path[2:]
elif path.startswith("~"):
i = path.find(pathsep)
if i > 0:
path = expanduser(path[:i]) + path[i:]
else:
return [
str_delim + "~" + i[0] + pathsep
for i in getpwall()
if i[0].startswith(path[1:])
][state]
dir, fname = splitpath(path)
if not dir:
dir = os.curdir
return [
str_delim + joinpath(dir, i)
for i in os.listdir(dir)
if i.startswith(fname)
][state]
def complete_libs(self, text, state):
libs = {}
for i in sys.path:
try:
if i == '':
i = os.curdir
files = os.listdir(i)
for j in files:
filename = joinpath(i, j)
if isfile(filename):
for s in [".py", ".pyc", ".so"]:
if j.endswith(s):
j = j[:-len(s)]
pos = j.find(".")
if pos > 0:
j = j[:pos]
libs[j] = None
break
elif isdir(filename):
for s in ["__init__.py", "__init__.pyc"]:
if isfile(joinpath(filename, s)):
libs[j] = None
except OSError:
pass
for j in sys.builtin_module_names:
libs[j] = None
libs = sorted(j for j in libs.keys() if j.startswith(text))
return libs[state]
# DEFINITIONS:
# history file path and length
history_length = 1000
history_path = os.getenv("PYTHON_HISTORY_FILE", default_history_file)
# bindings for readline (assign completion key, etc.)
# readline_binds = (
# 'tab: tab_complete',
# '"\C-o": operate-and-get-next', # exists in bash but not in readline
# )
# completion delimiters
# we erase ", ', ~ and / so file completion works
# readline_delims = ' \t\n`!@#$%^&*()-=+[{]}\\|;:,<>?'
readline_delims = readline.get_completer_delims()\
.replace("~", "", 1)\
.replace("/", "", 1)\
.replace("'", "", 1)\
.replace('"', '', 1)
# dictionary of keywords to be postfixed by a string
dict_keywords_postfix = {
":": ["else", "try", "finally", ],
" ": ["import", "from", "or", "and", "not", "if", "elif", ],
" ():": ["def", ] # "class", ]
}
# DO IT
completer = Irlcompleter(delims=readline_delims, # binds=readline_binds,
dict_keywords_postfix=dict_keywords_postfix)
readline.set_completer(completer.complete)
if not os.access(history_path, os.F_OK):
print(green + 'History file %s does not exist. Creating it...' % history_path + reset)
with open(history_path, 'w') as f:
pass
elif not os.access(history_path, os.R_OK|os.W_OK):
print(red + 'History file %s has wrong permissions!' % history_path + reset)
history = History(history_path, history_length)
#
# Hack: Implementation of bash-like "operate-and-get-next" (Ctrl-o)
#
try:
# We'll hook the C functions that we need from the underlying
# libreadline implementation that aren't exposed by the readline
# python module.
from ctypes import CDLL, CFUNCTYPE, c_int
librl = CDLL(readline.__file__)
rl_callback = CFUNCTYPE(c_int, c_int, c_int)
rl_int_void = CFUNCTYPE(c_int)
readline.add_defun = librl.rl_add_defun # didn't bother to define args
readline.accept_line = rl_callback(librl.rl_newline)
readline.previous_history = rl_callback(librl.rl_get_previous_history)
readline.where_history = rl_int_void(librl.where_history)
def pre_input_hook_factory(offset, char):
def rewind_history_pre_input_hook():
# Uninstall this hook, rewind history and redisplay
readline.set_pre_input_hook(None)
result = readline.previous_history(offset, char)
readline.redisplay()
return result
return rewind_history_pre_input_hook
@rl_callback
def operate_and_get_next(count, char):
current_line = readline.where_history()
offset = readline.get_current_history_length() - current_line
# Accept the current line and set the hook to rewind history
result = readline.accept_line(1, char)
readline.set_pre_input_hook(pre_input_hook_factory(offset, char))
return result
# Hook our function to Ctrl-o, and hold a reference to it to avoid GC
readline.add_defun('operate-and-get-next', operate_and_get_next, ord("O") & 0x1f)
history._readline_functions = [operate_and_get_next]
except (ImportError, OSError, AttributeError) as e:
print(red + """
Couldn't either bridge the needed methods from binary 'readline'
or properly install our implementation of 'operate-and-get-next'.
Skipping the hack. Underlying error:
""" + reset + repr(e))
builtin_setattr('history', history)
atexit.register(history.__exit__)
# run the initialization and clean up the environment afterwards
init()
del init
|
0xf4/pythonrc
|
pythonrc.py
|
Python
|
mit
| 19,310 | 0.000414 |
#!/usr/bin/python
"""
@package nilib
@file nicl.py
@brief Basic command line client for NI names, make 'em and check 'em
@version $Revision: 0.04 $ $Author: elwynd $
@version Copyright (C) 2012 Trinity College Dublin
This is an adjunct to the NI URI library developed as
part of the SAIL project. (http://sail-project.eu)
Specification(s) - note, versions may change
- http://tools.ietf.org/html/draft-farrell-decade-ni-10
- http://tools.ietf.org/html/draft-hallambaker-decade-ni-params-03
- http://tools.ietf.org/html/draft-kutscher-icnrg-netinf-proto-00
Copyright 2012 Trinity College Dublin
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
- http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
================================================================================@code
@code
Revision History
================
Version Date Author Notes
0.4 16/06/2012 Elwyn Davies Completed revision history and ref number.
0.3 12/10/2012 Elwyn Davies Renamed main routine for convenience with
setuputils in nilib package.
0.2 12/10/2012 Elwyn Davies Updated coments and specification refs.
0.1 01/06/2012 Elwyn Davies Updated to provide -w, -m and -b and
cope with nih: scheme.
0.0 12/02/2012 Elwyn Davies Created for NetInf codsprint.
@endcode
"""
import sys
from optparse import OptionParser
from ni import ni_errs, ni_errs_txt, NIname, NIproc
def py_nicl():
"""
@brief Command line program to generate and validate digests in ni: URLs.
Uses NIproc global instance of NI operations class
Run:
> nicl.py --help
to see usage and options.
"""
# Options parsing and verification stuff
usage = "%prog [-g|-w|-v] -n <name> -f <pathname of content file> [-V]\n"
usage = usage + " %prog -m -n <name> [-V]\n"
usage = usage + " %prog -b -s <suite_number> -f <pathname of content file> [-V]\n"
usage = usage + " The name can be either an ni: or nih: scheme URI\n"
usage = usage + " Return code: success 0, failure non-zero (-V for more info)\n"
usage = usage + " Available hashalg (suite number) options:\n"
usage = usage + " %s" % NIname.list_algs()
parser = OptionParser(usage)
parser.add_option("-g", "--generate", default=False,
action="store_true", dest="generate",
help="Generate hash based on content file, " + \
"and output name with encoded hash after the hashalg string")
parser.add_option("-w", "--well-known", default=False,
action="store_true", dest="well_known",
help="Generate hash based on content file, " + \
"and output name with encoded hash in the .well_known URL " + \
"after the hashalg string. Applies to ni: scheme only.")
parser.add_option("-v", "--verify", default=False,
action="store_true", dest="verify",
help="Verify hash in name is correct for content file")
parser.add_option("-m", "--map", default=False,
action="store_true", dest="map_wkn",
help="Maps from an ni: name to a .well-known URL")
parser.add_option("-b", "--binary", default=False,
action="store_true", dest="bin",
help="Outputs the name in binary format for a given suite number")
parser.add_option("-V", "--verbose", default=False,
action="store_true", dest="verbose",
help="Be more long winded.")
parser.add_option("-n", "--ni-name", dest="ni_name",
type="string",
help="The ni name template for (-g) or ni name matching (-v) content file.")
parser.add_option("-f", "--file", dest="file_name",
type="string",
help="File with content data named by ni name.")
parser.add_option("-s", "--suite-no", dest="suite_no",
type="int",
help="Suite number for hash algorithm to use.")
(opts, args) = parser.parse_args()
if not (opts.generate or opts.well_known or opts.verify or
opts.map_wkn or opts.bin ):
parser.error( "Must specify one of -g/--generate, -w/--well-known, -v/--verify, -m/--map or -b/--binary.")
if opts.generate or opts.well_known or opts.verify:
if (opts.ni_name == None) or (opts.file_name == None):
parser.error("Must specify both name and content file name for -g, -w or -v.")
if opts.map_wkn:
if (opts.ni_name == None):
parser.error("Must specify ni name for -m.")
if opts.bin:
if (opts.suite_no == None) or (opts.file_name == None):
parser.error("Must specify both suite number and content file name for -b.")
if len(args) != 0:
parser.error("Too many or unrecognised arguments specified")
# Execute requested action
if opts.generate:
n = NIname(opts.ni_name)
ret = NIproc.makenif(n, opts.file_name)
if ret == ni_errs.niSUCCESS:
if opts.verbose:
print("Name generated successfully.")
print "%s" % n.get_url()
sys.exit(0)
if opts.verbose:
print "Name could not be successfully generated."
elif opts.well_known:
n = NIname(opts.ni_name)
if n.get_scheme() == "nih":
if opts.verbose:
print "Only applicable to ni: scheme names."
sys.exit(1)
ret = NIproc.makenif(n, opts.file_name)
if ret == ni_errs.niSUCCESS:
if opts.verbose:
print("Name generated successfully.")
print "%s" % n.get_wku_transform()
sys.exit(0)
if opts.verbose:
print "Name could not be successfully generated"
elif opts.verify:
n = NIname(opts.ni_name)
ret = NIproc.checknif(n, opts.file_name)
if ret == ni_errs.niSUCCESS:
if opts.verbose:
print("Name matches content file.")
print "%s" % n.get_url()
sys.exit(0)
if opts.verbose:
print "Check of name against content failed."
elif opts.map_wkn:
n = NIname(opts.ni_name)
ret = n.validate_ni_url(has_params = True)
if ret == ni_errs.niSUCCESS:
if n.get_scheme() == "nih":
if opts.verbose:
print "Only applicable to ni: scheme names."
sys.exit(1)
if opts.verbose:
print("Name validated successfully.")
print "%s" % n.get_wku_transform()
sys.exit(0)
else:
if opts.verbose:
print "Name could not be successfully validated."
elif opts.bin:
(ret, bin_name) = NIproc.makebnf(opts.suite_no, opts.file_name)
if ret == ni_errs.niSUCCESS:
if opts.verbose:
print("Name generated successfully.")
print base64.b16encode(str(bin_name))
sys.exit(0)
else:
if opts.verbose:
print "Name could not be successfully generated."
else:
print"Should not have happened"
sys.exit(2)
# Print appropriate error message
if opts.verbose:
print "Error: %s" % ni_errs_txt[ret]
sys.exit(1)
sys.exit(0)
#-------------------------------------------------------------------------------
if __name__ == "__main__":
py_nicl()
|
skunkwerks/netinf
|
python/nilib/nicl.py
|
Python
|
apache-2.0
| 8,160 | 0.003922 |
#!/usr/bin/env python2.7
# -*- coding: utf-8 -*-
__author__ = """Co-Pierre Georg (co-pierre.georg@uct.ac.za)"""
import sys
from src.paralleltools import Parallel
#-------------------------------------------------------------------------
#
# conftools.py is a simple module to manage .xml configuration files
#
#-------------------------------------------------------------------------
if __name__ == '__main__':
"""
VARIABLES
"""
args = sys.argv
config_file_name = args[1]
"""
CODE
"""
parallel = Parallel()
parallel.create_config_files(config_file_name)
|
cogeorg/econlib
|
test_paralleltools.py
|
Python
|
gpl-3.0
| 601 | 0.003328 |
"""
Summary:
Container and main interface for accessing the Tuflow model and a class
for containing the main tuflow model files (Tcf, Tgc, etc).
There are several other classes in here that are used to determine the
order of the files in the model and key words for reading in the files.
Author:
Duncan Runnacles
Created:
01 Apr 2016
Copyright:
Duncan Runnacles 2016
TODO:
Updates:
"""
from __future__ import unicode_literals
from itertools import chain
from ship.tuflow.tuflowfilepart import TuflowFile, TuflowKeyValue, TuflowUserVariable, TuflowModelVariable
from ship.tuflow import FILEPART_TYPES as fpt
from ship.utils import utilfunctions as uf
import logging
logger = logging.getLogger(__name__)
"""logging references with a __name__ set to this module."""
class TuflowModel(object):
"""Container for the entire loaded tuflow model.
"""
def __init__(self, root):
"""Initialise constants and dictionaries.
"""
self.control_files = {}
"""Tuflow Control File objects.
All types of Tuflow Control file are stored here under the type header.
Types are: TCF, TGC, TBC, ECF, TEF.
TCF is slightly different to the others as it contains an additional
member variable 'main_file_hash' to identify the main tcf file that
was called to load the model.
"""
self._root = ''
"""The current directory path used to reach the run files in the model"""
self.missing_model_files = []
"""Contains any tcf, tgs, etc files that could not be loaded."""
self.bc_event = {}
"""Contains the currently acitve BC Event variables."""
self.user_variables = None
"""Class containing the scenario/event/variable keys and values."""
@property
def root(self):
return self._root
@root.setter
def root(self, value):
self._root = value
self.updateRoot(value)
def checkPathsExist(self):
"""Test that all of the filepaths in the TuflowModel exist."""
failed = []
for file_type, file in self.control_files.items():
failed.extend(file.checkPathsExist())
return failed
def updateRoot(self, root):
"""Update the root variable in all TuflowFile's in the model.
The root variable (TuflowModel.root) is the directory that the main
.tcf file is in. This is used to define the location of all other files
which are usually referenced relative to each other.
Note:
This method will be called automatically when setting the
TuflowModel.root variable.
Args:
root(str): the new root to set.
"""
for c in self.control_files.values():
c.updateRoot(root)
def customPartSearch(self, control_callback, tuflow_callback=None,
include_unknown=False):
"""Return TuflowPart's based on the return value of the callbacks.
control_callback will be used as an argument in each of
self.control_files' customPartSearch() methods. The tuflow_callback
will be called on the combined generators returned from that method.
See Also:
ControlFile.customPartSearch
Continuing the example in the ControlFile.customPartSearch method. This
time the additinal tuflow_callback function is defined as well.
callback_func must accept a TuflowPart and return a tuple of:
keep-status and the return value. For example::
# This is the callback_func that we test the TuflowPart. It is
# defined in your script
def callback_func(part):
# In this case we check for GIS parts and return a tuple of:
# - bool(keep-status): True if it is a GIS filepart_type
# - tuple: filename and parent.model_type. This can be
# whatever you want though
if part.filepart_type == fpt.GIS:
return True, (part.filename, part.associates.parent.model_type)
# Any TuflowPart's that you don't want included must return
# a tuple of (False, None)
else:
return False, None
# Here we define a function to run after the generators are returned
# from callback_func. In the funcion above the return type is a
# tuple, so we accept that as the arg in this function, but it will
# be whatever you return from callback_func above.
# This function checks to see if there are any duplicate filename's.
# Note that it must return the same tuple as the other callback.
# i.e. keep-status, result
def tuflow_callback(part_tuple):
found = []
if part_tuple[0] in found:
return False, None
else:
return True, part_tuple[0]
# Both callback's given this time
results = tuflow.customPartSearch(callback,
tuflow_callback=tuflowCallback)
# You can now iteratre the results
for r in results:
print (str(r))
Args:
callback_func(func): a function to run for each TuflowPart in
this ControlFile's PartHolder.
include_unknown=False(bool): If False any UnknownPart's will be
ignored. If set to True it is the resonsibility of the
callback_func to check for this and deal with it.
Return:
generator - containing the results of the search.
"""
gens = []
for c in self.control_files.values():
gens.append(
c.customPartSearch(control_callback, include_unknown)
)
all_gens = chain(gens[0:-1])
for a in all_gens:
for val in a:
if tuflow_callback:
take, value = tuflow_callback(val)
if take:
yield[value]
else:
yield [val]
def removeTcfModelFile(self, model_file):
"""Remove an existing ModelFile from 'TCF' and update ControlFile.
Note:
You can call this function directly if you want to, but it is also
hooked into a callback in the TCF ControlFile. This means that when
you use the standard ControlFile add/remove/replaceControlFile()
methods these will be called automatically.
Args:
model_files(ModelFile): the ModelFile being removed.
"""
if not model_file in self.control_files[model_file.model_type].control_files:
raise AttributeError("model_file doesn't exists in %s control_files" % model_file.model_type)
self.control_files[model_file.model_type].removeControlFile(model_file)
self.control_files['TCF'].parts.remove(model_file)
def replaceTcfModelFile(self, model_file, control_file, replace_file):
"""Replace an existing ModelFile in 'TCF' and update ControlFile.
Note:
You can call this function directly if you want to, but it is also
hooked into a callback in the TCF ControlFile. This means that when
you use the standard ControlFile add/remove/replaceControlFile()
methods these will be called automatically.
Args:
model_file(ModelFile): the replacement TuflowPart.
control_file(ControlFile): containing the contents to replace the
existing ControlFile.
replace_file(ModelFile): the TuflowPart to be replaced.
"""
if model_file in self.control_files[model_file.model_type].control_files:
raise AttributeError('model_file already exists in this ControlFile')
self.control_files[replace_file.model_type].replaceControlFile(
model_file, control_file, replace_file)
self.control_files['TCF'].parts.replace(model_file, replace_file)
def addTcfModelFile(self, model_file, control_file, **kwargs):
"""Add a new ModelFile instance to a TCF type ControlFile.
Note:
You can call this function directly if you want to, but it is also
hooked into a callback in the TCF ControlFile. This means that when
you use the standard ControlFile add/remove/replaceControlFile()
methods these will be called automatically.
**kwargs:
after(TuflowPart): the part to add the new ModelFile after.
before(TuflowPart): the part to add the new ModelFile before.
Either after or before kwargs must be given. If both are provided after
will take precedence.
Args:
model_file(ModelFile): the replacement ModelFile TuflowPart.
control_file(ControlFile): containing the contents to replace the
existing ControlFile.
"""
if not 'after' in kwargs.keys() and not 'before' in kwargs.keys():
raise AttributeError("Either 'before' or 'after' TuflowPart kwarg must be given")
if model_file in self.control_files[model_file.model_type].control_files:
raise AttributeError('model_file already exists in this ControlFile')
self.control_files[model_file.model_type].addControlFile(
model_file, control_file, **kwargs)
self.control_files['TCF'].parts.add(model_file, **kwargs)
# class TuflowUtils(object):
# """Utility functions for dealing with TuflowModel outputs."""
#
# def __init__(self):
# pass
#
# @staticmethod
# def resultsByParent(results):
# """
# """
class UserVariables(object):
"""Container for all user defined variables.
Includes variable set in the control files with 'Set somevar ==' and the
scenario and event variables.
Note:
Only the currently active scenario and event variables will be stored
in this class.
"""
def __init__(self):
self.variable = {}
self.scenario = {}
self.event = {}
self._names = []
self.has_cmd_args = False
def add(self, filepart, vtype=None):
"""Add a new variables to the class.
Args:
filepart(TuflowModelVariables or TuflowUserVariable):
Raises:
TypeError - if filepart is not a TuflowModelVariable or TuflowUserVariable.
ValueError - if filepart already exists.
"""
if filepart._variable_name in self._names:
raise ValueError('variable already exists with that name - use replace instead')
if isinstance(filepart, TuflowUserVariable):
self.variable[filepart.variable_name] = filepart
self._names.append(filepart.variable_name)
elif isinstance(filepart, TuflowModelVariable):
if filepart._variable_type == 'scenario':
if filepart._variable_name == 's1' or filepart._variable_name == 's':
if 's' in self._names or 's1' in self._names:
raise ValueError("variable already exists with that " +
"name - use replace instead\n" +
"note 's' and 's1' are treated the same.")
self.scenario[filepart._variable_name] = filepart
self.variable[filepart._variable_name] = filepart
self._names.append(filepart.variable_name)
else:
if filepart._variable_name == 'e1' or filepart._variable_name == 'e':
if 'e' in self._names or 'e1' in self._names:
raise ValueError("variable already exists with that " +
"name - use replace instead\n" +
"note 'e' and 'e1' are treated the same.")
self.event[filepart._variable_name] = filepart
self.variable[filepart._variable_name] = filepart
self._names.append(filepart.variable_name)
else:
raise TypeError('filepart must be of type TuflowUserVariable or TuflowModelVariable')
def replace(self, filepart):
"""Replace an existing variable.
Args:
filepart(TuflowModelVariables or TuflowUserVariable):
Raises:
TypeError - if filepart is not a TuflowModelVariable or TuflowUserVariable.
ValueError - if filepart doesn't already exist.
"""
# Make sure it actually already exists.
# s & s1 and e & e1 are treated as the same name - same as tuflow
temp_name = filepart._variable_name
if temp_name == 's' or temp_name == 's1':
if not 's' in self._names and not 's1' in self._names:
raise ValueError("filepart doesn't seem to exist in UserVariables.")
elif temp_name == 'e' or temp_name == 'e1':
if not 'e' in self._names and not 'e1' in self._names:
raise ValueError("filepart doesn't seem to exist in UserVariables.")
elif not filepart._variable_name in self._names:
raise ValueError("filepart doesn't seem to exist in UserVariables.")
# Delete the old one and call add() with the new one
if temp_name == 's' or temp_name == 's1':
if 's' in self.scenario.keys():
del self.scenario['s']
del self.variable['e']
if 's1' in self.scenario.keys():
del self.scenario['s1']
del self.variable['e1']
self.add(filepart, 'scenario')
if temp_name == 'e' or temp_name == 'e1':
if 'e' in self.scenario.keys():
del self.event['e']
del self.variable['e']
if 'e1' in self.scenario.keys():
del self.event['e1']
del self.variable['e1']
self.add(filepart, 'event')
else:
del self.variable[temp_name]
self.add(filepart)
def variablesToDict(self):
"""Get the values of the variables.
Note that, like tuflow, scenario and event values will be includes in
the variables dict returned.
{'name1': var1, 'name2': var2, 'nameN': name2}
Return:
dict - with variables names as key and values as values.
"""
out = {}
for vkey, vval in self.variable.items():
out[vkey] = vval.variable
return out
def seValsToDict(self):
"""Get the values of the scenario and event variables.
Returns the currently active scenario and event values only - not the
placeholder keys - in a dictionary in the format::
{'scenario': [val1, val2, valN], 'event': [val1, val2, valN]}
Return:
dict - of scenario and event values.
"""
scenario = [s.variable for s in self.scenario.values()]
event = [e.variable for e in self.event.values()]
return {'scenario': scenario, 'event': event}
def remove(self, key):
"""Remove the variable stored at the given key.
Args:
key(str): key for either the scenario, event, or variables dict.
"""
if key in self.scenario.keys():
self._names.remove(self.scenario[key]._variable_name)
del self.scenario[key]
if key in self.event.keys():
self._names.remove(self.scenario[key]._variable_name)
del self.event[key]
if key in self.variable.keys():
self._names.remove(self.scenario[key]._variable_name)
del self.variable[key]
def get(self, key, vtype=None):
"""Return the TuflowPart at the given key.
Args:
key(str): the key associated with the required TuflowPart.
vtype=None(str): the type of part to return. If None it will return
a 'variable' type. Other options are 'scenario' and 'event'.
Return:
TuflowPart - TuflowModelVariable or TuflowUserVariable type.
"""
if vtype == 'scenario':
if not key in self.scenario.keys():
raise KeyError('key %s is not in scenario keys' % key)
return self.scenario[key]
elif vtype == 'event':
if not key in self.event.keys():
raise KeyError('key %s is not in event keys' % key)
return self.event[key]
else:
if not key in self.variable.keys():
raise KeyError('key %s is not in variable keys' % key)
return self.variable[key]
class TuflowFilepartTypes(object):
"""Contains key words from Tuflow files for lookup.
This acts as a lookup table for the TuflowLoader class more than anything
else. It is kept here as that seems to be most sensible.
Contains methods for identifying whether a command given to it is known
to the library and what type it is. i.e. what UNIT_CATEGORY it falls into.
"""
def __init__(self):
"""Initialise the categories and known keywords"""
self.ambiguous = {
'WRITE CHECK FILES': [
['WRITE CHECK FILES INCLUDE', fpt.VARIABLE],
['WRITE CHECK FILES EXCLUDE', fpt.VARIABLE]
],
# 'WRITE CHECK FILES INCLUDE': ['WRITE CHECK FILES', fpt.RESULT],
# 'WRITE CHECK FILES EXCLUDE': ['WRITE CHECK FILES', fpt.RESULT],
'DEFINE EVENT': [['DEFINE OUTPUT ZONE', fpt.SECTION_LOGIC]],
'DEFINE OUTPUT ZONE': [['DEFINE EVENT', fpt.EVENT_LOGIC]],
# 'START 1D DOMAIN': ['START 2D DOMAIN', fpt.SECTION_LOGIC],
# 'START 2D DOMAIN': ['START 1D DOMAIN', fpt.SECTION_LOGIC],
}
self.ambiguous_keys = self.ambiguous.keys()
self.types = {}
self.types[fpt.MODEL] = [
'GEOMETRY CONTROL FILE', 'BC CONTROL FILE',
'READ GEOMETRY CONTROL FILE', 'READ BC CONTROL FILE',
'READ FILE', 'ESTRY CONTROL FILE',
'EVENT FILE'
]
self.types[fpt.RESULT] = [
'OUTPUT FOLDER', 'WRITE CHECK FILES', 'LOG FOLDER'
]
self.types[fpt.GIS] = [
'READ MI', 'READ GIS', 'READ GRID', 'SHP PROJECTION',
'MI PROJECTION'
]
self.types[fpt.DATA] = ['READ MATERIALS FILE', 'BC DATABASE']
self.types[fpt.VARIABLE] = [
'START TIME', 'END TIME', 'TIMESTEP', 'SET IWL',
'MAP OUTPUT INTERVAL', 'MAP OUTPUT DATA TYPES', 'CELL WET/DRY DEPTH',
'CELL SIDE WET/DRY DEPTH', 'SET IWL', 'TIME SERIES OUTPUT INTERVAL',
'SCREEN/LOG DISPLAY INTERVAL', 'CSV TIME', 'START OUTPUT',
'OUTPUT INTERVAL', 'STRUCTURE LOSSES', 'WLL APPROACH',
'WLL ADJUST XS WIDTH', 'WLL ADDITIONAL POINTS',
'DEPTH LIMIT FACTOR', 'CELL SIZE', 'SET CODE', 'GRID SIZE (X,Y)',
'SET ZPTS', 'SET MAT', 'MASS BALANCE OUTPUT', 'GIS FORMAT',
'MAP OUTPUT FORMATS', 'END MAT OUTPUT', 'ASC START MAP OUTPUT',
'ASC END MAP OUTPUT', 'XMDF MAP OUTPUT DATA TYPES',
'WRITE PO ONLINE', 'ASC MAP OUTPUT DATA TYPES',
'WRITE CHECK FILES INCLUDE', 'WRITE CHECK FILES EXCLUDE',
'STORE MAXIMUMS AND MINIMUMS'
]
self.types[fpt.IF_LOGIC] = [
'IF SCENARIO', 'ELSE IF SCENARIO', 'IF EVENT',
'ELSE IF EVENT', 'END IF', 'ELSE'
]
self.types[fpt.EVENT_LOGIC] = ['DEFINE EVENT', 'END DEFINE']
self.types[fpt.SECTION_LOGIC] = ['DEFINE OUTPUT ZONE', 'END DEFINE']
self.types[fpt.DOMAIN_LOGIC] = [
'START 1D DOMAIN', 'END 1D DOMAIN', 'START 2D DOMAIN',
'END 2D DOMAIN'
]
self.types[fpt.USER_VARIABLE] = ['SET VARIABLE']
self.types[fpt.EVENT_VARIABLE] = [
'BC EVENT TEXT', 'BC EVENT NAME',
'BC EVENT SOURCE',
]
self.types[fpt.MODEL_VARIABLE] = ['MODEL SCENARIOS', 'MODEL EVENTS', ]
def find(self, find_val, file_type='*'):
"""Checks if the given value is known or not.
The word to look for doesn't have to be an exact match to the given
value, it only has to start with it. This means that we don't need to
know whether it is a 'command == something' or just 'command something'
(like: 'Estry Control File Auto') at this point.
This helps to avoid unnecessary repitition. i.e. many files are like:
'READ GIS' + another word. All of them are GIS type files so they all
get dealt with in the same way.
In some edge cases there are command that start the same. These are
dealt with by secondary check to see if the next character is '=' or
not.
Args:
find_val (str): the value attempt to find in the lookup table.
file_type (int): Optional - reduce the lookup time by providing
the type (catgory) to look for the value in. These are the
constants (MODEL, GIS, etc).
Returns:
Tuple (Bool, int) True if found. Int is the class constant
indicating what type the value was found under.
"""
find_val = find_val.upper()
if file_type == '*':
for key, part_type in self.types.items():
found = [i for i in part_type if find_val.startswith(i)]
if found:
retval = key
if found[0] in self.ambiguous_keys:
retval = self._checkAmbiguity(found[0], find_val, key)
return True, retval
return (False, None)
else:
found = [i for i in self.types[file_type] if find_val.startswith(i)]
if found:
return True, file_type
return (False, None)
def _checkAmbiguity(self, found, find_val, key):
"""Resolves any ambiguity in the keys."""
f = find_val.replace(' ', '')
f2 = found.replace(' ', '') + '='
if f.startswith(f2):
return key
else:
alternatives = self.ambiguous[found]
for i, a in enumerate(alternatives):
if find_val.startswith(a[0]):
return self.ambiguous[found][i][1]
return key
|
duncan-r/SHIP
|
ship/tuflow/tuflowmodel.py
|
Python
|
mit
| 22,689 | 0.003261 |
import json
import unittest2
from google.appengine.ext import testbed
from consts.media_type import MediaType
from helpers.media_helper import MediaParser
from helpers.webcast_helper import WebcastParser
class TestMediaUrlParser(unittest2.TestCase):
def setUp(cls):
cls.testbed = testbed.Testbed()
cls.testbed.activate()
cls.testbed.init_urlfetch_stub()
def tearDown(cls):
cls.testbed.deactivate()
def test_youtube_parse(self):
yt_long = MediaParser.partial_media_dict_from_url("http://www.youtube.com/watch?v=I-IrVbsl_K8")
self.assertEqual(yt_long['media_type_enum'], MediaType.YOUTUBE_VIDEO)
self.assertEqual(yt_long['foreign_key'], "I-IrVbsl_K8")
yt_short = MediaParser.partial_media_dict_from_url("http://youtu.be/I-IrVbsl_K8")
self.assertEqual(yt_short['media_type_enum'], MediaType.YOUTUBE_VIDEO)
self.assertEqual(yt_short['foreign_key'], "I-IrVbsl_K8")
yt_from_playlist = MediaParser.partial_media_dict_from_url("https://www.youtube.com/watch?v=VP992UKFbko&index=1&list=PLZT9pIgNOV6ZE0EgstWeoRWGWT3uoaszm")
self.assertEqual(yt_from_playlist['media_type_enum'], MediaType.YOUTUBE_VIDEO)
self.assertEqual(yt_from_playlist['foreign_key'], 'VP992UKFbko')
# def test_cdphotothread_parsetest_cdphotothread_parse(self):
# cd = MediaParser.partial_media_dict_from_url(
# "https://www.chiefdelphi.com/media/photos/41999")
# self.assertEqual(cd['media_type_enum'], MediaType.CD_PHOTO_THREAD)
# self.assertEqual(cd['foreign_key'], "41999")
# self.assertTrue(cd['details_json'])
# details = json.loads(cd['details_json'])
# self.assertEqual(details['image_partial'], "a88/a880fa0d65c6b49ddb93323bc7d2e901_l.jpg")
def test_imgur_parse(self):
imgur_img = MediaParser.partial_media_dict_from_url("http://imgur.com/zYqWbBh")
self.assertEqual(imgur_img['media_type_enum'], MediaType.IMGUR)
self.assertEqual(imgur_img['foreign_key'], "zYqWbBh")
imgur_img = MediaParser.partial_media_dict_from_url("http://i.imgur.com/zYqWbBh.png")
self.assertEqual(imgur_img['media_type_enum'], MediaType.IMGUR)
self.assertEqual(imgur_img['foreign_key'], "zYqWbBh")
self.assertEqual(MediaParser.partial_media_dict_from_url("http://imgur.com/r/aww"), None)
self.assertEqual(MediaParser.partial_media_dict_from_url("http://imgur.com/a/album"), None)
def test_fb_profile_parse(self):
result = MediaParser.partial_media_dict_from_url("http://facebook.com/theuberbots")
self.assertEqual(result['media_type_enum'], MediaType.FACEBOOK_PROFILE)
self.assertEqual(result['is_social'], True)
self.assertEqual(result['foreign_key'], 'theuberbots')
self.assertEqual(result['site_name'], MediaType.type_names[MediaType.FACEBOOK_PROFILE])
self.assertEqual(result['profile_url'], 'https://www.facebook.com/theuberbots')
def test_twitter_profile_parse(self):
result = MediaParser.partial_media_dict_from_url("https://twitter.com/team1124")
self.assertEqual(result['media_type_enum'], MediaType.TWITTER_PROFILE)
self.assertEqual(result['is_social'], True)
self.assertEqual(result['foreign_key'], 'team1124')
self.assertEqual(result['site_name'], MediaType.type_names[MediaType.TWITTER_PROFILE])
self.assertEqual(result['profile_url'], 'https://twitter.com/team1124')
def test_youtube_profile_parse(self):
result = MediaParser.partial_media_dict_from_url("https://www.youtube.com/Uberbots1124")
self.assertEqual(result['media_type_enum'], MediaType.YOUTUBE_CHANNEL)
self.assertEqual(result['is_social'], True)
self.assertEqual(result['foreign_key'], 'uberbots1124')
self.assertEqual(result['site_name'], MediaType.type_names[MediaType.YOUTUBE_CHANNEL])
self.assertEqual(result['profile_url'], 'https://www.youtube.com/uberbots1124')
short_result = MediaParser.partial_media_dict_from_url("https://www.youtube.com/Uberbots1124")
self.assertEqual(short_result['media_type_enum'], MediaType.YOUTUBE_CHANNEL)
self.assertEqual(short_result['is_social'], True)
self.assertEqual(short_result['foreign_key'], 'uberbots1124')
self.assertEqual(short_result['site_name'], MediaType.type_names[MediaType.YOUTUBE_CHANNEL])
self.assertEqual(short_result['profile_url'], 'https://www.youtube.com/uberbots1124')
gapps_result = MediaParser.partial_media_dict_from_url("https://www.youtube.com/c/tnt3102org")
self.assertEqual(gapps_result['media_type_enum'], MediaType.YOUTUBE_CHANNEL)
self.assertEqual(gapps_result['is_social'], True)
self.assertEqual(gapps_result['foreign_key'], 'tnt3102org')
self.assertEqual(gapps_result['site_name'], MediaType.type_names[MediaType.YOUTUBE_CHANNEL])
self.assertEqual(gapps_result['profile_url'], 'https://www.youtube.com/tnt3102org')
def test_github_profile_parse(self):
result = MediaParser.partial_media_dict_from_url("https://github.com/frc1124")
self.assertEqual(result['media_type_enum'], MediaType.GITHUB_PROFILE)
self.assertEqual(result['is_social'], True)
self.assertEqual(result['foreign_key'], 'frc1124')
self.assertEqual(result['site_name'], MediaType.type_names[MediaType.GITHUB_PROFILE])
self.assertEqual(result['profile_url'], 'https://github.com/frc1124')
def test_instagram_profile_parse(self):
result = MediaParser.partial_media_dict_from_url("https://www.instagram.com/4hteamneutrino")
self.assertEqual(result['media_type_enum'], MediaType.INSTAGRAM_PROFILE)
self.assertEqual(result['is_social'], True)
self.assertEqual(result['foreign_key'], '4hteamneutrino')
self.assertEqual(result['site_name'], MediaType.type_names[MediaType.INSTAGRAM_PROFILE])
self.assertEqual(result['profile_url'], 'https://www.instagram.com/4hteamneutrino')
def test_periscope_profile_parse(self):
result = MediaParser.partial_media_dict_from_url("https://www.periscope.tv/evolution2626")
self.assertEqual(result['media_type_enum'], MediaType.PERISCOPE_PROFILE)
self.assertEqual(result['is_social'], True)
self.assertEqual(result['foreign_key'], 'evolution2626')
self.assertEqual(result['site_name'], MediaType.type_names[MediaType.PERISCOPE_PROFILE])
self.assertEqual(result['profile_url'], 'https://www.periscope.tv/evolution2626')
def test_grabcad_link(self):
result = MediaParser.partial_media_dict_from_url("https://grabcad.com/library/2016-148-robowranglers-1")
self.assertEqual(result['media_type_enum'], MediaType.GRABCAD)
self.assertEqual(result['is_social'], False)
self.assertEqual(result['foreign_key'], '2016-148-robowranglers-1')
details = json.loads(result['details_json'])
self.assertEqual(details['model_name'], '2016 | 148 - Robowranglers')
self.assertEqual(details['model_description'], 'Renegade')
self.assertEqual(details['model_image'], 'https://d2t1xqejof9utc.cloudfront.net/screenshots/pics/bf832651cc688c27a78c224fbd07d9d7/card.jpg')
self.assertEqual(details['model_created'], '2016-09-19T11:52:23Z')
# 2020-12-31 zach - I'm disabling this test because 1) it's failing and 2) we shouldn't be hitting the network during unit tests
# def test_instagram_image(self):
# result = MediaParser.partial_media_dict_from_url("https://www.instagram.com/p/BUnZiriBYre/")
# self.assertEqual(result['media_type_enum'], MediaType.INSTAGRAM_IMAGE)
# self.assertEqual(result['foreign_key'], "BUnZiriBYre")
# details = json.loads(result['details_json'])
# self.assertEqual(details['title'], "FRC 195 @ 2017 Battlecry @ WPI")
# self.assertEqual(details['author_name'], '1stroboticsrocks')
# self.assertIsNotNone(details.get('thumbnail_url', None))
def test_unsupported_url_parse(self):
self.assertEqual(MediaParser.partial_media_dict_from_url("http://foo.bar"), None)
class TestWebcastUrlParser(unittest2.TestCase):
@classmethod
def setUpClass(cls):
cls.testbed = testbed.Testbed()
cls.testbed.activate()
cls.testbed.init_urlfetch_stub()
@classmethod
def tearDownClass(cls):
cls.testbed.deactivate()
def testTwitchUrl(self):
res = WebcastParser.webcast_dict_from_url("http://twitch.tv/frcgamesense")
self.assertIsNotNone(res)
self.assertEqual(res['type'], 'twitch')
self.assertEqual(res['channel'], 'frcgamesense')
unknown = WebcastParser.webcast_dict_from_url("http://twitch.tv/")
self.assertIsNone(unknown)
def testYouTubeUrl(self):
yt_long = WebcastParser.webcast_dict_from_url("http://www.youtube.com/watch?v=I-IrVbsl_K8")
self.assertIsNotNone(yt_long)
self.assertEqual(yt_long['type'], 'youtube')
self.assertEqual(yt_long['channel'], 'I-IrVbsl_K8')
yt_short = WebcastParser.webcast_dict_from_url("http://youtu.be/I-IrVbsl_K8")
self.assertIsNotNone(yt_short)
self.assertEqual(yt_short['type'], 'youtube')
self.assertEqual(yt_short['channel'], 'I-IrVbsl_K8')
bad_long = WebcastParser.webcast_dict_from_url('"http://www.youtube.com/')
self.assertIsNone(bad_long)
bad_short = WebcastParser.webcast_dict_from_url("http://youtu.be/")
self.assertIsNone(bad_short)
"""
No webcasts are ustream anymore anyway...
def testUstream(self):
res = WebcastParser.webcast_dict_from_url('http://www.ustream.tv/decoraheagles')
self.assertIsNotNone(res)
self.assertEqual(res['type'], 'ustream')
self.assertEqual(res['channel'], '3064708')
bad = WebcastParser.webcast_dict_from_url('http://ustream.tv/')
self.assertIsNone(bad)
"""
def testUnknownUrl(self):
bad = WebcastParser.webcast_dict_from_url("http://mywebsite.somewebcast")
self.assertIsNone(bad)
|
fangeugene/the-blue-alliance
|
tests/suggestions/test_media_url_parse.py
|
Python
|
mit
| 10,138 | 0.004636 |
"""
General utility functions and classes for Topographica that require numpy.
"""
import re
from numpy import sqrt,dot,arctan2,array2string,fmod,floor,array, \
unravel_index,concatenate,set_printoptions,divide,maximum,minimum
from numpy import ufunc
import param
# Ask numpy to print even relatively large arrays by default
set_printoptions(threshold=200*200)
def ufunc_script_repr(f,imports,prefix=None,settings=None):
"""
Return a runnable representation of the numpy ufunc f, and an
import statement for its module.
"""
# (could probably be generalized if required, because module is
# f.__class__.__module__)
imports.append('import numpy')
return 'numpy.'+f.__name__
from param import parameterized
parameterized.script_repr_reg[ufunc]=ufunc_script_repr
def L2norm(v):
"""
Return the L2 norm of the vector v.
"""
return sqrt(dot(v,v))
def divisive_normalization(weights):
"""Divisively normalize an array to sum to 1.0"""
s = weights.sum()
if s != 0:
factor = 1.0/s
weights *= factor
def add_border(matrix,width=1,value=0.0):
"""
Returns a new matrix consisting of the given matrix with a border
or margin of the given width filled with the given value.
"""
rows,cols = matrix.shape
hborder = array([ [value]*(cols+2*width) ]*width)
vborder = array([ [value]*width ] * rows)
temp = concatenate( (vborder,matrix,vborder), axis=1)
return concatenate( (hborder,temp,hborder) )
def arg(z):
"""
Return the complex argument (phase) of z.
(z in radians.)
"""
z = z + complex(0,0) # so that arg(z) also works for real z
return arctan2(z.imag, z.real)
def octave_str(mat,name="mat",owner=""):
"""
Print the given Numpy matrix in Octave format, listing the given
matrix name and the object that owns it (if any).
"""
# This just prints the string version of the matrix and does search/replace
# to convert it; there may be a faster or easier way.
mstr=array2string(mat)
mstr=re.sub('\n','',mstr)
mstr=re.sub('[[]','',mstr)
mstr=re.sub('[]]','\n',mstr)
return ("# Created from %s %s\n# name: %s\n# type: matrix\n# rows: %s\n# columns: %s\n%s" %
(owner,name,name,mat.shape[0],mat.shape[1],mstr))
def octave_output(filename,mat,name="mat",owner=""):
"""Writes the given matrix to a new file of the given name, in Octave format."""
f = open(filename,'w')
f.write(octave_str(mat,name,owner))
f.close()
def centroid(array_2D):
"""Return the centroid (center of gravity) for a 2D array."""
rows,cols = array_2D.shape
rsum=0
csum=0
rmass_sum=0
cmass_sum=0
for r in xrange(rows):
row_sum = array_2D[r,:].sum()
rsum += r*row_sum
rmass_sum += row_sum
for c in xrange(cols):
col_sum = array_2D[:,c].sum()
csum += c*col_sum
cmass_sum += col_sum
row_centroid= rsum/rmass_sum
col_centroid= csum/cmass_sum
return row_centroid, col_centroid
def clip_lower(arr,lower_bound):
"""
In-place, one-sided version of numpy.clip().
i.e. numpy.clip(arr,a_min=lower_bound,out=arr) if it existed.
"""
maximum(arr,lower_bound,arr)
def clip_upper(arr,upper_bound):
"""
In-place, one-sided version of numpy.clip().
i.e. numpy.clip(arr,a_max=upper_bound,out=arr) if it existed.
"""
minimum(arr,upper_bound,arr)
def wrap(lower, upper, x):
"""
Circularly alias the numeric value x into the range [lower,upper).
Valid for cyclic quantities like orientations or hues.
"""
#I have no idea how I came up with this algorithm; it should be simplified.
#
# Note that Python's % operator works on floats and arrays;
# usually one can simply use that instead. E.g. to wrap array or
# scalar x into 0,2*pi, just use "x % (2*pi)".
range_=upper-lower
return lower + fmod(x-lower + 2*range_*(1-floor(x/(2*range_))), range_)
def array_argmax(arr):
"Returns the coordinates of the maximum element in the given array."
return unravel_index(arr.argmax(),arr.shape)
# CB: Is this of general interest? Used in gcal.ty.
class DivideWithConstant(param.Parameterized):
"""
Divide two scalars or arrays with a constant (c) offset on the
denominator to allow setting the gain or to avoid divide-by-zero
issues. The non-constant part of the denominator (y) is clipped
to ensure that it has only positive values.
"""
c = param.Number(default=1.0)
def __call__(self, x, y):
return divide(x,maximum(y,0)+self.c)
|
Tasignotas/topographica_mirror
|
topo/base/arrayutil.py
|
Python
|
bsd-3-clause
| 4,625 | 0.017514 |
'''
Learning-curve test functionality.
Author: Pontus Stenetorp <pontus stenetorp se>
Version: 2011-08-29
'''
from collections import defaultdict
from itertools import chain, izip
from operator import itemgetter
from os.path import join as path_join
from random import sample, seed
from sys import stderr
from common import compress, simstring_caching
from maths import mean, stddev
from scoring import score_classifier_by_tup, score_classifier_by_tup_ranked
try:
from cPickle import dump as pickle_dump, load as pickle_load
except ImportError:
from pickle import dump as pickle_dump, load as pickle_load
def __train_fold(args):
return _train_fold(*args)
def _train_fold(classifier, train_fold):
train_fold_lbls = [lbl for lbl, _ in train_fold]
train_fold_vecs = [vec for _, vec in train_fold]
assert len(train_fold_lbls) == len(train_fold_vecs)
classifier._train(train_fold_lbls, train_fold_vecs)
return len(train_fold_vecs), classifier
def _score_classifier(classifier, test_lbls, test_vecs):
score = score_classifier_by_tup(classifier,
(test_lbls, test_vecs))
# XXX: Hooking new scores into the old learning
new_score = score_classifier_by_tup_ranked(classifier,
(test_lbls, test_vecs), unseen=True)
return score, new_score
def _train_fold_gen(data_set, min_perc, max_perc, step_perc, it_factor):
set_size = len(data_set)
# Start with the largest folds, they take longer to process
for p in xrange(max_perc, min_perc - 1, -step_perc):
# Sample size for this iteration
sample_size = int((p / 100.0) * set_size)
if it_factor is not None:
folds = int(int(set_size / float(sample_size)) * it_factor)
else:
folds = 1
if p == 100:
# We can't sample when we use the whole set...
folds = 1
# Heuristic to keep us from having too low of a sample
elif folds < 4:
folds = 4
for _ in xrange(folds * 2):
yield sample(data_set, sample_size)
def _learning_curve_test_data_set(classifiers, train, test,
worker_pool, verbose=False, no_simstring_cache=False,
use_test_set=False, folds=10, min_perc=5, max_perc=100, step_perc=5,
it_factor=1):
# XXX: Not necessary any more!
if verbose:
print >> stderr, 'Calculating train set size...',
train_size = 0
for d in train:
for s in d:
for a in s:
train_size += 1
if verbose:
print >> stderr, 'Done!'
# XXX:
if not no_simstring_cache:
simstring_caching(classifiers, (train, test), verbose=verbose)
# Collect the seen type to iterate over later
seen_types = set()
results_by_classifier = {}
for classifier_id, classifier_class in classifiers.iteritems():
if verbose:
print >> stderr, 'Classifier:', classifier_id, '...',
from classifier.liblinear import hashabledict
classifier = classifier_class()
if verbose:
print >> stderr, 'featurising train:', '...',
train_lbls, train_vecs = classifier._gen_lbls_vecs(train)
train_set = [e for e in izip(train_lbls, train_vecs)]
assert len(train_lbls) == train_size, '{} != {}'.format(
len(train_lbls), train_size)
assert len(train_vecs) == train_size, '{} != {}'.format(
len(train_vecs), train_size)
assert len(train_set) == train_size, '{} != {}'.format(
len(train_set), train_size)
del train_lbls
del train_vecs
if verbose:
print >> stderr, 'Done!',
print >> stderr, 'featurising test', '...',
test_lbls, test_vecs = classifier._gen_lbls_vecs(test)
test_vecs = [hashabledict(d) for d in test_vecs]
if verbose:
print >> stderr, 'Done!',
# Fix the seed so that we get comparable folds
seed(0xd5347d33)
args = ((classifier, fold) for fold in _train_fold_gen(train_set,
min_perc, max_perc, step_perc, it_factor))
if worker_pool is None:
res_it = (_train_fold(*arg) for arg in args)
else:
res_it = worker_pool.imap(__train_fold, args)
classifier_results = defaultdict(list)
print >> stderr, 'Training and evaluating models: ...',
i = 0
for sample_size, fold_classifier in res_it:
score, new_score = _score_classifier(fold_classifier, test_lbls,
test_vecs)
classifier_results[sample_size].append((score, new_score))
i += 1
if i % 10 == 0:
print >> stderr, i, '...',
print >> stderr, 'Done!'
# Process the results
for sample_size in sorted(e for e in classifier_results):
results = classifier_results[sample_size]
scores = [score for score, _ in results]
new_scores = [new_score for _, new_score in results]
macro_scores = [ms for ms, _, _, _, _ in scores]
micro_scores = [ms for _, ms, _, _, _ in scores]
tps = [tp for _, _, tp, _, _ in scores]
fns = [fn for _, _, _, fn, _ in scores]
res_dics = [d for _, _, _, _, d in scores]
# New metrics
ranks = [mean(rs) for rs, _, _ in new_scores]
ambiguities = [mean(ambs) for _, ambs, _ in new_scores]
recalls = [r for _, _, r in new_scores]
# These are means of means
ranks_mean = mean(ranks)
ranks_stddev = stddev(ranks)
ambiguities_mean = mean(ambiguities)
ambiguities_stddev = stddev(ambiguities)
recalls_mean = mean(recalls)
recalls_stddev = stddev(recalls)
classifier_result = (
mean(macro_scores), stddev(macro_scores),
mean(micro_scores), stddev(micro_scores),
mean(tps), stddev(tps),
mean(fns), stddev(fns),
res_dics,
# New metrics
ranks_mean, ranks_stddev,
ambiguities_mean, ambiguities_stddev,
recalls_mean, recalls_stddev
)
classifier_results[sample_size] = classifier_result
if verbose:
res_str = ('Results {size}: '
'MACRO: {0:.3f} MACRO_STDDEV: {1:.3f} '
'MICRO: {2:.3f} MICRO_STDDEV: {3:.3f} '
'TP: {4:.3f} FP: {5:.3f} '
'MEAN_RANK: {mean_rank:.3f} MEAN_RANK_STDDEV: {mean_rank_stddev:.3f} '
'AVG_AMB: {avg_amb:.3f} AVG_AMB_STDDEV: {avg_amb_stddev:.3f} '
'RECALL: {recall:.3f} RECALL_STDDEV: {recall_stddev:.3f}'
).format(*classifier_result,
size=sample_size,
mean_rank=ranks_mean,
mean_rank_stddev=ranks_stddev,
avg_amb=ambiguities_mean,
avg_amb_stddev=ambiguities_stddev,
recall=recalls_mean,
recall_stddev=recalls_stddev
)
print res_str
results_by_classifier[classifier_id] = classifier_results
return results_by_classifier
def learning_curve_test(classifiers, datasets, outdir,
verbose=False, no_simstring_cache=False, folds=10, worker_pool=None,
min_perc=5, max_perc=100, step_perc=5, it_factor=1,
pickle_name='learning', use_test_set=False
):
### This part is really generic
# TODO: We could keep old results... But dangerous, mix-up
results_file_path = _get_learning_pickle_path(outdir, pickle_name)
#XXX: RESUME GOES HERE!
results_by_dataset = {}
for dataset_id, dataset_getter in datasets.iteritems():
if verbose:
print >> stderr, 'Data set:', dataset_id
if verbose:
print >> stderr, 'Caching vectorised data...',
train_set, dev_set, test_set = dataset_getter()
if use_test_set:
train, test = list(chain(train_set, dev_set)), list(test_set)
else:
train, test = list(train_set), list(dev_set)
del train_set, dev_set, test_set
if verbose:
print >> stderr, 'Done!'
results_by_dataset[dataset_id] = _learning_curve_test_data_set(
classifiers, train, test, worker_pool,
verbose=verbose, no_simstring_cache=no_simstring_cache,
use_test_set=use_test_set, folds=folds, min_perc=min_perc,
max_perc=max_perc, step_perc=step_perc, it_factor=it_factor)
### HACK TO GET INTERMEDIATE!
with open(results_file_path, 'w') as results_file:
pickle_dump(results_by_dataset, results_file)
if verbose:
print >> stderr, 'Results written to:', results_file_path
def _get_learning_pickle_path(outdir, name='learning'):
return path_join(outdir, '{0}_results.pickle'.format(name))
# Nice table-able number for a curve
def learning_curve_avg(classifiers, datasets, outdir, pickle_name='learning'):
with open(_get_learning_pickle_path(outdir, name=pickle_name), 'r') as results_file:
results = pickle_load(results_file)
for dataset in datasets:
print 'Dataset:', dataset
for classifier in classifiers:
print 'Classifier:', classifier
macro_avg = mean([res_tup[0] for res_tup
in results[dataset][classifier].itervalues()]) * 100
macro_tip = sorted((size, res_tup[0]) for size, res_tup
in results[dataset][classifier].iteritems())[-1][1] * 100
amb_avg = mean([res_tup[11] for res_tup
in results[dataset][classifier].itervalues()])
amb_tip = sorted((size, res_tup[11]) for size, res_tup
in results[dataset][classifier].iteritems())[-1][1]
rec_avg = mean([res_tup[13] for res_tup
in results[dataset][classifier].itervalues()]) * 100
rec_tip = sorted((size, res_tup[13]) for size, res_tup
in results[dataset][classifier].iteritems())[-1][1] * 100
print ('{:.2f}/{:.2f}/{:.2f}/{:.2f}/{:.2f}/{:.2f} '
'MACROAVG/MACROTIP/AMBAVG/AMBTIP/RECAVG/RECTIP').format(
macro_avg, macro_tip, amb_avg, amb_tip, rec_avg, rec_tip)
### Plot constants
# Default is black
LINE_COLOUR_BY_CLASSIFIER = defaultdict(lambda : 'k')
LINE_COLOUR_BY_CLASSIFIER.update({
'NAIVE': 'm',
#'MAXVOTE': 'y',
'INTERNAL': 'r',
'SIMSTRING': 'y',
'INTERNAL-SIMSTRING': 'b',
#'SIMPLE-INTERNAL-ENSEMBLE': 'g',
'GAZETTER': 'c',
'INTERNAL-GAZETTER': 'g',
#'SIMSTRING-COMPETITIVE': 'm',
#'COMPETITIVE': 'k',
})
# NOTE: Turning them all black
for k in LINE_COLOUR_BY_CLASSIFIER:
LINE_COLOUR_BY_CLASSIFIER[k] = 'k'
LINE_COLOUR_BY_DATASET = defaultdict(lambda : 'k')
LINE_STYLE_BY_CLASSIFIER = defaultdict(lambda : '-')
LINE_STYLE_BY_CLASSIFIER.update({
'NAIVE': '-:',
#'MAXVOTE': 'y',
'INTERNAL': 'default-.',
'SIMSTRING': 'steps-pre-.',
'INTERNAL-SIMSTRING': '-',
#'SIMPLE-INTERNAL-ENSEMBLE': 'g',
'GAZETTER': 'c',
'INTERNAL-GAZETTER': '--',
#'SIMSTRING-COMPETITIVE': 'm',
#'COMPETITIVE': 'k',
})
LINE_STYLE_BY_DATASET = defaultdict(lambda : '-')
LINE_MARKER_BY_CLASSIFIER = defaultdict(lambda : 'None')
LINE_MARKER_BY_CLASSIFIER.update({
})
LINE_MARKER_BY_DATASET = defaultdict(lambda : 'None')
LINE_MARKER_BY_DATASET.update({
'BioNLP-ST-2011-Epi_and_PTM': 'o',
'BioNLP-ST-2011-Infectious_Diseases': '|',
'BioNLP-ST-2011-genia': 's',
'CALBC_CII': '*',
'NLPBA': '^',
'SUPER_GREC': 'x',
})
# Res-tup handling (yuck):
#res_tups = [(size_value, res_tup[0], res_tup[1], res_tup[2],
# res_tup[3], res_tup[11], res_tup[12], res_tup[13], res_tup[14])
# for size_value, res_tup in classifier_results.iteritems()]
#res_tups.sort()
def _get_ambiguity_data(classifier_results):
# Get the ambiguity data for the res_tups (yuck...)
res_tups = [(size_value, res_tup[11], res_tup[12])
for size_value, res_tup in classifier_results.iteritems()]
res_tups.sort()
sample_sizes = [t[0] for t in res_tups]
ambiguity_means = [t[1] for t in res_tups]
ambiguity_stds = [t[2] for t in res_tups]
return sample_sizes, ambiguity_means, ambiguity_stds
def _ambiguity_plot_gen(results, classifiers, datasets):
#plt.ylabel('Accuracy')
#plt.xlabel('Training Examples')
from matplotlib.figure import Figure
import matplotlib.pyplot as plt
# Plot for each dataset and classifier
for dataset in datasets:
for classifier in classifiers:
classifier_results = results[dataset][classifier]
sample_sizes, ambiguity_means, ambiguity_stds = (
_get_ambiguity_data(classifier_results))
fig = Figure()
plt.errorbar(sample_sizes, ambiguity_means,
yerr=ambiguity_stds,
figure=fig,
label=classifier,
# Here we use style by classifier, rather than dataset
linestyle=LINE_STYLE_BY_CLASSIFIER[classifier],
color=LINE_COLOUR_BY_CLASSIFIER[classifier],
)
yield 'amb_{}_{}'.format(classifier.lower(), dataset.lower()), fig
plt.clf()
# Plot by dataset and then all classifiers
for dataset in datasets:
fig = Figure()
for classifier in classifiers:
classifier_results = results[dataset][classifier]
sample_sizes, ambiguity_means, ambiguity_stds = (
_get_ambiguity_data(classifier_results))
plt.errorbar(sample_sizes, ambiguity_means,
yerr=ambiguity_stds,
figure=fig,
label=classifier,
# Here we use style by classifier, rather than dataset
linestyle=LINE_STYLE_BY_CLASSIFIER[classifier],
marker=LINE_MARKER_BY_CLASSIFIER[classifier],
color=LINE_COLOUR_BY_CLASSIFIER[classifier],
)
yield 'amb_by_classifier', fig
plt.clf()
# XXX: Cut-off since some datasets are "too" big
dataset_size_cut_off = True
# Plot by classifier for all datasets
for classifier in classifiers:
fig = Figure()
added = []
for dataset in datasets:
classifier_results = results[dataset][classifier]
sample_sizes, ambiguity_means, ambiguity_stds = (
_get_ambiguity_data(classifier_results))
line = plt.errorbar(sample_sizes, ambiguity_means,
# NOTE: Becomes pretty much unreadable
#yerr=ambiguity_stds,
figure=fig,
label=dataset,
# Here we use style by dataset, rather than classifier
linestyle=LINE_STYLE_BY_DATASET[dataset],
marker=LINE_MARKER_BY_DATASET[dataset],
color=LINE_COLOUR_BY_DATASET[dataset],
)
added.append((dataset, line))
# Legend handling (yuck...)
#print fig.get_axes()
#ax = fig.get_axes()[0]
#handles, labels = ax.get_legend_handles_labels()
labels, handles = zip(*sorted(added))
plt.legend(handles, labels)
#ax.legend(handles, labels, loc=4)
plt.xlim(xmax=10000)
yield 'amb_by_dataset', fig
plt.clf()
# XXX: God damn it, how do you do non-stateful matplotlib...!
def plot_learning_curve_results(classifiers, datasets, outdir,
pickle_name='learning', verbose=False):
from matplotlib import pyplot as plt
if not classifiers and not datasets:
print >> stderr, 'No classifiers or datasets specified, exiting'
return
# Load the results to be plotted
if verbose:
print >> stderr, 'Loading results...',
with open(_get_learning_pickle_path(outdir, name=pickle_name), 'r'
) as results_file:
results = pickle_load(results_file)
if verbose:
print >> stderr, 'Done!'
image_formats = ('svg', )
for fig_name, fig in _ambiguity_plot_gen(results, classifiers, datasets):
for image_format in image_formats:
plt.savefig(path_join(outdir, fig_name + '.' + image_format),
format=image_format, figure=fig)
# Over all classifiers for one dataset
# Over all datasets for one classifier
# Same as the above for ambiguity and recall
#_plot_curve(outdir, results, pickle_name)
#_plot_curve(outdir, results, pickle_name + '_ambiguity', new_metric=True)
#_plot_curve(outdir, results, pickle_name + '_recall', new_metric=True, recall=True)
|
ninjin/simsem
|
experiment/learning.py
|
Python
|
isc
| 17,339 | 0.006863 |
# YouTube Video: https://www.youtube.com/watch?v=RRK0gd77Ln0
# Given a string, calculate the length of the string.
input_str = "LucidProgramming"
# Standard Pythonic way:
# print(len(input_str))
# Iterative length calculation: O(n)
def iterative_str_len(input_str):
input_str_len = 0
for i in range(len(input_str)):
input_str_len += 1
return input_str_len
# Recursive length calculation: O(n)
def recursive_str_len(input_str):
if input_str == '':
return 0
return 1 + recursive_str_len(input_str[1:])
print(iterative_str_len(input_str))
print(recursive_str_len(input_str))
|
vprusso/youtube_tutorials
|
algorithms/recursion/str_len.py
|
Python
|
gpl-3.0
| 615 | 0.00813 |
"""
Written by Matthew Cook
Created August 4, 2016
mattheworion.cook@gmail.com
Script to do basic sytax conversion between Python 3 and R syntax.
What it does convert:
-assignment operator
-function definitions
-filename
-inline arithmetic (*=, +=, -=, **)
-':' to '{'
-'not' to '!'
-if statments
-add closing brackets on a newline
What it doesn't do:
-Python specific functions to R specific functions
-Add closing brackets with perfect indentation
TODO: Closing brackets indentation issue
"""
from os import path
from shutil import copyfile
# Define changes to make
simple_change = {"**" : "^",
" = " : " <- ",
":\n" : "{\n",
"not" : "!"}
complex_change = {"def " : "",
"+=" : '+',
"-=" : '-',
"*=" : '*'}
# Create flag stacks
flags = {}
flags['comment'] = [] # Multi-line Comment flag
flags['bracket'] = [] # Code block start flag
flags['b_start'] = [] # Indentation at code block start
# Create indent dictionary
indents = {}
# Define special characters to prevent ';' addition to line
s_char = ('}\n', '{\n', ':\n', '"""\n')
def createRfile():
"""Creates a new file named "filename.R" by removing .py extension"""
# Provide path to file
# Note: The new R file will be placed in the same directory
filename = input("Please copy and paste the path to your file here: ")
if not path.isfile(filename):
print("Filename was invalid")
filename = input("Please copy and paste the path to your file here: ")
# Test for valid file path
if path.exists(filename) :
# Strip the directory from the filename
new_name = path.basename(filename)
# Replace python extention with R extension
new_name = filename.replace(".py", ".R")
doesexist = path.exists(new_name)
if doesexist:
print("""The file already exists.
Creating a backup copy in current directory""")
# Split name at . and insert -copy. before extension
cpy_temp = new_name.split(sep=".",)
cpy_temp[0] += "-copy."
cpy = cpy_temp[0] + cpy_temp[1]
copyfile(new_name, cpy)
print("Copy created as: ", cpy)
return filename, new_name
# Create R file
elif not doesexist:
file = open(new_name, "w")
print(new_name, " was successfully created.")
file.close()
return filename, new_name
else:
print(new_name, " could not be created. Check your permissions?")
exit(0)
else:
print("No valid file selected... Quitting script")
def find_all(tofind, string):
"""Returns number of times a certain substring is found"""
found = [i for i in range(len(string)) if string.startswith(tofind, i)]
num_found = len(found)
return num_found
def complexchange(line):
"""Completes multi-step line changes """
for key in complex_change:
if key in line:
if key == 'def ' and line.lstrip().startswith(key):
#remove 'def' keyword
change = complex_change[key]
line_r = ignoreStrReplace(line, key, change)
#split function definition at '('
lsplit = line_r.split('(', maxsplit=1)
fname = lsplit[0]
params = '(' + lsplit[1]
# create R style function def "fname <- function(params)"
line = fname + ' <- function' + params
else:
line = opfunc(line, key)
return line
# TESTED-Works
def chglinecontent(line):
"""Changes content contained within a single line"""
# Perform simple changes
for key in simple_change.keys():
# Ignore if string version exists in line
check_str_s, check_str_d = stringify(key)
if not check_str_d in line or not check_str_s in line:
line = ignoreStrReplace(line, key, simple_change[key])
line = complexchange(line)
line = statement(line)
return line
def indentation(s, tabsize=4):
"""Generator to return level of indentation"""
sx = s.expandtabs(tabsize)
# if line is empty yields 0
return 0 if sx.isspace() else len(sx) - len(sx.lstrip())
def opfunc(line, op):
"""
Replaces python operations ('*'=) in line(self) with R style operations.
"""
#Check if the operation is contained in a string, don't modify if true.
check_str_s, check_str_d = stringify(op)
if not check_str_d in line and not check_str_s in line:
# Get R style operation
rop = complex_change[op]
# Split line once at python operand
linesplit = line.split(op)
# Store variable (left side) and right side of operand
ls = linesplit[0] + ' <- '
rs = rop + linesplit[1]
# Prepend variable(ls) to right side and convert assignment variable
rs = linesplit[0].lstrip() + rs
# Strip whitespace from right of ls and create R style equation
line = ls + rs
return line
def stringify(sub):
"""Returns python string versions ('' and "") of original substring"""
check_str_s = "'" + sub + "'"
check_str_d = '"' + sub + '"'
return check_str_s, check_str_d
def setflags(line, indents):
"""Scans line to set/unset flags for further processing"""
# For multi-line comments
if 1 == (find_all('"""', line) % 2):
if not flags['comment']:
flags['comment'].append('"""')
else:
flags['comment'].pop()
# For code blocks
if line.rstrip().endswith(':'):
flags['bracket'].append("}")
flags['b_start'].append(indentation(line))
def standind(line, cur_ind):
"""Standardizes indentation"""
devfromstd = cur_ind % 4
if not devfromstd == 0:
line = (devfromstd * '') + line
return indentation(line), line
#TESTED-WORKS
def statement(line):
"""Converts if statements"""
if "if " in line and not 'elif' in line:
lsplit = line.split('if ', maxsplit=1)
ls = lsplit[0] + 'if '
rs = lsplit[1]
# Replace the ':' at the end of the statement
rs = lsplit[1].replace(':','{')
rs = '(' + rs
rs = rs.replace('{', '){')
line = ls + rs
if 'elif ' in line:
lsplit = line.split('if ', maxsplit=1)
ls = lsplit[0] + 'else if '
rs = lsplit[1]
# Replace the ':' at the end of the statement
rs = lsplit[1].replace(':','{')
rs = '(' + rs
rs = rs.replace('{', '){')
line = ls + rs
return line
def ignoreStrReplace(line, cur, rep):
"""Wrapper for str.replace to ignore strings"""
if '"' in line:
#Split string at quotation marks
lsplit = line.split('"')
#Replace items contained within even partitions
lsplit[::2] = [spl.replace(cur, rep) for spl in lsplit[::2]]
#Rejoin the partitions
line = '"'.join(lsplit)
elif "'" in line:
#Split string at quotation marks
lsplit = line.split("'")
#Replace items contained within even partitions
lsplit[::2] = [spl.replace(cur, rep) for spl in lsplit[::2]]
#Rejoin the partitions
line = '"'.join(lsplit)
else:
line = line.replace(cur, rep)
return line
def closeBrackets(file):
"""Attempts to find and close the opened brackets"""
for i in range(len(file)):
# Doing this to be able to randomly access variables
line = file[i]
# Set boolean to check for change in block
sameBlock = True
# Ignore lines with only whitespace
if not line.isspace():
#Look for opening brackets if closing brackets remain
if '{\n' in line and flags['bracket']:
# Store current index for later
i_temp = i
# Get starting indentation
start_indent = indentation(line)
while i+1 < len(file) and sameBlock:
#Get next line, and strip trailing whitespace
nextline = file[i+1].rstrip()
#Get its indentation
next_ind = indentation(nextline)
# Check for decreased indentation and comma continuation
if start_indent >= next_ind and not line.endswith(','):
sameBlock = False
else:
i += 1
# Append final line with bracket closure and new line
file[i] = file[i] + (start_indent * ' ')
file[i] = file[i] + flags['bracket'].pop()
file[i] = file[i] + '\n'
# Reset to previous index + 1
i = i_temp + 1
return file
def main():
pyfile, rfile = createRfile()
with open(pyfile, "r") as infile, open(rfile, "w") as outfile:
# Read each line into lines[]
lines = infile.readlines()
# Close Python file
infile.close()
for line in lines:
# Get indentation current before adjustment
indents['cur'] = indentation(line)
#Adjust to standard indent
indents['cur'], line = standind(line, indents['cur'])
#Strip whitespace and check for python comment or import
if not line.lstrip().startswith(("#","import","from")):
# Set the flags for further changes
setflags(line, indents)
if not flags['comment']:
# Perform line changes
line = chglinecontent(line)
#statement() may need two passes to correctly modify the line
line = statement(line)
# If the line isn't whitespace, write it to the file
if not line.isspace():
# Write modified line to file!
outfile.write(line)
# Close R file
outfile.close()
if flags['bracket']:
print("This functionality may not work perfectly... ")
toClose = input("""
Do you want to try to close the still open brackets?
(yes/no)
""")
if 'yes' == toClose.lower():
# Look for possible ways to close opened brackets in outfile
with open(rfile, "r") as file:
lines = file.readlines()
lines = closeBrackets(lines)
file.close()
with open(rfile, "w") as file:
# Overwrite old files
for line in lines:
file.write(line)
if __name__ == '__main__':
main()
|
mcook42/Py_R_Converters
|
py2r-syntax-converter.py
|
Python
|
mit
| 11,063 | 0.003073 |
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for unified pooling functionality in tensorflow.ops.nn."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import math
import numpy as np
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import test_util
from tensorflow.python.ops import gradient_checker
from tensorflow.python.ops import nn_ops
import tensorflow.python.ops.nn_grad # pylint: disable=unused-import
from tensorflow.python.platform import test
def pool_direct_single_axis(
input, # pylint: disable=redefined-builtin
axis,
window_size,
pooling_type,
padding,
dilation_rate,
stride):
"""Numpy implementation of pooling along a single axis.
This is intended for testing only, and therefore isn't particularly efficient.
See pool_direct below for the meaning of the arguments.
Args:
input: numpy array.
axis: axis along which to perform pooling.
window_size: int >= 1. Size of pooling window within axis.
pooling_type: either "MAX" or "AVG".
padding: either "SAME" or "VALID".
dilation_rate: int >= 1. Dilation factor for window, i.e. stride at which
to sample input.
stride: int >= 1. Stride at which to generate output.
Returns:
pooling output array of rank N+2.
Raises:
ValueError: if arguments are invalid.
"""
effective_window_size = (window_size - 1) * dilation_rate + 1
input_size = input.shape[axis]
if padding == "SAME":
output_size = int(math.ceil(input_size / stride))
total_padding_amount = max(
0, (output_size - 1) * stride + effective_window_size - input_size)
before_padding = total_padding_amount // 2
elif padding == "VALID":
output_size = int(
math.ceil((input_size - effective_window_size + 1) / stride))
before_padding = 0
else:
raise ValueError("Unsupported padding type: %r" % (padding,))
output_shape = input.shape[:axis] + (output_size,) + input.shape[axis + 1:]
output = np.zeros(output_shape, input.dtype)
initial_dim_selector = tuple(np.s_[:] for _ in range(axis))
if pooling_type == "MAX":
pooling_func = np.max
elif pooling_type == "AVG":
pooling_func = np.mean
else:
raise ValueError("Unsupported pooling type: %r" % (pooling_type,))
for output_pos in range(output_size):
input_start_pos = output_pos * stride - before_padding
input_end_pos = min(input_start_pos + effective_window_size, input_size)
if input_start_pos < 0:
input_start_pos += dilation_rate
input_slice = np.s_[input_start_pos:input_end_pos:dilation_rate]
output[initial_dim_selector + (output_pos,)] = pooling_func(
input[initial_dim_selector + (input_slice,)], axis=axis)
return output
def pool_direct(
input, # pylint: disable=redefined-builtin
window_shape,
pooling_type,
padding, # pylint: disable=redefined-builtin
dilation_rate,
strides,
data_format=None):
"""Numpy implementation of pooling.
This is intended for testing only, and therefore isn't particularly efficient.
See tensorflow.nn.pool.
Args:
input: numpy array of rank N+2.
window_shape: Sequence of N ints >= 1.
pooling_type: either "MAX" or "AVG".
padding: either "SAME" or "VALID".
dilation_rate: Sequence of N ints >= 1.
strides: Sequence of N ints >= 1.
data_format: If specified and starts with "NC", indicates that second
dimension, rather than the last dimension, specifies the channel.
Returns:
pooling output array of rank N+2.
Raises:
ValueError: if arguments are invalid.
"""
if data_format is None or not data_format.startswith("NC"):
spatial_start_dim = 1
else:
spatial_start_dim = 2
output = input
for i in range(len(window_shape)):
output = pool_direct_single_axis(
input=output,
axis=i + spatial_start_dim,
window_size=window_shape[i],
pooling_type=pooling_type,
padding=padding,
dilation_rate=dilation_rate[i],
stride=strides[i])
return output
class PoolingTest(test.TestCase):
def _test(self, input_shape, **kwargs):
# Use negative numbers to make sure there isn't any zero padding getting
# used.
x = -np.arange(
np.prod(input_shape), dtype=np.float32).reshape(input_shape) - 1
y1 = pool_direct(input=x, **kwargs)
y2 = nn_ops.pool(input=x, **kwargs)
self.assertAllClose(y1, self.evaluate(y2), rtol=1e-2, atol=1e-2)
def testPoolSimple(self):
with self.session(use_gpu=test.is_gpu_available()):
for padding in ["SAME", "VALID"]:
for pooling_type in ["MAX", "AVG"]:
self._test(
input_shape=[1, 1, 10, 1],
window_shape=[1, 3],
padding=padding,
pooling_type=pooling_type,
dilation_rate=[1, 1],
strides=[1, 2])
def testPool1D(self):
with self.session(use_gpu=test.is_gpu_available()):
for padding in ["SAME", "VALID"]:
for pooling_type in ["MAX", "AVG"]:
for input_shape in [[2, 9, 2], [2, 10, 2]]:
for window_shape in [[1], [2], [3]]:
if padding != "SAME":
for dilation_rate in [[1], [2], [3]]:
self._test(
input_shape=input_shape,
window_shape=window_shape,
padding=padding,
pooling_type=pooling_type,
dilation_rate=dilation_rate,
strides=[1])
for strides in [[1], [2], [3]]:
if np.any(np.array(strides) > window_shape):
continue
self._test(
input_shape=input_shape,
window_shape=window_shape,
padding=padding,
pooling_type=pooling_type,
dilation_rate=[1],
strides=strides)
def testPool2D(self):
with self.session(use_gpu=test.is_gpu_available()):
for padding in ["SAME", "VALID"]:
for pooling_type in ["MAX", "AVG"]:
for input_shape in [[2, 9, 10, 2], [2, 10, 9, 2]]:
for window_shape in [[1, 1], [2, 1], [2, 3]]:
if padding != "SAME":
for dilation_rate in [[1, 1], [2, 1], [1, 2], [2, 3]]:
self._test(
input_shape=input_shape,
window_shape=window_shape,
padding=padding,
pooling_type=pooling_type,
dilation_rate=dilation_rate,
strides=[1, 1])
for strides in [[1, 1], [2, 1], [1, 2], [2, 3]]:
if np.any(np.array(strides) > window_shape):
continue
self._test(
input_shape=input_shape,
window_shape=window_shape,
padding=padding,
pooling_type=pooling_type,
dilation_rate=[1, 1],
strides=strides)
def testPool3D(self):
if test.is_built_with_rocm():
self.skipTest("Pooling with 3D tensors is not supported in ROCm")
with self.session(use_gpu=test.is_gpu_available()):
for padding in ["SAME", "VALID"]:
for pooling_type in ["MAX", "AVG"]:
for input_shape in [[2, 9, 10, 11, 2], [2, 10, 9, 11, 2]]:
for window_shape in [[1, 1, 1], [2, 1, 2], [2, 3, 2]]:
if padding != "SAME":
for dilation_rate in [[1, 1, 1], [2, 1, 2], [1, 2, 2],
[2, 3, 3]]:
self._test(
input_shape=input_shape,
window_shape=window_shape,
padding=padding,
pooling_type=pooling_type,
dilation_rate=dilation_rate,
strides=[1, 1, 1])
for strides in [[1, 1, 1], [2, 1, 2], [1, 2, 2], [2, 3, 3]]:
if np.any(np.array(strides) > window_shape):
continue
self._test(
input_shape=input_shape,
window_shape=window_shape,
padding=padding,
pooling_type=pooling_type,
dilation_rate=[1, 1, 1],
strides=strides)
def testPoolNC(self):
if test.is_gpu_available(cuda_only=True):
# "NC*" format is currently only supported on CUDA.
with self.session(use_gpu=True):
for padding in ["SAME", "VALID"]:
self._test(
input_shape=[2, 2, 9],
window_shape=[2],
padding=padding,
pooling_type="MAX",
strides=[1],
dilation_rate=[1],
data_format="NCW")
self._test(
input_shape=[2, 2, 9],
window_shape=[2],
padding=padding,
pooling_type="MAX",
strides=[2],
dilation_rate=[1],
data_format="NCW")
self._test(
input_shape=[2, 2, 7, 9],
window_shape=[2, 2],
padding=padding,
pooling_type="MAX",
strides=[1, 2],
dilation_rate=[1, 1],
data_format="NCHW")
if test.is_built_with_rocm():
# Pooling with 3D tensors is not supported in ROCm
continue
self._test(
input_shape=[2, 2, 7, 5, 3],
window_shape=[2, 2, 2],
padding=padding,
pooling_type="MAX",
strides=[1, 2, 1],
dilation_rate=[1, 1, 1],
data_format="NCDHW")
self._test(
input_shape=[2, 2, 7, 9],
window_shape=[2, 2],
padding="VALID",
pooling_type="MAX",
strides=[1, 1],
dilation_rate=[2, 2],
data_format="NCHW")
def _test_gradient(self, input_shape, **kwargs):
x_val = -np.arange(
np.prod(input_shape), dtype=np.float32).reshape(input_shape) - 1
x = constant_op.constant(x_val, name="x", dtype=dtypes.float32)
output = nn_ops.pool(input=x, **kwargs)
y_shape = output.get_shape().as_list()
err = gradient_checker.compute_gradient_error(
[x], [input_shape], output, y_shape, x_init_value=[x_val])
err_tolerance = 1e-2
self.assertLess(err, err_tolerance)
@test_util.run_deprecated_v1
def testGradient1D(self):
with self.session(use_gpu=test.is_gpu_available()):
for padding in ["SAME", "VALID"]:
for pooling_type in ["AVG", "MAX"]:
for input_shape in [[2, 5, 2], [1, 4, 1]]:
for window_shape in [[1], [2]]:
if padding != "SAME":
for dilation_rate in [[1], [2]]:
self._test_gradient(
input_shape=input_shape,
window_shape=window_shape,
padding=padding,
pooling_type=pooling_type,
dilation_rate=dilation_rate,
strides=[1])
for strides in [[1], [2]]:
if np.any(np.array(strides) > window_shape):
continue
self._test(
input_shape=input_shape,
window_shape=window_shape,
padding=padding,
pooling_type=pooling_type,
dilation_rate=[1],
strides=strides)
@test_util.run_deprecated_v1
def testGradient2D(self):
with self.session(use_gpu=test.is_gpu_available()):
for padding in ["SAME", "VALID"]:
for pooling_type in ["AVG", "MAX"]:
for input_shape in [[2, 4, 5, 2], [1, 5, 4, 1]]:
for window_shape in [[1, 1], [2, 1], [2, 2]]:
if padding != "SAME":
for dilation_rate in [[1, 1], [2, 1], [2, 2]]:
self._test_gradient(
input_shape=input_shape,
window_shape=window_shape,
padding=padding,
pooling_type=pooling_type,
dilation_rate=dilation_rate,
strides=[1, 1])
for strides in [[1, 1], [2, 1], [1, 2], [2, 2]]:
if np.any(np.array(strides) > window_shape):
continue
self._test(
input_shape=input_shape,
window_shape=window_shape,
padding=padding,
pooling_type=pooling_type,
dilation_rate=[1, 1],
strides=strides)
@test_util.run_deprecated_v1
def testGradient3D(self):
if test.is_built_with_rocm():
self.skipTest("Pooling with 3D tensors is not supported in ROCm")
with self.session(use_gpu=test.is_gpu_available()):
for padding in ["SAME", "VALID"]:
for pooling_type in ["AVG", "MAX"]:
for input_shape in [[1, 3, 5, 4, 1], [1, 5, 4, 3, 1]]:
for window_shape in [[1, 1, 1], [2, 1, 2], [2, 2, 2]]:
if padding != "SAME":
for dilation_rate in [[1, 1, 1], [2, 1, 2], [2, 2, 2]]:
self._test_gradient(
input_shape=input_shape,
window_shape=window_shape,
padding=padding,
pooling_type=pooling_type,
dilation_rate=dilation_rate,
strides=[1, 1, 1])
for strides in [[1, 1, 1], [2, 1, 2], [2, 2, 2]]:
if np.any(np.array(strides) > window_shape):
continue
self._test(
input_shape=input_shape,
window_shape=window_shape,
padding=padding,
pooling_type=pooling_type,
dilation_rate=[1, 1, 1],
strides=strides)
if __name__ == "__main__":
test.main()
|
gunan/tensorflow
|
tensorflow/python/kernel_tests/pool_test.py
|
Python
|
apache-2.0
| 14,854 | 0.00579 |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.10 on 2018-03-05 09:24
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('rules', '0057_auto_20180302_1312'),
]
operations = [
migrations.AddField(
model_name='source',
name='public_source',
field=models.CharField(blank=True, max_length=100, null=True),
),
]
|
StamusNetworks/scirius
|
rules/migrations/0058_source_public_source.py
|
Python
|
gpl-3.0
| 440 | 0 |
import pyfits as pyf
import pylab as pyl
full = pyf.getdata('./data/gs_all_tf_h_130511b_multi.fits')
sample = pyf.getdata('../samples/sample_1.5_3.5_gs_all.fits')
f = pyl.figure(1)
f1 = f.add_subplot(111)
for i in range(len(sample)):
ID = sample['ID'][i]
H_flux = full['WFC3_F160W_FLUX'][i-1]
H_flux_err = full['WFC3_F160W_FLUXERR'][i-1]
H_flux_weight = full['WFC3_F160W_WEIGHT'][i-1]
H_mag = sample['Hmag'][i]
f1.scatter(H_mag,H_flux/H_flux_err)
pyl.show()
|
boada/ICD
|
sandbox/legacy_plot_code/ston_test.py
|
Python
|
mit
| 488 | 0.004098 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# This file is part of GNUWiNetwork,
# Copyright (C) 2014 by
# Pablo Belzarena, Gabriel Gomez Sena, Victor Gonzalez Barbone,
# Facultad de Ingenieria, Universidad de la Republica, Uruguay.
#
# GNUWiNetwork is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# GNUWiNetwork is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNUWiNetwork. If not, see <http://www.gnu.org/licenses/>.
#
'''Functions to create events of different types.
To create an event object use function C{mkevent()}. This function creates events of different types, according to the event modules imported by this module.
'''
import sys
import types
import evtimer
import utils.framers.ieee80211.evframes80211 as evframes80211
import evrequest
#sys.path = sys.path + ['..']
def mkevent(nickname, **kwargs):
'''Returns an event of the given event nickname.
@param nickname: a valid event nickname, i.e. one that is a key in dictionary of valid nicknames.
@param kwargs: a dictionary of variables depending on the type of event. Field C{ev_dc} is a dictionary of fields and values for the corresponding event type; field C{frmpkt} is a binary packed frame.
@return: an Event object.
'''
from evtimer import dc_nicknames as ev_dc_nicknames
import utils.framers.ieee80211.evframes80211
import evrequest
frmpkt, ev_dc = '', {}
if kwargs.has_key('ev_dc'):
ev_dc = kwargs['ev_dc']
if kwargs.has_key('frmpkt'):
frmpkt = kwargs['frmpkt']
ev_dc['frame_length'] = len(frmpkt)
else:
ev_dc['frame_length'] = 0
frmpkt = ''
if kwargs.has_key('payload'):
payload = kwargs['payload']
else:
payload = ''
if evtimer.dc_nicknames.has_key(nickname):
ptype, psubtype, eventclass = evtimer.dc_nicknames[nickname]
return eventclass(nickname, ptype, psubtype, ev_dc)
elif evframes80211.dc_nicknames.has_key(nickname):
ev_type, ev_subtype, eventclass = evframes80211.dc_nicknames[nickname]
ev = eventclass(nickname, ev_type, ev_subtype, frmpkt, ev_dc)
ev.payload = payload
return ev
elif evrequest.dc_nicknames.has_key(nickname):
ptype, psubtype, eventclass = evrequest.dc_nicknames[nickname]
return eventclass(nickname, ptype, psubtype, ev_dc)
else:
raise EventNameException(nickname + ' is not a valid nickname.')
if __name__ == '__main__':
import doctest
doctest.testmod()
|
git-artes/GNUWiNetwork
|
gwn/gwnevents/api_events.py
|
Python
|
gpl-3.0
| 2,996 | 0.00534 |
#!/usr/bin/env python
# Copyright (C) 2015 Wayne Warren
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import io
import os
import logging
import platform
import sys
from stevedore import extension
import yaml
from jenkins_jobs.cli.parser import create_parser
from jenkins_jobs.config import JJBConfig
from jenkins_jobs import utils
from jenkins_jobs import version
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger()
reload(sys)
sys.setdefaultencoding('utf8')
def __version__():
return "Jenkins Job Builder version: %s" % \
version.version_info.version_string()
class JenkinsJobs(object):
""" This is the entry point class for the `jenkins-jobs` command line tool.
While this class can be used programmatically by external users of the JJB
API, the main goal here is to abstract the `jenkins_jobs` tool in a way
that prevents test suites from caring overly much about various
implementation details--for example, tests of subcommands must not have
access to directly modify configuration objects, instead they must provide
a fixture in the form of an .ini file that provides the configuration
necessary for testing.
External users of the JJB API may be interested in this class as an
alternative to wrapping `jenkins_jobs` with a subprocess that execs it as a
system command; instead, python scripts may be written that pass
`jenkins_jobs` args directly to this class to allow programmatic setting of
various command line parameters.
"""
def __init__(self, args=None, **kwargs):
if args is None:
args = []
self.parser = create_parser()
self.options = self.parser.parse_args(args)
self.jjb_config = JJBConfig(self.options.conf, **kwargs)
if not self.options.command:
self.parser.error("Must specify a 'command' to be performed")
if (self.options.log_level is not None):
self.options.log_level = getattr(logging,
self.options.log_level.upper(),
logger.getEffectiveLevel())
logger.setLevel(self.options.log_level)
self._parse_additional()
self.jjb_config.validate()
def _set_config(self, target, option):
"""
Sets the option in target only if the given option was explicitly set
"""
opt_val = getattr(self.options, option, None)
if opt_val is not None:
target[option] = opt_val
def _parse_additional(self):
self._set_config(self.jjb_config.builder, 'ignore_cache')
self._set_config(self.jjb_config.builder, 'flush_cache')
self._set_config(self.jjb_config.yamlparser, 'allow_empty_variables')
self._set_config(self.jjb_config.jenkins, 'user')
self._set_config(self.jjb_config.jenkins, 'password')
if getattr(self.options, 'plugins_info_path', None) is not None:
with io.open(self.options.plugins_info_path, 'r',
encoding='utf-8') as yaml_file:
plugins_info = yaml.load(yaml_file)
if not isinstance(plugins_info, list):
self.parser.error("{0} must contain a Yaml list!".format(
self.options.plugins_info_path))
self.jjb_config.builder['plugins_info'] = plugins_info
if getattr(self.options, 'path', None):
if hasattr(self.options.path, 'read'):
logger.debug("Input file is stdin")
if self.options.path.isatty():
if platform.system() == 'Windows':
key = 'CTRL+Z'
else:
key = 'CTRL+D'
logger.warn("Reading configuration from STDIN. "
"Press %s to end input.", key)
else:
# take list of paths
self.options.path = self.options.path.split(os.pathsep)
do_recurse = (getattr(self.options, 'recursive', False) or
self.jjb_config.recursive)
excludes = ([e for elist in self.options.exclude
for e in elist.split(os.pathsep)] or
self.jjb_config.excludes)
paths = []
for path in self.options.path:
if do_recurse and os.path.isdir(path):
paths.extend(utils.recurse_path(path, excludes))
else:
paths.append(path)
self.options.path = paths
def execute(self):
extension_manager = extension.ExtensionManager(
namespace='jjb.cli.subcommands',
invoke_on_load=True,)
ext = extension_manager[self.options.command]
ext.obj.execute(self.options, self.jjb_config)
def main():
argv = sys.argv[1:]
jjb = JenkinsJobs(argv)
jjb.execute()
|
tamac-io/jenkins-job-builder
|
jenkins_jobs/cli/entry.py
|
Python
|
apache-2.0
| 5,508 | 0.000182 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.