text
stringlengths 6
947k
| repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
|
---|---|---|---|---|---|---|
import math
from pyb import DAC, micros, elapsed_micros
def tone1(freq):
t0 = micros()
dac = DAC(1)
while True:
theta = 2*math.pi*float(elapsed_micros(t0))*freq/1e6
fv = math.sin(theta)
v = int(126.0 * fv) + 127
#print("Theta %f, sin %f, scaled %d" % (theta, fv, v))
#delay(100)
dac.write(v)
def tone2(freq):
t0 = micros()
dac = DAC(1)
omega = 2 * math.pi * freq / 1e6
while True:
theta = omega*float(elapsed_micros(t0))
fv = math.sin(theta)
v = int(126.0 * fv) + 127
#print("Theta %f, sin %f, scaled %d" % (theta, fv, v))
#delay(100)
dac.write(v)
def tone3(freq, l_buf=256):
dac = DAC(1)
dtheta = 2 * math.pi / l_buf
scale = lambda fv: int(126.0 * fv) + 127
buf = bytearray(scale(math.sin(dtheta*t)) for t in range(l_buf))
dac.write_timed(buf, freq * l_buf, mode=DAC.CIRCULAR)
def tone4(freq, l_buf=256):
dac = DAC(1)
dtheta = 2 * math.pi / l_buf
scale = lambda fv: int(123 * fv) + 127
buf = bytearray(scale(math.sin(dtheta*t)) for t in range(l_buf))
dac.write_timed(buf, freq * l_buf, mode=DAC.CIRCULAR)
def tone5(freq, wavefun=lambda x: math.sin(2.0*math.pi*x), l_buf=256):
dac = DAC(1)
dt = 1.0 / l_buf
scale = lambda fv: int(123 * fv) + 127
buf = bytearray(scale(wavefun(t*dt)) for t in range(l_buf))
dac.write_timed(buf, freq * l_buf, mode=DAC.CIRCULAR)
def tone6(freq, wavefun=lambda x: math.sin(2.0*math.pi*x), l_buf=256, dacnum=1):
dac = DAC(dacnum)
dt = 1.0 / l_buf
scale = lambda fv: int(123 * fv) + 127
buf = bytearray(scale(wavefun(t*dt)) for t in range(l_buf))
dac.write_timed(buf, freq * l_buf, mode=DAC.CIRCULAR)
|
pramasoul/pyboard-fun
|
tone.py
|
Python
|
mit
| 1,739 | 0.008626 |
#!/usr/bin/env python2
import sys, os
import pwd, grp
from gi.repository import Gtk, GObject, Gio, GdkPixbuf, AccountsService
import gettext
import shutil
import PIL
from PIL import Image
from random import randint
import re
import subprocess
gettext.install("cinnamon", "/usr/share/locale")
(INDEX_USER_OBJECT, INDEX_USER_PICTURE, INDEX_USER_DESCRIPTION) = range(3)
(INDEX_GID, INDEX_GROUPNAME) = range(2)
class GroupDialog (Gtk.Dialog):
def __init__ (self, label, value):
super(GroupDialog, self).__init__()
try:
self.set_modal(True)
self.set_skip_taskbar_hint(True)
self.set_skip_pager_hint(True)
self.set_title("")
table = DimmedTable()
table.add_labels([label])
self.entry = Gtk.Entry()
self.entry.set_text(value)
self.entry.connect("changed", self._on_entry_changed)
table.add_controls([self.entry])
self.set_border_width(6)
box = self.get_content_area()
box.add(table)
self.show_all()
self.add_buttons(Gtk.STOCK_CANCEL, Gtk.ResponseType.CANCEL, Gtk.STOCK_OK, Gtk.ResponseType.OK, )
self.set_response_sensitive(Gtk.ResponseType.OK, False)
except Exception, detail:
print detail
def _on_entry_changed(self, entry):
name = entry.get_text()
if " " in name or name.lower() != name:
entry.set_icon_from_stock(Gtk.EntryIconPosition.SECONDARY, Gtk.STOCK_DIALOG_WARNING)
entry.set_icon_tooltip_text(Gtk.EntryIconPosition.SECONDARY, _("The group name cannot contain upper-case or space characters"))
self.set_response_sensitive(Gtk.ResponseType.OK, False)
else:
entry.set_icon_from_stock(Gtk.EntryIconPosition.SECONDARY, None)
self.set_response_sensitive(Gtk.ResponseType.OK, True)
if entry.get_text() == "":
self.set_response_sensitive(Gtk.ResponseType.OK, False)
class DimmedTable (Gtk.Table):
def __init__ (self):
super(DimmedTable, self).__init__()
self.set_border_width(6)
self.set_row_spacings(8)
self.set_col_spacings(15)
def add_labels(self, texts):
row = 0
for text in texts:
if text != None:
label = Gtk.Label(text)
label.set_alignment(1, 0.5)
label.get_style_context().add_class("dim-label")
self.attach(label, 0, 1, row, row+1, xoptions=Gtk.AttachOptions.EXPAND|Gtk.AttachOptions.FILL)
row = row + 1
def add_controls(self, controls):
row = 0
for control in controls:
self.attach(control, 1, 2, row, row+1)
row = row + 1
class EditableEntry (Gtk.Notebook):
__gsignals__ = {
'changed': (GObject.SIGNAL_RUN_FIRST, None,
(str,))
}
PAGE_BUTTON = 0
PAGE_ENTRY = 1
def __init__ (self):
super(EditableEntry, self).__init__()
self.label = Gtk.Label()
self.entry = Gtk.Entry()
self.button = Gtk.Button()
self.button.set_alignment(0.0, 0.5)
self.button.set_relief(Gtk.ReliefStyle.NONE)
self.append_page(self.button, None);
self.append_page(self.entry, None);
self.set_current_page(0)
self.set_show_tabs(False)
self.set_show_border(False)
self.editable = False
self.show_all()
self.button.connect("released", self._on_button_clicked)
self.button.connect("activate", self._on_button_clicked)
self.entry.connect("activate", self._on_entry_validated)
self.entry.connect("changed", self._on_entry_changed)
def set_text(self, text):
self.button.set_label(text)
self.entry.set_text(text)
def _on_button_clicked(self, button):
self.set_editable(True)
def _on_entry_validated(self, entry):
self.set_editable(False)
self.emit("changed", entry.get_text())
def _on_entry_changed(self, entry):
self.button.set_label(entry.get_text())
def set_editable(self, editable):
if (editable):
self.set_current_page(EditableEntry.PAGE_ENTRY)
else:
self.set_current_page(EditableEntry.PAGE_BUTTON)
self.editable = editable
def set_tooltip_text(self, tooltip):
self.button.set_tooltip_text(tooltip)
def get_editable(self):
return self.editable
def get_text(self):
return self.entry.get_text()
class PasswordDialog(Gtk.Dialog):
def __init__ (self, user, password_mask, group_mask):
super(PasswordDialog, self).__init__()
self.user = user
self.password_mask = password_mask
self.group_mask = group_mask
self.set_modal(True)
self.set_skip_taskbar_hint(True)
self.set_skip_pager_hint(True)
self.set_title(_("Change Password"))
table = DimmedTable()
table.add_labels([_("New password"), None, _("Confirm password")])
self.new_password = Gtk.Entry()
self.new_password.set_icon_from_icon_name(Gtk.EntryIconPosition.SECONDARY, "reload")
self.new_password.set_icon_tooltip_text(Gtk.EntryIconPosition.SECONDARY, _("Generate a password"))
self.new_password.connect("icon-release", self._on_new_password_icon_released)
self.new_password.connect("changed", self._on_passwords_changed)
table.attach(self.new_password, 1, 3, 0, 1)
self.strengh_indicator = Gtk.ProgressBar()
self.strengh_indicator.set_tooltip_text(_("Your new password needs to be at least 8 characters long"))
self.strengh_indicator.set_fraction(0.0)
table.attach(self.strengh_indicator, 1, 2, 1, 2, xoptions=Gtk.AttachOptions.EXPAND|Gtk.AttachOptions.FILL)
self.strengh_indicator.set_size_request(-1, 1)
self.strengh_label = Gtk.Label()
self.strengh_label.set_tooltip_text(_("Your new password needs to be at least 8 characters long"))
self.strengh_label.set_alignment(1, 0.5)
table.attach(self.strengh_label, 2, 3, 1, 2)
self.confirm_password = Gtk.Entry()
self.confirm_password.connect("changed", self._on_passwords_changed)
table.attach(self.confirm_password, 1, 3, 2, 3)
self.show_password = Gtk.CheckButton(_("Show password"))
self.show_password.connect('toggled', self._on_show_password_toggled)
table.attach(self.show_password, 1, 3, 3, 4)
self.set_border_width(6)
box = self.get_content_area()
box.add(table)
self.show_all()
self.infobar = Gtk.InfoBar()
self.infobar.set_message_type(Gtk.MessageType.ERROR)
label = Gtk.Label(_("An error occured. Your password was not changed."))
content = self.infobar.get_content_area()
content.add(label)
table.attach(self.infobar, 0, 3, 4, 5)
self.add_buttons(Gtk.STOCK_CANCEL, Gtk.ResponseType.CANCEL, _("Change"), Gtk.ResponseType.OK, )
self.set_passwords_visibility()
self.set_response_sensitive(Gtk.ResponseType.OK, False)
self.infobar.hide()
self.connect("response", self._on_response)
def _on_response(self, dialog, response_id):
if response_id == Gtk.ResponseType.OK:
self.change_password()
else:
self.destroy()
def change_password(self):
newpass = self.new_password.get_text()
self.user.set_password(newpass, "")
mask = self.group_mask.get_text()
if "nopasswdlogin" in mask:
subprocess.call(["gpasswd", "-d", self.user.get_user_name(), "nopasswdlogin"])
mask = mask.split(", ")
mask.remove("nopasswdlogin")
mask = ", ".join(mask)
self.group_mask.set_text(mask)
self.password_mask.set_text(u'\u2022\u2022\u2022\u2022\u2022\u2022')
self.destroy()
def set_passwords_visibility(self):
visible = self.show_password.get_active()
self.new_password.set_visibility(visible)
self.confirm_password.set_visibility(visible)
def _on_new_password_icon_released(self, widget, icon_pos, event):
self.infobar.hide()
self.show_password.set_active(True)
characters = "0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ_-"
newpass = ""
for i in range (8):
index = randint(0, len(characters) -1)
newpass = newpass + characters[index]
self.new_password.set_text(newpass)
self.confirm_password.set_text(newpass)
self.check_passwords()
def _on_show_password_toggled(self, widget):
self.set_passwords_visibility()
# Based on setPasswordStrength() in Mozilla Seamonkey, which is tri-licensed under MPL 1.1, GPL 2.0, and LGPL 2.1.
# Forked from Ubiquity validation.py
def password_strength(self, password):
upper = lower = digit = symbol = 0
for char in password:
if char.isdigit():
digit += 1
elif char.islower():
lower += 1
elif char.isupper():
upper += 1
else:
symbol += 1
length = len(password)
length = min(length,4)
digit = min(digit,3)
upper = min(upper,3)
symbol = min(symbol,3)
strength = (
((length * 0.1) - 0.2) +
(digit * 0.1) +
(symbol * 0.15) +
(upper * 0.1))
if strength > 1:
strength = 1
if strength < 0:
strength = 0
return strength
def _on_passwords_changed(self, widget):
self.infobar.hide()
new_password = self.new_password.get_text()
confirm_password = self.confirm_password.get_text()
strength = self.password_strength(new_password)
if new_password != confirm_password:
self.confirm_password.set_icon_from_stock(Gtk.EntryIconPosition.SECONDARY, Gtk.STOCK_DIALOG_WARNING)
self.confirm_password.set_icon_tooltip_text(Gtk.EntryIconPosition.SECONDARY, _("Passwords do not match"))
else:
self.confirm_password.set_icon_from_stock(Gtk.EntryIconPosition.SECONDARY, None)
if len(new_password) < 8:
self.strengh_label.set_text(_("Too short"))
self.strengh_indicator.set_fraction(0.0)
elif strength < 0.5:
self.strengh_label.set_text(_("Weak"))
self.strengh_indicator.set_fraction(0.2)
elif strength < 0.75:
self.strengh_label.set_text(_("Fair"))
self.strengh_indicator.set_fraction(0.4)
elif strength < 0.9:
self.strengh_label.set_text(_("Good"))
self.strengh_indicator.set_fraction(0.6)
else:
self.strengh_label.set_text(_("Strong"))
self.strengh_indicator.set_fraction(1.0)
self.check_passwords()
def check_passwords(self):
new_password = self.new_password.get_text()
confirm_password = self.confirm_password.get_text()
if len(new_password) >= 8 and new_password == confirm_password:
self.set_response_sensitive(Gtk.ResponseType.OK, True)
else:
self.set_response_sensitive(Gtk.ResponseType.OK, False)
class NewUserDialog(Gtk.Dialog):
def __init__ (self):
super(NewUserDialog, self).__init__()
try:
self.set_modal(True)
self.set_skip_taskbar_hint(True)
self.set_skip_pager_hint(True)
self.set_title("")
self.account_type_combo = Gtk.ComboBoxText()
self.account_type_combo.append_text(_("Standard"))
self.account_type_combo.append_text(_("Administrator"))
self.account_type_combo.set_active(0)
self.realname_entry = Gtk.Entry()
self.realname_entry.connect("changed", self._on_info_changed)
self.username_entry = Gtk.Entry()
self.username_entry.connect("changed", self._on_info_changed)
label = Gtk.Label()
label.set_markup(_("The username must consist of only:\n - lower case letters (a-z)\n - numerals (0-9)\n - '.', '-', and '_' characters"))
table = DimmedTable()
table.add_labels([_("Account Type"), _("Full Name"), _("Username")])
table.add_controls([self.account_type_combo, self.realname_entry, self.username_entry])
self.set_border_width(6)
box = self.get_content_area()
box.add(table)
box.add(label)
self.show_all()
self.add_buttons(Gtk.STOCK_CANCEL, Gtk.ResponseType.CANCEL, Gtk.STOCK_ADD, Gtk.ResponseType.OK, )
self.set_response_sensitive(Gtk.ResponseType.OK, False)
except Exception, detail:
print detail
def _on_info_changed(self, widget):
fullname = self.realname_entry.get_text()
username = self.username_entry.get_text()
valid = True
if re.search('[^a-z0-9_.-]', username):
self.username_entry.set_icon_from_stock(Gtk.EntryIconPosition.SECONDARY, Gtk.STOCK_DIALOG_WARNING)
self.username_entry.set_icon_tooltip_text(Gtk.EntryIconPosition.SECONDARY, _("Invalid username"))
valid = False
else:
self.username_entry.set_icon_from_stock(Gtk.EntryIconPosition.SECONDARY, None)
if username == "" or fullname == "":
valid = False
self.set_response_sensitive(Gtk.ResponseType.OK, valid)
class GroupsDialog(Gtk.Dialog):
def __init__ (self, username):
super(GroupsDialog, self).__init__()
try:
self.set_modal(True)
self.set_skip_taskbar_hint(True)
self.set_skip_pager_hint(True)
self.set_title("")
self.set_default_size(200, 480)
scrolled = Gtk.ScrolledWindow()
viewport = Gtk.Viewport()
vbox = Gtk.VBox()
self.checkboxes = []
groups = sorted(grp.getgrall(), key=lambda x: x[0], reverse=False)
for group in groups:
checkbox = Gtk.CheckButton(group[0])
self.checkboxes.append(checkbox)
vbox.add(checkbox)
if username in group[3]:
checkbox.set_active(True)
viewport.add(vbox)
scrolled.add(viewport)
self.set_border_width(6)
box = self.get_content_area()
box.pack_start(scrolled, True, True, 0)
self.show_all()
self.add_buttons(Gtk.STOCK_CANCEL, Gtk.ResponseType.CANCEL, Gtk.STOCK_OK, Gtk.ResponseType.OK, )
except Exception, detail:
print detail
def get_selected_groups(self):
groups = []
for checkbox in self.checkboxes:
if checkbox.get_active():
groups.append(checkbox.get_label())
return groups
class Module:
def __init__(self):
try:
self.builder = Gtk.Builder()
self.builder.add_from_file("/usr/share/cinnamon/cinnamon-settings-users/cinnamon-settings-users.ui")
self.window = self.builder.get_object("main_window")
self.window.connect("destroy", Gtk.main_quit)
self.window.set_title(_("Users and Groups"))
self.builder.get_object("label_users").set_label(_("Users"))
self.builder.get_object("label_groups").set_label(_("Groups"))
self.builder.get_object("button_add_user").connect("clicked", self.on_user_addition)
self.builder.get_object("button_delete_user").connect("clicked", self.on_user_deletion)
self.builder.get_object("button_add_group").connect("clicked", self.on_group_addition)
self.builder.get_object("button_edit_group").connect("clicked", self.on_group_edition)
self.builder.get_object("button_delete_group").connect("clicked", self.on_group_deletion)
self.users = Gtk.TreeStore(object, GdkPixbuf.Pixbuf, str)
self.users.set_sort_column_id(2, Gtk.SortType.ASCENDING)
self.groups = Gtk.TreeStore(int, str)
self.groups.set_sort_column_id(1, Gtk.SortType.ASCENDING)
self.users_treeview = self.builder.get_object("treeview_users")
self.users_treeview.set_rules_hint(True)
self.groups_treeview = self.builder.get_object("treeview_groups")
self.users_treeview.get_selection().connect("changed", self.on_user_selection)
self.groups_treeview.get_selection().connect("changed", self.on_group_selection)
column = Gtk.TreeViewColumn()
cell = Gtk.CellRendererPixbuf()
column.pack_start(cell, True)
column.add_attribute(cell, 'pixbuf', INDEX_USER_PICTURE)
cell.set_property('ypad', 1)
self.users_treeview.append_column(column)
column = Gtk.TreeViewColumn()
cell = Gtk.CellRendererText()
column.pack_start(cell, True)
column.add_attribute(cell, 'markup', INDEX_USER_DESCRIPTION)
self.users_treeview.append_column(column)
column = Gtk.TreeViewColumn()
cell = Gtk.CellRendererText()
column.pack_start(cell, True)
column.add_attribute(cell, 'text', INDEX_GROUPNAME)
column.set_sort_column_id(1)
self.groups_treeview.append_column(column)
self.builder.get_object("button_delete_user").set_sensitive(False)
self.builder.get_object("button_edit_group").set_sensitive(False)
self.builder.get_object("button_delete_group").set_sensitive(False)
self.face_button = Gtk.Button()
self.face_image = Gtk.Image()
self.face_button.set_image(self.face_image)
self.face_image.set_from_file("/usr/share/cinnamon/faces/user-generic.png")
self.face_button.set_alignment(0.0, 0.5)
self.face_button.set_tooltip_text(_("Click to change the picture"))
self.menu = Gtk.Menu()
separator = Gtk.SeparatorMenuItem()
face_browse_menuitem = Gtk.MenuItem(_("Browse for more pictures..."))
face_browse_menuitem.connect('activate', self._on_face_browse_menuitem_activated)
self.face_button.connect("button-release-event", self.menu_display)
row = 0
col = 0
num_cols = 4
face_dirs = ["/usr/share/cinnamon/faces"]
for face_dir in face_dirs:
if os.path.exists(face_dir):
pictures = sorted(os.listdir(face_dir))
for picture in pictures:
path = os.path.join(face_dir, picture)
file = Gio.File.new_for_path(path)
file_icon = Gio.FileIcon.new(file)
image = Gtk.Image.new_from_gicon (file_icon, Gtk.IconSize.DIALOG)
menuitem = Gtk.MenuItem()
menuitem.add(image)
menuitem.connect('activate', self._on_face_menuitem_activated, path)
self.menu.attach(menuitem, col, col+1, row, row+1)
col = (col+1) % num_cols
if (col == 0):
row = row + 1
row = row + 1
self.menu.attach(separator, 0, 4, row, row+1)
self.menu.attach(face_browse_menuitem, 0, 4, row+2, row+3)
self.account_type_combo = Gtk.ComboBoxText()
self.account_type_combo.append_text(_("Standard"))
self.account_type_combo.append_text(_("Administrator"))
self.account_type_combo.connect("changed", self._on_accounttype_changed)
self.realname_entry = EditableEntry()
self.realname_entry.connect("changed", self._on_realname_changed)
self.realname_entry.set_tooltip_text(_("Click to change the name"))
self.password_mask = Gtk.Label()
self.password_mask.set_alignment(0.0, 0.5)
self.password_button = Gtk.Button()
self.password_button.add(self.password_mask)
self.password_button.set_relief(Gtk.ReliefStyle.NONE)
self.password_button.set_tooltip_text(_("Click to change the password"))
self.password_button.connect('activate', self._on_password_button_clicked)
self.password_button.connect('released', self._on_password_button_clicked)
self.groups_label = Gtk.Label()
self.groups_label.set_line_wrap(True)
self.groups_label.set_alignment(0, 0.5)
self.groups_button = Gtk.Button()
self.groups_button.add(self.groups_label)
self.groups_button.set_relief(Gtk.ReliefStyle.NONE)
self.groups_button.set_tooltip_text(_("Click to change the groups"))
self.groups_button.connect("clicked", self._on_groups_button_clicked)
box = Gtk.Box()
box.pack_start(self.face_button, False, False, 0)
table = DimmedTable()
table.add_labels([_("Picture"), _("Account Type"), _("Name"), _("Password"), _("Groups")])
table.add_controls([box, self.account_type_combo, self.realname_entry, self.password_button, self.groups_button])
self.builder.get_object("box_users").add(table)
self.accountService = AccountsService.UserManager.get_default()
self.accountService.connect('notify::is-loaded', self.on_accounts_service_loaded)
self.load_groups()
self.window.show_all()
self.builder.get_object("box_users").hide()
except Exception, detail:
print detail
def _on_password_button_clicked(self, widget):
model, treeiter = self.users_treeview.get_selection().get_selected()
if treeiter != None:
user = model[treeiter][INDEX_USER_OBJECT]
dialog = PasswordDialog(user, self.password_mask, self.groups_label)
response = dialog.run()
def _on_groups_button_clicked(self, widget):
model, treeiter = self.users_treeview.get_selection().get_selected()
if treeiter != None:
user = model[treeiter][INDEX_USER_OBJECT]
dialog = GroupsDialog(user.get_user_name())
response = dialog.run()
if response == Gtk.ResponseType.OK:
groups = dialog.get_selected_groups()
subprocess.call(["usermod", user.get_user_name(), "-G", ",".join(groups)])
groups.sort()
self.groups_label.set_text(", ".join(groups))
dialog.destroy()
def _on_accounttype_changed(self, combobox):
model, treeiter = self.users_treeview.get_selection().get_selected()
if treeiter != None:
user = model[treeiter][INDEX_USER_OBJECT]
if self.account_type_combo.get_active() == 1:
user.set_account_type(AccountsService.UserAccountType.ADMINISTRATOR)
else:
user.set_account_type(AccountsService.UserAccountType.STANDARD)
groups = []
for group in grp.getgrall():
if user.get_user_name() in group[3]:
groups.append(group[0])
groups.sort()
self.groups_label.set_text(", ".join(groups))
def _on_realname_changed(self, widget, text):
model, treeiter = self.users_treeview.get_selection().get_selected()
if treeiter != None:
user = model[treeiter][INDEX_USER_OBJECT]
user.set_real_name(text)
description = "<b>%s</b>\n%s" % (text, user.get_user_name())
model.set_value(treeiter, INDEX_USER_DESCRIPTION, description)
def _on_face_browse_menuitem_activated(self, menuitem):
model, treeiter = self.users_treeview.get_selection().get_selected()
if treeiter != None:
user = model[treeiter][INDEX_USER_OBJECT]
dialog = Gtk.FileChooserDialog(None, None, Gtk.FileChooserAction.OPEN, (Gtk.STOCK_CANCEL, Gtk.ResponseType.CANCEL, Gtk.STOCK_OPEN, Gtk.ResponseType.OK))
filter = Gtk.FileFilter()
filter.set_name(_("Images"))
filter.add_mime_type("image/*")
dialog.add_filter(filter)
preview = Gtk.Image()
dialog.set_preview_widget(preview);
dialog.connect("update-preview", self.update_preview_cb, preview)
dialog.set_use_preview_label(False)
response = dialog.run()
if response == Gtk.ResponseType.OK:
path = dialog.get_filename()
image = PIL.Image.open(path)
width, height = image.size
if width > height:
new_width = height
new_height = height
elif height > width:
new_width = width
new_height = width
else:
new_width = width
new_height = height
left = (width - new_width)/2
top = (height - new_height)/2
right = (width + new_width)/2
bottom = (height + new_height)/2
image = image.crop((left, top, right, bottom))
image.thumbnail((96, 96), Image.ANTIALIAS)
face_path = os.path.join(user.get_home_dir(), ".face")
image.save(face_path, "png")
user.set_icon_file(face_path)
self.face_image.set_from_file(face_path)
model.set_value(treeiter, INDEX_USER_PICTURE, GdkPixbuf.Pixbuf.new_from_file_at_size(face_path, 48, 48))
model.row_changed(model.get_path(treeiter), treeiter)
dialog.destroy()
def update_preview_cb (self, dialog, preview):
filename = dialog.get_preview_filename()
if filename is None:
return
dialog.set_preview_widget_active(False)
if os.path.isfile(filename):
pixbuf = GdkPixbuf.Pixbuf.new_from_file_at_size(filename, 128, 128)
if pixbuf is not None:
preview.set_from_pixbuf (pixbuf)
dialog.set_preview_widget_active(True)
def _on_face_menuitem_activated(self, menuitem, path):
if os.path.exists(path):
model, treeiter = self.users_treeview.get_selection().get_selected()
if treeiter != None:
user = model[treeiter][INDEX_USER_OBJECT]
user.set_icon_file(path)
self.face_image.set_from_file(path)
shutil.copy(path, os.path.join(user.get_home_dir(), ".face"))
model.set_value(treeiter, INDEX_USER_PICTURE, GdkPixbuf.Pixbuf.new_from_file_at_size(path, 48, 48))
model.row_changed(model.get_path(treeiter), treeiter)
def menu_display(self, widget, event):
if event.button == 1:
self.menu.popup(None, None, self.popup_menu_below_button, self.face_button, event.button, event.time)
self.menu.show_all()
def popup_menu_below_button (self, *args):
# the introspection for GtkMenuPositionFunc seems to change with each Gtk version,
# this is a workaround to make sure we get the menu and the widget
menu = args[0]
widget = args[-1]
# here I get the coordinates of the button relative to
# window (self.window)
button_x, button_y = widget.get_allocation().x, widget.get_allocation().y
# now convert them to X11-relative
unused_var, window_x, window_y = widget.get_window().get_origin()
x = window_x + button_x
y = window_y + button_y
# now move the menu below the button
y += widget.get_allocation().height
push_in = True # push_in is True so all menu is always inside screen
return (x, y, push_in)
def on_accounts_service_loaded(self, user, param):
self.load_users()
def load_users(self):
self.users.clear()
users = self.accountService.list_users()
for user in users:
if os.path.exists(user.get_icon_file()):
pixbuf = GdkPixbuf.Pixbuf.new_from_file_at_size(user.get_icon_file(), 48, 48)
else:
pixbuf = GdkPixbuf.Pixbuf.new_from_file_at_size("/usr/share/cinnamon/faces/user-generic.png", 48, 48)
description = "<b>%s</b>\n%s" % (user.get_real_name(), user.get_user_name())
piter = self.users.append(None, [user, pixbuf, description])
self.users_treeview.set_model(self.users)
def load_groups(self):
self.groups.clear()
groups = sorted(grp.getgrall(), key=lambda x: x[0], reverse=False)
for group in groups:
(gr_name, gr_passwd, gr_gid, gr_mem) = group
piter = self.groups.append(None, [gr_gid, gr_name])
self.groups_treeview.set_model(self.groups)
#USER CALLBACKS
def on_user_selection(self, selection):
self.password_button.set_sensitive(True)
self.password_button.set_tooltip_text("")
model, treeiter = selection.get_selected()
if treeiter != None:
user = model[treeiter][INDEX_USER_OBJECT]
self.builder.get_object("button_delete_user").set_sensitive(True)
self.realname_entry.set_text(user.get_real_name())
if user.get_password_mode() == AccountsService.UserPasswordMode.REGULAR:
self.password_mask.set_text(u'\u2022\u2022\u2022\u2022\u2022\u2022')
elif user.get_password_mode() == AccountsService.UserPasswordMode.NONE:
self.password_mask.set_markup("<b>%s</b>" % _("No password set"))
else:
self.password_mask.set_text(_("Set at login"))
if user.get_account_type() == AccountsService.UserAccountType.ADMINISTRATOR:
self.account_type_combo.set_active(1)
else:
self.account_type_combo.set_active(0)
if os.path.exists(user.get_icon_file()):
self.face_image.set_from_file(user.get_icon_file())
else:
self.face_image.set_from_file("/usr/share/cinnamon/faces/user-generic.png")
groups = []
for group in grp.getgrall():
if user.get_user_name() in group[3]:
groups.append(group[0])
groups.sort()
self.groups_label.set_text(", ".join(groups))
self.builder.get_object("box_users").show()
# Count the number of connections for the currently logged-in user
connections = int(subprocess.check_output(["w", "-hs", user.get_user_name()]).count("\n"))
if connections > 0:
self.builder.get_object("button_delete_user").set_sensitive(False)
self.builder.get_object("button_delete_user").set_tooltip_text(_("This user is currently logged in"))
else:
self.builder.get_object("button_delete_user").set_sensitive(True)
self.builder.get_object("button_delete_user").set_tooltip_text("")
if os.path.exists("/home/.ecryptfs/%s" % user.get_user_name()):
self.password_button.set_sensitive(False)
self.password_button.set_tooltip_text(_("The user's home directory is encrypted. To preserve access to the encrypted directory, only the user should change this password."))
else:
self.builder.get_object("button_delete_user").set_sensitive(False)
self.builder.get_object("box_users").hide()
def on_user_deletion(self, event):
model, treeiter = self.users_treeview.get_selection().get_selected()
if treeiter != None:
user = model[treeiter][INDEX_USER_OBJECT]
message = _("Are you sure you want to permanently delete %s and all the files associated with this user?") % user.get_user_name()
d = Gtk.MessageDialog(self.window,
Gtk.DialogFlags.MODAL | Gtk.DialogFlags.DESTROY_WITH_PARENT,
Gtk.MessageType.QUESTION,
Gtk.ButtonsType.YES_NO,
message)
d.set_markup(message)
d.set_default_response(Gtk.ResponseType.NO)
r = d.run()
d.destroy()
if r == Gtk.ResponseType.YES:
result = self.accountService.delete_user(user, True)
if result:
model.remove(treeiter)
self.load_groups()
def on_user_addition(self, event):
dialog = NewUserDialog()
response = dialog.run()
if response == Gtk.ResponseType.OK:
if dialog.account_type_combo.get_active() == 1:
account_type = AccountsService.UserAccountType.ADMINISTRATOR
else:
account_type = AccountsService.UserAccountType.STANDARD
fullname = dialog.realname_entry.get_text()
username = dialog.username_entry.get_text()
new_user = self.accountService.create_user(username, fullname, account_type)
new_user.set_password_mode(AccountsService.UserPasswordMode.NONE)
pixbuf = GdkPixbuf.Pixbuf.new_from_file_at_size("/usr/share/cinnamon/faces/user-generic.png", 48, 48)
description = "<b>%s</b>\n%s" % (fullname, username)
piter = self.users.append(None, [new_user, pixbuf, description])
# Add the user to his/her own group and sudo if Administrator was selected
if dialog.account_type_combo.get_active() == 1:
subprocess.call(["usermod", username, "-G", "%s,sudo,nopasswdlogin" % username])
else:
subprocess.call(["usermod", username, "-G", "%s,nopasswdlogin" % username])
self.load_groups()
dialog.destroy()
def on_user_edition(self, event):
model, treeiter = self.users_treeview.get_selection().get_selected()
if treeiter != None:
print "Editing user %s" % model[treeiter][INDEX_USER_OBJECT].get_user_name()
# GROUPS CALLBACKS
def on_group_selection(self, selection):
model, treeiter = selection.get_selected()
if treeiter != None:
self.builder.get_object("button_edit_group").set_sensitive(True)
self.builder.get_object("button_delete_group").set_sensitive(True)
self.builder.get_object("button_delete_group").set_tooltip_text("")
group = model[treeiter][INDEX_GROUPNAME]
for p in pwd.getpwall():
username = p[0]
primary_group = grp.getgrgid(p[3])[0]
if primary_group == group:
self.builder.get_object("button_delete_group").set_sensitive(False)
self.builder.get_object("button_delete_group").set_tooltip_text(_("This group is set as %s's primary group") % username)
break
else:
self.builder.get_object("button_edit_group").set_sensitive(False)
self.builder.get_object("button_delete_group").set_sensitive(False)
self.builder.get_object("button_delete_group").set_tooltip_text("")
def on_group_deletion(self, event):
model, treeiter = self.groups_treeview.get_selection().get_selected()
if treeiter != None:
group = model[treeiter][INDEX_GROUPNAME]
message = _("Are you sure you want to permanently delete %s?") % group
d = Gtk.MessageDialog(self.window,
Gtk.DialogFlags.MODAL | Gtk.DialogFlags.DESTROY_WITH_PARENT,
Gtk.MessageType.QUESTION,
Gtk.ButtonsType.YES_NO,
message)
d.set_markup(message)
d.set_default_response(Gtk.ResponseType.NO)
r = d.run()
if r == Gtk.ResponseType.YES:
subprocess.call(["groupdel", group])
self.load_groups()
d.destroy()
def on_group_addition(self, event):
dialog = GroupDialog(_("Group Name"), "")
response = dialog.run()
if response == Gtk.ResponseType.OK:
subprocess.call(["groupadd", dialog.entry.get_text().lower()])
self.load_groups()
dialog.destroy()
def on_group_edition(self, event):
model, treeiter = self.groups_treeview.get_selection().get_selected()
if treeiter != None:
group = model[treeiter][INDEX_GROUPNAME]
dialog = GroupDialog(_("Group Name"), group)
response = dialog.run()
if response == Gtk.ResponseType.OK:
subprocess.call(["groupmod", group, "-n", dialog.entry.get_text().lower()])
self.load_groups()
dialog.destroy()
if __name__ == "__main__":
module = Module()
Gtk.main()
|
Kulmerov/Cinnamon
|
files/usr/share/cinnamon/cinnamon-settings-users/cinnamon-settings-users.py
|
Python
|
gpl-2.0
| 37,177 | 0.00382 |
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# [START documentai_process_splitter_document]
# TODO(developer): Uncomment these variables before running the sample.
# project_id= 'YOUR_PROJECT_ID'
# location = 'YOUR_PROJECT_LOCATION' # Format is 'us' or 'eu'
# processor_id = 'YOUR_PROCESSOR_ID' # Create processor in Cloud Console
# file_path = '/path/to/local/pdf'
def process_document_splitter_sample(
project_id: str, location: str, processor_id: str, file_path: str
):
from google.cloud import documentai_v1beta3 as documentai
# You must set the api_endpoint if you use a location other than 'us', e.g.:
opts = {}
if location == "eu":
opts = {"api_endpoint": "eu-documentai.googleapis.com"}
client = documentai.DocumentProcessorServiceClient(client_options=opts)
# The full resource name of the processor, e.g.:
# projects/project-id/locations/location/processor/processor-id
# You must create new processors in the Cloud Console first
name = f"projects/{project_id}/locations/{location}/processors/{processor_id}"
with open(file_path, "rb") as image:
image_content = image.read()
# Read the file into memory
document = {"content": image_content, "mime_type": "application/pdf"}
# Configure the process request
request = {"name": name, "raw_document": document}
# Recognizes text entities in the PDF document
result = client.process_document(request=request)
print("Document processing complete.\n")
# Read the splitter output from the document splitter processor:
# https://cloud.google.com/document-ai/docs/processors-list#processor_doc-splitter
# This processor only provides text for the document and information on how
# to split the document on logical boundaries. To identify and extract text,
# form elements, and entities please see other processors like the OCR, form,
# and specalized processors.
document = result.document
print(f"Found {len(document.entities)} subdocuments:")
for entity in document.entities:
conf_percent = "{:.1%}".format(entity.confidence)
pages_range = page_refs_to_string(entity.page_anchor.page_refs)
# Print subdocument type information, if available
try:
doctype = entity.type
print(
f'{conf_percent} confident that {pages_range} a "{doctype}" subdocument.'
)
except AttributeError:
print(f"{conf_percent} confident that {pages_range} a subdocument.")
def page_refs_to_string(page_refs: dict) -> str:
""" Converts a page ref to a string describing the page or page range."""
if len(page_refs) == 1:
num = str(int(page_refs[0].page) + 1)
return f"page {num} is"
else:
start = str(int(page_refs[0].page) + 1)
end = str(int(page_refs[1].page) + 1)
return f"pages {start} to {end} are"
# [END documentai_process_splitter_document]
|
googleapis/python-documentai
|
samples/snippets/process_document_splitter_sample.py
|
Python
|
apache-2.0
| 3,497 | 0.001716 |
# Copyright 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pbr.version
version_info = pbr.version.VersionInfo('glance')
version_string = version_info.version_string
|
openstack/glance
|
glance/version.py
|
Python
|
apache-2.0
| 731 | 0 |
'''
Imports the hooks dynamically while keeping the package API clean,
abstracting the underlying modules
'''
from airflow.utils import import_module_attrs as _import_module_attrs
_hooks = {
'ftp_hook': ['FTPHook'],
}
_import_module_attrs(globals(), _hooks)
|
cswaroop/airflow
|
airflow/contrib/hooks/__init__.py
|
Python
|
apache-2.0
| 264 | 0 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
r"""Application template.
"""
# Import standard packages.
import inspect
import logging
# Import installed packages.
import matplotlib.pyplot as plt
import seaborn as sns
# Import local packages.
from .. import utils
# Define module exports:
__all__ = ['prepend_this']
# Define state settings and globals.
# Note: For non-root-level loggers, use `getLogger(__name__)`
# http://stackoverflow.com/questions/17336680/python-logging-with-multiple-modules-does-not-work
logger = logging.getLogger(__name__)
# Set the matplotlib backend to the Anti-Grain Geometry C++ library.
# Note: Use plt.switch_backend since matplotlib.use('agg') before importing pyplot fails.
plt.switch_backend('agg')
# Set matplotlib styles with seaborn
sns.set()
def prepend_this(app_arg:str):
r"""Prepend the application argument with 'Prepended '
Args:
app_arg (str): `str` to prepend.
Returns:
app_ret (str): Prepended `str`.
Raises:
ValueError: Raised if not `isinstance(app_arg, str)`
"""
# Check arguments.
if not isinstance(app_arg, str):
raise ValueError(
"`app_arg` must be type `str`. " +
"Required: type(app_arg) == str"
"Given: type(app_arg) == {typ}").format(
typ=type(app_arg))
# Define 'here' for logger and log arguments passed.
here = inspect.stack()[0].function
frame = inspect.currentframe()
(args, *_, values) = inspect.getargvalues(frame)
logger.info(here+": Argument values: {args_values}".format(
args_values=[(arg, values[arg]) for arg in sorted(args)]))
# Log the code version from util.__version__.
logger.info(here+": Version = {version}".format(version=utils.__version__))
# Prepend the argument and return.
app_ret = 'Prepended '+app_arg
return app_ret
|
stharrold/demo
|
demo/app_template/template.py
|
Python
|
mit
| 1,935 | 0.001034 |
from django import forms
from django.core.exceptions import ValidationError
from django.utils.translation import ugettext_lazy as _
import datetime # for checking renewal date range
class RenewBookForm(forms.Form):
renewal_date = forms.DateField(help_text="Enter a date between now and 4 weeks (default 3). ")
def clean_renewal_date(self):
data = self.cleaned_data['renewal_date']
# check date is not in past
if data < datetime.date.today():
raise ValidationError(_('Invalid date - renewal in past'))
# check date is in range librarian allowed to change(+4 weeks)
if data > datetime.date.today() + datetime.timedelta(weeks=4):
raise ValidationError(_('Invalid date - renewal more than 4 weeks ahead'))
# Returning the cleaned data
return data
|
PatrickCmd/django_local_library
|
catalog/forms.py
|
Python
|
apache-2.0
| 776 | 0.016753 |
'''
*******************************************************************************
* ButtonEvent.py is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* ButtonEvent.py is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with ButtonEvent.py. If not, see <http://www.gnu.org/licenses/>.
********************************************************************************
Created on Jan 5, 2010
@author: iocanto
'''
BUTTON_SELECT = 257
BUTTON_HOTKEY_1 = 258;
BUTTON_HOTKEY_2 = 259;
BUTTON_HOTKEY_3 = 260;
BUTTON_HOTKEY_4 = 261;
BUTTON_RIGHT = 262;
BUTTON_LEFT = 263;
BUTTON_UP = 264;
BUTTON_DOWN = 265;
KEY_UP = 0
KEY_DOWN = 1
class ButtonEvent():
# Constructor
def __init__(self, button = BUTTON_HOTKEY_1, action = KEY_UP ):
self.__button = button
self.__action = action
def __str__ (self):
return "ButtonEvent [__button %i]" % self.__button
def getAction(self):
return self.__action
def getButton(self):
return self.__button
def getButtonName(self):
return { 257 : "BUTTON_SELECT" ,
258 : "BUTTON_HOTKEY_1",
259 : "BUTTON_HOTKEY_2",
260 : "BUTTON_HOTKEY_3",
261 : "BUTTON_HOTKEY_4",
262 : "BUTTON_RIGHT" ,
263 : "BUTTON_LEFT" ,
264 : "BUTTON_UP" ,
265 : "BUTTON_DOWN" ,
}[self.__button]
def setAction(self, action):
self.__action = action
def setButton(self, button):
self.__button = button
|
iocanto/bug-python-libraries
|
ButtonEvent.py
|
Python
|
gpl-3.0
| 2,246 | 0.026269 |
lookup = {}
lookup = dict()
lookup = {'age': 42, 'loc': 'Italy'}
lookup = dict(age=42, loc='Italy')
print(lookup)
print(lookup['loc'])
lookup['cat'] = 'cat'
if 'cat' in lookup:
print(lookup['cat'])
class Wizard:
# This actually creates a key value dictionary
def __init__(self, name, level):
self.level = level
self.name = name
# There is an implicit dictionary that stores this data
gandolf = Wizard('Gladolf', 42)
print(gandolf.__dict__)
# The takeway is that all objects are built around the concept of dictionary data structures
# Here is another example
import collections
User = collections.namedtuple('User', 'id, name, email')
users = [
User(1, 'user1', 'user1@test.com'),
User(2, 'user2', 'user2@test.com'),
User(3, 'user3', 'user3@test.com'),
]
lookup = dict()
for u in users:
lookup[u.email] = u
print(lookup['user2@test.com'])
|
derrickyoo/python-jumpstart
|
apps/09_real_estate_data_miner/concept_dicts.py
|
Python
|
mit
| 933 | 0.005359 |
import logging
import json
import textwrap
from json.encoder import JSONEncoder
from logging import StreamHandler, Formatter, FileHandler
from ethereum.utils import bcolors, is_numeric
DEFAULT_LOGLEVEL = 'INFO'
JSON_FORMAT = '%(message)s'
PRINT_FORMAT = '%(levelname)s:%(name)s\t%(message)s'
FILE_PREFIX = '%(asctime)s'
TRACE = 5
known_loggers = set()
log_listeners = []
def _inject_into_logger(name, code, namespace=None):
# This is a hack to fool the logging module into reporting correct source files.
# It determines the actual source of a logging call by inspecting the stack frame's
# source file. So we use this `eval(compile())` construct to "inject" our additional
# methods into the logging module.
if namespace is None:
namespace = {}
eval(
compile(
code,
logging._srcfile,
'exec'
),
namespace
)
setattr(logging.Logger, name, namespace[name])
# Add `trace()` level to Logger
_inject_into_logger(
'trace',
textwrap.dedent(
"""\
def trace(self, msg, *args, **kwargs):
if self.isEnabledFor(TRACE):
self._log(TRACE, msg, args, **kwargs)
"""
),
{'TRACE': TRACE}
)
logging.TRACE = TRACE
logging.addLevelName(TRACE, "TRACE")
# Add `DEV()` shortcut to loggers
_inject_into_logger(
'DEV',
textwrap.dedent(
"""\
def DEV(self, msg, *args, **kwargs):
'''Shortcut to output highlighted log text'''
kwargs['highlight'] = True
self.critical(msg, *args, **kwargs)
"""
)
)
class LogRecorder(object):
"""
temporarily records all logs, w/o level filtering
use only once!
"""
max_capacity = 1000 * 1000 # check we are not forgotten or abused
def __init__(self, disable_other_handlers=False, log_config=None):
self._records = []
log_listeners.append(self._add_log_record)
self._saved_config = None
if log_config:
self._saved_config = get_configuration()
configure(log_config)
self._saved_handlers = []
if disable_other_handlers:
self._saved_handlers = rootLogger.handlers[:]
rootLogger.handlers = []
def pop_records(self):
# only returns records on the first call
r = self._records[:]
self._records = []
try:
log_listeners.remove(self._add_log_record)
except ValueError:
pass
if self._saved_config:
configure(**self._saved_config)
self._saved_config = None
if self._saved_handlers:
rootLogger.handlers = self._saved_handlers[:]
self._saved_handlers = []
return r
def _add_log_record(self, msg):
self._records.append(msg)
assert len(self._records) < self.max_capacity
def get_configuration():
"""
get a configuration (snapshot) that can be used to call configure
snapshot = get_configuration()
configure(**snapshot)
"""
root = getLogger()
name_levels = [('', logging.getLevelName(root.level))]
name_levels.extend(
(name, logging.getLevelName(logger.level))
for name, logger
in root.manager.loggerDict.items()
if hasattr(logger, 'level')
)
config_string = ','.join('%s:%s' % x for x in name_levels)
return dict(config_string=config_string, log_json=SLogger.manager.log_json)
def get_logger_names():
return sorted(known_loggers, key=lambda x: '' if not x else x)
class BoundLogger(object):
def __init__(self, logger, context):
self.logger = logger
self.context = context
def bind(self, **kwargs):
return BoundLogger(self, kwargs)
def _proxy(self, method_name, *args, **kwargs):
context = self.context.copy()
context.update(kwargs)
return getattr(self.logger, method_name)(*args, **context)
trace = lambda self, *args, **kwargs: self._proxy('trace', *args, **kwargs)
debug = lambda self, *args, **kwargs: self._proxy('debug', *args, **kwargs)
info = lambda self, *args, **kwargs: self._proxy('info', *args, **kwargs)
warn = warning = lambda self, *args, **kwargs: self._proxy('warning', *args, **kwargs)
error = lambda self, *args, **kwargs: self._proxy('error', *args, **kwargs)
exception = lambda self, *args, **kwargs: self._proxy('exception', *args, **kwargs)
fatal = critical = lambda self, *args, **kwargs: self._proxy('critical', *args, **kwargs)
class _LogJSONEncoder(JSONEncoder):
def default(self, o):
return repr(o)
class SLogger(logging.Logger):
def __init__(self, name, level=DEFAULT_LOGLEVEL):
self.warn = self.warning
super(SLogger, self).__init__(name, level=level)
@property
def log_json(self):
return SLogger.manager.log_json
def is_active(self, level_name='trace'):
return self.isEnabledFor(logging._checkLevel(level_name.upper()))
def format_message(self, msg, kwargs, highlight, level):
if getattr(self, 'log_json', False):
message = dict()
message['event'] = '{}.{}'.format(self.name, msg.lower().replace(' ', '_'))
message['level'] = logging.getLevelName(level)
try:
message.update(kwargs)
try:
msg = json.dumps(message, cls=_LogJSONEncoder)
except TypeError:
# Invalid value. With our custom encoder this can only happen with non-string
# dict keys (see: https://bugs.python.org/issue18820).
message = _stringify_dict_keys(message)
msg = json.dumps(message, cls=_LogJSONEncoder)
except UnicodeDecodeError:
message.update({
k: v if is_numeric(v) or isinstance(v, (float, complex)) else repr(v)
for k, v in kwargs.items()
})
msg = json.dumps(message, cls=_LogJSONEncoder)
else:
msg = "{}{} {}{}".format(
bcolors.WARNING if highlight else "",
msg,
" ".join("{}={!s}".format(k, v) for k, v in kwargs.items()),
bcolors.ENDC if highlight else ""
)
return msg
def bind(self, **kwargs):
return BoundLogger(self, kwargs)
def _log(self, level, msg, args, **kwargs):
exc_info = kwargs.pop('exc_info', None)
extra = kwargs.pop('extra', {})
highlight = kwargs.pop('highlight', False)
extra['kwargs'] = kwargs
extra['original_msg'] = msg
msg = self.format_message(msg, kwargs, highlight, level)
super(SLogger, self)._log(level, msg, args, exc_info, extra)
class RootLogger(SLogger):
"""
A root logger is not that different to any other logger, except that
it must have a logging level and there is only one instance of it in
the hierarchy.
"""
def __init__(self, level):
"""
Initialize the logger with the name "root".
"""
super(RootLogger, self).__init__("root", level)
def handle(self, record):
if log_listeners:
rec_dict = getattr(record, 'kwargs', {}).copy()
rec_dict['event'] = getattr(record, 'original_msg', "")
for listener in log_listeners:
listener(rec_dict)
super(RootLogger, self).handle(record)
class SManager(logging.Manager):
def __init__(self, rootnode):
self.loggerClass = SLogger
self.log_json = False
super(SManager, self).__init__(rootnode)
def getLogger(self, name):
logging.setLoggerClass(SLogger)
return super(SManager, self).getLogger(name)
rootLogger = RootLogger(DEFAULT_LOGLEVEL)
SLogger.root = rootLogger
SLogger.manager = SManager(SLogger.root)
def _stringify_dict_keys(input_):
if isinstance(input_, dict):
res = {}
for k, v in input_.items():
v = _stringify_dict_keys(v)
if not isinstance(k, (int, long, bool, None.__class__)):
k = str(k)
res[k] = v
elif isinstance(input_, (list, tuple)):
res = input_.__class__([_stringify_dict_keys(i) for i in input_])
else:
res = input_
return res
def getLogger(name=None):
"""
Return a logger with the specified name, creating it if necessary.
If no name is specified, return the root logger.
"""
if name:
logger = SLogger.manager.getLogger(name)
return logger
else:
return rootLogger
def configure(config_string=None, log_json=False, log_file=None):
if not config_string:
config_string = ":{}".format(DEFAULT_LOGLEVEL)
if log_json:
SLogger.manager.log_json = True
log_format = JSON_FORMAT
else:
SLogger.manager.log_json = False
log_format = PRINT_FORMAT
if len(rootLogger.handlers) == 0:
handler = StreamHandler()
formatter = Formatter(log_format)
handler.setFormatter(formatter)
rootLogger.addHandler(handler)
if log_file:
if not any(isinstance(hndlr, FileHandler) for hndlr in rootLogger.handlers):
handler = FileHandler(log_file)
formatter = Formatter("{} {}".format(FILE_PREFIX, log_format))
handler.setFormatter(formatter)
rootLogger.addHandler(handler)
# Reset logging levels before applying new config below
for name, logger in SLogger.manager.loggerDict.items():
if hasattr(logger, 'setLevel'):
# Guard against `logging.PlaceHolder` instances
logger.setLevel(logging.NOTSET)
logger.propagate = True
for name_levels in config_string.split(','):
name, _, level = name_levels.partition(':')
logger = getLogger(name)
logger.setLevel(level.upper())
configure_logging = configure
def set_level(name, level):
assert not isinstance(level, int)
logger = getLogger(name)
logger.setLevel(getattr(logging, level.upper()))
def get_logger(name=None):
known_loggers.add(name)
return getLogger(name)
def DEBUG(msg, *args, **kwargs):
"""temporary logger during development that is always on"""
logger = getLogger("DEBUG")
if len(logger.handlers) == 0:
logger.addHandler(StreamHandler())
logger.propagate = False
logger.setLevel(logging.DEBUG)
logger.DEV(msg, *args, **kwargs)
|
nirenzang/Serpent-Pyethereum-Tutorial
|
pyethereum/ethereum/slogging.py
|
Python
|
gpl-3.0
| 10,541 | 0.001613 |
import re
import os
import sys
from jcompiler.token import tokenize
from jcompiler.parse import Parser
import jcompiler.xmlutil as xmlutil
def remove_comments(s):
return re.sub(r'(\s*//.*)|(\s*/\*(.|\n)*?\*/\s*)', '', s)
if __name__ == '__main__':
if len(sys.argv) < 2:
print 'a input file is needed'
sys.exit(1)
fname = sys.argv[1]
if not os.path.isfile(fname):
print 'not a valid file path: %s' % fname
sys.exit(1)
with open(fname, 'r') as f:
source = remove_comments(f.read())
parser = Parser(tokenize(source))
tree = parser.parse_tree()
# print tree
print xmlutil.dump_parse_tree(tree)
|
my-zhang/nand2tetris
|
ch10-frontend/jcompiler/cli.py
|
Python
|
mit
| 709 | 0.026798 |
"""add timezone to each station
Revision ID: 4d0be367f095
Revises: 6722b0ef4e1
Create Date: 2014-03-19 16:43:00.326820
"""
# revision identifiers, used by Alembic.
revision = '4d0be367f095'
down_revision = '6722b0ef4e1'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('radio_station', sa.Column('timezone', sa.String(length=32), nullable=True))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('radio_station', 'timezone')
### end Alembic commands ###
|
rootio/rootio_web
|
alembic/versions/4d0be367f095_station_timezone.py
|
Python
|
agpl-3.0
| 644 | 0.01087 |
from setuptools import setup, find_packages
setup(
name='pulp_ostree_common',
version='1.0.0a2',
packages=find_packages(),
url='http://www.pulpproject.org',
license='GPLv2+',
author='Pulp Team',
author_email='pulp-list@redhat.com',
description='common code for pulp\'s ostree support',
)
|
ipanova/pulp_ostree
|
common/setup.py
|
Python
|
gpl-2.0
| 321 | 0 |
import os
import sys
import datetime as dt
import json
from itertools import groupby
from kivy.properties import (StringProperty,
DictProperty,
ListProperty,
BooleanProperty)
from kivy.uix.boxlayout import BoxLayout
from kivy.uix.screenmanager import Screen
from kivy.uix.gridlayout import GridLayout
from core.bglabel import BGLabel
from MythTV import MythBE
EPOCH = dt.datetime(1970, 1, 1)
class MythRecording(BoxLayout):
"""Widget class for displaying information about upcoming recordings."""
rec = DictProperty({})
bg = ListProperty([0.1, 0.15, 0.15, 1])
def __init__(self, **kwargs):
super(MythRecording, self).__init__(**kwargs)
self.rec = kwargs["rec"]
class MythRecordingHeader(BGLabel):
"""Widget class for grouping recordings by day."""
rec_date = StringProperty("")
def __init__(self, **kwargs):
super(MythRecordingHeader, self).__init__(**kwargs)
self.bgcolour = [0.1, 0.1, 0.4, 1]
self.rec_date = kwargs["rec_date"]
class MythTVScreen(Screen):
"""Main screen class for MythTV schedule.
Screen attempts to connect to MythTV backend and retrieve list of
upcoming recordings and display this.
Data is cached so that information can still be viewed even if backend
is offline (e.g. for power saving purposes).
"""
backendonline = BooleanProperty(False)
isrecording = BooleanProperty(False)
def __init__(self, **kwargs):
super(MythTVScreen, self).__init__(**kwargs)
# Get the path for the folder
scr = sys.modules[self.__class__.__module__].__file__
# Create variable to retain path to our cache fie
self.screendir = os.path.dirname(scr)
self.cacheFile = os.path.join(self.screendir, "cache", "cache.json")
# Some other useful variable
self.running = False
self.rec_timer = None
self.status_timer = None
self.be = None
self.recs = None
def on_enter(self):
# We only update when we enter the screen. No need for regular updates.
self.getRecordings()
self.drawScreen()
self.checkRecordingStatus()
def on_leave(self):
pass
def cacheRecs(self, recs):
"""Method to save local copy of recordings. Backend may not be online
all the time so a cache enables us to display recordings if if we
can't poll the server for an update.
"""
with open(self.cacheFile, 'w') as outfile:
json.dump(recs, outfile)
def loadCache(self):
"""Retrieves cached recorings and returns as a python list object."""
try:
raw = open(self.cacheFile, 'r')
recs = json.load(raw)
except:
recs = []
return recs
def recs_to_dict(self, uprecs):
"""Converts the MythTV upcoming recording iterator into a list of
dict objects.
"""
raw_recs = []
recs = []
# Turn the response into a dict object and add to our list of recorings
for r in uprecs:
rec = {}
st = r.starttime
et = r.endtime
rec["title"] = r.title
rec["subtitle"] = r.subtitle if r.subtitle else ""
day = dt.datetime(st.year, st.month, st.day)
rec["day"] = (day - EPOCH).total_seconds()
rec["time"] = "{} - {}".format(st.strftime("%H:%M"),
et.strftime("%H:%M"))
rec["timestamp"] = (st - EPOCH).total_seconds()
rec["desc"] = r.description
raw_recs.append(rec)
# Group the recordings by day (so we can print a header)
for k, g in groupby(raw_recs, lambda x: x["day"]):
recs.append((k, list(g)))
return recs
def getRecordings(self):
"""Attempts to connect to MythTV backend and retrieve recordings."""
try:
# If we can connect then get recordings and save a local cache.
self.be = MythBE()
uprecs = self.be.getUpcomingRecordings()
self.recs = self.recs_to_dict(uprecs)
self.cacheRecs(self.recs)
self.backendonline = True
except:
# Can't connect so we need to set variables accordinly and try
# to load data from the cache.
self.be = None
self.recs = self.loadCache()
self.backendonline = False
def checkRecordingStatus(self):
"""Checks whether the backend is currently recording."""
try:
recbe = MythBE()
for recorder in recbe.getRecorderList():
if recbe.isRecording(recorder):
self.isrecording = True
break
except:
# If we can't connect to it then it can't be recording.
self.isrecording = False
def drawScreen(self):
"""Main method for rendering screen.
If there is recording data (live or cached) then is laid out in a
scroll view.
If not, the user is notified that the backend is unreachable.
"""
sv = self.ids.myth_scroll
sv.clear_widgets()
if self.recs:
# Create a child widget to hold the recordings.
self.sl = GridLayout(cols=1, size_hint=(1, None), spacing=2)
self.sl.bind(minimum_height=self.sl.setter('height'))
# Loop over the list of recordings.
for rec in self.recs:
# These are grouped by day so we need a header
day = dt.timedelta(0, rec[0]) + EPOCH
mrh = MythRecordingHeader(rec_date=day.strftime("%A %d %B"))
self.sl.add_widget(mrh)
# Then we loop over the recordings scheduled for that day
for r in rec[1]:
# and add them to the display.
mr = MythRecording(rec=r)
self.sl.add_widget(mr)
sv.add_widget(self.sl)
else:
lb = Label(text="Backend is unreachable and there is no cached"
" information")
sv.add_widget(lb)
|
9and3r/RPi-InfoScreen-Kivy
|
screens/mythtv/screen.py
|
Python
|
gpl-3.0
| 6,297 | 0.000476 |
import sublime_plugin
from cmakehelpers.compilerflags import clang, gcc
from cmakehelpers.compilerflags import find_completions
COMPLETION_DATABASES = dict(
clang=dict(loader=clang, database=None),
gcc=dict(loader=gcc, database=None))
def log_message(s):
print("CMakeSnippets: {0}".format(s))
def load_completion_databases():
global COMPLETION_DATABASES
for compiler_name, database_info in COMPLETION_DATABASES.iteritems():
loader = database_info['loader']
completion_database = loader.load_compiler_options_database()
log_message("Loading {0} options database: {1} entries".format(compiler_name, len(completion_database)))
database_info['database'] = completion_database
load_completion_databases()
class CompilerFlagAutocomplete(sublime_plugin.EventListener):
def on_query_completions(self, view, prefix, locations):
if not view.match_selector(locations[0], "source.cmake"):
return []
cursor = locations[0]
# print('*** prefix: ' + str(prefix))
# print('*** cursor pos: ' + str(cursor))
line_region = view.line(cursor)
line_start, line_end = line_region.a, line_region.b
cursor_offset = cursor - line_start
current_line = view.substr(line_region)[:]
# print '*** line befor cursor: ', [current_line]
all_completions = list()
for compiler_name, database_info in COMPLETION_DATABASES.iteritems():
compiler_options_db = database_info['database']
all_completions.extend(find_completions(compiler_name, compiler_options_db, current_line, cursor_offset))
return all_completions
|
sevas/sublime_cmake_snippets
|
compiler_completions.py
|
Python
|
mit
| 1,676 | 0.001193 |
from __future__ import absolute_import
import operator
from django.db import models
from django.db.models import Q
from django.utils import timezone
from sentry.db.models import Model, sane_repr
from sentry.db.models.fields import FlexibleForeignKey, JSONField
from sentry.ownership.grammar import load_schema
from functools import reduce
class ProjectOwnership(Model):
__core__ = True
project = FlexibleForeignKey("sentry.Project", unique=True)
raw = models.TextField(null=True)
schema = JSONField(null=True)
fallthrough = models.BooleanField(default=True)
auto_assignment = models.BooleanField(default=False)
date_created = models.DateTimeField(default=timezone.now)
last_updated = models.DateTimeField(default=timezone.now)
is_active = models.BooleanField(default=True)
# An object to indicate ownership is implicitly everyone
Everyone = object()
class Meta:
app_label = "sentry"
db_table = "sentry_projectownership"
__repr__ = sane_repr("project_id", "is_active")
@classmethod
def get_owners(cls, project_id, data):
"""
For a given project_id, and event data blob.
If Everyone is returned, this means we implicitly are
falling through our rules and everyone is responsible.
If an empty list is returned, this means there are explicitly
no owners.
"""
try:
ownership = cls.objects.get(project_id=project_id)
except cls.DoesNotExist:
ownership = cls(project_id=project_id)
rules = cls._matching_ownership_rules(ownership, project_id, data)
if not rules:
return cls.Everyone if ownership.fallthrough else [], None
owners = {o for rule in rules for o in rule.owners}
return filter(None, resolve_actors(owners, project_id).values()), rules
@classmethod
def get_autoassign_owner(cls, project_id, data):
"""
Get the auto-assign owner for a project if there are any.
Will return None if there are no owners, or a list of owners.
"""
try:
ownership = cls.objects.get(project_id=project_id)
except cls.DoesNotExist:
return None
if not ownership.auto_assignment:
return None
rules = cls._matching_ownership_rules(ownership, project_id, data)
if not rules:
return None
score = 0
owners = None
# Automatic assignment prefers the owner with the longest
# matching pattern as the match is more specific.
for rule in rules:
candidate = len(rule.matcher.pattern)
if candidate > score:
score = candidate
owners = rule.owners
actors = filter(None, resolve_actors(owners, project_id).values())
# Can happen if the ownership rule references a user/team that no longer
# is assigned to the project or has been removed from the org.
if not actors:
return None
return actors[0].resolve()
@classmethod
def _matching_ownership_rules(cls, ownership, project_id, data):
rules = []
if ownership.schema is not None:
for rule in load_schema(ownership.schema):
if rule.test(data):
rules.append(rule)
return rules
def resolve_actors(owners, project_id):
""" Convert a list of Owner objects into a dictionary
of {Owner: Actor} pairs. Actors not identified are returned
as None. """
from sentry.api.fields.actor import Actor
from sentry.models import User, Team
if not owners:
return {}
users, teams = [], []
owners_lookup = {}
for owner in owners:
# teams aren't technical case insensitive, but teams also
# aren't allowed to have non-lowercase in slugs, so
# this kinda works itself out correctly since they won't match
owners_lookup[(owner.type, owner.identifier.lower())] = owner
if owner.type == "user":
users.append(owner)
elif owner.type == "team":
teams.append(owner)
actors = {}
if users:
actors.update(
{
("user", email.lower()): Actor(u_id, User)
for u_id, email in User.objects.filter(
reduce(operator.or_, [Q(emails__email__iexact=o.identifier) for o in users]),
# We don't require verified emails
# emails__is_verified=True,
is_active=True,
sentry_orgmember_set__organizationmemberteam__team__projectteam__project_id=project_id,
)
.distinct()
.values_list("id", "emails__email")
}
)
if teams:
actors.update(
{
("team", slug): Actor(t_id, Team)
for t_id, slug in Team.objects.filter(
slug__in=[o.identifier for o in teams], projectteam__project_id=project_id
).values_list("id", "slug")
}
)
return {o: actors.get((o.type, o.identifier.lower())) for o in owners}
|
mvaled/sentry
|
src/sentry/models/projectownership.py
|
Python
|
bsd-3-clause
| 5,206 | 0.000768 |
#
# The Python Imaging Library.
# $Id$
#
# image enhancement classes
#
# For a background, see "Image Processing By Interpolation and
# Extrapolation", Paul Haeberli and Douglas Voorhies. Available
# at http://www.sgi.com/grafica/interp/index.html
#
# History:
# 1996-03-23 fl Created
# 2009-06-16 fl Fixed mean calculation
#
# Copyright (c) Secret Labs AB 1997.
# Copyright (c) Fredrik Lundh 1996.
#
# See the README file for information on usage and redistribution.
#
from PIL import Image, ImageFilter, ImageStat
class _Enhance:
def enhance(self, factor):
"""
Returns an enhanced image.
:param factor: A floating point value controlling the enhancement.
Factor 1.0 always returns a copy of the original image,
lower factors mean less color (brightness, contrast,
etc), and higher values more. There are no restrictions
on this value.
:rtype: :py:class:`~PIL.Image.Image`
"""
return Image.blend(self.degenerate, self.image, factor)
class Color(_Enhance):
"""Adjust image color balance.
This class can be used to adjust the colour balance of an image, in
a manner similar to the controls on a colour TV set. An enhancement
factor of 0.0 gives a black and white image. A factor of 1.0 gives
the original image.
"""
def __init__(self, image):
self.image = image
self.degenerate = image.convert("L").convert(image.mode)
class Contrast(_Enhance):
"""Adjust image contrast.
This class can be used to control the contrast of an image, similar
to the contrast control on a TV set. An enhancement factor of 0.0
gives a solid grey image. A factor of 1.0 gives the original image.
"""
def __init__(self, image):
self.image = image
mean = int(ImageStat.Stat(image.convert("L")).mean[0] + 0.5)
self.degenerate = Image.new("L", image.size, mean).convert(image.mode)
class Brightness(_Enhance):
"""Adjust image brightness.
This class can be used to control the brighntess of an image. An
enhancement factor of 0.0 gives a black image. A factor of 1.0 gives the
original image.
"""
def __init__(self, image):
self.image = image
self.degenerate = Image.new(image.mode, image.size, 0)
class Sharpness(_Enhance):
"""Adjust image sharpness.
This class can be used to adjust the sharpness of an image. An
enhancement factor of 0.0 gives a blurred image, a factor of 1.0 gives the
original image, and a factor of 2.0 gives a sharpened image.
"""
def __init__(self, image):
self.image = image
self.degenerate = image.filter(ImageFilter.SMOOTH)
|
Amechi101/concepteur-market-app
|
venv/lib/python2.7/site-packages/PIL/ImageEnhance.py
|
Python
|
mit
| 2,760 | 0 |
from __future__ import print_function
from sklearn.datasets import fetch_20newsgroups
from sklearn.decomposition import TruncatedSVD
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.pipeline import make_pipeline
from sklearn.preprocessing import Normalizer
from sklearn import metrics
import numpy as np
from tabulate import tabulate
import logging
from sklearn.cluster import KMeans
from spherecluster import SphericalKMeans
from spherecluster import VonMisesFisherMixture
# modified from
# http://scikit-learn.org/stable/auto_examples/text/document_clustering.html
# Display progress logs on stdout
logging.basicConfig(level=logging.INFO,
format='%(asctime)s %(levelname)s %(message)s')
###############################################################################
# Optional params
use_LSA = False
n_components = 500
###############################################################################
# Load some categories from the training set
categories = [
'alt.atheism',
'talk.religion.misc',
'comp.graphics',
'sci.space',
]
# Uncomment the following to do the analysis on all the categories
#categories = None
print("Loading 20 newsgroups dataset for categories:")
print(categories)
dataset = fetch_20newsgroups(subset='all', categories=categories,
shuffle=True, random_state=42)
print("%d documents" % len(dataset.data))
print("%d categories" % len(dataset.target_names))
print()
labels = dataset.target
true_k = np.unique(labels).shape[0]
print("Extracting features from the training dataset using a sparse vectorizer")
vectorizer = TfidfVectorizer(stop_words='english', use_idf=True)
X = vectorizer.fit_transform(dataset.data)
print("n_samples: %d, n_features: %d" % X.shape)
print()
# table for results display
table = []
###############################################################################
# LSA for dimensionality reduction (and finding dense vectors)
if use_LSA:
print("Performing dimensionality reduction using LSA")
svd = TruncatedSVD(n_components)
normalizer = Normalizer(copy=False)
lsa = make_pipeline(svd, normalizer)
X = lsa.fit_transform(X)
explained_variance = svd.explained_variance_ratio_.sum()
print("Explained variance of the SVD step: {}%".format(
int(explained_variance * 100)))
print()
###############################################################################
# K-Means clustering
km = KMeans(n_clusters=true_k, init='k-means++', n_init=20)
print("Clustering with %s" % km)
km.fit(X)
print()
table.append([
'k-means',
metrics.homogeneity_score(labels, km.labels_),
metrics.completeness_score(labels, km.labels_),
metrics.v_measure_score(labels, km.labels_),
metrics.adjusted_rand_score(labels, km.labels_),
metrics.adjusted_mutual_info_score(labels, km.labels_),
metrics.silhouette_score(X, km.labels_, metric='cosine')])
print("Homogeneity: %0.3f" % metrics.homogeneity_score(labels, km.labels_))
print("Completeness: %0.3f" % metrics.completeness_score(labels, km.labels_))
print("V-measure: %0.3f" % metrics.v_measure_score(labels, km.labels_))
print("Adjusted Rand-Index: %.3f"
% metrics.adjusted_rand_score(labels, km.labels_))
print("Adjusted Mututal Information: %.3f"
% metrics.adjusted_mutual_info_score(labels, km.labels_))
print("Silhouette Coefficient (euclidean): %0.3f"
% metrics.silhouette_score(X, km.labels_, metric='euclidean'))
print("Silhouette Coefficient (cosine): %0.3f"
% metrics.silhouette_score(X, km.labels_, metric='cosine'))
print()
###############################################################################
# Spherical K-Means clustering
skm = SphericalKMeans(n_clusters=true_k, init='k-means++', n_init=20)
print("Clustering with %s" % skm)
skm.fit(X)
print()
print("Homogeneity: %0.3f" % metrics.homogeneity_score(labels, skm.labels_))
print("Completeness: %0.3f" % metrics.completeness_score(labels, skm.labels_))
print("V-measure: %0.3f" % metrics.v_measure_score(labels, skm.labels_))
print("Adjusted Rand-Index: %.3f"
% metrics.adjusted_rand_score(labels, skm.labels_))
print("Adjusted Mututal Information: %.3f"
% metrics.adjusted_mutual_info_score(labels, skm.labels_))
print("Silhouette Coefficient (euclidean): %0.3f"
% metrics.silhouette_score(X, skm.labels_, metric='euclidean'))
print("Silhouette Coefficient (cosine): %0.3f"
% metrics.silhouette_score(X, skm.labels_, metric='cosine'))
print()
table.append([
'spherical k-means',
metrics.homogeneity_score(labels, skm.labels_),
metrics.completeness_score(labels, skm.labels_),
metrics.v_measure_score(labels, skm.labels_),
metrics.adjusted_rand_score(labels, skm.labels_),
metrics.adjusted_mutual_info_score(labels, skm.labels_),
metrics.silhouette_score(X, skm.labels_, metric='cosine')])
###############################################################################
# Mixture of von Mises Fisher clustering (soft)
vmf_soft = VonMisesFisherMixture(n_clusters=true_k, posterior_type='soft',
init='random-class', n_init=20, force_weights=np.ones((true_k,))/true_k)
print("Clustering with %s" % vmf_soft)
vmf_soft.fit(X)
print()
print('weights: {}'.format(vmf_soft.weights_))
print('concentrations: {}'.format(vmf_soft.concentrations_))
print("Homogeneity: %0.3f" % metrics.homogeneity_score(labels, vmf_soft.labels_))
print("Completeness: %0.3f" % metrics.completeness_score(labels, vmf_soft.labels_))
print("V-measure: %0.3f" % metrics.v_measure_score(labels, vmf_soft.labels_))
print("Adjusted Rand-Index: %.3f"
% metrics.adjusted_rand_score(labels, vmf_soft.labels_))
print("Adjusted Mututal Information: %.3f"
% metrics.adjusted_mutual_info_score(labels, vmf_soft.labels_))
print("Silhouette Coefficient (euclidean): %0.3f"
% metrics.silhouette_score(X, vmf_soft.labels_, metric='euclidean'))
print("Silhouette Coefficient (cosine): %0.3f"
% metrics.silhouette_score(X, vmf_soft.labels_, metric='cosine'))
print()
table.append([
'movMF-soft',
metrics.homogeneity_score(labels, vmf_soft.labels_),
metrics.completeness_score(labels, vmf_soft.labels_),
metrics.v_measure_score(labels, vmf_soft.labels_),
metrics.adjusted_rand_score(labels, vmf_soft.labels_),
metrics.adjusted_mutual_info_score(labels, vmf_soft.labels_),
metrics.silhouette_score(X, vmf_soft.labels_, metric='cosine')])
###############################################################################
# Mixture of von Mises Fisher clustering (hard)
vmf_hard = VonMisesFisherMixture(n_clusters=true_k, posterior_type='hard',
init='spherical-k-means', n_init=20, force_weights=np.ones((true_k,))/true_k)
print("Clustering with %s" % vmf_hard)
vmf_hard.fit(X)
print()
print('weights: {}'.format(vmf_hard.weights_))
print('concentrations: {}'.format(vmf_hard.concentrations_))
print("Homogeneity: %0.3f" % metrics.homogeneity_score(labels, vmf_hard.labels_))
print("Completeness: %0.3f" % metrics.completeness_score(labels, vmf_hard.labels_))
print("V-measure: %0.3f" % metrics.v_measure_score(labels, vmf_hard.labels_))
print("Adjusted Rand-Index: %.3f"
% metrics.adjusted_rand_score(labels, vmf_hard.labels_))
print("Adjusted Mututal Information: %.3f"
% metrics.adjusted_mutual_info_score(labels, vmf_hard.labels_))
print("Silhouette Coefficient (euclidean): %0.3f"
% metrics.silhouette_score(X, vmf_hard.labels_, metric='euclidean'))
print("Silhouette Coefficient (cosine): %0.3f"
% metrics.silhouette_score(X, vmf_hard.labels_, metric='cosine'))
print()
table.append([
'movMF-hard',
metrics.homogeneity_score(labels, vmf_hard.labels_),
metrics.completeness_score(labels, vmf_hard.labels_),
metrics.v_measure_score(labels, vmf_hard.labels_),
metrics.adjusted_rand_score(labels, vmf_hard.labels_),
metrics.adjusted_mutual_info_score(labels, vmf_hard.labels_),
metrics.silhouette_score(X, vmf_hard.labels_, metric='cosine')])
###############################################################################
# Print all results in table
headers = [
'Homogeneity',
'Completeness',
'V-Measure',
'Adj Rand',
'Adj MI',
'Silhouette (cos)']
print(tabulate(table, headers, tablefmt="fancy_grid"))
|
clara-labs/spherecluster
|
examples/document_clustering.py
|
Python
|
mit
| 8,298 | 0.00229 |
import msgpackrpc
import time
class SumServer(object):
def sum(self, x, y):
return x + y
def sleepy_sum(self, x, y):
time.sleep(1)
return x + y
server = msgpackrpc.Server(SumServer())
server.listen(msgpackrpc.Address("localhost", 18800))
server.start()
|
jpfairbanks/streaming
|
server.py
|
Python
|
bsd-3-clause
| 286 | 0.01049 |
import unittest
from palindromes import is_palindrome
cases = (
('lsdkjfskf', False),
('radar', True),
('racecar', True),
)
class TestCorrectness(unittest.TestCase):
def test_identifies_palindromes(self):
for word, expectation in cases:
self.assertEqual(is_palindrome(word), expectation)
|
Bradfield/algorithms-and-data-structures
|
book/deques/palindromes_test.py
|
Python
|
cc0-1.0
| 328 | 0 |
import ply.lex as lex
import re
tokens = (
'LANGLE', # <
'LANGLESLASH', # </
'RANGLE', # >
'EQUAL', # =
'STRING', # "hello"
'WORD') # Welcome!
state = (
("htmlcomment", "exclusive"),
)
t_ignore = ' '
def t_htmlcomment(token):
r'<!--'
token.lexer.begin('htmlcomment')
def t_htmlcomment_end(token):
r'-->'
token.lexer.lineno += token.value.count('\n')
token.lexer.begin('INITIAl')
#def t_htmlcomment_error(token):
# token.lexer.skip(1)
def t_newline(token):
r'\n'
token.lexer.lineno += 1
pass
def t_LANGLESLASH(token):
r'</'
return token
def t_LANGLE(token):
r'<'
return token
def t_RANGLE(token):
r'>'
return token
def t_EQUAL(token):
r'='
return token
def t_STRING(token):
r'"[^"]*"'
token.value = token.value[1:-1] # dropping off the double quotes
return token
def t_WORD(token):
r'[^ <>\n]+'
return token
webpage = "This is <!-- <b>my --> woag</b> webpage"
htmllexer = lex.lex()
htmllexer.input(webpage)
while True:
tok = htmllexer.token()
if not tok: break
print(tok)
|
melvin0008/pythoncodestrial
|
first.py
|
Python
|
apache-2.0
| 1,107 | 0.01897 |
# -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import unittest
from typing import List, Dict
import google.api_core.exceptions
from google.cloud.bigtable.column_family import MaxVersionsGCRule
from google.cloud.bigtable.instance import Instance
from google.cloud.bigtable.table import ClusterState
from parameterized import parameterized
from airflow import AirflowException
from airflow.contrib.operators.gcp_bigtable_operator import \
BigtableInstanceDeleteOperator, \
BigtableTableDeleteOperator, \
BigtableTableCreateOperator, \
BigtableTableWaitForReplicationSensor, \
BigtableClusterUpdateOperator, \
BigtableInstanceCreateOperator
from tests.compat import mock
PROJECT_ID = 'test_project_id'
INSTANCE_ID = 'test-instance-id'
CLUSTER_ID = 'test-cluster-id'
CLUSTER_ZONE = 'us-central1-f'
GCP_CONN_ID = 'test-gcp-conn-id'
NODES = 5
TABLE_ID = 'test-table-id'
INITIAL_SPLIT_KEYS = [] # type: List
EMPTY_COLUMN_FAMILIES = {} # type: Dict
class BigtableInstanceCreateTest(unittest.TestCase):
@parameterized.expand([
('instance_id', PROJECT_ID, '', CLUSTER_ID, CLUSTER_ZONE),
('main_cluster_id', PROJECT_ID, INSTANCE_ID, '', CLUSTER_ZONE),
('main_cluster_zone', PROJECT_ID, INSTANCE_ID, CLUSTER_ID, ''),
], testcase_func_name=lambda f, n, p: 'test_empty_attribute.empty_' + p.args[0])
@mock.patch('airflow.contrib.operators.gcp_bigtable_operator.BigtableHook')
def test_empty_attribute(self, missing_attribute, project_id, instance_id,
main_cluster_id,
main_cluster_zone, mock_hook):
with self.assertRaises(AirflowException) as e:
BigtableInstanceCreateOperator(
project_id=project_id,
instance_id=instance_id,
main_cluster_id=main_cluster_id,
main_cluster_zone=main_cluster_zone,
task_id="id",
gcp_conn_id=GCP_CONN_ID
)
err = e.exception
self.assertEqual(str(err), 'Empty parameter: {}'.format(missing_attribute))
mock_hook.assert_not_called()
@mock.patch('airflow.contrib.operators.gcp_bigtable_operator.BigtableHook')
def test_create_instance_that_exists(self, mock_hook):
mock_hook.return_value.get_instance.return_value = mock.Mock(Instance)
op = BigtableInstanceCreateOperator(
project_id=PROJECT_ID,
instance_id=INSTANCE_ID,
main_cluster_id=CLUSTER_ID,
main_cluster_zone=CLUSTER_ZONE,
task_id="id",
gcp_conn_id=GCP_CONN_ID
)
op.execute(None)
mock_hook.assert_called_once_with(gcp_conn_id=GCP_CONN_ID)
mock_hook.return_value.create_instance.assert_not_called()
@mock.patch('airflow.contrib.operators.gcp_bigtable_operator.BigtableHook')
def test_create_instance_that_exists_empty_project_id(self, mock_hook):
mock_hook.return_value.get_instance.return_value = mock.Mock(Instance)
op = BigtableInstanceCreateOperator(
instance_id=INSTANCE_ID,
main_cluster_id=CLUSTER_ID,
main_cluster_zone=CLUSTER_ZONE,
task_id="id",
gcp_conn_id=GCP_CONN_ID
)
op.execute(None)
mock_hook.assert_called_once_with(gcp_conn_id=GCP_CONN_ID)
mock_hook.return_value.create_instance.assert_not_called()
@mock.patch('airflow.contrib.operators.gcp_bigtable_operator.BigtableHook')
def test_different_error_reraised(self, mock_hook):
mock_hook.return_value.get_instance.return_value = None
op = BigtableInstanceCreateOperator(
project_id=PROJECT_ID,
instance_id=INSTANCE_ID,
main_cluster_id=CLUSTER_ID,
main_cluster_zone=CLUSTER_ZONE,
task_id="id",
gcp_conn_id=GCP_CONN_ID
)
mock_hook.return_value.create_instance.side_effect = mock.Mock(
side_effect=google.api_core.exceptions.GoogleAPICallError('error'))
with self.assertRaises(google.api_core.exceptions.GoogleAPICallError):
op.execute(None)
mock_hook.assert_called_once_with(gcp_conn_id=GCP_CONN_ID)
mock_hook.return_value.create_instance.assert_called_once_with(
cluster_nodes=None,
cluster_storage_type=None,
instance_display_name=None,
instance_id=INSTANCE_ID,
instance_labels=None,
instance_type=None,
main_cluster_id=CLUSTER_ID,
main_cluster_zone=CLUSTER_ZONE,
project_id=PROJECT_ID,
replica_cluster_id=None,
replica_cluster_zone=None,
timeout=None
)
class BigtableClusterUpdateTest(unittest.TestCase):
@parameterized.expand([
('instance_id', PROJECT_ID, '', CLUSTER_ID, NODES),
('cluster_id', PROJECT_ID, INSTANCE_ID, '', NODES),
('nodes', PROJECT_ID, INSTANCE_ID, CLUSTER_ID, ''),
], testcase_func_name=lambda f, n, p: 'test_empty_attribute.empty_' + p.args[0])
@mock.patch('airflow.contrib.operators.gcp_bigtable_operator.BigtableHook')
def test_empty_attribute(self, missing_attribute, project_id, instance_id,
cluster_id, nodes, mock_hook):
with self.assertRaises(AirflowException) as e:
BigtableClusterUpdateOperator(
project_id=project_id,
instance_id=instance_id,
cluster_id=cluster_id,
nodes=nodes,
task_id="id",
gcp_conn_id=GCP_CONN_ID
)
err = e.exception
self.assertEqual(str(err), 'Empty parameter: {}'.format(missing_attribute))
mock_hook.assert_not_called()
@mock.patch('airflow.contrib.operators.gcp_bigtable_operator.BigtableHook')
def test_updating_cluster_but_instance_does_not_exists(self, mock_hook):
mock_hook.return_value.get_instance.return_value = None
with self.assertRaises(AirflowException) as e:
op = BigtableClusterUpdateOperator(
project_id=PROJECT_ID,
instance_id=INSTANCE_ID,
cluster_id=CLUSTER_ID,
nodes=NODES,
task_id="id",
gcp_conn_id=GCP_CONN_ID
)
op.execute(None)
err = e.exception
self.assertEqual(str(err), "Dependency: instance '{}' does not exist.".format(
INSTANCE_ID))
mock_hook.assert_called_once_with(gcp_conn_id=GCP_CONN_ID)
mock_hook.return_value.update_cluster.assert_not_called()
@mock.patch('airflow.contrib.operators.gcp_bigtable_operator.BigtableHook')
def test_updating_cluster_but_instance_does_not_exists_empty_project_id(self,
mock_hook):
mock_hook.return_value.get_instance.return_value = None
with self.assertRaises(AirflowException) as e:
op = BigtableClusterUpdateOperator(
instance_id=INSTANCE_ID,
cluster_id=CLUSTER_ID,
nodes=NODES,
task_id="id",
gcp_conn_id=GCP_CONN_ID
)
op.execute(None)
err = e.exception
self.assertEqual(str(err), "Dependency: instance '{}' does not exist.".format(
INSTANCE_ID))
mock_hook.assert_called_once_with(gcp_conn_id=GCP_CONN_ID)
mock_hook.return_value.update_cluster.assert_not_called()
@mock.patch('airflow.contrib.operators.gcp_bigtable_operator.BigtableHook')
def test_updating_cluster_that_does_not_exists(self, mock_hook):
instance = mock_hook.return_value.get_instance.return_value = mock.Mock(Instance)
mock_hook.return_value.update_cluster.side_effect = mock.Mock(
side_effect=google.api_core.exceptions.NotFound("Cluster not found."))
with self.assertRaises(AirflowException) as e:
op = BigtableClusterUpdateOperator(
project_id=PROJECT_ID,
instance_id=INSTANCE_ID,
cluster_id=CLUSTER_ID,
nodes=NODES,
task_id="id",
gcp_conn_id=GCP_CONN_ID
)
op.execute(None)
err = e.exception
self.assertEqual(
str(err),
"Dependency: cluster '{}' does not exist for instance '{}'.".format(
CLUSTER_ID, INSTANCE_ID)
)
mock_hook.assert_called_once_with(gcp_conn_id=GCP_CONN_ID)
mock_hook.return_value.update_cluster.assert_called_once_with(
instance=instance, cluster_id=CLUSTER_ID, nodes=NODES)
@mock.patch('airflow.contrib.operators.gcp_bigtable_operator.BigtableHook')
def test_updating_cluster_that_does_not_exists_empty_project_id(self, mock_hook):
instance = mock_hook.return_value.get_instance.return_value = mock.Mock(Instance)
mock_hook.return_value.update_cluster.side_effect = mock.Mock(
side_effect=google.api_core.exceptions.NotFound("Cluster not found."))
with self.assertRaises(AirflowException) as e:
op = BigtableClusterUpdateOperator(
instance_id=INSTANCE_ID,
cluster_id=CLUSTER_ID,
nodes=NODES,
task_id="id",
gcp_conn_id=GCP_CONN_ID
)
op.execute(None)
err = e.exception
self.assertEqual(
str(err),
"Dependency: cluster '{}' does not exist for instance '{}'.".format(
CLUSTER_ID, INSTANCE_ID)
)
mock_hook.assert_called_once_with(gcp_conn_id=GCP_CONN_ID)
mock_hook.return_value.update_cluster.assert_called_once_with(
instance=instance, cluster_id=CLUSTER_ID, nodes=NODES)
@mock.patch('airflow.contrib.operators.gcp_bigtable_operator.BigtableHook')
def test_different_error_reraised(self, mock_hook):
op = BigtableClusterUpdateOperator(
project_id=PROJECT_ID,
instance_id=INSTANCE_ID,
cluster_id=CLUSTER_ID,
nodes=NODES,
task_id="id",
gcp_conn_id=GCP_CONN_ID
)
instance = mock_hook.return_value.get_instance.return_value = mock.Mock(Instance)
mock_hook.return_value.update_cluster.side_effect = mock.Mock(
side_effect=google.api_core.exceptions.GoogleAPICallError('error'))
with self.assertRaises(google.api_core.exceptions.GoogleAPICallError):
op.execute(None)
mock_hook.assert_called_once_with(gcp_conn_id=GCP_CONN_ID)
mock_hook.return_value.update_cluster.assert_called_once_with(
instance=instance, cluster_id=CLUSTER_ID, nodes=NODES)
class BigtableInstanceDeleteTest(unittest.TestCase):
@mock.patch('airflow.contrib.operators.gcp_bigtable_operator.BigtableHook')
def test_delete_execute(self, mock_hook):
op = BigtableInstanceDeleteOperator(
project_id=PROJECT_ID,
instance_id=INSTANCE_ID,
task_id="id",
gcp_conn_id=GCP_CONN_ID
)
op.execute(None)
mock_hook.assert_called_once_with(gcp_conn_id=GCP_CONN_ID)
mock_hook.return_value.delete_instance.assert_called_once_with(
project_id=PROJECT_ID,
instance_id=INSTANCE_ID)
@mock.patch('airflow.contrib.operators.gcp_bigtable_operator.BigtableHook')
def test_delete_execute_empty_project_id(self, mock_hook):
op = BigtableInstanceDeleteOperator(
instance_id=INSTANCE_ID,
task_id="id",
gcp_conn_id=GCP_CONN_ID
)
op.execute(None)
mock_hook.assert_called_once_with(gcp_conn_id=GCP_CONN_ID)
mock_hook.return_value.delete_instance.assert_called_once_with(
project_id=None,
instance_id=INSTANCE_ID)
@parameterized.expand([
('instance_id', PROJECT_ID, ''),
], testcase_func_name=lambda f, n, p: 'test_empty_attribute.empty_' + p.args[0])
@mock.patch('airflow.contrib.operators.gcp_bigtable_operator.BigtableHook')
def test_empty_attribute(self, missing_attribute, project_id, instance_id, mock_hook):
with self.assertRaises(AirflowException) as e:
BigtableInstanceDeleteOperator(
project_id=project_id,
instance_id=instance_id,
task_id="id"
)
err = e.exception
self.assertEqual(str(err), 'Empty parameter: {}'.format(missing_attribute))
mock_hook.assert_not_called()
@mock.patch('airflow.contrib.operators.gcp_bigtable_operator.BigtableHook')
def test_deleting_instance_that_doesnt_exists(self, mock_hook):
op = BigtableInstanceDeleteOperator(
project_id=PROJECT_ID,
instance_id=INSTANCE_ID,
task_id="id",
gcp_conn_id=GCP_CONN_ID
)
mock_hook.return_value.delete_instance.side_effect = mock.Mock(
side_effect=google.api_core.exceptions.NotFound("Instance not found."))
op.execute(None)
mock_hook.assert_called_once_with(gcp_conn_id=GCP_CONN_ID)
mock_hook.return_value.delete_instance.assert_called_once_with(
project_id=PROJECT_ID,
instance_id=INSTANCE_ID)
@mock.patch('airflow.contrib.operators.gcp_bigtable_operator.BigtableHook')
def test_deleting_instance_that_doesnt_exists_empty_project_id(self, mock_hook):
op = BigtableInstanceDeleteOperator(
instance_id=INSTANCE_ID,
task_id="id",
gcp_conn_id=GCP_CONN_ID
)
mock_hook.return_value.delete_instance.side_effect = mock.Mock(
side_effect=google.api_core.exceptions.NotFound("Instance not found."))
op.execute(None)
mock_hook.assert_called_once_with(gcp_conn_id=GCP_CONN_ID)
mock_hook.return_value.delete_instance.assert_called_once_with(
project_id=None,
instance_id=INSTANCE_ID)
@mock.patch('airflow.contrib.operators.gcp_bigtable_operator.BigtableHook')
def test_different_error_reraised(self, mock_hook):
op = BigtableInstanceDeleteOperator(
project_id=PROJECT_ID,
instance_id=INSTANCE_ID,
task_id="id",
gcp_conn_id=GCP_CONN_ID
)
mock_hook.return_value.delete_instance.side_effect = mock.Mock(
side_effect=google.api_core.exceptions.GoogleAPICallError('error'))
with self.assertRaises(google.api_core.exceptions.GoogleAPICallError):
op.execute(None)
mock_hook.assert_called_once_with(gcp_conn_id=GCP_CONN_ID)
mock_hook.return_value.delete_instance.assert_called_once_with(
project_id=PROJECT_ID,
instance_id=INSTANCE_ID)
class BigtableTableDeleteTest(unittest.TestCase):
@mock.patch('airflow.contrib.operators.gcp_bigtable_operator.BigtableHook')
def test_delete_execute(self, mock_hook):
op = BigtableTableDeleteOperator(
project_id=PROJECT_ID,
instance_id=INSTANCE_ID,
table_id=TABLE_ID,
task_id="id",
gcp_conn_id=GCP_CONN_ID
)
op.execute(None)
mock_hook.assert_called_once_with(gcp_conn_id=GCP_CONN_ID)
mock_hook.return_value.delete_table.assert_called_once_with(
project_id=PROJECT_ID,
instance_id=INSTANCE_ID,
table_id=TABLE_ID)
@parameterized.expand([
('instance_id', PROJECT_ID, '', TABLE_ID),
('table_id', PROJECT_ID, INSTANCE_ID, ''),
], testcase_func_name=lambda f, n, p: 'test_empty_attribute.empty_' + p.args[0])
@mock.patch('airflow.contrib.operators.gcp_bigtable_operator.BigtableHook')
def test_empty_attribute(self, missing_attribute, project_id, instance_id, table_id,
mock_hook):
with self.assertRaises(AirflowException) as e:
BigtableTableDeleteOperator(
project_id=project_id,
instance_id=instance_id,
table_id=table_id,
task_id="id",
gcp_conn_id=GCP_CONN_ID
)
err = e.exception
self.assertEqual(str(err), 'Empty parameter: {}'.format(missing_attribute))
mock_hook.assert_not_called()
@mock.patch('airflow.contrib.operators.gcp_bigtable_operator.BigtableHook')
def test_deleting_table_that_doesnt_exists(self, mock_hook):
op = BigtableTableDeleteOperator(
project_id=PROJECT_ID,
instance_id=INSTANCE_ID,
table_id=TABLE_ID,
task_id="id",
gcp_conn_id=GCP_CONN_ID
)
mock_hook.return_value.delete_table.side_effect = mock.Mock(
side_effect=google.api_core.exceptions.NotFound("Table not found."))
op.execute(None)
mock_hook.assert_called_once_with(gcp_conn_id=GCP_CONN_ID)
mock_hook.return_value.delete_table.assert_called_once_with(
project_id=PROJECT_ID,
instance_id=INSTANCE_ID,
table_id=TABLE_ID)
@mock.patch('airflow.contrib.operators.gcp_bigtable_operator.BigtableHook')
def test_deleting_table_that_doesnt_exists_empty_project_id(self, mock_hook):
op = BigtableTableDeleteOperator(
instance_id=INSTANCE_ID,
table_id=TABLE_ID,
task_id="id",
gcp_conn_id=GCP_CONN_ID
)
mock_hook.return_value.delete_table.side_effect = mock.Mock(
side_effect=google.api_core.exceptions.NotFound("Table not found."))
op.execute(None)
mock_hook.assert_called_once_with(gcp_conn_id=GCP_CONN_ID)
mock_hook.return_value.delete_table.assert_called_once_with(
project_id=None,
instance_id=INSTANCE_ID,
table_id=TABLE_ID)
@mock.patch('airflow.contrib.operators.gcp_bigtable_operator.BigtableHook')
def test_deleting_table_when_instance_doesnt_exists(self, mock_hook):
op = BigtableTableDeleteOperator(
project_id=PROJECT_ID,
instance_id=INSTANCE_ID,
table_id=TABLE_ID,
task_id="id",
gcp_conn_id=GCP_CONN_ID
)
mock_hook.return_value.get_instance.return_value = None
with self.assertRaises(AirflowException) as e:
op.execute(None)
err = e.exception
self.assertEqual(str(err), "Dependency: instance '{}' does not exist.".format(
INSTANCE_ID))
mock_hook.assert_called_once_with(gcp_conn_id=GCP_CONN_ID)
mock_hook.return_value.delete_table.assert_not_called()
@mock.patch('airflow.contrib.operators.gcp_bigtable_operator.BigtableHook')
def test_different_error_reraised(self, mock_hook):
op = BigtableTableDeleteOperator(
project_id=PROJECT_ID,
instance_id=INSTANCE_ID,
table_id=TABLE_ID,
task_id="id",
gcp_conn_id=GCP_CONN_ID
)
mock_hook.return_value.delete_table.side_effect = mock.Mock(
side_effect=google.api_core.exceptions.GoogleAPICallError('error'))
with self.assertRaises(google.api_core.exceptions.GoogleAPICallError):
op.execute(None)
mock_hook.assert_called_once_with(gcp_conn_id=GCP_CONN_ID)
mock_hook.return_value.delete_table.assert_called_once_with(
project_id=PROJECT_ID,
instance_id=INSTANCE_ID,
table_id=TABLE_ID)
class BigtableTableCreateTest(unittest.TestCase):
@mock.patch('airflow.contrib.operators.gcp_bigtable_operator.BigtableHook')
def test_create_execute(self, mock_hook):
op = BigtableTableCreateOperator(
project_id=PROJECT_ID,
instance_id=INSTANCE_ID,
table_id=TABLE_ID,
initial_split_keys=INITIAL_SPLIT_KEYS,
column_families=EMPTY_COLUMN_FAMILIES,
task_id="id",
gcp_conn_id=GCP_CONN_ID
)
instance = mock_hook.return_value.get_instance.return_value = mock.Mock(Instance)
op.execute(None)
mock_hook.assert_called_once_with(gcp_conn_id=GCP_CONN_ID)
mock_hook.return_value.create_table.assert_called_once_with(
instance=instance,
table_id=TABLE_ID,
initial_split_keys=INITIAL_SPLIT_KEYS,
column_families=EMPTY_COLUMN_FAMILIES)
@parameterized.expand([
('instance_id', PROJECT_ID, '', TABLE_ID),
('table_id', PROJECT_ID, INSTANCE_ID, ''),
], testcase_func_name=lambda f, n, p: 'test_empty_attribute.empty_' + p.args[0])
@mock.patch('airflow.contrib.operators.gcp_bigtable_operator.BigtableHook')
def test_empty_attribute(self, missing_attribute, project_id, instance_id, table_id,
mock_hook):
with self.assertRaises(AirflowException) as e:
BigtableTableCreateOperator(
project_id=project_id,
instance_id=instance_id,
table_id=table_id,
task_id="id",
gcp_conn_id=GCP_CONN_ID
)
err = e.exception
self.assertEqual(str(err), 'Empty parameter: {}'.format(missing_attribute))
mock_hook.assert_not_called()
@mock.patch('airflow.contrib.operators.gcp_bigtable_operator.BigtableHook')
def test_instance_not_exists(self, mock_hook):
op = BigtableTableCreateOperator(
project_id=PROJECT_ID,
instance_id=INSTANCE_ID,
table_id=TABLE_ID,
initial_split_keys=INITIAL_SPLIT_KEYS,
column_families=EMPTY_COLUMN_FAMILIES,
task_id="id",
gcp_conn_id=GCP_CONN_ID
)
mock_hook.return_value.get_instance.return_value = None
with self.assertRaises(AirflowException) as e:
op.execute(None)
err = e.exception
self.assertEqual(
str(err),
"Dependency: instance '{}' does not exist in project '{}'.".format(
INSTANCE_ID, PROJECT_ID)
)
mock_hook.assert_called_once_with(gcp_conn_id=GCP_CONN_ID)
@mock.patch('airflow.contrib.operators.gcp_bigtable_operator.BigtableHook')
def test_creating_table_that_exists(self, mock_hook):
op = BigtableTableCreateOperator(
project_id=PROJECT_ID,
instance_id=INSTANCE_ID,
table_id=TABLE_ID,
initial_split_keys=INITIAL_SPLIT_KEYS,
column_families=EMPTY_COLUMN_FAMILIES,
task_id="id",
gcp_conn_id=GCP_CONN_ID
)
mock_hook.return_value.get_column_families_for_table.return_value = \
EMPTY_COLUMN_FAMILIES
instance = mock_hook.return_value.get_instance.return_value = mock.Mock(Instance)
mock_hook.return_value.create_table.side_effect = mock.Mock(
side_effect=google.api_core.exceptions.AlreadyExists("Table already exists."))
op.execute(None)
mock_hook.assert_called_once_with(gcp_conn_id=GCP_CONN_ID)
mock_hook.return_value.create_table.assert_called_once_with(
instance=instance,
table_id=TABLE_ID,
initial_split_keys=INITIAL_SPLIT_KEYS,
column_families=EMPTY_COLUMN_FAMILIES)
@mock.patch('airflow.contrib.operators.gcp_bigtable_operator.BigtableHook')
def test_creating_table_that_exists_empty_project_id(self, mock_hook):
op = BigtableTableCreateOperator(
instance_id=INSTANCE_ID,
table_id=TABLE_ID,
initial_split_keys=INITIAL_SPLIT_KEYS,
column_families=EMPTY_COLUMN_FAMILIES,
task_id="id",
gcp_conn_id=GCP_CONN_ID
)
mock_hook.return_value.get_column_families_for_table.return_value = \
EMPTY_COLUMN_FAMILIES
instance = mock_hook.return_value.get_instance.return_value = mock.Mock(Instance)
mock_hook.return_value.create_table.side_effect = mock.Mock(
side_effect=google.api_core.exceptions.AlreadyExists("Table already exists."))
op.execute(None)
mock_hook.assert_called_once_with(gcp_conn_id=GCP_CONN_ID)
mock_hook.return_value.create_table.assert_called_once_with(
instance=instance,
table_id=TABLE_ID,
initial_split_keys=INITIAL_SPLIT_KEYS,
column_families=EMPTY_COLUMN_FAMILIES)
@mock.patch('airflow.contrib.operators.gcp_bigtable_operator.BigtableHook')
def test_creating_table_that_exists_with_different_column_families_ids_in_the_table(
self, mock_hook):
op = BigtableTableCreateOperator(
project_id=PROJECT_ID,
instance_id=INSTANCE_ID,
table_id=TABLE_ID,
initial_split_keys=INITIAL_SPLIT_KEYS,
column_families=EMPTY_COLUMN_FAMILIES,
task_id="id",
gcp_conn_id=GCP_CONN_ID
)
mock_hook.return_value.get_column_families_for_table.return_value = {
"existing_family": None}
mock_hook.return_value.create_table.side_effect = mock.Mock(
side_effect=google.api_core.exceptions.AlreadyExists("Table already exists."))
with self.assertRaises(AirflowException) as e:
op.execute(None)
err = e.exception
self.assertEqual(
str(err),
"Table '{}' already exists with different Column Families.".format(TABLE_ID)
)
mock_hook.assert_called_once_with(gcp_conn_id=GCP_CONN_ID)
@mock.patch('airflow.contrib.operators.gcp_bigtable_operator.BigtableHook')
def test_creating_table_that_exists_with_different_column_families_gc_rule_in__table(
self, mock_hook):
op = BigtableTableCreateOperator(
project_id=PROJECT_ID,
instance_id=INSTANCE_ID,
table_id=TABLE_ID,
initial_split_keys=INITIAL_SPLIT_KEYS,
column_families={"cf-id": MaxVersionsGCRule(1)},
task_id="id",
gcp_conn_id=GCP_CONN_ID
)
cf_mock = mock.Mock()
cf_mock.gc_rule = mock.Mock(return_value=MaxVersionsGCRule(2))
mock_hook.return_value.get_column_families_for_table.return_value = {
"cf-id": cf_mock
}
mock_hook.return_value.create_table.side_effect = mock.Mock(
side_effect=google.api_core.exceptions.AlreadyExists("Table already exists."))
with self.assertRaises(AirflowException) as e:
op.execute(None)
err = e.exception
self.assertEqual(
str(err),
"Table '{}' already exists with different Column Families.".format(TABLE_ID)
)
mock_hook.assert_called_once_with(gcp_conn_id=GCP_CONN_ID)
class BigtableWaitForTableReplicationTest(unittest.TestCase):
@parameterized.expand([
('instance_id', PROJECT_ID, '', TABLE_ID),
('table_id', PROJECT_ID, INSTANCE_ID, ''),
], testcase_func_name=lambda f, n, p: 'test_empty_attribute.empty_' + p.args[0])
@mock.patch('airflow.contrib.operators.gcp_bigtable_operator.BigtableHook')
def test_empty_attribute(self, missing_attribute, project_id, instance_id, table_id,
mock_hook):
with self.assertRaises(AirflowException) as e:
BigtableTableWaitForReplicationSensor(
project_id=project_id,
instance_id=instance_id,
table_id=table_id,
task_id="id",
gcp_conn_id=GCP_CONN_ID
)
err = e.exception
self.assertEqual(str(err), 'Empty parameter: {}'.format(missing_attribute))
mock_hook.assert_not_called()
@mock.patch('airflow.contrib.operators.gcp_bigtable_operator.BigtableHook')
def test_wait_no_instance(self, mock_hook):
mock_hook.return_value.get_instance.return_value = None
op = BigtableTableWaitForReplicationSensor(
project_id=PROJECT_ID,
instance_id=INSTANCE_ID,
table_id=TABLE_ID,
task_id="id",
gcp_conn_id=GCP_CONN_ID
)
self.assertFalse(op.poke(None))
mock_hook.assert_called_once_with(gcp_conn_id=GCP_CONN_ID)
@mock.patch('airflow.contrib.operators.gcp_bigtable_operator.BigtableHook')
def test_wait_no_table(self, mock_hook):
mock_hook.return_value.get_instance.return_value = mock.Mock(Instance)
mock_hook.return_value.get_cluster_states_for_table.side_effect = mock.Mock(
side_effect=google.api_core.exceptions.NotFound("Table not found."))
op = BigtableTableWaitForReplicationSensor(
project_id=PROJECT_ID,
instance_id=INSTANCE_ID,
table_id=TABLE_ID,
task_id="id",
gcp_conn_id=GCP_CONN_ID
)
self.assertFalse(op.poke(None))
mock_hook.assert_called_once_with(gcp_conn_id=GCP_CONN_ID)
@mock.patch('airflow.contrib.operators.gcp_bigtable_operator.BigtableHook')
def test_wait_not_ready(self, mock_hook):
mock_hook.return_value.get_instance.return_value = mock.Mock(Instance)
mock_hook.return_value.get_cluster_states_for_table.return_value = {
"cl-id": ClusterState(0)
}
op = BigtableTableWaitForReplicationSensor(
project_id=PROJECT_ID,
instance_id=INSTANCE_ID,
table_id=TABLE_ID,
task_id="id",
gcp_conn_id=GCP_CONN_ID
)
self.assertFalse(op.poke(None))
mock_hook.assert_called_once_with(gcp_conn_id=GCP_CONN_ID)
@mock.patch('airflow.contrib.operators.gcp_bigtable_operator.BigtableHook')
def test_wait_ready(self, mock_hook):
mock_hook.return_value.get_instance.return_value = mock.Mock(Instance)
mock_hook.return_value.get_cluster_states_for_table.return_value = {
"cl-id": ClusterState(4)
}
op = BigtableTableWaitForReplicationSensor(
project_id=PROJECT_ID,
instance_id=INSTANCE_ID,
table_id=TABLE_ID,
task_id="id",
gcp_conn_id=GCP_CONN_ID
)
self.assertTrue(op.poke(None))
mock_hook.assert_called_once_with(gcp_conn_id=GCP_CONN_ID)
|
owlabs/incubator-airflow
|
tests/contrib/operators/test_gcp_bigtable_operator.py
|
Python
|
apache-2.0
| 31,128 | 0.001542 |
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Removing M2M table for field references on 'Message'
db.delete_table('django_mailbox_message_references')
def backwards(self, orm):
# Adding M2M table for field references on 'Message'
db.create_table('django_mailbox_message_references', (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('from_message', models.ForeignKey(orm['django_mailbox.message'], null=False)),
('to_message', models.ForeignKey(orm['django_mailbox.message'], null=False))
))
db.create_unique('django_mailbox_message_references', ['from_message_id', 'to_message_id'])
models = {
'django_mailbox.mailbox': {
'Meta': {'object_name': 'Mailbox'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'uri': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '255', 'null': 'True', 'blank': 'True'})
},
'django_mailbox.message': {
'Meta': {'object_name': 'Message'},
'body': ('django.db.models.fields.TextField', [], {}),
'from_header': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'in_reply_to': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'replies'", 'null': 'True', 'to': "orm['django_mailbox.Message']"}),
'mailbox': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'messages'", 'to': "orm['django_mailbox.Mailbox']"}),
'message_id': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'outgoing': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'processed': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'subject': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'to_header': ('django.db.models.fields.TextField', [], {})
}
}
complete_apps = ['django_mailbox']
|
coddingtonbear/django-mailbox
|
django_mailbox/south_migrations/0009_remove_references_table.py
|
Python
|
mit
| 2,520 | 0.007143 |
import urllib
from cyclone.web import asynchronous
from twisted.python import log
from sockjs.cyclone import proto
from sockjs.cyclone.transports import pollingbase
class JSONPTransport(pollingbase.PollingTransportBase):
name = 'jsonp'
@asynchronous
def get(self, session_id):
# Start response
self.handle_session_cookie()
self.disable_cache()
# Grab callback parameter
self.callback = self.get_argument('c', None)
if not self.callback:
self.write('"callback" parameter required')
self.set_status(500)
self.finish()
return
# Get or create session without starting heartbeat
if not self._attach_session(session_id):
return
# Might get already detached because connection was closed in
# connectionMade
if not self.session:
return
if self.session.send_queue.is_empty():
self.session.start_heartbeat()
else:
self.session.flush()
def connectionLost(self, reason):
self.session.delayed_close()
def send_pack(self, message):
# TODO: Just escape
msg = '%s(%s);\r\n' % (self.callback, proto.json_encode(message))
self.set_header('Content-Type',
'application/javascript; charset=UTF-8')
self.set_header('Content-Length', len(msg))
# FIXME
self.set_header('Etag', 'dummy')
self.write(msg)
self._detach()
self.safe_finish()
class JSONPSendHandler(pollingbase.PollingTransportBase):
def post(self, session_id):
self.preflight()
self.handle_session_cookie()
self.disable_cache()
session = self._get_session(session_id)
if session is None:
self.set_status(404)
return
#data = self.request.body.decode('utf-8')
data = self.request.body
ctype = self.request.headers.get('Content-Type', '').lower()
if ctype == 'application/x-www-form-urlencoded':
if not data.startswith('d='):
log.msg('jsonp_send: Invalid payload.')
self.write("Payload expected.")
self.set_status(500)
return
data = urllib.unquote_plus(data[2:])
if not data:
log.msg('jsonp_send: Payload expected.')
self.write("Payload expected.")
self.set_status(500)
return
try:
messages = proto.json_decode(data)
except:
# TODO: Proper error handling
log.msg('jsonp_send: Invalid json encoding')
self.write("Broken JSON encoding.")
self.set_status(500)
return
try:
session.messagesReceived(messages)
except Exception:
log.msg('jsonp_send: messagesReceived() failed')
session.close()
self.write('Message handler failed.')
self.set_status(500)
return
self.write('ok')
self.set_header('Content-Type', 'text/plain; charset=UTF-8')
self.set_status(200)
|
flaviogrossi/sockjs-cyclone
|
sockjs/cyclone/transports/jsonp.py
|
Python
|
mit
| 3,786 | 0.003698 |
import sys
import numpy as np
from scipy import stats
import subprocess as sp
import datetime
import socket
import os
exec_name = sys.argv[1]
max_t = int(sys.argv[2])
ntries = 5
tot_timings = []
for t_idx in range(1,max_t + 1):
cur_timings = []
for _ in range(ntries):
# Run the process.
p = sp.Popen([exec_name,str(t_idx)],stdout=sp.PIPE,stderr=sp.STDOUT)
# Wait for it to finish and get stdout.
out = p.communicate()[0]
# Parse the stderr in order to find the time.
out = out.split(bytes('\n','ascii'))[1].split()[0][0:-1]
cur_timings.append(float(out))
tot_timings.append(cur_timings)
tot_timings = np.array(tot_timings)
retval = np.array([np.mean(tot_timings,axis=1),stats.sem(tot_timings,axis=1)])
fmt='{fname}_%Y%m%d%H%M%S'
filename = datetime.datetime.now().strftime(fmt).format(fname=socket.gethostname() + '_' + os.path.basename(exec_name)) + '.txt'
np.savetxt(filename,retval)
|
darioizzo/piranha
|
tools/benchmark.py
|
Python
|
gpl-3.0
| 911 | 0.023052 |
# Copyright (c) 2011 Justin Santa Barbara
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_log import log as logging
from oslo_serialization import jsonutils
import requests
import six
from six.moves.urllib import parse
LOG = logging.getLogger(__name__)
class APIResponse(object):
"""Decoded API Response
This provides a decoded version of the Requests response which
include a json decoded body, far more convenient for testing that
returned structures are correct, or using parts of returned
structures in tests.
This class is a simple wrapper around dictionaries for API
responses in tests. It includes extra attributes so that they can
be inspected in addition to the attributes.
All json responses from Nova APIs are dictionary compatible, or
blank, so other possible base classes are not needed.
"""
status = 200
"""The HTTP status code as an int"""
content = ""
"""The Raw HTTP response body as a string"""
body = {}
"""The decoded json body as a dictionary"""
headers = {}
"""Response headers as a dictionary"""
def __init__(self, response):
"""Construct an API response from a Requests response
:param response: a ``requests`` library response
"""
super(APIResponse, self).__init__()
self.status = response.status_code
self.content = response.content
if self.content:
self.body = jsonutils.loads(self.content)
self.headers = response.headers
def __str__(self):
# because __str__ falls back to __repr__ we can still use repr
# on self but add in the other attributes.
return "<Response body:%r, status_code:%s>" % (self.body, self.status)
class OpenStackApiException(Exception):
def __init__(self, message=None, response=None):
self.response = response
if not message:
message = 'Unspecified error'
if response:
_status = response.status_code
_body = response.content
message = ('%(message)s\nStatus Code: %(_status)s\n'
'Body: %(_body)s' %
{'message': message, '_status': _status,
'_body': _body})
super(OpenStackApiException, self).__init__(message)
class OpenStackApiAuthenticationException(OpenStackApiException):
def __init__(self, response=None, message=None):
if not message:
message = "Authentication error"
super(OpenStackApiAuthenticationException, self).__init__(message,
response)
class OpenStackApiAuthorizationException(OpenStackApiException):
def __init__(self, response=None, message=None):
if not message:
message = "Authorization error"
super(OpenStackApiAuthorizationException, self).__init__(message,
response)
class OpenStackApiNotFoundException(OpenStackApiException):
def __init__(self, response=None, message=None):
if not message:
message = "Item not found"
super(OpenStackApiNotFoundException, self).__init__(message, response)
class TestOpenStackClient(object):
"""Simple OpenStack API Client.
This is a really basic OpenStack API client that is under our control,
so we can make changes / insert hooks for testing
"""
def __init__(self, auth_user, auth_key, auth_uri,
project_id=None):
super(TestOpenStackClient, self).__init__()
self.auth_result = None
self.auth_user = auth_user
self.auth_key = auth_key
self.auth_uri = auth_uri
if project_id is None:
self.project_id = "6f70656e737461636b20342065766572"
else:
self.project_id = project_id
self.microversion = None
def request(self, url, method='GET', body=None, headers=None):
_headers = {'Content-Type': 'application/json'}
_headers.update(headers or {})
response = requests.request(method, url, data=body, headers=_headers)
return response
def _authenticate(self):
if self.auth_result:
return self.auth_result
auth_uri = self.auth_uri
headers = {'X-Auth-User': self.auth_user,
'X-Auth-Key': self.auth_key,
'X-Auth-Project-Id': self.project_id}
response = self.request(auth_uri,
headers=headers)
http_status = response.status_code
LOG.debug("%(auth_uri)s => code %(http_status)s",
{'auth_uri': auth_uri, 'http_status': http_status})
if http_status == 401:
raise OpenStackApiAuthenticationException(response=response)
self.auth_result = response.headers
return self.auth_result
def api_request(self, relative_uri, check_response_status=None,
strip_version=False, **kwargs):
auth_result = self._authenticate()
# NOTE(justinsb): httplib 'helpfully' converts headers to lower case
base_uri = auth_result['x-server-management-url']
if strip_version:
# NOTE(vish): cut out version number and tenant_id
base_uri = '/'.join(base_uri.split('/', 3)[:-1])
full_uri = '%s/%s' % (base_uri, relative_uri)
headers = kwargs.setdefault('headers', {})
headers['X-Auth-Token'] = auth_result['x-auth-token']
if ('X-OpenStack-Nova-API-Version' in headers or
'OpenStack-API-Version' in headers):
raise Exception('Microversion should be set via '
'microversion attribute in API client.')
elif self.microversion:
headers['X-OpenStack-Nova-API-Version'] = self.microversion
headers['OpenStack-API-Version'] = 'compute %s' % self.microversion
response = self.request(full_uri, **kwargs)
http_status = response.status_code
LOG.debug("%(relative_uri)s => code %(http_status)s",
{'relative_uri': relative_uri, 'http_status': http_status})
if check_response_status:
if http_status not in check_response_status:
if http_status == 404:
raise OpenStackApiNotFoundException(response=response)
elif http_status == 401:
raise OpenStackApiAuthorizationException(response=response)
else:
raise OpenStackApiException(
message="Unexpected status code",
response=response)
return response
def _decode_json(self, response):
resp = APIResponse(status=response.status_code)
if response.content:
resp.body = jsonutils.loads(response.content)
return resp
def api_get(self, relative_uri, **kwargs):
kwargs.setdefault('check_response_status', [200])
return APIResponse(self.api_request(relative_uri, **kwargs))
def api_post(self, relative_uri, body, **kwargs):
kwargs['method'] = 'POST'
if body:
headers = kwargs.setdefault('headers', {})
headers['Content-Type'] = 'application/json'
kwargs['body'] = jsonutils.dumps(body)
kwargs.setdefault('check_response_status', [200, 202])
return APIResponse(self.api_request(relative_uri, **kwargs))
def api_put(self, relative_uri, body, **kwargs):
kwargs['method'] = 'PUT'
if body:
headers = kwargs.setdefault('headers', {})
headers['Content-Type'] = 'application/json'
kwargs['body'] = jsonutils.dumps(body)
kwargs.setdefault('check_response_status', [200, 202, 204])
return APIResponse(self.api_request(relative_uri, **kwargs))
def api_delete(self, relative_uri, **kwargs):
kwargs['method'] = 'DELETE'
kwargs.setdefault('check_response_status', [200, 202, 204])
return APIResponse(self.api_request(relative_uri, **kwargs))
#####################################
#
# Convenience methods
#
# The following are a set of convenience methods to get well known
# resources, they can be helpful in setting up resources in
# tests. All of these convenience methods throw exceptions if they
# get a non 20x status code, so will appropriately abort tests if
# they fail.
#
# They all return the most relevant part of their response body as
# decoded data structure.
#
#####################################
def get_server(self, server_id):
return self.api_get('/servers/%s' % server_id).body['server']
def get_servers(self, detail=True, search_opts=None):
rel_url = '/servers/detail' if detail else '/servers'
if search_opts is not None:
qparams = {}
for opt, val in six.iteritems(search_opts):
qparams[opt] = val
if qparams:
query_string = "?%s" % parse.urlencode(qparams)
rel_url += query_string
return self.api_get(rel_url).body['servers']
def post_server(self, server):
response = self.api_post('/servers', server).body
if 'reservation_id' in response:
return response
else:
return response['server']
def put_server(self, server_id, server):
return self.api_put('/servers/%s' % server_id, server).body
def post_server_action(self, server_id, data):
return self.api_post('/servers/%s/action' % server_id, data).body
def delete_server(self, server_id):
return self.api_delete('/servers/%s' % server_id)
def get_image(self, image_id):
return self.api_get('/images/%s' % image_id).body['image']
def get_images(self, detail=True):
rel_url = '/images/detail' if detail else '/images'
return self.api_get(rel_url).body['images']
def post_image(self, image):
return self.api_post('/images', image).body['image']
def delete_image(self, image_id):
return self.api_delete('/images/%s' % image_id)
def get_flavor(self, flavor_id):
return self.api_get('/flavors/%s' % flavor_id).body['flavor']
def get_flavors(self, detail=True):
rel_url = '/flavors/detail' if detail else '/flavors'
return self.api_get(rel_url).body['flavors']
def post_flavor(self, flavor):
return self.api_post('/flavors', flavor).body['flavor']
def delete_flavor(self, flavor_id):
return self.api_delete('/flavors/%s' % flavor_id)
def post_extra_spec(self, flavor_id, spec):
return self.api_post('/flavors/%s/os-extra_specs' %
flavor_id, spec)
def get_volume(self, volume_id):
return self.api_get('/os-volumes/%s' % volume_id).body['volume']
def get_volumes(self, detail=True):
rel_url = '/os-volumes/detail' if detail else '/os-volumes'
return self.api_get(rel_url).body['volumes']
def post_volume(self, volume):
return self.api_post('/os-volumes', volume).body['volume']
def delete_volume(self, volume_id):
return self.api_delete('/os-volumes/%s' % volume_id)
def get_snapshot(self, snap_id):
return self.api_get('/os-snapshots/%s' % snap_id).body['snapshot']
def get_snapshots(self, detail=True):
rel_url = '/os-snapshots/detail' if detail else '/os-snapshots'
return self.api_get(rel_url).body['snapshots']
def post_snapshot(self, snapshot):
return self.api_post('/os-snapshots', snapshot).body['snapshot']
def delete_snapshot(self, snap_id):
return self.api_delete('/os-snapshots/%s' % snap_id)
def get_server_volume(self, server_id, attachment_id):
return self.api_get('/servers/%s/os-volume_attachments/%s' %
(server_id, attachment_id)
).body['volumeAttachment']
def get_server_volumes(self, server_id):
return self.api_get('/servers/%s/os-volume_attachments' %
(server_id)).body['volumeAttachments']
def post_server_volume(self, server_id, volume_attachment):
return self.api_post('/servers/%s/os-volume_attachments' %
(server_id), volume_attachment
).body['volumeAttachment']
def put_server_volume(self, server_id, attachment_id, volume_id):
return self.api_put('/servers/%s/os-volume_attachments/%s' %
(server_id, attachment_id),
{"volumeAttachment": {"volumeId": volume_id}})
def delete_server_volume(self, server_id, attachment_id):
return self.api_delete('/servers/%s/os-volume_attachments/%s' %
(server_id, attachment_id))
def post_server_metadata(self, server_id, metadata):
post_body = {'metadata': {}}
post_body['metadata'].update(metadata)
return self.api_post('/servers/%s/metadata' % server_id,
post_body).body['metadata']
def delete_server_metadata(self, server_id, key):
return self.api_delete('/servers/%s/metadata/%s' %
(server_id, key))
def get_server_groups(self, all_projects=None):
if all_projects:
return self.api_get(
'/os-server-groups?all_projects').body['server_groups']
else:
return self.api_get('/os-server-groups').body['server_groups']
def get_server_group(self, group_id):
return self.api_get('/os-server-groups/%s' %
group_id).body['server_group']
def post_server_groups(self, group):
response = self.api_post('/os-server-groups', {"server_group": group})
return response.body['server_group']
def delete_server_group(self, group_id):
self.api_delete('/os-server-groups/%s' % group_id)
def get_instance_actions(self, server_id):
return self.api_get('/servers/%s/os-instance-actions' %
(server_id)).body['instanceActions']
def post_aggregate(self, aggregate):
return self.api_post('/os-aggregates', aggregate).body['aggregate']
def delete_aggregate(self, aggregate_id):
self.api_delete('/os-aggregates/%s' % aggregate_id)
|
hanlind/nova
|
nova/tests/functional/api/client.py
|
Python
|
apache-2.0
| 15,004 | 0.000267 |
from number_theory import int_pow, prime_sieve, prime, mod_exp
from itertools import count
from math import ceil, sqrt
def find_n(p1, p2):
"""
Finds n such that for consecutive primes p1 and p2 (p2 > p1), n is
divisible by p2 and the last digits of n are formed by p1.
"""
len_p1 = len(str(p1))
n = int_pow(10, len_p1)
totient_n = int_pow(2, len_p1 - 1) * 4 * int_pow(5, len_p1 - 1)
#now solve p_2.x == p_1 (mod n) i.e. x == p_2^(-1).p_1 (mod n)
x = mod_exp(p2, totient_n - 1, n)
x *= p1
x %= n
return x * p2
#get primes up to 1000000 plus the next one
primes = prime_sieve(1000000)
p = primes[-1] + 2
while not prime(p):
p += 2
primes += [p]
primes = primes[2:]
summation = 0
for p_i in range(len(primes) - 1):
n = find_n(primes[p_i], primes[p_i + 1])
summation += n
print(summation)
|
peterstace/project-euler
|
OLD_PY_CODE/project_euler_old_old/134/134.py
|
Python
|
unlicense
| 858 | 0.006993 |
# vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:
# Copyright 2014-2016 Florian Bruhin (The Compiler) <mail@qutebrowser.org>
#
# This file is part of qutebrowser.
#
# qutebrowser is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# qutebrowser is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with qutebrowser. If not, see <http://www.gnu.org/licenses/>.
"""Tests for the webelement utils."""
from unittest import mock
import collections.abc
import operator
import itertools
import binascii
import os.path
import hypothesis
import hypothesis.strategies
from PyQt5.QtCore import PYQT_VERSION, QRect, QPoint
from PyQt5.QtWebKit import QWebElement
import pytest
from qutebrowser.browser import webelem
def get_webelem(geometry=None, frame=None, null=False, style=None,
display='', attributes=None, tagname=None, classes=None):
"""Factory for WebElementWrapper objects based on a mock.
Args:
geometry: The geometry of the QWebElement as QRect.
frame: The QWebFrame the element is in.
null: Whether the element is null or not.
style: A dict with the styleAttributes of the element.
attributes: Boolean HTML attributes to be added.
tagname: The tag name.
classes: HTML classes to be added.
"""
elem = mock.Mock()
elem.isNull.return_value = null
elem.geometry.return_value = geometry
elem.webFrame.return_value = frame
elem.tagName.return_value = tagname
elem.toOuterXml.return_value = '<fakeelem/>'
elem.toPlainText.return_value = 'text'
attribute_dict = {}
if attributes is None:
pass
elif not isinstance(attributes, collections.abc.Mapping):
attribute_dict.update({e: None for e in attributes})
else:
attribute_dict.update(attributes)
elem.hasAttribute.side_effect = lambda k: k in attribute_dict
elem.attribute.side_effect = lambda k: attribute_dict.get(k, '')
elem.setAttribute.side_effect = (lambda k, v:
operator.setitem(attribute_dict, k, v))
elem.removeAttribute.side_effect = attribute_dict.pop
elem.attributeNames.return_value = list(attribute_dict)
if classes is not None:
elem.classes.return_value = classes.split(' ')
else:
elem.classes.return_value = []
style_dict = {'visibility': '', 'display': ''}
if style is not None:
style_dict.update(style)
def _style_property(name, strategy):
"""Helper function to act as styleProperty method."""
if strategy != QWebElement.ComputedStyle:
raise ValueError("styleProperty called with strategy != "
"ComputedStyle ({})!".format(strategy))
return style_dict[name]
elem.styleProperty.side_effect = _style_property
wrapped = webelem.WebElementWrapper(elem)
return wrapped
class SelectionAndFilterTests:
"""Generator for tests for TestSelectionsAndFilters."""
# A mapping of a HTML element to a list of groups where the selectors
# (after filtering) should match.
#
# Based on this, test cases are generated to make sure it matches those
# groups and not the others.
TESTS = [
('<foo />', []),
('<foo bar="baz"/>', []),
('<foo href="baz"/>', [webelem.Group.url]),
('<foo src="baz"/>', [webelem.Group.url]),
('<a />', [webelem.Group.all]),
('<a href="foo" />', [webelem.Group.all, webelem.Group.links,
webelem.Group.prevnext, webelem.Group.url]),
('<a href="javascript://foo" />', [webelem.Group.all,
webelem.Group.url]),
('<area />', [webelem.Group.all]),
('<area href="foo" />', [webelem.Group.all, webelem.Group.links,
webelem.Group.prevnext, webelem.Group.url]),
('<area href="javascript://foo" />', [webelem.Group.all,
webelem.Group.url]),
('<link />', [webelem.Group.all]),
('<link href="foo" />', [webelem.Group.all, webelem.Group.links,
webelem.Group.prevnext, webelem.Group.url]),
('<link href="javascript://foo" />', [webelem.Group.all,
webelem.Group.url]),
('<textarea />', [webelem.Group.all]),
('<select />', [webelem.Group.all]),
('<input />', [webelem.Group.all]),
('<input type="hidden" />', []),
('<button />', [webelem.Group.all]),
('<button href="foo" />', [webelem.Group.all, webelem.Group.prevnext,
webelem.Group.url]),
('<button href="javascript://foo" />', [webelem.Group.all,
webelem.Group.url]),
# We can't easily test <frame>/<iframe> as they vanish when setting
# them via QWebFrame::setHtml...
('<p onclick="foo" foo="bar"/>', [webelem.Group.all]),
('<p onmousedown="foo" foo="bar"/>', [webelem.Group.all]),
('<p role="option" foo="bar"/>', [webelem.Group.all]),
('<p role="button" foo="bar"/>', [webelem.Group.all]),
('<p role="button" href="bar"/>', [webelem.Group.all,
webelem.Group.prevnext,
webelem.Group.url]),
]
GROUPS = [e for e in webelem.Group if e != webelem.Group.focus]
COMBINATIONS = list(itertools.product(TESTS, GROUPS))
def __init__(self):
self.tests = list(self._generate_tests())
def _generate_tests(self):
for (val, matching_groups), group in self.COMBINATIONS:
if group in matching_groups:
yield group, val, True
else:
yield group, val, False
class TestSelectorsAndFilters:
TESTS = SelectionAndFilterTests().tests
def test_test_generator(self):
assert self.TESTS
@pytest.mark.parametrize('group, val, matching', TESTS)
def test_selectors(self, webframe, group, val, matching):
webframe.setHtml('<html><body>{}</body></html>'.format(val))
# Make sure setting HTML succeeded and there's a new element
assert len(webframe.findAllElements('*')) == 3
elems = webframe.findAllElements(webelem.SELECTORS[group])
elems = [webelem.WebElementWrapper(e) for e in elems]
filterfunc = webelem.FILTERS.get(group, lambda e: True)
elems = [e for e in elems if filterfunc(e)]
assert bool(elems) == matching
class TestWebElementWrapper:
"""Generic tests for WebElementWrapper.
Note: For some methods, there's a dedicated test class with more involved
tests.
"""
@pytest.fixture
def elem(self):
return get_webelem()
def test_nullelem(self):
"""Test __init__ with a null element."""
with pytest.raises(webelem.IsNullError):
get_webelem(null=True)
def test_double_wrap(self, elem):
"""Test wrapping a WebElementWrapper."""
with pytest.raises(TypeError) as excinfo:
webelem.WebElementWrapper(elem)
assert str(excinfo.value) == "Trying to wrap a wrapper!"
@pytest.mark.parametrize('code', [
str,
lambda e: e[None],
lambda e: operator.setitem(e, None, None),
lambda e: operator.delitem(e, None),
lambda e: None in e,
len,
lambda e: e.is_visible(None),
lambda e: e.rect_on_view(),
lambda e: e.is_writable(),
lambda e: e.is_content_editable(),
lambda e: e.is_editable(),
lambda e: e.is_text_input(),
lambda e: e.debug_text(),
list, # __iter__
])
def test_vanished(self, elem, code):
"""Make sure methods check if the element is vanished."""
elem._elem.isNull.return_value = True
with pytest.raises(webelem.IsNullError):
code(elem)
def test_str(self, elem):
assert str(elem) == 'text'
@pytest.mark.parametrize('is_null, expected', [
(False, "<qutebrowser.browser.webelem.WebElementWrapper "
"html='<fakeelem/>'>"),
(True, '<qutebrowser.browser.webelem.WebElementWrapper html=None>'),
])
def test_repr(self, elem, is_null, expected):
elem._elem.isNull.return_value = is_null
assert repr(elem) == expected
def test_getitem(self):
elem = get_webelem(attributes={'foo': 'bar'})
assert elem['foo'] == 'bar'
def test_getitem_keyerror(self, elem):
with pytest.raises(KeyError):
elem['foo'] # pylint: disable=pointless-statement
def test_setitem(self, elem):
elem['foo'] = 'bar'
assert elem._elem.attribute('foo') == 'bar'
def test_delitem(self):
elem = get_webelem(attributes={'foo': 'bar'})
del elem['foo']
assert not elem._elem.hasAttribute('foo')
def test_setitem_keyerror(self, elem):
with pytest.raises(KeyError):
del elem['foo']
def test_contains(self):
elem = get_webelem(attributes={'foo': 'bar'})
assert 'foo' in elem
assert 'bar' not in elem
@pytest.mark.parametrize('attributes, expected', [
({'one': '1', 'two': '2'}, {'one', 'two'}),
({}, set()),
])
def test_iter(self, attributes, expected):
elem = get_webelem(attributes=attributes)
assert set(elem) == expected
@pytest.mark.parametrize('attributes, length', [
({'one': '1', 'two': '2'}, 2),
({}, 0),
])
def test_len(self, attributes, length):
elem = get_webelem(attributes=attributes)
assert len(elem) == length
@pytest.mark.parametrize('attributes, writable', [
([], True),
(['disabled'], False),
(['readonly'], False),
(['disabled', 'readonly'], False),
])
def test_is_writable(self, attributes, writable):
elem = get_webelem(attributes=attributes)
assert elem.is_writable() == writable
@pytest.mark.parametrize('attributes, expected', [
({}, False),
({'contenteditable': 'false'}, False),
({'contenteditable': 'inherit'}, False),
({'contenteditable': 'true'}, True),
])
def test_is_content_editable(self, attributes, expected):
elem = get_webelem(attributes=attributes)
assert elem.is_content_editable() == expected
@pytest.mark.parametrize('tagname, attributes, expected', [
('input', {}, True),
('textarea', {}, True),
('select', {}, False),
('foo', {'role': 'combobox'}, True),
('foo', {'role': 'textbox'}, True),
('foo', {'role': 'bar'}, False),
('input', {'role': 'bar'}, True),
])
def test_is_text_input(self, tagname, attributes, expected):
elem = get_webelem(tagname=tagname, attributes=attributes)
assert elem.is_text_input() == expected
@pytest.mark.parametrize('xml, expected', [
('<fakeelem/>', '<fakeelem/>'),
('<foo>\n<bar/>\n</foo>', '<foo><bar/></foo>'),
('<foo>{}</foo>'.format('x' * 500), '<foo>{}…'.format('x' * 494)),
], ids=['fakeelem', 'newlines', 'long'])
def test_debug_text(self, elem, xml, expected):
elem._elem.toOuterXml.return_value = xml
assert elem.debug_text() == expected
class TestIsVisible:
@pytest.fixture
def frame(self, stubs):
return stubs.FakeWebFrame(QRect(0, 0, 100, 100))
def test_invalid_frame_geometry(self, stubs):
"""Test with an invalid frame geometry."""
rect = QRect(0, 0, 0, 0)
assert not rect.isValid()
frame = stubs.FakeWebFrame(rect)
elem = get_webelem(QRect(0, 0, 10, 10), frame)
assert not elem.is_visible(frame)
def test_invalid_invisible(self, frame):
"""Test elements with an invalid geometry which are invisible."""
elem = get_webelem(QRect(0, 0, 0, 0), frame)
assert not elem.geometry().isValid()
assert elem.geometry().x() == 0
assert not elem.is_visible(frame)
def test_invalid_visible(self, frame):
"""Test elements with an invalid geometry which are visible.
This seems to happen sometimes in the real world, with real elements
which *are* visible, but don't have a valid geometry.
"""
elem = get_webelem(QRect(10, 10, 0, 0), frame)
assert not elem.geometry().isValid()
assert elem.is_visible(frame)
@pytest.mark.parametrize('geometry, visible', [
(QRect(5, 5, 4, 4), False),
(QRect(10, 10, 1, 1), True),
])
def test_scrolled(self, geometry, visible, stubs):
scrolled_frame = stubs.FakeWebFrame(QRect(0, 0, 100, 100),
scroll=QPoint(10, 10))
elem = get_webelem(geometry, scrolled_frame)
assert elem.is_visible(scrolled_frame) == visible
@pytest.mark.parametrize('style, visible', [
({'visibility': 'visible'}, True),
({'visibility': 'hidden'}, False),
({'display': 'inline'}, True),
({'display': 'none'}, False),
({'visibility': 'visible', 'display': 'none'}, False),
({'visibility': 'hidden', 'display': 'inline'}, False),
])
def test_css_attributes(self, frame, style, visible):
elem = get_webelem(QRect(0, 0, 10, 10), frame, style=style)
assert elem.is_visible(frame) == visible
class TestIsVisibleIframe:
"""Tests for is_visible with a child frame.
Attributes:
frame: The FakeWebFrame we're using to test.
iframe: The iframe inside frame.
elem1-elem4: FakeWebElements to test.
"""
Objects = collections.namedtuple('Objects', ['frame', 'iframe', 'elems'])
@pytest.fixture
def objects(self, stubs):
"""Set up the following base situation.
0, 0 300, 0
##############################
# #
0,10 # iframe 100,10 #
#********** #
#*e * elems[0]: 0, 0 in iframe (visible)
#* * #
#* e * elems[1]: 20,90 in iframe (visible)
#********** #
0,110 #. .100,110 #
#. . #
#. e . elems[2]: 20,150 in iframe (not visible)
#.......... #
# e elems[3]: 30, 180 in main frame (visible)
# #
# frame #
##############################
300, 0 300, 300
Returns an Objects namedtuple with frame/iframe/elems attributes.
"""
frame = stubs.FakeWebFrame(QRect(0, 0, 300, 300))
iframe = stubs.FakeWebFrame(QRect(0, 10, 100, 100), parent=frame)
assert frame.geometry().contains(iframe.geometry())
elems = [
get_webelem(QRect(0, 0, 10, 10), iframe),
get_webelem(QRect(20, 90, 10, 10), iframe),
get_webelem(QRect(20, 150, 10, 10), iframe),
get_webelem(QRect(30, 180, 10, 10), frame),
]
assert elems[0].is_visible(frame)
assert elems[1].is_visible(frame)
assert not elems[2].is_visible(frame)
assert elems[3].is_visible(frame)
return self.Objects(frame=frame, iframe=iframe, elems=elems)
def test_iframe_scrolled(self, objects):
"""Scroll iframe down so elem3 gets visible and elem1/elem2 not."""
objects.iframe.scrollPosition.return_value = QPoint(0, 100)
assert not objects.elems[0].is_visible(objects.frame)
assert not objects.elems[1].is_visible(objects.frame)
assert objects.elems[2].is_visible(objects.frame)
assert objects.elems[3].is_visible(objects.frame)
def test_mainframe_scrolled_iframe_visible(self, objects):
"""Scroll mainframe down so iframe is partly visible but elem1 not."""
objects.frame.scrollPosition.return_value = QPoint(0, 50)
geom = objects.frame.geometry().translated(
objects.frame.scrollPosition())
assert not geom.contains(objects.iframe.geometry())
assert geom.intersects(objects.iframe.geometry())
assert not objects.elems[0].is_visible(objects.frame)
assert objects.elems[1].is_visible(objects.frame)
assert not objects.elems[2].is_visible(objects.frame)
assert objects.elems[3].is_visible(objects.frame)
def test_mainframe_scrolled_iframe_invisible(self, objects):
"""Scroll mainframe down so iframe is invisible."""
objects.frame.scrollPosition.return_value = QPoint(0, 110)
geom = objects.frame.geometry().translated(
objects.frame.scrollPosition())
assert not geom.contains(objects.iframe.geometry())
assert not geom.intersects(objects.iframe.geometry())
assert not objects.elems[0].is_visible(objects.frame)
assert not objects.elems[1].is_visible(objects.frame)
assert not objects.elems[2].is_visible(objects.frame)
assert objects.elems[3].is_visible(objects.frame)
@pytest.fixture
def invalid_objects(self, stubs):
"""Set up the following base situation.
0, 0 300, 0
##############################
# #
0,10 # iframe 100,10 #
#********** #
#* e * elems[0]: 10, 10 in iframe (visible)
#* * #
#* * #
#********** #
0,110 #. .100,110 #
#. . #
#. e . elems[2]: 20,150 in iframe (not visible)
#.......... #
##############################
300, 0 300, 300
Returns an Objects namedtuple with frame/iframe/elems attributes.
"""
frame = stubs.FakeWebFrame(QRect(0, 0, 300, 300))
iframe = stubs.FakeWebFrame(QRect(0, 10, 100, 100), parent=frame)
assert frame.geometry().contains(iframe.geometry())
elems = [
get_webelem(QRect(10, 10, 0, 0), iframe),
get_webelem(QRect(20, 150, 0, 0), iframe),
]
for e in elems:
assert not e.geometry().isValid()
return self.Objects(frame=frame, iframe=iframe, elems=elems)
def test_invalid_visible(self, invalid_objects):
"""Test elements with an invalid geometry which are visible.
This seems to happen sometimes in the real world, with real elements
which *are* visible, but don't have a valid geometry.
"""
elem = invalid_objects.elems[0]
assert elem.is_visible(invalid_objects.frame)
def test_invalid_invisible(self, invalid_objects):
"""Test elements with an invalid geometry which are invisible."""
assert not invalid_objects.elems[1].is_visible(invalid_objects.frame)
def test_focus_element(stubs):
"""Test getting focus element with a fake frame/element.
Testing this with a real webpage is almost impossible because the window
and the element would have focus, which is hard to achieve consistently in
a test.
"""
frame = stubs.FakeWebFrame(QRect(0, 0, 100, 100))
elem = get_webelem()
frame.focus_elem = elem._elem
assert webelem.focus_elem(frame)._elem is elem._elem
class TestRectOnView:
def test_simple(self, stubs):
geometry = QRect(5, 5, 4, 4)
frame = stubs.FakeWebFrame(QRect(0, 0, 100, 100))
elem = get_webelem(geometry, frame)
assert elem.rect_on_view() == QRect(5, 5, 4, 4)
def test_scrolled(self, stubs):
geometry = QRect(20, 20, 4, 4)
frame = stubs.FakeWebFrame(QRect(0, 0, 100, 100),
scroll=QPoint(10, 10))
elem = get_webelem(geometry, frame)
assert elem.rect_on_view() == QRect(20 - 10, 20 - 10, 4, 4)
def test_iframe(self, stubs):
"""Test an element in an iframe.
0, 0 200, 0
##############################
# #
0,10 # iframe 100,10 #
#********** #
#* * #
#* * #
#* e * elem: 20,90 in iframe
#********** #
0,100 # #
##############################
200, 0 200, 200
"""
frame = stubs.FakeWebFrame(QRect(0, 0, 200, 200))
iframe = stubs.FakeWebFrame(QRect(0, 10, 100, 100), parent=frame)
assert frame.geometry().contains(iframe.geometry())
elem = get_webelem(QRect(20, 90, 10, 10), iframe)
assert elem.rect_on_view() == QRect(20, 10 + 90, 10, 10)
def test_passed_geometry(self, stubs):
"""Make sure geometry isn't called when a geometry is passed."""
raw_elem = get_webelem()._elem
rect = QRect(10, 20, 30, 40)
assert webelem.rect_on_view(raw_elem, rect) == rect
assert not raw_elem.geometry.called
class TestJavascriptEscape:
TESTS = {
'foo\\bar': r'foo\\bar',
'foo\nbar': r'foo\nbar',
'foo\rbar': r'foo\rbar',
"foo'bar": r"foo\'bar",
'foo"bar': r'foo\"bar',
'one\\two\rthree\nfour\'five"six': r'one\\two\rthree\nfour\'five\"six',
'\x00': r'\x00',
'hellö': 'hellö',
'☃': '☃',
'\x80Ā': '\x80Ā',
'𐀀\x00𐀀\x00': r'𐀀\x00𐀀\x00',
'𐀀\ufeff': r'𐀀\ufeff',
'\ufeff': r'\ufeff',
# http://stackoverflow.com/questions/2965293/
'\u2028': r'\u2028',
'\u2029': r'\u2029',
}
# Once there was this warning here:
# load glyph failed err=6 face=0x2680ba0, glyph=1912
# http://qutebrowser.org:8010/builders/debian-jessie/builds/765/steps/unittests/
# Should that be ignored?
@pytest.mark.parametrize('before, after', sorted(TESTS.items()), ids=repr)
def test_fake_escape(self, before, after):
"""Test javascript escaping with some expected outcomes."""
assert webelem.javascript_escape(before) == after
def _test_escape(self, text, qtbot, webframe):
"""Helper function for test_real_escape*."""
try:
self._test_escape_simple(text, webframe)
except AssertionError:
# Try another method if the simple method failed.
#
# See _test_escape_hexlified documentation on why this is
# necessary.
self._test_escape_hexlified(text, qtbot, webframe)
def _test_escape_hexlified(self, text, qtbot, webframe):
"""Test conversion by hexlifying in javascript.
Since the conversion of QStrings to Python strings is broken in some
older PyQt versions in some corner cases, we load a HTML file which
generates an MD5 of the escaped text and use that for comparisons.
"""
escaped = webelem.javascript_escape(text)
path = os.path.join(os.path.dirname(__file__),
'test_webelem_jsescape.html')
with open(path, encoding='utf-8') as f:
html_source = f.read().replace('%INPUT%', escaped)
with qtbot.waitSignal(webframe.loadFinished) as blocker:
webframe.setHtml(html_source)
assert blocker.args == [True]
result = webframe.evaluateJavaScript('window.qute_test_result')
assert result is not None
assert '|' in result
result_md5, result_text = result.split('|', maxsplit=1)
text_md5 = binascii.hexlify(text.encode('utf-8')).decode('ascii')
assert result_md5 == text_md5, result_text
def _test_escape_simple(self, text, webframe):
"""Test conversion by using evaluateJavaScript."""
escaped = webelem.javascript_escape(text)
result = webframe.evaluateJavaScript('"{}";'.format(escaped))
assert result == text
@pytest.mark.parametrize('text', sorted(TESTS), ids=repr)
def test_real_escape(self, webframe, qtbot, text):
"""Test javascript escaping with a real QWebPage."""
self._test_escape(text, qtbot, webframe)
@pytest.mark.qt_log_ignore('^load glyph failed',
'^OpenType support missing for script',
extend=True)
@hypothesis.given(hypothesis.strategies.text())
def test_real_escape_hypothesis(self, webframe, qtbot, text):
"""Test javascript escaping with a real QWebPage and hypothesis."""
# We can't simply use self._test_escape because of this:
# https://github.com/pytest-dev/pytest-qt/issues/69
# self._test_escape(text, qtbot, webframe)
try:
self._test_escape_simple(text, webframe)
except AssertionError:
if PYQT_VERSION >= 0x050300:
self._test_escape_hexlified(text, qtbot, webframe)
class TestGetChildFrames:
"""Check get_child_frames."""
def test_single_frame(self, stubs):
"""Test get_child_frames with a single frame without children."""
frame = stubs.FakeChildrenFrame()
children = webelem.get_child_frames(frame)
assert len(children) == 1
assert children[0] is frame
frame.childFrames.assert_called_once_with()
def test_one_level(self, stubs):
r"""Test get_child_frames with one level of children.
o parent
/ \
child1 o o child2
"""
child1 = stubs.FakeChildrenFrame()
child2 = stubs.FakeChildrenFrame()
parent = stubs.FakeChildrenFrame([child1, child2])
children = webelem.get_child_frames(parent)
assert len(children) == 3
assert children[0] is parent
assert children[1] is child1
assert children[2] is child2
parent.childFrames.assert_called_once_with()
child1.childFrames.assert_called_once_with()
child2.childFrames.assert_called_once_with()
def test_multiple_levels(self, stubs):
r"""Test get_child_frames with multiple levels of children.
o root
/ \
o o first
/\ /\
o o o o second
"""
second = [stubs.FakeChildrenFrame() for _ in range(4)]
first = [stubs.FakeChildrenFrame(second[0:2]),
stubs.FakeChildrenFrame(second[2:4])]
root = stubs.FakeChildrenFrame(first)
children = webelem.get_child_frames(root)
assert len(children) == 7
assert children[0] is root
for frame in [root] + first + second:
frame.childFrames.assert_called_once_with()
class TestIsEditable:
"""Tests for is_editable."""
@pytest.fixture
def stubbed_config(self, config_stub, monkeypatch):
"""Fixture to create a config stub with an input section."""
config_stub.data = {'input': {}}
monkeypatch.setattr('qutebrowser.browser.webelem.config', config_stub)
return config_stub
@pytest.mark.parametrize('tagname, attributes, editable', [
('input', {}, True),
('input', {'type': 'text'}, True),
('INPUT', {'TYPE': 'TEXT'}, True), # caps attributes/name
('input', {'type': 'email'}, True),
('input', {'type': 'url'}, True),
('input', {'type': 'tel'}, True),
('input', {'type': 'number'}, True),
('input', {'type': 'password'}, True),
('input', {'type': 'search'}, True),
('textarea', {}, True),
('input', {'type': 'button'}, False),
('input', {'type': 'checkbox'}, False),
('select', {}, False),
('input', {'disabled': None}, False),
('input', {'readonly': None}, False),
('textarea', {'disabled': None}, False),
('textarea', {'readonly': None}, False),
('foobar', {}, False),
('foobar', {'contenteditable': 'true'}, True),
('foobar', {'contenteditable': 'false'}, False),
('foobar', {'contenteditable': 'true', 'disabled': None}, False),
('foobar', {'contenteditable': 'true', 'readonly': None}, False),
('foobar', {'role': 'foobar'}, False),
('foobar', {'role': 'combobox'}, True),
('foobar', {'role': 'textbox'}, True),
('foobar', {'role': 'combobox', 'disabled': None}, False),
('foobar', {'role': 'combobox', 'readonly': None}, False),
])
def test_is_editable(self, tagname, attributes, editable):
elem = get_webelem(tagname=tagname, attributes=attributes)
assert elem.is_editable() == editable
@pytest.mark.parametrize('classes, editable', [
(None, False),
('foo-kix-bar', False),
('foo kix-foo', True),
('KIX-FOO', False),
('foo CodeMirror-foo', True),
])
def test_is_editable_div(self, classes, editable):
elem = get_webelem(tagname='div', classes=classes)
assert elem.is_editable() == editable
@pytest.mark.parametrize('setting, tagname, attributes, editable', [
(True, 'embed', {}, True),
(True, 'embed', {}, True),
(False, 'applet', {}, False),
(False, 'applet', {}, False),
(True, 'object', {'type': 'application/foo'}, True),
(False, 'object', {'type': 'application/foo'}, False),
(True, 'object', {'type': 'foo', 'classid': 'foo'}, True),
(False, 'object', {'type': 'foo', 'classid': 'foo'}, False),
(True, 'object', {}, False),
(True, 'object', {'type': 'image/gif'}, False),
])
def test_is_editable_plugin(self, stubbed_config, setting, tagname,
attributes, editable):
stubbed_config.data['input']['insert-mode-on-plugins'] = setting
elem = get_webelem(tagname=tagname, attributes=attributes)
assert elem.is_editable() == editable
|
haxwithaxe/qutebrowser
|
tests/unit/browser/test_webelem.py
|
Python
|
gpl-3.0
| 30,783 | 0 |
from __future__ import print_function, division
import matplotlib
import logging
from sys import stdout
matplotlib.use('Agg') # Must be before importing matplotlib.pyplot or pylab!
from neuralnilm import (Net, RealApplianceSource,
BLSTMLayer, DimshuffleLayer,
BidirectionalRecurrentLayer)
from neuralnilm.source import standardise, discretize, fdiff, power_and_fdiff
from neuralnilm.experiment import run_experiment, init_experiment
from neuralnilm.net import TrainingError
from neuralnilm.layers import MixtureDensityLayer
from neuralnilm.objectives import (scaled_cost, mdn_nll,
scaled_cost_ignore_inactive, ignore_inactive,
scaled_cost3)
from neuralnilm.plot import MDNPlotter, CentralOutputPlotter
from lasagne.nonlinearities import sigmoid, rectify, tanh
from lasagne.objectives import mse, binary_crossentropy
from lasagne.init import Uniform, Normal
from lasagne.layers import (LSTMLayer, DenseLayer, Conv1DLayer,
ReshapeLayer, FeaturePoolLayer, RecurrentLayer)
from lasagne.updates import nesterov_momentum, momentum
from functools import partial
import os
import __main__
from copy import deepcopy
from math import sqrt
import numpy as np
import theano.tensor as T
NAME = os.path.splitext(os.path.split(__main__.__file__)[1])[0]
#PATH = "/homes/dk3810/workspace/python/neuralnilm/figures"
PATH = "/data/dk3810/figures"
SAVE_PLOT_INTERVAL = 500
GRADIENT_STEPS = 10
source_dict = dict(
filename='/data/dk3810/ukdale.h5',
appliances=[
['fridge freezer', 'fridge', 'freezer'],
'hair straighteners',
'television'
# 'dish washer',
# ['washer dryer', 'washing machine']
],
on_power_thresholds=[5] * 5,
min_on_durations=[60, 60, 60, 1800, 1800],
min_off_durations=[12, 12, 12, 1800, 600],
window=("2013-06-01", "2014-07-01"),
seq_length=1024,
# random_window=64,
output_one_appliance=False,
boolean_targets=False,
train_buildings=[1],
validation_buildings=[1],
# skip_probability=0.8,
one_target_per_seq=False,
n_seq_per_batch=16,
subsample_target=4,
include_diff=False,
include_power=True,
# clip_appliance_power=True,
target_is_prediction=False,
# independently_center_inputs = True,
standardise_input=True,
unit_variance_targets=True,
input_padding=2,
lag=0
# classification=True
# reshape_target_to_2D=True
# input_stats={'mean': np.array([ 0.05526326], dtype=np.float32),
# 'std': np.array([ 0.12636775], dtype=np.float32)},
# target_stats={
# 'mean': np.array([ 0.04066789, 0.01881946,
# 0.24639061, 0.17608672, 0.10273963],
# dtype=np.float32),
# 'std': np.array([ 0.11449792, 0.07338708,
# 0.26608968, 0.33463112, 0.21250485],
# dtype=np.float32)}
)
N = 50
net_dict = dict(
save_plot_interval=SAVE_PLOT_INTERVAL,
# loss_function=partial(ignore_inactive, loss_func=mdn_nll, seq_length=SEQ_LENGTH),
# loss_function=lambda x, t: mdn_nll(x, t).mean(),
loss_function=lambda x, t: mse(x, t).mean(),
# loss_function=lambda x, t: binary_crossentropy(x, t).mean(),
# loss_function=partial(scaled_cost, loss_func=mse),
# loss_function=ignore_inactive,
# loss_function=partial(scaled_cost3, ignore_inactive=False),
updates_func=momentum,
learning_rate=1e-1,
learning_rate_changes_by_iteration={
1000: 1e-2,
# 400: 1e-3,
# 800: 1e-4
# 500: 1e-3
# 4000: 1e-03,
# 6000: 5e-06,
# 7000: 1e-06
# 2000: 5e-06
# 3000: 1e-05
# 7000: 5e-06,
# 10000: 1e-06,
# 15000: 5e-07,
# 50000: 1e-07
},
do_save_activations=True
# auto_reshape=False,
# plotter=CentralOutputPlotter
# plotter=MDNPlotter
)
def exp_a(name):
global source
source_dict_copy = deepcopy(source_dict)
source = RealApplianceSource(**source_dict_copy)
net_dict_copy = deepcopy(net_dict)
net_dict_copy.update(dict(
experiment_name=name,
source=source
))
output_shape = source.output_shape_after_processing()
net_dict_copy['layers_config'] = [
{
'type': BLSTMLayer,
'num_units': 40,
'gradient_steps': GRADIENT_STEPS,
'peepholes': False
},
{
'type': DimshuffleLayer,
'pattern': (0, 2, 1)
},
{
'type': Conv1DLayer,
'num_filters': 20,
'filter_length': 4,
'stride': 4,
'nonlinearity': sigmoid
},
{
'type': DimshuffleLayer,
'pattern': (0, 2, 1)
},
{
'type': BLSTMLayer,
'num_units': 80,
'gradient_steps': GRADIENT_STEPS,
'peepholes': False
},
{
'type': DenseLayer,
'num_units': source.n_outputs,
'nonlinearity': T.nnet.softplus
}
]
net = Net(**net_dict_copy)
return net
def main():
# EXPERIMENTS = list('abcdefghijklmnopqrstuvwxyz')
EXPERIMENTS = list('a')
for experiment in EXPERIMENTS:
full_exp_name = NAME + experiment
func_call = init_experiment(PATH, experiment, full_exp_name)
logger = logging.getLogger(full_exp_name)
try:
net = eval(func_call)
run_experiment(net, epochs=100000)
except KeyboardInterrupt:
logger.info("KeyboardInterrupt")
break
except Exception as exception:
logger.exception("Exception")
# raise
finally:
logging.shutdown()
if __name__ == "__main__":
main()
|
mmottahedi/neuralnilm_prototype
|
scripts/e362.py
|
Python
|
mit
| 5,901 | 0.009659 |
# position/views_admin.py
# Brought to you by We Vote. Be good.
# -*- coding: UTF-8 -*-
from .controllers import generate_position_sorting_dates_for_election, positions_import_from_master_server, \
refresh_cached_position_info_for_election, \
refresh_positions_with_candidate_details_for_election, \
refresh_positions_with_contest_office_details_for_election, \
refresh_positions_with_contest_measure_details_for_election
from .models import ANY_STANCE, PositionEntered, PositionForFriends, PositionListManager, PERCENT_RATING
from admin_tools.views import redirect_to_sign_in_page
from candidate.models import CandidateCampaign, CandidateListManager, CandidateManager
from config.base import get_environment_variable
from django.urls import reverse
from django.contrib import messages
from django.contrib.auth.decorators import login_required
from django.contrib.messages import get_messages
from django.http import HttpResponseRedirect
from django.shortcuts import render
from django.db.models import Q
from election.controllers import retrieve_election_id_list_by_year_list
from election.models import ElectionManager
from exception.models import handle_record_found_more_than_one_exception,\
handle_record_not_found_exception, handle_record_not_saved_exception
from measure.controllers import push_contest_measure_data_to_other_table_caches
from office.controllers import push_contest_office_data_to_other_table_caches
from office.models import ContestOfficeManager
from organization.models import OrganizationManager
from politician.models import PoliticianManager
from voter.models import voter_has_authority
import wevote_functions.admin
from wevote_functions.functions import convert_to_int, convert_integer_to_string_with_comma_for_thousands_separator, \
positive_value_exists, STATE_CODE_MAP
from django.http import HttpResponse
import json
UNKNOWN = 'U'
POSITIONS_SYNC_URL = get_environment_variable("POSITIONS_SYNC_URL") # positionsSyncOut
WE_VOTE_SERVER_ROOT_URL = get_environment_variable("WE_VOTE_SERVER_ROOT_URL")
logger = wevote_functions.admin.get_logger(__name__)
# This page does not need to be protected.
def positions_sync_out_view(request): # positionsSyncOut
google_civic_election_id = convert_to_int(request.GET.get('google_civic_election_id', 0))
if not positive_value_exists(google_civic_election_id):
json_data = {
'success': False,
'status': 'POSITION_LIST_CANNOT_BE_RETURNED-ELECTION_ID_REQUIRED'
}
return HttpResponse(json.dumps(json_data), content_type='application/json')
stance_we_are_looking_for = ANY_STANCE
try:
# Only return public positions
position_list_query = PositionEntered.objects.order_by('date_entered')
# As of Aug 2018 we are no longer using PERCENT_RATING
position_list_query = position_list_query.exclude(stance__iexact=PERCENT_RATING)
position_list_query = position_list_query.filter(google_civic_election_id=google_civic_election_id)
# SUPPORT, STILL_DECIDING, INFORMATION_ONLY, NO_STANCE, OPPOSE, PERCENT_RATING
if stance_we_are_looking_for != ANY_STANCE:
# If we passed in the stance "ANY" it means we want to not filter down the list
position_list_query = position_list_query.filter(stance__iexact=stance_we_are_looking_for)
# convert datetime to str for date_entered and date_last_changed columns
position_list_query = position_list_query.extra(
select={'date_entered': "to_char(date_entered, 'YYYY-MM-DD HH24:MI:SS')"})
position_list_query = position_list_query.extra(
select={'date_last_changed': "to_char(date_last_changed, 'YYYY-MM-DD HH24:MI:SS')"})
position_list_dict = position_list_query.values(
'we_vote_id', 'ballot_item_display_name', 'ballot_item_image_url_https',
'ballot_item_twitter_handle', 'speaker_display_name',
'speaker_image_url_https', 'speaker_twitter_handle', 'date_entered',
'date_last_changed', 'organization_we_vote_id', 'voter_we_vote_id',
'public_figure_we_vote_id', 'google_civic_election_id', 'state_code',
'vote_smart_rating_id', 'vote_smart_time_span', 'vote_smart_rating',
'vote_smart_rating_name', 'contest_office_we_vote_id', 'race_office_level',
'candidate_campaign_we_vote_id', 'google_civic_candidate_name',
'politician_we_vote_id', 'contest_measure_we_vote_id', 'speaker_type', 'stance',
'position_ultimate_election_date', 'position_year',
'statement_text', 'statement_html', 'twitter_followers_count', 'more_info_url', 'from_scraper',
'organization_certified', 'volunteer_certified', 'voter_entering_position',
'tweet_source_id', 'twitter_user_entered_position', 'is_private_citizen')
if position_list_dict:
position_list_json = list(position_list_dict)
return HttpResponse(json.dumps(position_list_json), content_type='application/json')
except Exception as e:
handle_record_not_found_exception(e, logger=logger)
json_data = {
'success': False,
'status': 'POSITION_LIST_MISSING'
}
return HttpResponse(json.dumps(json_data), content_type='application/json')
@login_required
def positions_import_from_master_server_view(request):
# admin, analytics_admin, partner_organization, political_data_manager, political_data_viewer, verified_volunteer
authority_required = {'admin'}
if not voter_has_authority(request, authority_required):
return redirect_to_sign_in_page(request, authority_required)
if WE_VOTE_SERVER_ROOT_URL in POSITIONS_SYNC_URL:
messages.add_message(request, messages.ERROR, "Cannot sync with Master We Vote Server -- "
"this is the Master We Vote Server.")
return HttpResponseRedirect(reverse('admin_tools:admin_home', args=()))
google_civic_election_id = convert_to_int(request.GET.get('google_civic_election_id', 0))
state_code = request.GET.get('state_code', '')
if not positive_value_exists(google_civic_election_id):
messages.add_message(request, messages.INFO, 'Google civic election id is required for Positions import.')
return HttpResponseRedirect(reverse('admin_tools:sync_dashboard', args=()) + "?google_civic_election_id=" +
str(google_civic_election_id) + "&state_code=" + str(state_code))
results = positions_import_from_master_server(request, google_civic_election_id)
if not results['success']:
messages.add_message(request, messages.ERROR, results['status'])
else:
messages.add_message(request, messages.INFO, 'Positions import completed. '
'Saved: {saved}, Updated: {updated}, '
'Duplicates skipped: '
'{duplicates_removed}, '
'Not processed: {not_processed}'
''.format(saved=results['saved'],
updated=results['updated'],
duplicates_removed=results['duplicates_removed'],
not_processed=results['not_processed']))
return HttpResponseRedirect(reverse('admin_tools:sync_dashboard', args=()) + "?google_civic_election_id=" +
str(google_civic_election_id) + "&state_code=" + str(state_code))
def update_position_list_with_speaker_type(position_list):
organization_manager = OrganizationManager()
organization_dict = {}
for one_position in position_list:
position_change = False
speaker_type = UNKNOWN
twitter_followers_count = 0
if one_position.organization_we_vote_id in organization_dict:
organization = organization_dict[one_position.organization_we_vote_id]
speaker_type = organization.organization_type
twitter_followers_count = organization.twitter_followers_count
else:
organization_results = organization_manager.retrieve_organization_from_we_vote_id(
one_position.organization_we_vote_id)
if organization_results['organization_found']:
organization = organization_results['organization']
organization_dict[one_position.organization_we_vote_id] = organization
speaker_type = organization.organization_type
twitter_followers_count = organization.twitter_followers_count
if speaker_type != UNKNOWN:
one_position.speaker_type = speaker_type
position_change = True
if positive_value_exists(twitter_followers_count):
one_position.twitter_followers_count = twitter_followers_count
position_change = True
if position_change:
one_position.save()
return True
def update_position_list_with_contest_office_info(position_list):
candidate_manager = CandidateManager()
candidate_dict = {}
politician_manager = PoliticianManager()
politician_dict = {}
for one_position in position_list:
candidate_id = 0
contest_office_we_vote_id = ''
contest_office_id = 0
politician_we_vote_id = ''
politician_id = 0
position_change = False
if one_position.candidate_campaign_we_vote_id in candidate_dict:
candidate = candidate_dict[one_position.candidate_campaign_we_vote_id]
candidate_id = candidate.id
contest_office_we_vote_id = candidate.contest_office_we_vote_id
contest_office_id = candidate.contest_office_id
politician_we_vote_id = candidate.politician_we_vote_id
politician_id = candidate.politician_id
else:
results = candidate_manager.retrieve_candidate_from_we_vote_id(
one_position.candidate_campaign_we_vote_id)
if results['candidate_found']:
candidate = results['candidate']
candidate_dict[one_position.candidate_campaign_we_vote_id] = candidate
candidate_id = candidate.id
contest_office_we_vote_id = candidate.contest_office_we_vote_id
contest_office_id = candidate.contest_office_id
politician_we_vote_id = candidate.politician_we_vote_id
politician_id = candidate.politician_id
if positive_value_exists(candidate_id):
one_position.candidate_campaign_id = candidate_id
position_change = True
if positive_value_exists(contest_office_we_vote_id):
one_position.contest_office_we_vote_id = contest_office_we_vote_id
position_change = True
if positive_value_exists(contest_office_id):
one_position.contest_office_id = contest_office_id
position_change = True
if positive_value_exists(politician_we_vote_id):
one_position.politician_we_vote_id = politician_we_vote_id
position_change = True
if positive_value_exists(politician_id):
one_position.politician_id = politician_id
position_change = True
elif positive_value_exists(politician_we_vote_id):
# Look up the politician_id
if politician_we_vote_id in politician_dict:
politician = politician_dict[politician_we_vote_id]
one_position.politician_id = politician.id
position_change = True
else:
results = politician_manager.retrieve_politician(0, politician_we_vote_id)
if results['politician_found']:
politician = results['politician']
politician_dict[politician_we_vote_id] = politician
one_position.politician_id = politician.id
position_change = True
if position_change:
one_position.save()
return True
@login_required
def position_list_view(request):
"""
We actually don't want to see PositionForFriends entries in this view
:param request:
:return:
"""
# admin, analytics_admin, partner_organization, political_data_manager, political_data_viewer, verified_volunteer
authority_required = {'partner_organization', 'political_data_viewer', 'verified_volunteer'}
if not voter_has_authority(request, authority_required):
return redirect_to_sign_in_page(request, authority_required)
messages_on_stage = get_messages(request)
google_civic_election_id = convert_to_int(request.GET.get('google_civic_election_id', 0))
show_all_elections = positive_value_exists(request.GET.get('show_all_elections', False))
show_statistics = positive_value_exists(request.GET.get('show_statistics', False))
show_this_year_of_elections = convert_to_int(request.GET.get('show_this_year_of_elections', 0))
state_code = request.GET.get('state_code', '')
state_list = STATE_CODE_MAP
state_list_modified = {}
election_years_available = [2022, 2021, 2020, 2019, 2018, 2017, 2016]
position_search = request.GET.get('position_search', '')
candidate_list_manager = CandidateListManager()
election_manager = ElectionManager()
google_civic_election_id_list_for_dropdown = []
if positive_value_exists(show_this_year_of_elections):
election_year_list_to_show = [show_this_year_of_elections]
google_civic_election_id_list_for_dropdown = \
retrieve_election_id_list_by_year_list(election_year_list_to_show=election_year_list_to_show)
elif positive_value_exists(show_all_elections):
results = election_manager.retrieve_elections()
temp_election_list = results['election_list']
for one_election in temp_election_list:
google_civic_election_id_list_for_dropdown.append(one_election.google_civic_election_id)
else:
results = election_manager.retrieve_upcoming_elections()
temp_election_list = results['election_list']
# Make sure we always include the current election in the election_list, even if it is older
if positive_value_exists(google_civic_election_id):
this_election_found = False
for one_election in temp_election_list:
if convert_to_int(one_election.google_civic_election_id) == convert_to_int(google_civic_election_id):
this_election_found = True
break
if not this_election_found:
results = election_manager.retrieve_election(google_civic_election_id)
if results['election_found']:
one_election = results['election']
temp_election_list.append(one_election)
for one_election in temp_election_list:
google_civic_election_id_list_for_dropdown.append(one_election.google_civic_election_id)
if positive_value_exists(google_civic_election_id):
google_civic_election_id_list_for_display = [google_civic_election_id]
elif positive_value_exists(show_this_year_of_elections):
google_civic_election_id_list_for_display = google_civic_election_id_list_for_dropdown
elif positive_value_exists(show_all_elections):
google_civic_election_id_list_for_display = google_civic_election_id_list_for_dropdown
else:
google_civic_election_id_list_for_display = google_civic_election_id_list_for_dropdown
if len(google_civic_election_id_list_for_display) > 0:
results = candidate_list_manager.retrieve_candidate_we_vote_id_list_from_election_list(
google_civic_election_id_list=google_civic_election_id_list_for_display,
limit_to_this_state_code=state_code)
if not positive_value_exists(results['success']):
success = False
candidate_we_vote_id_list = results['candidate_we_vote_id_list']
else:
candidate_we_vote_id_list = []
public_position_list_clean_count = 0
friend_position_list_clean_count = 0
if positive_value_exists(show_statistics):
# Make sure all positions in this election have a speaker_type
if positive_value_exists(google_civic_election_id):
public_position_list_clean_query = PositionEntered.objects.all()
public_position_list_clean_query = public_position_list_clean_query.filter(
Q(google_civic_election_id__in=google_civic_election_id_list_for_display) |
Q(candidate_campaign_we_vote_id__in=candidate_we_vote_id_list))
public_position_list_clean_query = public_position_list_clean_query.filter(
speaker_type=UNKNOWN,
)
public_position_list_clean_count_query = public_position_list_clean_query
public_position_list_clean_count = public_position_list_clean_count_query.count()
public_position_list_clean = list(public_position_list_clean_count_query)
update_position_list_with_speaker_type(public_position_list_clean)
friend_position_list_clean_query = PositionForFriends.objects.all()
friend_position_list_clean_query = friend_position_list_clean_query.filter(
Q(google_civic_election_id__in=google_civic_election_id_list_for_display) |
Q(candidate_campaign_we_vote_id__in=candidate_we_vote_id_list))
friend_position_list_clean_query = friend_position_list_clean_query.filter(
speaker_type=UNKNOWN,
)
friend_position_list_clean_count_query = friend_position_list_clean_query
friend_position_list_clean_count = friend_position_list_clean_count_query.count()
friend_position_list_clean = list(friend_position_list_clean_count_query)
update_position_list_with_speaker_type(friend_position_list_clean)
public_position_list_candidate_clean_count = 0
friend_position_list_candidate_clean_count = 0
if positive_value_exists(show_statistics):
# Make sure all candidate-related positions in this election have contest_office information and politician info
if positive_value_exists(google_civic_election_id):
public_position_list_candidate_clean_query = PositionEntered.objects.all()
public_position_list_candidate_clean_query = public_position_list_candidate_clean_query.filter(
Q(google_civic_election_id__in=google_civic_election_id_list_for_display) |
Q(candidate_campaign_we_vote_id__in=candidate_we_vote_id_list))
public_position_list_candidate_clean_query = public_position_list_candidate_clean_query.exclude(
Q(candidate_campaign_we_vote_id__isnull=True) | Q(candidate_campaign_we_vote_id=""))
public_position_list_candidate_clean_count_query = public_position_list_candidate_clean_query
public_position_list_candidate_clean_count = public_position_list_candidate_clean_count_query.count()
public_position_list_candidate_clean = list(public_position_list_candidate_clean_count_query)
update_position_list_with_contest_office_info(public_position_list_candidate_clean)
friend_position_list_candidate_clean_query = PositionForFriends.objects.all()
friend_position_list_candidate_clean_query = friend_position_list_candidate_clean_query.filter(
Q(google_civic_election_id__in=google_civic_election_id_list_for_display) |
Q(candidate_campaign_we_vote_id__in=candidate_we_vote_id_list))
friend_position_list_candidate_clean_query = friend_position_list_candidate_clean_query.exclude(
Q(candidate_campaign_we_vote_id__isnull=True) | Q(candidate_campaign_we_vote_id=""))
friend_position_list_candidate_clean_count_query = friend_position_list_candidate_clean_query
friend_position_list_candidate_clean_count = friend_position_list_candidate_clean_count_query.count()
friend_position_list_candidate_clean = list(friend_position_list_candidate_clean_count_query)
update_position_list_with_contest_office_info(friend_position_list_candidate_clean)
# Publicly visible positions
public_position_list_query = PositionEntered.objects.order_by('-id') # This order_by is temp
public_position_list_query = public_position_list_query.exclude(stance__iexact=PERCENT_RATING)
public_position_list_query = public_position_list_query.filter(
Q(google_civic_election_id__in=google_civic_election_id_list_for_display) |
Q(candidate_campaign_we_vote_id__in=candidate_we_vote_id_list))
if positive_value_exists(state_code):
public_position_list_query = public_position_list_query.filter(state_code__iexact=state_code)
if positive_value_exists(position_search):
search_words = position_search.split()
for one_word in search_words:
filters = []
new_filter = Q(state_code__icontains=one_word)
filters.append(new_filter)
new_filter = Q(we_vote_id__iexact=one_word)
filters.append(new_filter)
new_filter = Q(candidate_campaign_we_vote_id__iexact=one_word)
filters.append(new_filter)
new_filter = Q(contest_measure_we_vote_id__iexact=one_word)
filters.append(new_filter)
new_filter = Q(contest_office_we_vote_id__iexact=one_word)
filters.append(new_filter)
new_filter = Q(organization_we_vote_id__iexact=one_word)
filters.append(new_filter)
new_filter = Q(voter_we_vote_id__iexact=one_word)
filters.append(new_filter)
new_filter = Q(google_civic_measure_title__icontains=one_word)
filters.append(new_filter)
new_filter = Q(speaker_display_name__icontains=one_word)
filters.append(new_filter)
new_filter = Q(ballot_item_display_name__icontains=one_word)
filters.append(new_filter)
if len(filters):
final_filters = filters.pop()
# ...and "OR" the remaining items in the list
for item in filters:
final_filters |= item
public_position_list_query = public_position_list_query.filter(final_filters)
public_position_list_count = 0
public_position_list_comments_count = 0
if positive_value_exists(show_statistics):
public_position_list_count_query = public_position_list_query
public_position_list_count = public_position_list_count_query.count()
public_position_list_comments_count_query = public_position_list_query
public_position_list_comments_count_query = public_position_list_comments_count_query.exclude(
(Q(statement_text__isnull=True) | Q(statement_text__exact='')))
public_position_list_comments_count = public_position_list_comments_count_query.count()
public_position_list_query = public_position_list_query[:10]
public_position_list = list(public_position_list_query)
# Friends-only visible positions
friends_only_position_list_query = PositionForFriends.objects.order_by('-id') # This order_by is temp
# As of Aug 2018 we are no longer using PERCENT_RATING
friends_only_position_list_query = friends_only_position_list_query.exclude(stance__iexact=PERCENT_RATING)
friends_only_position_list_query = friends_only_position_list_query.filter(
Q(google_civic_election_id__in=google_civic_election_id_list_for_display) |
Q(candidate_campaign_we_vote_id__in=candidate_we_vote_id_list))
if positive_value_exists(state_code):
friends_only_position_list_query = friends_only_position_list_query.filter(state_code__iexact=state_code)
if positive_value_exists(position_search):
search_words = position_search.split()
for one_word in search_words:
filters = []
new_filter = Q(state_code__icontains=one_word)
filters.append(new_filter)
new_filter = Q(we_vote_id__iexact=one_word)
filters.append(new_filter)
new_filter = Q(candidate_campaign_we_vote_id__iexact=one_word)
filters.append(new_filter)
new_filter = Q(contest_measure_we_vote_id__iexact=one_word)
filters.append(new_filter)
# new_filter = Q(contest_office_name__icontains=one_word)
# filters.append(new_filter)
#
# new_filter = Q(contest_office_we_vote_id__iexact=one_word)
# filters.append(new_filter)
new_filter = Q(organization_we_vote_id__iexact=one_word)
filters.append(new_filter)
new_filter = Q(voter_we_vote_id__iexact=one_word)
filters.append(new_filter)
new_filter = Q(google_civic_measure_title__icontains=one_word)
filters.append(new_filter)
new_filter = Q(speaker_display_name__icontains=one_word)
filters.append(new_filter)
new_filter = Q(ballot_item_display_name__icontains=one_word)
filters.append(new_filter)
if len(filters):
final_filters = filters.pop()
# ...and "OR" the remaining items in the list
for item in filters:
final_filters |= item
friends_only_position_list_query = friends_only_position_list_query.filter(final_filters)
friends_only_position_list_count = 0
friends_only_position_list_comments_count = 0
if positive_value_exists(show_statistics):
friends_only_position_list_count_query = friends_only_position_list_query
friends_only_position_list_comments_count_query = friends_only_position_list_query
friends_only_position_list_count = friends_only_position_list_count_query.count()
friends_only_position_list_comments_count_query = friends_only_position_list_comments_count_query.exclude(
(Q(statement_text__isnull=True) | Q(statement_text__exact='')))
friends_only_position_list_comments_count = friends_only_position_list_comments_count_query.count()
friends_only_position_list_query = friends_only_position_list_query[:10]
friends_only_position_list = list(friends_only_position_list_query)
position_list = public_position_list + friends_only_position_list
if positive_value_exists(show_statistics):
public_position_list_count_string = \
convert_integer_to_string_with_comma_for_thousands_separator(public_position_list_count)
public_position_list_comments_count_string = \
convert_integer_to_string_with_comma_for_thousands_separator(public_position_list_comments_count)
friends_only_position_list_count_string = \
convert_integer_to_string_with_comma_for_thousands_separator(friends_only_position_list_count)
friends_only_position_list_comments_count_string = \
convert_integer_to_string_with_comma_for_thousands_separator(friends_only_position_list_comments_count)
messages.add_message(
request, messages.INFO,
public_position_list_count_string + ' public positions found ' +
'(' + public_position_list_comments_count_string + ' with commentary). ' +
friends_only_position_list_count_string + ' friends-only positions found ' +
'(' + friends_only_position_list_comments_count_string + ' with commentary). '
)
if public_position_list_clean_count or friend_position_list_clean_count:
public_position_list_clean_count_string = \
convert_integer_to_string_with_comma_for_thousands_separator(public_position_list_clean_count)
friend_position_list_clean_count_string = \
convert_integer_to_string_with_comma_for_thousands_separator(friend_position_list_clean_count)
messages.add_message(
request, messages.INFO,
public_position_list_clean_count_string + ' public positions updated with speaker_type. ' +
friend_position_list_clean_count_string + ' friends-only positions updated with speaker_type. '
)
if public_position_list_candidate_clean_count or friend_position_list_candidate_clean_count:
public_position_list_candidate_clean_count_string = \
convert_integer_to_string_with_comma_for_thousands_separator(public_position_list_candidate_clean_count)
friend_position_list_candidate_clean_count_string = \
convert_integer_to_string_with_comma_for_thousands_separator(friend_position_list_candidate_clean_count)
messages.add_message(
request, messages.INFO,
public_position_list_candidate_clean_count_string + ' public positions updated with office info. ' +
friend_position_list_candidate_clean_count_string + ' friends-only positions updated with office info. '
)
position_list_manager = PositionListManager()
if len(google_civic_election_id_list_for_display) > 0:
for one_state_code, one_state_name in state_list.items():
state_name_modified = one_state_name
if positive_value_exists(show_statistics):
count_result = position_list_manager.retrieve_position_counts_for_election_and_state(
google_civic_election_id_list_for_display, one_state_code)
if positive_value_exists(count_result['public_count']) \
or positive_value_exists(count_result['friends_only_count']):
state_name_modified += " - " + str(count_result['public_count']) + \
'/' + str(count_result['friends_only_count'])
else:
state_name_modified += ""
state_list_modified[one_state_code] = state_name_modified
sorted_state_list = sorted(state_list_modified.items())
results = election_manager.retrieve_elections_by_google_civic_election_id_list(
google_civic_election_id_list_for_dropdown, read_only=True)
election_list = results['election_list']
template_values = {
'messages_on_stage': messages_on_stage,
'position_list': position_list,
'position_search': position_search,
'election_list': election_list,
'election_years_available': election_years_available,
'google_civic_election_id': google_civic_election_id,
'show_all_elections': show_all_elections,
'show_statistics': show_statistics,
'show_this_year_of_elections': show_this_year_of_elections,
'state_code': state_code,
'state_list': sorted_state_list,
}
return render(request, 'position/position_list.html', template_values)
@login_required
def position_new_view(request):
# admin, analytics_admin, partner_organization, political_data_manager, political_data_viewer, verified_volunteer
authority_required = {'verified_volunteer'}
if not voter_has_authority(request, authority_required):
return redirect_to_sign_in_page(request, authority_required)
messages_on_stage = get_messages(request)
template_values = {
'messages_on_stage': messages_on_stage,
}
return render(request, 'position/position_edit.html', template_values)
@login_required
def position_edit_view(request, position_we_vote_id):
# admin, analytics_admin, partner_organization, political_data_manager, political_data_viewer, verified_volunteer
authority_required = {'verified_volunteer'}
if not voter_has_authority(request, authority_required):
return redirect_to_sign_in_page(request, authority_required)
messages_on_stage = get_messages(request)
position_on_stage_found = False
try:
position_on_stage = PositionEntered.objects.get(we_vote_id=position_we_vote_id)
position_on_stage_found = True
except PositionEntered.MultipleObjectsReturned as e:
pass
except PositionEntered.DoesNotExist:
# This is fine, create new
pass
if position_on_stage_found:
template_values = {
'messages_on_stage': messages_on_stage,
'position': position_on_stage,
}
else:
template_values = {
'messages_on_stage': messages_on_stage,
}
return render(request, 'position/position_edit.html', template_values)
@login_required
def position_edit_process_view(request): # TODO DALE I don't think this is in use, but needs to be updated
"""
Process the new or edit position forms
:param request:
:return:
"""
# admin, analytics_admin, partner_organization, political_data_manager, political_data_viewer, verified_volunteer
authority_required = {'verified_volunteer'}
if not voter_has_authority(request, authority_required):
return redirect_to_sign_in_page(request, authority_required)
position_we_vote_id = request.POST.get('position_we_vote_id')
position_name = request.POST['position_name']
twitter_handle = request.POST['twitter_handle']
position_website = request.POST['position_website']
# Check to see if this position is already being used anywhere
position_on_stage_found = False
try:
position_query = PositionEntered.objects.filter(we_vote_id=position_we_vote_id)
if len(position_query):
position_on_stage = position_query[0]
position_on_stage_found = True
except Exception as e:
handle_record_not_found_exception(e, logger=logger)
try:
if position_on_stage_found:
# Update
position_on_stage.position_name = position_name
position_on_stage.twitter_handle = twitter_handle
position_on_stage.position_website = position_website
position_on_stage.save()
messages.add_message(request, messages.INFO, 'PositionEntered updated.')
else:
# Create new
position_on_stage = CandidateCampaign(
position_name=position_name,
twitter_handle=twitter_handle,
position_website=position_website,
)
position_on_stage.save()
messages.add_message(request, messages.INFO, 'New position saved.')
except Exception as e:
handle_record_not_saved_exception(e, logger=logger)
messages.add_message(request, messages.ERROR, 'Could not save position.')
return HttpResponseRedirect(reverse('position:position_list', args=()))
@login_required
def position_summary_view(request, position_we_vote_id):
# admin, analytics_admin, partner_organization, political_data_manager, political_data_viewer, verified_volunteer
authority_required = {'partner_organization', 'political_data_viewer', 'verified_volunteer'}
if not voter_has_authority(request, authority_required):
return redirect_to_sign_in_page(request, authority_required)
messages_on_stage = get_messages(request)
position_on_stage_found = False
position_on_stage = PositionEntered()
try:
position_on_stage = PositionEntered.objects.get(we_vote_id=position_we_vote_id)
position_on_stage_found = True
except PositionEntered.MultipleObjectsReturned as e:
handle_record_found_more_than_one_exception(e, logger=logger)
except PositionEntered.DoesNotExist:
# This is fine, create new
pass
if position_on_stage_found:
template_values = {
'messages_on_stage': messages_on_stage,
'position': position_on_stage,
}
else:
template_values = {
'messages_on_stage': messages_on_stage,
}
return render(request, 'position/position_summary.html', template_values)
@login_required
def refresh_cached_position_info_for_election_view(request):
# admin, analytics_admin, partner_organization, political_data_manager, political_data_viewer, verified_volunteer
authority_required = {'verified_volunteer'}
if not voter_has_authority(request, authority_required):
return redirect_to_sign_in_page(request, authority_required)
google_civic_election_id = convert_to_int(request.GET.get('google_civic_election_id', 0))
state_code = request.GET.get('state_code', '')
results = generate_position_sorting_dates_for_election(
google_civic_election_id=google_civic_election_id)
messages.add_message(
request, messages.INFO,
'candidate_to_office_link_update_count: {candidate_to_office_link_update_count}, '
'candidate_ultimate_update_count: {candidate_ultimate_update_count}, '
'candidate_year_update_count: {candidate_year_update_count}, '
'contest_measure_update_count: {contest_measure_update_count}, '
'friends_position_year_candidate_update_count: {friends_position_year_candidate_update_count}, '
'friends_position_year_measure_update_count: {friends_position_year_measure_update_count}, '
'friends_ultimate_candidate_update_count: {friends_ultimate_candidate_update_count}, '
'friends_ultimate_measure_update_count: {friends_ultimate_measure_update_count}, '
'measure_ultimate_update_count: {measure_ultimate_update_count}, '
'measure_year_update_count: {measure_year_update_count}, '
'public_position_year_candidate_update_count: {public_position_year_candidate_update_count}, '
'public_position_year_measure_update_count: {public_position_year_measure_update_count}, '
'public_ultimate_candidate_update_count: {public_ultimate_candidate_update_count}, '
'public_ultimate_measure_update_count: {public_ultimate_measure_update_count}, '
'status: {status}'
''.format(
candidate_to_office_link_update_count=results['candidate_to_office_link_update_count'],
candidate_ultimate_update_count=results['candidate_ultimate_update_count'],
candidate_year_update_count=results['candidate_year_update_count'],
contest_measure_update_count=results['contest_measure_update_count'],
friends_position_year_candidate_update_count=results['friends_position_year_candidate_update_count'],
friends_position_year_measure_update_count=results['friends_position_year_measure_update_count'],
friends_ultimate_candidate_update_count=results['friends_ultimate_candidate_update_count'],
friends_ultimate_measure_update_count=results['friends_ultimate_measure_update_count'],
measure_ultimate_update_count=results['measure_ultimate_update_count'],
measure_year_update_count=results['measure_year_update_count'],
public_position_year_candidate_update_count=results['public_position_year_candidate_update_count'],
public_position_year_measure_update_count=results['public_position_year_measure_update_count'],
public_ultimate_candidate_update_count=results['public_ultimate_candidate_update_count'],
public_ultimate_measure_update_count=results['public_ultimate_measure_update_count'],
status=results['status']))
# September 2020: Dale commenting this out temporarily. It needs a testing run through, specifically around
# how we are treating google_civic_election_id for positions about candidates.
# results = refresh_cached_position_info_for_election(
# google_civic_election_id=google_civic_election_id,
# state_code=state_code)
# public_positions_updated = results['public_positions_updated']
# friends_only_positions_updated = results['friends_only_positions_updated']
#
# messages.add_message(request, messages.INFO,
# 'public_positions_updated: {public_positions_updated}, '
# 'friends_only_positions_updated: {friends_only_positions_updated}'
# ''.format(public_positions_updated=public_positions_updated,
# friends_only_positions_updated=friends_only_positions_updated))
return HttpResponseRedirect(reverse('position:position_list', args=()) +
'?google_civic_election_id=' + str(google_civic_election_id) +
'&state_code=' + str(state_code))
@login_required
def refresh_positions_with_candidate_details_for_election_view(request):
"""
Refresh Positions with candidate details
:param request:
:return:
"""
# admin, analytics_admin, partner_organization, political_data_manager, political_data_viewer, verified_volunteer
authority_required = {'verified_volunteer'}
if not voter_has_authority(request, authority_required):
return redirect_to_sign_in_page(request, authority_required)
google_civic_election_id = convert_to_int(request.GET.get('google_civic_election_id', 0))
state_code = request.GET.get('state_code', '')
results = refresh_positions_with_candidate_details_for_election(google_civic_election_id=google_civic_election_id,
state_code=state_code)
if not results['success']:
messages.add_message(request, messages.INFO, results['status'])
else:
positions_updated_count = results['positions_updated_count']
messages.add_message(request, messages.INFO,
"Social media retrieved. Positions refreshed: {update_all_positions_results_count},"
.format(update_all_positions_results_count=positions_updated_count))
return HttpResponseRedirect(reverse('candidate:candidate_list', args=()) +
'?google_civic_election_id=' + str(google_civic_election_id) +
'&state_code=' + str(state_code))
@login_required
def refresh_positions_with_contest_office_details_for_election_view(request):
"""
Refresh positions with contest office details
:param request:
:return:
"""
# admin, analytics_admin, partner_organization, political_data_manager, political_data_viewer, verified_volunteer
authority_required = {'verified_volunteer'}
if not voter_has_authority(request, authority_required):
return redirect_to_sign_in_page(request, authority_required)
google_civic_election_id = convert_to_int(request.GET.get('google_civic_election_id', 0))
state_code = request.GET.get('state_code', '')
contest_office_id = request.GET.get('office_id', 0)
contest_office_we_vote_id = request.GET.get('office_we_vote_id', '')
if positive_value_exists(contest_office_id):
results = push_contest_office_data_to_other_table_caches(contest_office_id)
elif positive_value_exists(contest_office_we_vote_id):
results = push_contest_office_data_to_other_table_caches(contest_office_we_vote_id)
elif positive_value_exists(google_civic_election_id):
results = refresh_positions_with_contest_office_details_for_election(
google_civic_election_id=google_civic_election_id, state_code=state_code)
else:
results = refresh_positions_with_contest_office_details_for_election(
google_civic_election_id=google_civic_election_id, state_code=state_code)
if not results['success']:
messages.add_message(request, messages.INFO, results['status'])
else:
positions_updated_count = results['positions_updated_count']
messages.add_message(request, messages.INFO,
"Social media retrieved. Positions refreshed: {update_all_positions_results_count},"
.format(update_all_positions_results_count=positions_updated_count))
if positive_value_exists(google_civic_election_id):
return HttpResponseRedirect(reverse('office:office_list', args=()) +
'?google_civic_election_id=' + str(google_civic_election_id) +
'&state_code=' + str(state_code))
elif positive_value_exists(contest_office_id):
return HttpResponseRedirect(reverse('office:office_summary', args=(contest_office_id,)))
else:
return HttpResponseRedirect (reverse ('office:office_list', args=()) +
'?google_civic_election_id=' + str (google_civic_election_id) +
'&state_code=' + str (state_code))
@login_required
def refresh_positions_with_contest_measure_details_for_election_view(request):
"""
Refresh positions with contest measure details
:param request:
:return:
"""
# admin, analytics_admin, partner_organization, political_data_manager, political_data_viewer, verified_volunteer
authority_required = {'verified_volunteer'}
if not voter_has_authority(request, authority_required):
return redirect_to_sign_in_page(request, authority_required)
google_civic_election_id = convert_to_int(request.GET.get('google_civic_election_id', 0))
state_code = request.GET.get('state_code', '')
contest_measure_id = request.GET.get('measure_id', 0)
contest_measure_we_vote_id = request.GET.get('measure_we_vote_id', '')
if positive_value_exists(contest_measure_id):
results = push_contest_measure_data_to_other_table_caches(contest_measure_id)
elif positive_value_exists(contest_measure_we_vote_id):
results = push_contest_measure_data_to_other_table_caches(contest_measure_we_vote_id)
elif positive_value_exists(google_civic_election_id):
results = refresh_positions_with_contest_measure_details_for_election(
google_civic_election_id=google_civic_election_id, state_code=state_code)
else:
results = refresh_positions_with_contest_measure_details_for_election(
google_civic_election_id=google_civic_election_id, state_code=state_code)
if not results['success']:
messages.add_message(request, messages.INFO, results['status'])
else:
positions_updated_count = results['positions_updated_count']
messages.add_message(request, messages.INFO,
"Social media retrieved. Positions refreshed: {update_all_positions_results_count},"
.format(update_all_positions_results_count=positions_updated_count))
if positive_value_exists(google_civic_election_id):
return HttpResponseRedirect(reverse('measure:measure_list', args=()) +
'?google_civic_election_id=' + str(google_civic_election_id) +
'&state_code=' + str(state_code))
elif positive_value_exists(contest_measure_id):
return HttpResponseRedirect(reverse('measure:measure_summary', args=(contest_measure_id,)))
else:
return HttpResponseRedirect (reverse ('measure:measure_list', args=()) +
'?google_civic_election_id=' + str (google_civic_election_id) +
'&state_code=' + str (state_code))
@login_required
def relink_candidates_measures_view(request):
# admin, analytics_admin, partner_organization, political_data_manager, political_data_viewer, verified_volunteer
authority_required = {'verified_volunteer'}
if not voter_has_authority(request, authority_required):
return redirect_to_sign_in_page(request, authority_required)
messages.add_message(request, messages.INFO, 'TO BE BUILT: relink_candidates_measures_view')
return HttpResponseRedirect(reverse('position:position_list', args=()))
@login_required
def position_delete_process_view(request):
"""
Delete a position
:param request:
:return:
"""
# admin, analytics_admin, partner_organization, political_data_manager, political_data_viewer, verified_volunteer
authority_required = {'verified_volunteer'}
if not voter_has_authority(request, authority_required):
return redirect_to_sign_in_page(request, authority_required)
position_we_vote_id = request.GET.get('position_we_vote_id', '')
google_civic_election_id = request.GET.get('google_civic_election_id', 0)
# Retrieve this position
position_on_stage_found = False
position_on_stage = PositionEntered()
organization_id = 0
try:
position_query = PositionEntered.objects.filter(we_vote_id=position_we_vote_id)
if len(position_query):
position_on_stage = position_query[0]
organization_id = position_on_stage.organization_id
position_on_stage_found = True
except Exception as e:
messages.add_message(request, messages.ERROR, 'Could not find position -- exception.')
if not position_on_stage_found:
messages.add_message(request, messages.ERROR, 'Could not find position.')
return HttpResponseRedirect(reverse('position:position_list', args=()) +
"?google_civic_election_id=" + str(google_civic_election_id))
try:
if position_on_stage_found:
# Delete
position_on_stage.delete()
messages.add_message(request, messages.INFO, 'Position deleted.')
if positive_value_exists(organization_id):
return HttpResponseRedirect(reverse('organization:organization_position_list',
args=([organization_id])) +
"?google_civic_election_id=" + str(google_civic_election_id))
else:
messages.add_message(request, messages.ERROR, 'Could not find position.')
except Exception as e:
handle_record_not_saved_exception(e, logger=logger)
messages.add_message(request, messages.ERROR, 'Could not save position.')
return HttpResponseRedirect(reverse('position:position_list', args=()) +
"?google_civic_election_id=" + str(google_civic_election_id))
|
wevote/WeVoteServer
|
position/views_admin.py
|
Python
|
mit
| 50,531 | 0.004552 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
operations = [
migrations.CreateModel(
name='Entry',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=255)),
],
),
]
|
tbeadle/django
|
tests/migrations/test_auto_now_add/0001_initial.py
|
Python
|
bsd-3-clause
| 474 | 0.00211 |
import shutil
import json
from rest_framework import routers, serializers, viewsets, parsers, filters
from rest_framework.views import APIView
from rest_framework.exceptions import APIException
from rest_framework.response import Response
from django.core.exceptions import ValidationError
from django.core.files.uploadedfile import SimpleUploadedFile, InMemoryUploadedFile
from django.core.validators import URLValidator
from base.models import Project, SeedsList
from apps.crawl_space.models import Crawl, CrawlModel
from elasticsearch import Elasticsearch
from elasticsearch.exceptions import ConnectionError, NotFoundError
class DataWakeIndexUnavailable(APIException):
status_code = 404
default_detail = "The server failed to find the DataWake index in elasticsearch."
class SlugModelSerializer(serializers.ModelSerializer):
slug = serializers.SlugField(required=False, read_only=True)
class ProjectSerializer(SlugModelSerializer):
url = serializers.CharField(read_only=True)
class Meta:
model = Project
class CrawlSerializer(SlugModelSerializer):
# Expose these fields, but only as read only.
id = serializers.ReadOnlyField()
seeds_list = serializers.FileField(read_only=True, use_url=False)
status = serializers.CharField(read_only=True)
config = serializers.CharField(read_only=True)
index_name = serializers.CharField(read_only=True)
url = serializers.CharField(read_only=True)
pages_crawled = serializers.IntegerField(read_only=True)
harvest_rate = serializers.FloatField(read_only=True)
location = serializers.CharField(read_only=True)
def validate_crawler(self, value):
if value == "ache" and not self.initial_data.get("crawl_model"):
raise serializers.ValidationError("Ache crawls require a Crawl Model.")
return value
class Meta:
model = Crawl
class CrawlModelSerializer(SlugModelSerializer):
model = serializers.FileField(use_url=False)
features = serializers.FileField(use_url=False)
url = serializers.CharField(read_only=True)
def validate_model(self, value):
if value.name != "pageclassifier.model":
raise serializers.ValidationError("File must be named pageclassifier.model")
return value
def validate_features(self, value):
if value.name != "pageclassifier.features":
raise serializers.ValidationError("File must be named pageclassifier.features")
return value
class Meta:
model = CrawlModel
class SeedsListSerializer(SlugModelSerializer):
url = serializers.CharField(read_only=True)
file_string = serializers.CharField(read_only=True)
def validate_seeds(self, value):
try:
seeds = json.loads(value)
except ValueError:
raise serializers.ValidationError("Seeds must be a JSON encoded string.")
if type(seeds) != list:
raise serializers.ValidationError("Seeds must be an array of URLs.")
validator = URLValidator()
errors = []
for index, x in enumerate(seeds):
try:
validator(x)
except ValidationError:
# Add index to make it easier for CodeMirror to select the right
# line.
errors.append({index: x})
if errors:
errors.insert(0, "The seeds list contains invalid urls.")
errors.append({"list": "\n".join(seeds)})
raise serializers.ValidationError(errors)
return value
class Meta:
model = SeedsList
"""
Viewset Classes.
Filtering is provided by django-filter.
Backend settings are in common_settings.py under REST_FRAMEWORK. Setting is:
'DEFAULT_FILTER_BACKENDS': ('rest_framework.filters.DjangoFilterBackend',)
This backend is supplied to every viewset by default. Alter query fields by adding
or removing items from filter_fields
"""
class ProjectViewSet(viewsets.ModelViewSet):
queryset = Project.objects.all()
serializer_class = ProjectSerializer
filter_fields = ('id', 'slug', 'name',)
class CrawlViewSet(viewsets.ModelViewSet):
queryset = Crawl.objects.all()
serializer_class = CrawlSerializer
filter_fields = ('id', 'slug', 'name', 'description', 'status', 'project',
'crawl_model', 'crawler', 'seeds_object')
class CrawlModelViewSet(viewsets.ModelViewSet):
queryset = CrawlModel.objects.all()
serializer_class = CrawlModelSerializer
filter_fields = ('id', 'slug', 'name', 'project',)
def destroy(self, request, pk=None):
model = CrawlModel.objects.get(pk=pk)
crawls = Crawl.objects.all().filter(crawl_model=pk)
if crawls:
message = "The Crawl Model is being used by the following Crawls and cannot be deleted: "
raise serializers.ValidationError({
"message": message,
"errors": [x.name for x in crawls],
})
else:
shutil.rmtree(model.get_model_path())
return super(CrawlModelViewSet, self).destroy(request)
class SeedsListViewSet(viewsets.ModelViewSet):
queryset = SeedsList.objects.all()
serializer_class = SeedsListSerializer
filter_fields = ('id', 'name', 'seeds', 'slug',)
def create(self, request):
# If a seeds file or a textseeds exists, then use those. Otherwise, look
# for a string in request.data["seeds"]
seeds_list = request.FILES.get("seeds", False)
textseeds = request.data.get("textseeds", False)
if seeds_list:
request.data["seeds"] = json.dumps(map(str.strip, seeds_list.readlines()))
elif textseeds:
if type(textseeds) == unicode:
request.data["seeds"] = json.dumps(map(unicode.strip, textseeds.split("\n")))
# Get rid of carriage return character.
elif type(textseeds) == str:
request.data["seeds"] = json.dumps(map(str.strip, textseeds.split("\n")))
return super(SeedsListViewSet, self).create(request)
def destroy(self, request, pk=None):
seeds = SeedsList.objects.get(pk=pk)
crawls = Crawl.objects.all().filter(seeds_object=pk)
if crawls:
message = "The Seeds List is being used by the following Crawls and cannot be deleted: "
raise serializers.ValidationError({
"message": message,
"errors": [x.name for x in crawls],
})
else:
return super(SeedsListViewSet, self).destroy(request)
class DataWakeView(APIView):
index = "datawake"
es = Elasticsearch()
def create_trails(self, trail_ids):
trails = []
for x in trail_ids:
url_search = self.es.search(index=self.index, q="trail_id:%d" % x,
fields="url", size=1000)["hits"]["hits"]
new_trail = {"trail_id": x, "urls": [], "domain_name":url_search[0]["_type"]}
for y in url_search:
new_trail["urls"].append(y["fields"]["url"][0])
new_trail.update({"urls_string": "\n".join(new_trail["urls"])})
trails.append(new_trail)
return trails
def get(self, request, format=None):
# TODO: catch all exception. At the very least, deal with 404 not found and
# connection refused exceptions.
# Temporarily remove exceptions for debugging.
try:
trail_ids = [x["key"] for x in self.es.search(index=self.index, body={
"aggs" : {
"trail_id" : {
"terms" : { "field" : "trail_id" }
}
}
})["aggregations"]["trail_id"]["buckets"]]
response = self.create_trails(trail_ids)
except ConnectionError as e:
raise OSError("Failed to connect to local elasticsearch instance.")
except NotFoundError:
raise DataWakeIndexUnavailable
return Response(response)
router = routers.DefaultRouter()
router.register(r"projects", ProjectViewSet)
router.register(r"crawls", CrawlViewSet)
router.register(r"crawl_models", CrawlModelViewSet)
router.register(r"seeds_list", SeedsListViewSet)
|
memex-explorer/memex-explorer
|
source/memex/rest.py
|
Python
|
bsd-2-clause
| 8,218 | 0.003407 |
from buildbot.plugins import worker
infosun = {
"polyjit-ci": {
"host": "polyjit-ci",
"password": None,
"properties": {
"uchroot_image_path": "/data/polyjit/xenial-image/",
"uchroot_binary": "/data/polyjit/erlent/build/uchroot",
"can_build_llvm_debug": False
},
},
"debussy": {
"host": "debussy",
"password": None,
"properties": {
"llvm_prefix": "/scratch/pjtest/llvm-03-11-2017_5.0",
"llvm_libs": "/scratch/pjtest/llvm-03-11-2017_5.0/lib",
"cc": "/scratch/pjtest/llvm-03-11-2017_5.0/bin/clang",
"cxx": "/scratch/pjtest/llvm-03-11-2017_5.0/bin/clang++",
"uchroot_image_path": "/local/hdd/buildbot-polyjit/disco-image/",
"uchroot_binary": "/scratch/pjtest/erlent/build/uchroot",
"testinputs": "/scratch/pjtest/pprof-test-data",
"cmake_prefix": "/scratch/pjtest/opt/cmake",
"has_munged": True,
"can_build_llvm_debug": True
}
},
"ligeti": {
"host": "ligeti",
"password": None,
"properties": {
"llvm_prefix": "/scratch/pjtest/llvm-03-11-2017_5.0",
"llvm_libs": "/scratch/pjtest/llvm-03-11-2017_5.0/lib",
"cc": "/scratch/pjtest/llvm-03-11-2017_5.0/bin/clang",
"cxx": "/scratch/pjtest/llvm-03-11-2017_5.0/bin/clang++",
"uchroot_image_path": "/local/hdd/buildbot-polyjit/disco-image/",
"uchroot_binary": "/scratch/pjtest/erlent/build/uchroot",
"testinputs": "/scratch/pjtest/pprof-test-data",
"cmake_prefix": "/scratch/pjtest/opt/cmake",
"has_munged": True,
"can_build_llvm_debug": True
}
}
}
def get_hostlist(slave_dict, predicate = None):
if not predicate:
predicate = lambda x : True
hosts = []
for k in slave_dict:
if predicate(slave_dict[k]):
hosts.append(slave_dict[k]["host"])
return hosts
def configure(c):
for k in infosun:
slave = infosun[k]
props = {}
if "properties" in slave:
props = slave["properties"]
c['workers'].append(worker.Worker(slave["host"], slave[
"password"], properties = props))
|
PolyJIT/buildbot
|
polyjit/buildbot/slaves.py
|
Python
|
mit
| 2,304 | 0.003038 |
# This file is part of MyPaint.
# Copyright (C) 2014 by Andrew Chadwick <a.t.chadwick@gmail.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
"""Modes for manipulating the view"""
## Imports
import gui.mode
import math
from gettext import gettext as _
## Class defs
class PanViewMode (gui.mode.OneshotDragMode):
"""A oneshot mode for translating the viewport by dragging."""
ACTION_NAME = 'PanViewMode'
pointer_behavior = gui.mode.Behavior.CHANGE_VIEW
scroll_behavior = gui.mode.Behavior.NONE # XXX grabs ptr, so no CHANGE_VIEW
supports_button_switching = False
@classmethod
def get_name(cls):
return _(u"Scroll View")
def get_usage(self):
return _(u"Drag the canvas view")
@property
def inactive_cursor(self):
return self.doc.app.cursors.get_action_cursor(
self.ACTION_NAME)
@property
def active_cursor(self):
return self.doc.app.cursors.get_action_cursor(
self.ACTION_NAME)
def drag_update_cb(self, tdw, event, dx, dy):
tdw.scroll(-dx, -dy)
self.doc.notify_view_changed()
super(PanViewMode, self).drag_update_cb(tdw, event, dx, dy)
class ZoomViewMode (gui.mode.OneshotDragMode):
"""A oneshot mode for zooming the viewport by dragging."""
ACTION_NAME = 'ZoomViewMode'
pointer_behavior = gui.mode.Behavior.CHANGE_VIEW
scroll_behavior = gui.mode.Behavior.NONE # XXX grabs ptr, so no CHANGE_VIEW
supports_button_switching = False
@classmethod
def get_name(cls):
return _(u"Zoom View")
def get_usage(self):
return _(u"Zoom the canvas view")
@property
def active_cursor(self):
return self.doc.app.cursors.get_action_cursor(
self.ACTION_NAME)
@property
def inactive_cursor(self):
return self.doc.app.cursors.get_action_cursor(
self.ACTION_NAME)
def drag_update_cb(self, tdw, event, dx, dy):
tdw.scroll(-dx, -dy)
tdw.zoom(math.exp(dy/100.0), center=(event.x, event.y))
# TODO: Let modifiers constrain the zoom amount to
# the defined steps.
self.doc.notify_view_changed()
super(ZoomViewMode, self).drag_update_cb(tdw, event, dx, dy)
class RotateViewMode (gui.mode.OneshotDragMode):
"""A oneshot mode for rotating the viewport by dragging."""
ACTION_NAME = 'RotateViewMode'
pointer_behavior = gui.mode.Behavior.CHANGE_VIEW
scroll_behavior = gui.mode.Behavior.NONE # XXX grabs ptr, so no CHANGE_VIEW
supports_button_switching = False
@classmethod
def get_name(cls):
return _(u"Rotate View")
def get_usage(cls):
return _(u"Rotate the canvas view")
@property
def active_cursor(self):
return self.doc.app.cursors.get_action_cursor(
self.ACTION_NAME)
@property
def inactive_cursor(self):
return self.doc.app.cursors.get_action_cursor(
self.ACTION_NAME)
def drag_update_cb(self, tdw, event, dx, dy):
# calculate angular velocity from the rotation center
x, y = event.x, event.y
cx, cy = tdw.get_center()
x, y = x-cx, y-cy
phi2 = math.atan2(y, x)
x, y = x-dx, y-dy
phi1 = math.atan2(y, x)
tdw.rotate(phi2-phi1, center=(cx, cy))
self.doc.notify_view_changed()
# TODO: Allow modifiers to constrain the transformation angle
# to 22.5 degree steps.
super(RotateViewMode, self).drag_update_cb(tdw, event, dx, dy)
|
glenux/contrib-mypaint
|
gui/viewmanip.py
|
Python
|
gpl-2.0
| 3,703 | 0.00135 |
import app_info
import loggers
import plist_editor
__version__ = '1.9.1'
__all__ = ['app_info', 'fs_analysis', 'loggers', 'plist_editor', 'slack']
# This provides the ability to get the version from the command line.
# Do something like:
# $ python -m management_tools.__init__
if __name__ == "__main__":
print("Management Tools, version: {}".format(__version__))
|
univ-of-utah-marriott-library-apple/management_tools
|
management_tools/__init__.py
|
Python
|
mit
| 376 | 0.00266 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
d = {'Michael':95, 'Henry':96, 'Emily':97}
d['Lucy'] = 94
d['Lucy'] = 91
key = (1, 2, 3)
d[key] = 98
print(d['Michael'])
d.pop('Michael')
print(d)
print('Tom' in d)
print(d.get('Tom'))
print(d.get('Tom'), -1)
s1 = set([1, 2, 2, 3, 3])
s2 = set([2, 3, 4])
s3 = set((1, 2))
s1.add(4)
s1.add(4)
s1.remove(4)
print(s1)
print(s2)
print(s1 & s2)
print(s1 | s2)
print(s3)
|
henryneu/Python
|
sample/dict.py
|
Python
|
apache-2.0
| 415 | 0.007229 |
import heapq
import sys
filename = "Median.txt"
lst = [int(l) for l in open(filename)]
H_low = []
H_high = []
sum = 0
for num in lst:
if len(H_low) > 0:
if num > -H_low[0]:
heapq.heappush(H_high, num)
else:
heapq.heappush(H_low, -num)
else:
heapq.heappush(H_low, -num)
if len(H_low) > len(H_high) + 1:
heapq.heappush(H_high, -(heapq.heappop(H_low)))
elif len(H_high) > len(H_low):
heapq.heappush(H_low, -(heapq.heappop(H_high)))
sum += -H_low[0]
print sum % 10000
|
xala3pa/my-way-to-algorithms
|
graphs/hash/python/median.py
|
Python
|
mit
| 489 | 0.05317 |
#!/usr/bin/env python
#
# Copyright 2010 Andrei <vish@gravitysoft.org>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
from __future__ import with_statement
import sys, threading, datetime
#
# Data base module to centralize working with data base connections,
# support connection pools and have a possibility to track connection
# leakage.
#
def create_db(id, parameters):
return Database(id, parameters)
def remove_db(id):
pass
def get_db(id):
return Database.getDatabase(id)
def get_dbconnection(id):
return Database(id).getConnection()
class Database(object):
DS_LOCK = threading.RLock()
SOURCES = {}
class CursorWrapper(object):
def __init__(self, cursor, owner):
assert cursor and owner
self._cursor = cursor
self._owner = owner
@property
def description(self):
return self._cursor.description
@property
def rowcount(self):
return self._cursor.rowcount
def close(self): self._owner._closeMe(self)
def callproc(self, procname, parameters=None):
return self._cursor.callproc(procname, parameters)
def execute(self, operation, parameters=None):
return self._cursor.execute(operation, parameters)
def executemany(self, operation, seq_of_parameters):
return self._cursor.executemany(operation, seq_of_parameters)
def fetchone(self):
return self._cursor.fetchone()
def fetchmany(self, size=None):
s = self.arraysize
if size : s = size
return self._cursor.fetchmany(s)
def fetchall(self):
return self._cursor.fetchall()
def nextset(self):
return self._cursor.nextset()
@property
def arraysize(self):
return self._cursor.arraysize
def setinputsizes(self, sizes):
return self._cursor.setinputsizes(sizes)
def setoutputsize(self, size, column=None):
return self._cursor.setoutputsize(size, column)
def _orig_cursor(self):
return self._cursor
class ConnectionWrapper(object):
def __init__(self, connection, owner):
assert connection and owner
self._connection = connection
self._creation_time = datetime.datetime.now()
self._owner = owner
self._cursors = []
def creationTime(self): return self._creation_time
def close(self):
with Database.DS_LOCK:
self._owner._closeMe(self)
def commit(self):
return self._connection.commit()
def rollback(self):
return self._connection.rollback()
def cursor(self):
with Database.DS_LOCK:
c = Database.CursorWrapper(self._connection.cursor(), self)
self._cursors.append(c)
return c
def _closeMe(self, cur):
with Database.DS_LOCK:
try: i = self._cursors.index(cur)
except ValueError: i = -1
if i >= 0:
self._cursors.pop(i)
cur._orig_cursor().close()
def cleanup(self):
with Database.DS_LOCK:
for cur in self._cursors:
try: cur._orig_cursor().close()
except: pass
self._cursors = []
def __str__(self):
return "'%s' connection wrapper, created: " % self._owner._id + str(self._creation_time)
def __enter__(self): pass
def __exit__(self, type, value, traceback):
self.close()
def _orig_conn(self):
return self._connection
@classmethod
def createDatabase(cls, id, parameters):
assert id and parameters
with cls.DS_LOCK:
if id in cls.SOURCES:
raise BaseException("Data base '%s' already exists." % id)
return Database(id, parameters)
@classmethod
def getDatabase(cls, id):
assert id
with cls.DS_LOCK: return cls.SOURCES[id]
@classmethod
def hasDatabase(cls, id):
assert id
with cls.DS_LOCK: return id in cls.SOURCES
def init(self, id, parameters):
global DBMODULE
self._poolSize = 1
key = 'db.poolsize'
if key in parameters:
self._poolSize = int(parameters[key])
del parameters[key]
self._poolLatency = self._poolSize / 5
key = 'db.poollatency'
if key in parameters:
self._poolLatency = int(parameters[key])
del parameters[key]
assert self._poolLatency >= 0 and self._poolSize >= 0
if self._poolLatency > self._poolSize:
raise BaseException("DB '%s' pool latency cannot be less than max pool size." % id)
self._parameters = parameters
self._id = id
self._pool = []
self._module = DBMODULE
self._firstfree = 0
def __new__(cls, id, parameters = None):
assert id and len(id.strip()) > 0
id = id.strip()
with cls.DS_LOCK:
if id in cls.SOURCES:
ds = cls.SOURCES[id]
if parameters and parameters != ds._parameters:
raise BaseException("Data base '%s' have been defined with another db parameters.")
return ds
else:
if parameters == None:
raise BaseException("DB parameters have not been specified for '%s' data base." % id)
ds = object.__new__(cls)
ds.init(id, parameters)
ds.ping()
cls.SOURCES[id] = ds
return ds
def ping(self):
con = None
try: con = self._module.connect(**self._parameters)
finally:
if con : con.close()
def getConnection(self):
with self.DS_LOCK:
# connection pool should not be used
if self._poolSize == 0:
return Database.ConnectionWrapper(self._module.connect(**self._parameters), owner = self)
else:
# found free connection in pool
if self._firstfree < len(self._pool):
self._firstfree += 1
return self._pool[self._firstfree - 1]
else:
# pool is not full
if self._poolSize > len(self._pool):
c = Database.ConnectionWrapper(self._module.connect(**self._parameters), owner = self)
self._pool.append(c)
self._firstfree = len(self._pool)
return c
else:
# pool is full
raise BaseException("'%s' connection pool is full (%d connections opened)." % (self._id, len(self._pool)))
def cleanup(self):
with self.DS_LOCK:
for c in self._pool:
try: c.orig_conn().close()
except: pass
self._pool = []
self._firstfree = 0
def _closeMe(self, con):
with self.DS_LOCK:
# pool is not supported
if self._poolSize == 0:
con._orig_conn().close()
else:
try: i = self._pool.index(con)
except ValueError: i = -1
if i == -1 or i >= self._firstfree:
raise BaseException("DB '%s' connection has been already closed." % self._id)
# check if have already enough opened free connection
# and really close connection if it is true
if self._poolLatency == (len(self._pool) - self._firstfree):
c = self._pool.pop(i)
c.cleanup()
c._orig_conn().close()
else:
c = self._pool.pop(i)
c.cleanup()
self._pool.append(c)
self._firstfree -= 1
def __str__(self):
s = "Data base '%s' {" % self._id + "\n Parameters:" + str(self._parameters)
s += "\n pool size = %d" % self._poolSize
s += "\n pool latency = %d" % self._poolLatency
s += "\n free connections = %d" % (len(self._pool) - self._firstfree)
s += "\n first free pos = %d" % self._firstfree
s += "\n " + str(self._pool) + "\n}"
return s
DBMODULE = None
def __LOAD_DB_MODULE__(module='MySQLdb'):
global DBMODULE
if DBMODULE and DBMODULE.__name__ != module:
raise BaseException("Only one db specific module can be loaded at the same time.")
elif DBMODULE == None:
DBMODULE = __import__(module)
for k in DBMODULE.__dict__:
o = DBMODULE.__dict__[k]
if k.find("Error") >=0 or k.find("Warning") >= 0:
setattr(sys.modules[__name__], k, o)
if __name__ != '__main__':
__LOAD_DB_MODULE__()
|
barmalei/scalpel
|
lib/gravity/common/db.py
|
Python
|
lgpl-3.0
| 10,270 | 0.013048 |
from __future__ import print_function
import unittest
import sys
import os
import re
import tempfile
import shutil
import glob
import warnings
warnings.simplefilter("default")
# Only use coverage if it's new enough and is requested
try:
import coverage
if not hasattr(coverage.coverage, 'combine'):
coverage = None
except ImportError:
coverage = None
if 'SALIWEB_COVERAGE' not in os.environ:
coverage = None
class RunAllTests(unittest.TestProgram):
"""Custom main program that also displays a final coverage report"""
def __init__(self, *args, **keys):
if coverage:
# Start coverage testing now before we import any modules
self.topdir = 'python'
self.mods = (glob.glob("%s/saliweb/*.py" % self.topdir)
+ glob.glob("%s/saliweb/backend/*.py" % self.topdir))
self.cov = coverage.coverage(branch=True, include=self.mods,
data_file='.coverage.backend')
self.cov.start()
self.make_site_customize()
# Run the tests
unittest.TestProgram.__init__(self, *args, **keys)
def make_site_customize(self):
"""Get coverage information on Python subprocesses"""
self.tmpdir = tempfile.mkdtemp()
with open(os.path.join(self.tmpdir, 'sitecustomize.py'), 'w') as fh:
fh.write("""
import coverage
import atexit
_cov = coverage.coverage(branch=True, data_suffix=True, auto_data=True,
data_file='%s')
_cov.start()
def _coverage_cleanup(c):
c.stop()
atexit.register(_coverage_cleanup, _cov)
""" % os.path.abspath('.coverage.backend'))
os.environ['PYTHONPATH'] = self.tmpdir + ':' + os.environ['PYTHONPATH']
def runTests(self):
self.testRunner = unittest.TextTestRunner(verbosity=self.verbosity)
result = self.testRunner.run(self.test)
if coverage:
shutil.rmtree(self.tmpdir)
self.cov.stop()
self.cov.combine()
print("\nPython coverage report\n", file=sys.stderr)
if hasattr(coverage.files, 'RELATIVE_DIR'):
coverage.files.RELATIVE_DIR = self.topdir + '/'
else:
self.cov.file_locator.relative_dir = self.topdir + '/'
self.cov.report(self.mods, file=sys.stderr)
self.cov.save()
sys.exit(not result.wasSuccessful())
def regressionTest():
try:
os.unlink('state_file')
except OSError:
pass
path = os.path.abspath(os.path.dirname(sys.argv[0]))
files = os.listdir(path)
test = re.compile(r"^test_.*\.py$", re.IGNORECASE)
files = filter(test.search, files)
modnames = [os.path.splitext(f)[0] for f in files]
modobjs = [__import__(m) for m in modnames]
tests = [unittest.defaultTestLoader.loadTestsFromModule(o)
for o in modobjs]
return unittest.TestSuite(tests)
if __name__ == "__main__":
RunAllTests(defaultTest="regressionTest")
|
salilab/saliweb
|
test/backend/run-all-tests.py
|
Python
|
lgpl-2.1
| 3,023 | 0 |
""":mod:`ShopWizardResult` -- Provides an interface for shop wizard results
.. module:: ShopWizardResult
:synopsis: Provides an interface for shop wizard results
.. moduleauthor:: Joshua Gilman <joshuagilman@gmail.com>
"""
from neolib.exceptions import parseException
from neolib.inventory.Inventory import Inventory
from neolib.shop.UserShopFront import UserShopFront
from neolib.item.Item import Item
import logging
class ShopWizardResult(Inventory):
"""Represents a shop wizard search result
Sub-classes the Inventory class to provide an interface for the results
from a Shop Wizard search. Automatically populates itself with the results
upon initialization.
Attributes
usr (User) - The user associated with the results
Initialization
ShopWizardResult(pg, usr)
Loads results from a shop wizard search
Parameters
pg (Page) - The page containing the results
usr (User) - The user to load the SDB for
Raises
parseException
Example
>>> res = ShopWizard.search(usr, "Mau Codestone")
>>> for item in res:
... print item.price
3,000
3,001
...
"""
usr = None
def __init__(self, pg, usr):
self.usr = usr
try:
items = pg.find("td", "contentModuleHeaderAlt").parent.parent.find_all("tr")
items.pop(0)
self.items = []
for item in items:
tmpItem = Item(item.find_all("td")[1].text)
tmpItem.owner = item.td.a.text
tmpItem.location = item.td.a['href']
tmpItem.stock = item.find_all("td")[2].text
tmpItem.price = item.find_all("td")[3].text.replace(" NP", "").replace(",", "")
tmpItem.id = tmpItem.location.split("buy_obj_info_id=")[1].split("&")[0]
self.items.append(tmpItem)
except Exception:
logging.getLogger("neolib.shop").exception("Unable to parse shop wizard results.", {'pg': pg})
raise parseException
def shop(self, index):
""" Return's the user shop the indexed item is in
Parameters:
index (int) -- The item index
Returns
UserShopFront - User shop item is in
"""
return UserShopFront(self.usr, item.owner, item.id, str(item.price))
def buy(self, index):
""" Attempts to buy indexed item, returns result
Parameters:
index (int) -- The item index
Returns
bool - True if item was bought, false otherwise
"""
item = self.items[index]
us = UserShopFront(self.usr, item.owner, item.id, str(item.price))
us.load()
if not item.name in us.inventory:
return False
if not us.inventory[item.name].buy():
return False
return True
|
jmgilman/Neolib
|
neolib/inventory/ShopWizardResult.py
|
Python
|
mit
| 3,097 | 0.009041 |
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import markdown
from sklearn import metrics
from sklearn.externals import joblib
import re
def plot_precision_recall_n(y_true, y_prob, model_name=None):
# thanks rayid
from sklearn.metrics import precision_recall_curve
y_score = y_prob
precision_curve, recall_curve, pr_thresholds = precision_recall_curve(y_true, y_score)
precision_curve = precision_curve[:-1]
recall_curve = recall_curve[:-1]
pct_above_per_thresh = []
number_scored = len(y_score)
for value in pr_thresholds:
num_above_thresh = len(y_score[y_score>=value])
pct_above_thresh = num_above_thresh / float(number_scored)
pct_above_per_thresh.append(pct_above_thresh)
pct_above_per_thresh = np.array(pct_above_per_thresh)
plt.clf()
fig, ax1 = plt.subplots()
ax1.plot(pct_above_per_thresh, precision_curve, 'b')
ax1.set_xlabel('percent of population')
ax1.set_ylabel('precision', color='b')
ax2 = ax1.twinx()
ax2.plot(pct_above_per_thresh, recall_curve, 'r')
ax2.set_ylabel('recall', color='r')
if model_name is not None:
name = model_name
plt.title(name)
#plt.savefig(name)
plt.show()
def current_strat(outcome, conn):
sql_outcome = '"' + outcome + '_OTC"'
qts707g2 = range(35, 52)
col_names = ['707G_'+str(float(i))+'_Q' for i in qts707g2]
qts707g2 = ['"'+i+'"' for i in col_names]
query = 'SELECT "UNI_PART_ID_I", ' + ','.join(qts707g2) + ',' + \
sql_outcome + ',"BBO_F" FROM core_birth_info_rc WHERE \
"707G_LT_D" >= \'2014-07-01\' AND ' + sql_outcome + ' IS NOT NULL'
df707g2 = pd.read_sql(query, conn)
points = [1,1,1,1,1,1,1,1,2,1,2,1,1,1,1,1,1]
scores = np.dot(df707g2[col_names], points)
df707g2['elig'] = [1 if i >= 2 else 0 for i in scores]
results_dct = {}
results_dct['precision'] = metrics.precision_score(df707g2[outcome+'_OTC'],
df707g2['elig'])
results_dct['recall'] = metrics.recall_score(df707g2[outcome+'_OTC'],
df707g2['elig'])
results_dct['prior'] = df707g2[outcome+'_OTC'].mean()
results_dct['bbo_crosstab'] = pd.crosstab(df707g2['BBO_F'],
df707g2['elig'], margins=True)
results_dct['graph'] = plot_precision_recall_n(df707g2[outcome+'_OTC'],
scores,
'Precision, Recall vs % Eligible')
return results_dct
def dict_to_dataframe(eval_dct, pkl_dct):
df = pd.DataFrame(columns=eval_dct[eval_dct.keys()[0]].columns.values)
for key in eval_dct.keys():
eval_dct[key].index = [key]
df = df.append(eval_dct[key])
pkl_df = pd.DataFrame({'index': pkl_dct.keys(),
'pickle_file': pkl_dct.values()}).set_index('index')
return df.join(pkl_df)
def markdown_to_html(md_file, out_file_name=None):
input_file = open(md_file, 'r')
text = input_file.read()
html_file = markdown.markdown(text)
if out_file_name is None:
out_file_name = md_file.split('.')[0]+'.html'
out_file = open(out_file_name, 'w')
out_file.write(html_file)
input_file.close()
out_file.close()
return 'Your converted HTML file is saved as ' + out_file_name
def weight_mapper(data_dct, eval_df, sort_list, mapping, assmnt):
if type(assmnt) is not str:
assmnt = str(assmnt)
eval_df.sort(sort_list, inplace=True, ascending=False)
model = joblib.load(eval_df['pickle_file'][0])
if 'weights' in model.__dict__:
wts = model.weights
else:
wts = model.coef_[0]
mapping = pd.read_csv(mapping)
config = pd.read_csv(data_dct['config_file'])
questions = [q for q in data_dct['features']
if bool(re.search(r'(?i)_Q$', q))]
mapping.loc[:,'QUESTION_N'] = [assmnt+'_'+str(float(i))+'_Q'
for i in mapping['QUESTION_N']]
mapping_sub = mapping[[True if i in questions else False for i in mapping['QUESTION_N']]]
mapping_sub.loc[:,'weights'] = wts
return mapping_sub
def weight_html(df):
df.columns = ['QID', 'Question', 'Model Score', 'Expanded Weights', 'Simple Weights']
df = df.set_index('QID')
df.index.name = None
return df.to_html()
def metrics_getter(data_dct, eval_df, sort_list, mapping, k, scores,
rnd=True, scale=False):
eval_df.sort(sort_list, inplace=True, ascending=False)
reg_ex = r'precision at.*mean|test_percent at.*mean'
metric_cols = eval_df.columns.str.contains(reg_ex)
metric_df = pd.DataFrame(eval_df.iloc[0, metric_cols])
prec_index = metric_df.index.str.contains(r'precision')
test_index = metric_df.index.str.contains(r'test_percent')
prec = metric_df.iloc[prec_index,:].reset_index()
test = metric_df.iloc[test_index,:].reset_index()
mdf = pd.DataFrame({'Precision': prec.iloc[:,1],
'Predicted % Eligible': test.iloc[:,1]
})
mdf.index = ['Top '+str(int(each_k*100))+'%' for each_k in k]
mdf = mdf.astype(float)
if rnd:
fmat = lambda x: str(np.round(x,1))+'%'
mdf = (mdf*100).applymap(fmat)
scores = sorted(scores)[::-1]
mes = 'Minimum Eligibility Score'
if scale:
mdf[mes] = [np.round(scores[int(each_k*len(scores))]*100,2)
for each_k in k]
else: mdf[mes] = [scores[int(each_k*len(scores))] for each_k in k]
return mdf
|
dssg/babies-public
|
babysaver/evaluation.py
|
Python
|
mit
| 5,646 | 0.007793 |
"""Unit tests for the ``organizations`` paths.
Each ``APITestCase`` subclass tests a single URL. A full list of URLs to be
tested can be found here:
http://theforeman.org/api/apidoc/v2/organizations.html
:Requirement: Organization
:CaseAutomation: Automated
:CaseLevel: Acceptance
:CaseComponent: API
:TestType: Functional
:CaseImportance: High
:Upstream: No
"""
from fauxfactory import gen_alphanumeric, gen_string
from nailgun import client, entities
from random import randint
from requests.exceptions import HTTPError
from robottelo.config import settings
from robottelo.datafactory import filtered_datapoint, invalid_values_list
from robottelo.decorators import skip_if_bug_open, tier1, tier2
from robottelo.helpers import get_nailgun_config
from robottelo.test import APITestCase
from six.moves import http_client
@filtered_datapoint
def valid_org_data_list():
"""List of valid data for input testing.
Note: The maximum allowed length of org name is 242 only. This is an
intended behavior (Also note that 255 is the standard across other
entities.)
"""
return [
gen_string('alphanumeric', randint(1, 242)),
gen_string('alpha', randint(1, 242)),
gen_string('cjk', randint(1, 85)),
gen_string('latin1', randint(1, 242)),
gen_string('numeric', randint(1, 242)),
gen_string('utf8', randint(1, 85)),
gen_string('html', randint(1, 85)),
]
class OrganizationTestCase(APITestCase):
"""Tests for the ``organizations`` path."""
@tier1
def test_positive_create_text_plain(self):
"""Create an organization using a 'text/plain' content-type.
:id: 6f67a3f0-0c1d-498c-9a35-28207b0faec2
:expectedresults: HTTP 415 is returned.
:CaseImportance: Critical
"""
organization = entities.Organization()
organization.create_missing()
response = client.post(
organization.path(),
organization.create_payload(),
auth=settings.server.get_credentials(),
headers={'content-type': 'text/plain'},
verify=False,
)
self.assertEqual(
http_client.UNSUPPORTED_MEDIA_TYPE, response.status_code)
@tier1
def test_positive_create_with_auto_label(self):
"""Create an organization and provide a name.
:id: c9f69ee5-c6dd-4821-bb05-0d93ffa22460
:expectedresults: The organization has the provided attributes and an
auto-generated label.
:CaseImportance: Critical
"""
org = entities.Organization().create()
self.assertTrue(hasattr(org, 'label'))
self.assertIsInstance(org.label, type(u''))
@tier1
def test_positive_create_with_custom_label(self):
"""Create an org and provide a name and identical label.
:id: f0deab6a-b09b-4110-8575-d4bea945a545
:expectedresults: The organization has the provided attributes.
:CaseImportance: Critical
"""
# A label has a more restrictive allowable charset than a name, so we
# use it for populating both name and label.
org = entities.Organization()
name_label = org.get_fields()['label'].gen_value()
org.name = org.label = name_label
org = org.create()
self.assertEqual(name_label, org.name)
self.assertEqual(name_label, org.label)
@tier1
def test_positive_create_with_name_and_label(self):
"""Create an organization and provide a name and label.
:id: 2bdd9aa8-a36a-4009-ac29-5c3d6416a2b7
:expectedresults: The organization has the provided attributes.
:CaseImportance: Critical
"""
org = entities.Organization()
org.name = name = org.get_fields()['name'].gen_value()
org.label = label = org.get_fields()['label'].gen_value()
org = org.create()
self.assertEqual(name, org.name)
self.assertEqual(label, org.label)
@tier1
def test_positive_create_with_name_and_description(self):
"""Create an organization and provide a name and description.
:id: afeea84b-61ca-40bf-bb16-476432919115
:expectedresults: The organization has the provided attributes and an
auto-generated label.
:CaseImportance: Critical
"""
for name in valid_org_data_list():
with self.subTest(name):
org = entities.Organization(
name=name,
description=name,
).create()
self.assertEqual(org.name, name)
self.assertEqual(org.description, name)
# Was a label auto-generated?
self.assertTrue(hasattr(org, 'label'))
self.assertIsInstance(org.label, type(u''))
self.assertGreater(len(org.label), 0)
@tier1
def test_positive_create_with_name_label_description(self):
"""Create an org and provide a name, label and description.
:id: f7d92392-751e-45de-91da-5ed2a47afc3f
:expectedresults: The organization has the provided name, label and
description.
:CaseImportance: Critical
"""
org = entities.Organization()
org.name = name = org.get_fields()['name'].gen_value()
org.label = label = org.get_fields()['label'].gen_value()
org.description = desc = org.get_fields()['description'].gen_value()
org = org.create()
self.assertEqual(org.name, name)
self.assertEqual(org.label, label)
self.assertEqual(org.description, desc)
@tier1
def test_negative_create_with_invalid_name(self):
"""Create an org with an incorrect name.
:id: 9c6a4b45-a98a-4d76-9865-92d992fa1a22
:expectedresults: The organization cannot be created.
:CaseImportance: Critical
"""
for name in invalid_values_list():
with self.subTest(name):
with self.assertRaises(HTTPError):
entities.Organization(name=name).create()
@tier1
def test_negative_create_with_same_name(self):
"""Create two organizations with identical names.
:id: a0f5333c-cc83-403c-9bf7-08fb372909dc
:expectedresults: The second organization cannot be created.
:CaseImportance: Critical
"""
name = entities.Organization().create().name
with self.assertRaises(HTTPError):
entities.Organization(name=name).create()
@tier1
def test_positive_search(self):
"""Create an organization, then search for it by name.
:id: f6f1d839-21f2-4676-8683-9f899cbdec4c
:expectedresults: Searching returns at least one result.
:CaseImportance: Critical
"""
org = entities.Organization().create()
orgs = entities.Organization().search(
query={u'search': u'name="{0}"'.format(org.name)}
)
self.assertEqual(len(orgs), 1)
self.assertEqual(orgs[0].id, org.id)
self.assertEqual(orgs[0].name, org.name)
class OrganizationUpdateTestCase(APITestCase):
"""Tests for the ``organizations`` path."""
@classmethod
def setUpClass(cls): # noqa
"""Create an organization."""
super(OrganizationUpdateTestCase, cls).setUpClass()
cls.organization = entities.Organization().create()
@tier1
def test_positive_update_name(self):
"""Update an organization's name with valid values.
:id: 68f2ba13-2538-407c-9f33-2447fca28cd5
:expectedresults: The organization's name is updated.
:CaseImportance: Critical
"""
for name in valid_org_data_list():
with self.subTest(name):
setattr(self.organization, 'name', name)
self.organization = self.organization.update(['name'])
self.assertEqual(self.organization.name, name)
@tier1
def test_positive_update_description(self):
"""Update an organization's description with valid values.
:id: bd223197-1021-467e-8714-c1a767ae89af
:expectedresults: The organization's description is updated.
:CaseImportance: Critical
"""
for desc in valid_org_data_list():
with self.subTest(desc):
setattr(self.organization, 'description', desc)
self.organization = self.organization.update(['description'])
self.assertEqual(self.organization.description, desc)
@tier1
def test_positive_update_name_and_description(self):
"""Update an organization with new name and description.
:id: 30036e70-b8fc-4c24-9494-b201bbd1c28d
:expectedresults: The organization's name and description are updated.
:CaseImportance: Critical
"""
name = gen_string('alpha')
desc = gen_string('alpha')
self.organization.name = name
self.organization.description = desc
self.organization = self.organization.update(['name', 'description'])
self.assertEqual(self.organization.name, name)
self.assertEqual(self.organization.description, desc)
@tier2
def test_positive_update_user(self):
"""Update an organization, associate user with it.
:id: 2c0c0061-5b4e-4007-9f54-b61d6e65ef58
:expectedresults: User is associated with organization.
:CaseLevel: Integration
"""
user = entities.User().create()
self.organization.user = [user]
self.organization = self.organization.update(['user'])
self.assertEqual(len(self.organization.user), 1)
self.assertEqual(self.organization.user[0].id, user.id)
@tier2
def test_positive_update_subnet(self):
"""Update an organization, associate subnet with it.
:id: 3aa0b9cb-37f7-4e7e-a6ec-c1b407225e54
:expectedresults: Subnet is associated with organization.
:CaseLevel: Integration
"""
subnet = entities.Subnet().create()
self.organization.subnet = [subnet]
self.organization = self.organization.update(['subnet'])
self.assertEqual(len(self.organization.subnet), 1)
self.assertEqual(self.organization.subnet[0].id, subnet.id)
@tier2
@skip_if_bug_open('bugzilla', 1230865)
def test_positive_add_media(self):
"""Update an organization and associate it with a media.
:id: 83f085d9-94c0-4462-9780-d29ea4cb5aac
:expectedresults: An organization is associated with a media.
:CaseLevel: Integration
"""
media = entities.Media().create()
self.organization.media = [media]
self.organization = self.organization.update(['media'])
self.assertEqual(len(self.organization.media), 1)
self.assertEqual(self.organization.media[0].id, media.id)
@tier2
def test_positive_add_hostgroup(self):
"""Add a hostgroup to an organization
:id: e8c2ccfd-9ae8-4a39-b459-bc5818f54e63
:expectedresults: Hostgroup is added to organization
:CaseLevel: Integration
"""
org = entities.Organization().create()
hostgroup = entities.HostGroup().create()
org.hostgroup = [hostgroup]
org = org.update(['hostgroup'])
self.assertEqual(len(org.hostgroup), 1)
self.assertEqual(org.hostgroup[0].id, hostgroup.id)
@skip_if_bug_open('bugzilla', 1395229)
@tier2
def test_positive_remove_hostgroup(self):
"""Add a hostgroup to an organization and then remove it
:id: 7eb1aca7-fd7b-404f-ab18-21be5052a11f
:expectedresults: Hostgroup is added to organization and then removed
:CaseLevel: Integration
"""
org = entities.Organization().create()
hostgroup = entities.HostGroup().create()
org.hostgroup = [hostgroup]
org = org.update(['hostgroup'])
self.assertEqual(len(org.hostgroup), 1)
org.hostgroup = []
org = org.update(['hostgroup'])
self.assertEqual(len(org.hostgroup), 0)
@tier2
@skip_if_bug_open('bugzilla', 1395229)
def test_positive_add_smart_proxy(self):
"""Add a smart proxy to an organization
:id: e21de720-3fa2-429b-bd8e-b6a48a13146d
:expectedresults: Smart proxy is successfully added to organization
:CaseLevel: Integration
"""
# Every Satellite has a built-in smart proxy, so let's find it
smart_proxy = entities.SmartProxy().search(query={
'search': 'url = https://{0}:9090'.format(settings.server.hostname)
})
# Check that proxy is found and unpack it from the list
self.assertGreater(len(smart_proxy), 0)
smart_proxy = smart_proxy[0]
# By default, newly created organization uses built-in smart proxy,
# so we need to remove it first
org = entities.Organization().create()
org.smart_proxy = []
org = org.update(['smart_proxy'])
# Verify smart proxy was actually removed
self.assertEqual(len(org.smart_proxy), 0)
# Add smart proxy to organization
org.smart_proxy = [smart_proxy]
org = org.update(['smart_proxy'])
# Verify smart proxy was actually added
self.assertEqual(len(org.smart_proxy), 1)
self.assertEqual(org.smart_proxy[0].id, smart_proxy.id)
@skip_if_bug_open('bugzilla', 1395229)
@tier2
def test_positive_remove_smart_proxy(self):
"""Remove a smart proxy from an organization
:id: 8045910e-d85c-47ee-9aed-ac0a6bbb646b
:expectedresults: Smart proxy is removed from organization
:CaseLevel: Integration
"""
# By default, newly created organization uses built-in smart proxy,
# so we can remove it instead of adding and removing some another one
org = entities.Organization().create()
self.assertGreater(len(org.smart_proxy), 0)
org.smart_proxy = []
org = org.update(['smart_proxy'])
# Verify smart proxy was actually removed
self.assertEqual(len(org.smart_proxy), 0)
@tier1
def test_negative_update(self):
"""Update an organization's attributes with invalid values.
:id: b7152d0b-5ab0-4d68-bfdf-f3eabcb5fbc6
:expectedresults: The organization's attributes are not updated.
:CaseImportance: Critical
"""
dataset = (
{'name': gen_string(str_type='utf8', length=256)},
# Immutable. See BZ 1089996.
{'label': gen_string(str_type='utf8')},
)
for attrs in dataset:
with self.subTest(attrs):
with self.assertRaises(HTTPError):
entities.Organization(
id=self.organization.id,
**attrs
).update(attrs.keys())
@tier2
@skip_if_bug_open('bugzilla', 1103157)
def test_verify_bugzilla_1103157(self):
"""Create organization and add two compute resources one by one
using different transactions and different users to see that they
actually added, but not overwrite each other
:id: 5f4fd2b7-d998-4980-b5e7-9822bd54156b
:Steps:
1. Use the admin user to create an organization and two compute
resources. Make one compute resource point at / belong to the
organization.
2. Create a user and give them the ability to update compute
resources and organizations. Have this user make the second
compute resource point at / belong to the organization.
3. Use the admin user to read information about the organization.
Verify that both compute resources are pointing at / belong to
the organization.
:expectedresults: Organization contains both compute resources
:CaseLevel: Integration
"""
# setUpClass() creates an organization w/admin user. Here, we use admin
# to make two compute resources and make first belong to organization.
compute_resources = [
entities.LibvirtComputeResource(
name=gen_string('alpha'),
url='qemu://host.example.com/system'
).create()
for _ in range(2)
]
self.organization.compute_resource = compute_resources[:1] # list
self.organization = self.organization.update(['compute_resource'])
self.assertEqual(len(self.organization.compute_resource), 1)
# Create a new user and give them minimal permissions.
login = gen_alphanumeric()
password = gen_alphanumeric()
user = entities.User(login=login, password=password).create()
role = entities.Role().create()
for perm in ['edit_compute_resources', 'edit_organizations']:
permissions = [
entities.Permission(id=permission['id'])
for permission
in entities.Permission(name=perm).search()
]
entities.Filter(permission=permissions, role=role).create()
user.role = [role]
user = user.update(['role'])
# Make new user assign second compute resource to org.
cfg = get_nailgun_config()
cfg.auth = (login, password)
entities.Organization(
cfg,
id=self.organization.id,
compute_resource=compute_resources[1:], # slice returns list
).update(['compute_resource'])
# Use admin to verify both compute resources belong to organization.
self.assertEqual(len(self.organization.read().compute_resource), 2)
|
elyezer/robottelo
|
tests/foreman/api/test_organization.py
|
Python
|
gpl-3.0
| 17,745 | 0 |
#!/usr/bin/env python
#
# Copyright (c) 2016 Apple Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS''
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS
# BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
# THE POSSIBILITY OF SUCH DAMAGE.
import logging
from string import Template
from builtins_generator import BuiltinsGenerator, WK_lcfirst, WK_ucfirst
from builtins_templates import BuiltinsGeneratorTemplates as Templates
log = logging.getLogger('global')
class BuiltinsInternalsWrapperImplementationGenerator(BuiltinsGenerator):
def __init__(self, model):
BuiltinsGenerator.__init__(self, model)
self.internals = filter(lambda object: 'internal' in object.annotations, model.objects)
def output_filename(self):
return "%sJSBuiltinInternals.cpp" % self.model().framework.setting('namespace')
def generate_output(self):
args = {
'namespace': self.model().framework.setting('namespace'),
}
sections = []
sections.append(self.generate_license())
sections.append(Template(Templates.DoNotEditWarning).substitute(args))
sections.append(self.generate_primary_header_includes())
sections.append(self.generate_secondary_header_includes())
sections.append(Template(Templates.NamespaceTop).substitute(args))
sections.append(self.generate_section_for_object())
sections.append(Template(Templates.NamespaceBottom).substitute(args))
return "\n\n".join(sections)
def generate_secondary_header_includes(self):
header_includes = [
(["WebCore"],
("WebCore", "JSDOMGlobalObject.h"),
),
(["WebCore"],
("WebCore", "WebCoreJSClientData.h"),
),
(["WebCore"],
("JavaScriptCore", "heap/HeapInlines.h"),
),
(["WebCore"],
("JavaScriptCore", "heap/SlotVisitorInlines.h"),
),
(["WebCore"],
("JavaScriptCore", "runtime/JSCJSValueInlines.h"),
),
(["WebCore"],
("JavaScriptCore", "runtime/StructureInlines.h"),
),
]
return '\n'.join(self.generate_includes_from_entries(header_includes))
def generate_section_for_object(self):
lines = []
lines.append(self.generate_constructor())
lines.append(self.generate_visit_method())
lines.append(self.generate_initialize_method())
return '\n'.join(lines)
def accessor_name(self, object):
return WK_lcfirst(object.object_name)
def member_name(self, object):
return "m_" + self.accessor_name(object)
def member_type(self, object):
return WK_ucfirst(object.object_name) + "BuiltinFunctions"
def generate_constructor(self):
guards = set([object.annotations.get('conditional') for object in self.internals if 'conditional' in object.annotations])
lines = ["JSBuiltinInternalFunctions::JSBuiltinInternalFunctions(JSC::VM& vm)",
" : m_vm(vm)"]
for object in self.internals:
initializer = " , %s(m_vm)" % self.member_name(object)
lines.append(BuiltinsGenerator.wrap_with_guard(object.annotations.get('conditional'), initializer))
lines.append("{")
lines.append(" UNUSED_PARAM(vm);")
lines.append("}\n")
return '\n'.join(lines)
def property_macro(self, object):
lines = []
lines.append("#define DECLARE_GLOBAL_STATIC(name) \\")
lines.append(" JSDOMGlobalObject::GlobalPropertyInfo( \\")
lines.append(" clientData.builtinFunctions().%sBuiltins().name##PrivateName(), %s().m_##name##Function.get() , JSC::PropertyAttribute::DontDelete | JSC::PropertyAttribute::ReadOnly)," % (self.accessor_name(object), self.accessor_name(object)))
lines.append(" WEBCORE_FOREACH_%s_BUILTIN_FUNCTION_NAME(DECLARE_GLOBAL_STATIC)" % object.object_name.upper())
lines.append("#undef DECLARE_GLOBAL_STATIC")
return '\n'.join(lines)
def generate_visit_method(self):
lines = ["void JSBuiltinInternalFunctions::visit(JSC::SlotVisitor& visitor)",
"{"]
for object in self.internals:
visit = " %s.visit(visitor);" % self.member_name(object)
lines.append(BuiltinsGenerator.wrap_with_guard(object.annotations.get('conditional'), visit))
lines.append(" UNUSED_PARAM(visitor);")
lines.append("}\n")
return '\n'.join(lines)
def _generate_initialize_static_globals(self):
lines = [" JSVMClientData& clientData = *static_cast<JSVMClientData*>(m_vm.clientData);",
" JSDOMGlobalObject::GlobalPropertyInfo staticGlobals[] = {"]
for object in self.internals:
lines.append(BuiltinsGenerator.wrap_with_guard(object.annotations.get('conditional'), self.property_macro(object)))
lines.append(" };")
lines.append(" globalObject.addStaticGlobals(staticGlobals, WTF_ARRAY_LENGTH(staticGlobals));")
lines.append(" UNUSED_PARAM(clientData);")
return '\n'.join(lines)
def generate_initialize_method(self):
lines = ["void JSBuiltinInternalFunctions::initialize(JSDOMGlobalObject& globalObject)",
"{",
" UNUSED_PARAM(globalObject);"]
for object in self.internals:
init = " %s.init(globalObject);" % self.member_name(object)
lines.append(BuiltinsGenerator.wrap_with_guard(object.annotations.get('conditional'), init))
lines.append("")
guards = set([object.annotations.get('conditional') for object in self.internals if 'conditional' in object.annotations])
lines.append(BuiltinsGenerator.wrap_with_guard(" || ".join(guards), self._generate_initialize_static_globals()))
lines.append("}")
return '\n'.join(lines)
|
teamfx/openjfx-8u-dev-rt
|
modules/web/src/main/native/Source/JavaScriptCore/Scripts/builtins/builtins_generate_internals_wrapper_implementation.py
|
Python
|
gpl-2.0
| 7,074 | 0.003534 |
# generated from genmsg/cmake/pkg-genmsg.context.in
messages_str = ""
services_str = ""
pkg_name = "quad"
dependencies_str = "std_msgs;geometry_msgs;kobuki_msgs;hector_uav_msgs;nav_msgs;sensor_msgs;gazebo_msgs;tf"
langs = "gencpp;genlisp;genpy"
dep_include_paths_str = "std_msgs;/opt/ros/hydro/share/std_msgs/cmake/../msg;geometry_msgs;/opt/ros/hydro/share/geometry_msgs/cmake/../msg;kobuki_msgs;/opt/ros/hydro/share/kobuki_msgs/cmake/../msg;hector_uav_msgs;/opt/ros/hydro/share/hector_uav_msgs/cmake/../msg;nav_msgs;/opt/ros/hydro/share/nav_msgs/cmake/../msg;sensor_msgs;/opt/ros/hydro/share/sensor_msgs/cmake/../msg;gazebo_msgs;/opt/ros/hydro/share/gazebo_msgs/cmake/../msg;tf;/opt/ros/hydro/share/tf/cmake/../msg;actionlib_msgs;/opt/ros/hydro/share/actionlib_msgs/cmake/../msg;trajectory_msgs;/opt/ros/hydro/share/trajectory_msgs/cmake/../msg"
PYTHON_EXECUTABLE = "/usr/bin/python"
package_has_static_sources = '' == 'TRUE'
|
rafafigueroa/cws
|
build/quad/cmake/quad-genmsg-context.py
|
Python
|
apache-2.0
| 928 | 0.002155 |
#!/usr/bin/env python
################################################################################
#
# Project Euler - Problem 6
#
# The sum of the squares of the first ten natural numbers is,
#
# 1^2 + 2^2 + ... + 10^2 = 385
# The square of the sum of the first ten natural numbers is,
#
# (1 + 2 + ... + 10)^2 = 552 = 3025
# Hence the difference between the sum of the squares of the first ten natural
# numbers and the square of the sum is 3025 - 385 = 2640
#
# Find the difference between the sum of the squares of the first one hundred
# natural numbers and the square of the sum.
#
# Joaquin Derrac - carrdelling@gmail.com
#
################################################################################
if __name__ == "__main__":
sum_one_hundred = sum([x for x in range(1, 101)])
sum_one_hundred_squared = sum_one_hundred * sum_one_hundred
sum_squared = sum([x ** 2 for x in range(1, 101)])
solution = sum_one_hundred_squared - sum_squared
print(solution)
|
carrdelling/project_euler
|
problem6.py
|
Python
|
gpl-2.0
| 996 | 0.002008 |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import webob.exc
from neutron.db import db_base_plugin_v2
from neutron.db import subnet_service_type_db_models
from neutron.extensions import subnet_service_types
from neutron.tests.unit.db import test_db_base_plugin_v2
class SubnetServiceTypesExtensionManager(object):
def get_resources(self):
return []
def get_actions(self):
return []
def get_request_extensions(self):
return []
def get_extended_resources(self, version):
extension = subnet_service_types.Subnet_service_types()
return extension.get_extended_resources(version)
class SubnetServiceTypesExtensionTestPlugin(
db_base_plugin_v2.NeutronDbPluginV2,
subnet_service_type_db_models.SubnetServiceTypeMixin):
"""Test plugin to mixin the subnet service_types extension.
"""
supported_extension_aliases = ["subnet-service-types"]
class SubnetServiceTypesExtensionTestCase(
test_db_base_plugin_v2.NeutronDbPluginV2TestCase):
"""Test API extension subnet_service_types attributes.
"""
CIDRS = ['10.0.0.0/8', '20.0.0.0/8', '30.0.0.0/8']
IP_VERSION = 4
def setUp(self):
plugin = ('neutron.tests.unit.extensions.test_subnet_service_types.' +
'SubnetServiceTypesExtensionTestPlugin')
ext_mgr = SubnetServiceTypesExtensionManager()
super(SubnetServiceTypesExtensionTestCase,
self).setUp(plugin=plugin, ext_mgr=ext_mgr)
def _create_service_subnet(self, service_types=None, cidr=None,
network=None, enable_dhcp=False):
if not network:
with self.network() as network:
pass
network = network['network']
if not cidr:
cidr = self.CIDRS[0]
args = {'net_id': network['id'],
'tenant_id': network['tenant_id'],
'cidr': cidr,
'ip_version': self.IP_VERSION,
'enable_dhcp': enable_dhcp}
if service_types:
args['service_types'] = service_types
return self._create_subnet(self.fmt, **args)
def _test_create_subnet(self, service_types, expect_fail=False):
res = self._create_service_subnet(service_types)
if expect_fail:
self.assertEqual(webob.exc.HTTPClientError.code,
res.status_int)
else:
subnet = self.deserialize('json', res)
subnet = subnet['subnet']
self.assertEqual(len(service_types),
len(subnet['service_types']))
for service in service_types:
self.assertIn(service, subnet['service_types'])
def test_create_subnet_blank_type(self):
self._test_create_subnet([])
def test_create_subnet_bar_type(self):
self._test_create_subnet(['network:bar'])
def test_create_subnet_foo_type(self):
self._test_create_subnet(['compute:foo'])
def test_create_subnet_bar_and_foo_type(self):
self._test_create_subnet(['network:bar', 'compute:foo'])
def test_create_subnet_invalid_type(self):
self._test_create_subnet(['foo'], expect_fail=True)
self._test_create_subnet([1], expect_fail=True)
def test_create_subnet_no_type(self):
res = self._create_service_subnet()
subnet = self.deserialize('json', res)
subnet = subnet['subnet']
self.assertFalse(subnet['service_types'])
def _test_update_subnet(self, subnet, service_types, fail_code=None):
data = {'subnet': {'service_types': service_types}}
req = self.new_update_request('subnets', data, subnet['id'])
res = self.deserialize(self.fmt, req.get_response(self.api))
if fail_code is not None:
self.assertEqual(fail_code,
res['NeutronError']['type'])
else:
subnet = res['subnet']
self.assertEqual(len(service_types),
len(subnet['service_types']))
for service in service_types:
self.assertIn(service, subnet['service_types'])
def test_update_subnet_zero_to_one(self):
service_types = ['network:foo']
# Create a subnet with no service type
res = self._create_service_subnet()
subnet = self.deserialize('json', res)['subnet']
# Update it with a single service type
self._test_update_subnet(subnet, service_types)
def test_update_subnet_one_to_two(self):
service_types = ['network:foo']
# Create a subnet with one service type
res = self._create_service_subnet(service_types)
subnet = self.deserialize('json', res)['subnet']
# Update it with two service types
service_types.append('compute:bar')
self._test_update_subnet(subnet, service_types)
def test_update_subnet_two_to_one(self):
service_types = ['network:foo', 'compute:bar']
# Create a subnet with two service types
res = self._create_service_subnet(service_types)
subnet = self.deserialize('json', res)['subnet']
# Update it with one service type
service_types = ['network:foo']
self._test_update_subnet(subnet, service_types)
def test_update_subnet_one_to_zero(self):
service_types = ['network:foo']
# Create a subnet with one service type
res = self._create_service_subnet(service_types)
subnet = self.deserialize('json', res)['subnet']
# Update it with zero service types
service_types = []
self._test_update_subnet(subnet, service_types)
def test_update_subnet_invalid_type(self):
# Create a subnet with no service type
res = self._create_service_subnet()
subnet = self.deserialize('json', res)['subnet']
# Update it with invalid service type(s)
self._test_update_subnet(subnet, ['foo'],
fail_code='InvalidSubnetServiceType')
self._test_update_subnet(subnet, [2],
fail_code='InvalidInputSubnetServiceType')
def _assert_port_res(self, port, service_type, subnet, fallback,
error='IpAddressGenerationFailureNoMatchingSubnet'):
res = self.deserialize('json', port)
if fallback:
port = res['port']
self.assertEqual(1, len(port['fixed_ips']))
self.assertEqual(service_type, port['device_owner'])
self.assertEqual(subnet['id'], port['fixed_ips'][0]['subnet_id'])
else:
self.assertEqual(error, res['NeutronError']['type'])
def test_create_port_with_matching_service_type(self):
with self.network() as network:
pass
matching_type = 'network:foo'
non_matching_type = 'network:bar'
# Create a subnet with no service types
self._create_service_subnet(network=network)
# Create a subnet with a non-matching service type
self._create_service_subnet([non_matching_type],
cidr=self.CIDRS[2],
network=network)
# Create a subnet with a service type to match the port device owner
res = self._create_service_subnet([matching_type],
cidr=self.CIDRS[1],
network=network)
service_subnet = self.deserialize('json', res)['subnet']
# Create a port with device owner matching the correct service subnet
network = network['network']
port = self._create_port(self.fmt,
net_id=network['id'],
tenant_id=network['tenant_id'],
device_owner=matching_type)
self._assert_port_res(port, matching_type, service_subnet, True)
def test_create_port_without_matching_service_type(self, fallback=True):
with self.network() as network:
pass
subnet = ''
matching_type = 'compute:foo'
non_matching_type = 'network:foo'
if fallback:
# Create a subnet with no service types
res = self._create_service_subnet(network=network)
subnet = self.deserialize('json', res)['subnet']
# Create a subnet with a non-matching service type
self._create_service_subnet([non_matching_type],
cidr=self.CIDRS[1],
network=network)
# Create a port with device owner not matching the service subnet
network = network['network']
port = self._create_port(self.fmt,
net_id=network['id'],
tenant_id=network['tenant_id'],
device_owner=matching_type)
self._assert_port_res(port, matching_type, subnet, fallback)
def test_create_port_without_matching_service_type_no_fallback(self):
self.test_create_port_without_matching_service_type(fallback=False)
def test_create_port_no_device_owner(self, fallback=True):
with self.network() as network:
pass
subnet = ''
service_type = 'compute:foo'
if fallback:
# Create a subnet with no service types
res = self._create_service_subnet(network=network)
subnet = self.deserialize('json', res)['subnet']
# Create a subnet with a service_type
self._create_service_subnet([service_type],
cidr=self.CIDRS[1],
network=network)
# Create a port without a device owner
network = network['network']
port = self._create_port(self.fmt,
net_id=network['id'],
tenant_id=network['tenant_id'])
self._assert_port_res(port, '', subnet, fallback)
def test_create_port_no_device_owner_no_fallback(self):
self.test_create_port_no_device_owner(fallback=False)
def test_create_port_exhausted_subnet(self, fallback=True):
with self.network() as network:
pass
subnet = ''
service_type = 'compute:foo'
if fallback:
# Create a subnet with no service types
res = self._create_service_subnet(network=network)
subnet = self.deserialize('json', res)['subnet']
# Create a subnet with a service_type
res = self._create_service_subnet([service_type],
cidr=self.CIDRS[1],
network=network)
service_subnet = self.deserialize('json', res)['subnet']
# Update the service subnet with empty allocation pools
data = {'subnet': {'allocation_pools': []}}
req = self.new_update_request('subnets', data, service_subnet['id'])
res = self.deserialize(self.fmt, req.get_response(self.api))
# Create a port with a matching device owner
network = network['network']
port = self._create_port(self.fmt,
net_id=network['id'],
tenant_id=network['tenant_id'],
device_owner=service_type)
self._assert_port_res(port, service_type, subnet, fallback,
error='IpAddressGenerationFailure')
def test_create_port_exhausted_subnet_no_fallback(self):
self.test_create_port_exhausted_subnet(fallback=False)
def test_create_dhcp_port_compute_subnet(self, enable_dhcp=True):
with self.network() as network:
pass
res = self._create_service_subnet(['compute:nova'],
network=network,
enable_dhcp=enable_dhcp)
subnet = self.deserialize('json', res)['subnet']
network = network['network']
port = self._create_port(self.fmt,
net_id=network['id'],
tenant_id=network['tenant_id'],
fixed_ips=[{'subnet_id': subnet['id']}],
device_owner='network:dhcp')
self._assert_port_res(port, 'network:dhcp', subnet, enable_dhcp)
def test_create_dhcp_port_compute_subnet_no_dhcp(self):
self.test_create_dhcp_port_compute_subnet(enable_dhcp=False)
def test_update_port_fixed_ips(self):
with self.network() as network:
pass
service_type = 'compute:foo'
# Create a subnet with a service_type
res = self._create_service_subnet([service_type],
cidr=self.CIDRS[1],
network=network)
service_subnet = self.deserialize('json', res)['subnet']
# Create a port with a matching device owner
network = network['network']
port = self._create_port(self.fmt,
net_id=network['id'],
tenant_id=network['tenant_id'],
device_owner=service_type)
port = self.deserialize('json', port)['port']
# Update the port's fixed_ips. It's ok to reuse the same IP it already
# has.
ip_address = port['fixed_ips'][0]['ip_address']
data = {'port': {'fixed_ips': [{'subnet_id': service_subnet['id'],
'ip_address': ip_address}]}}
# self._update will fail with a MismatchError if the update cannot be
# applied
port = self._update('ports', port['id'], data)
class SubnetServiceTypesExtensionTestCasev6(
SubnetServiceTypesExtensionTestCase):
CIDRS = ['2001:db8:2::/64', '2001:db8:3::/64', '2001:db8:4::/64']
IP_VERSION = 6
|
eayunstack/neutron
|
neutron/tests/unit/extensions/test_subnet_service_types.py
|
Python
|
apache-2.0
| 14,519 | 0 |
from ..utils import *
##
# Minions
class AT_019:
"Dreadsteed"
deathrattle = Summon(CONTROLLER, "AT_019")
class AT_021:
"Tiny Knight of Evil"
events = Discard(FRIENDLY).on(Buff(SELF, "AT_021e"))
AT_021e = buff(+1, +1)
class AT_023:
"Void Crusher"
inspire = Destroy(RANDOM_ENEMY_MINION | RANDOM_FRIENDLY_MINION)
class AT_026:
"Wrathguard"
events = Damage(SELF).on(Hit(FRIENDLY_HERO, Damage.AMOUNT))
class AT_027:
"Wilfred Fizzlebang"
events = Draw(CONTROLLER, None, FRIENDLY_HERO_POWER).on(Buff(Draw.CARD, "AT_027e"))
class AT_027e:
cost = SET(0)
##
# Spells
class AT_022:
"Fist of Jaraxxus"
play = Hit(RANDOM_ENEMY_CHARACTER, 4)
class Hand:
events = Discard(SELF).on(Hit(RANDOM_ENEMY_CHARACTER, 4))
class AT_024:
"Demonfuse"
play = Buff(TARGET, "AT_024e"), GainMana(OPPONENT, 1)
AT_024e = buff(+3, +3)
class AT_025:
"Dark Bargain"
play = Destroy(RANDOM(ENEMY_MINIONS) * 2), Discard(RANDOM(FRIENDLY_HAND) * 2)
|
beheh/fireplace
|
fireplace/cards/tgt/warlock.py
|
Python
|
agpl-3.0
| 950 | 0.024211 |
#!/usr/bin/env python
#
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import multiprocessing
import optparse
import os
import sys
from pylib import android_commands
from pylib import test_options_parser
from pylib import constants
def InstallApk(args):
options, device = args
apk_path = os.path.join(os.environ['CHROME_SRC'],
'out', options.build_type,
'apks', options.apk)
result = android_commands.AndroidCommands(device=device).ManagedInstall(
apk_path, False, options.apk_package)
print '----- Installed on %s -----' % device
print result
def main(argv):
parser = optparse.OptionParser()
test_options_parser.AddBuildTypeOption(parser)
test_options_parser.AddInstallAPKOption(parser)
options, args = parser.parse_args(argv)
if len(args) > 1:
raise Exception('Error: Unknown argument:', args[1:])
devices = android_commands.GetAttachedDevices()
if not devices:
raise Exception('Error: no connected devices')
pool = multiprocessing.Pool(len(devices))
# Send a tuple (options, device) per instance of DeploySingleDevice.
pool.map(InstallApk, zip([options] * len(devices), devices))
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
junmin-zhu/chromium-rivertrail
|
build/android/adb_install_apk.py
|
Python
|
bsd-3-clause
| 1,365 | 0.011722 |
# -*- coding: utf-8 -*-
"""configuration module for MPContribs Flask API"""
import os
import datetime
import json
import gzip
formulae_path = os.path.join(
os.path.dirname(__file__), "contributions", "formulae.json.gz"
)
with gzip.open(formulae_path) as f:
FORMULAE = json.load(f)
VERSION = datetime.datetime.today().strftime("%Y.%m.%d")
JSON_SORT_KEYS = False
JSON_ADD_STATUS = False
SECRET_KEY = "super-secret" # TODO in local prod config
USTS_MAX_AGE = 2.628e6 # 1 month
MAIL_DEFAULT_SENDER = os.environ.get("MAIL_DEFAULT_SENDER")
MAIL_TOPIC = os.environ.get("AWS_SNS_TOPIC_ARN")
MPCONTRIBS_DB = os.environ.get("MPCONTRIBS_DB_NAME", "mpcontribs")
MPCONTRIBS_MONGO_HOST = os.environ.get("MPCONTRIBS_MONGO_HOST")
MONGODB_SETTINGS = {
# Changed in version 3.9: retryWrites now defaults to True.
"host": f"mongodb+srv://{MPCONTRIBS_MONGO_HOST}/{MPCONTRIBS_DB}",
"connect": False,
"db": MPCONTRIBS_DB,
"compressors": ["snappy", "zstd", "zlib"],
}
REDIS_ADDRESS = os.environ.get("REDIS_ADDRESS", "redis")
REDIS_URL = RQ_REDIS_URL = RQ_DASHBOARD_REDIS_URL = f"redis://{REDIS_ADDRESS}"
DOC_DIR = os.path.join(os.path.dirname(__file__), f"swagger-{MPCONTRIBS_DB}")
SWAGGER = {
"swagger_ui_bundle_js": "//unpkg.com/swagger-ui-dist@3/swagger-ui-bundle.js",
"swagger_ui_standalone_preset_js": "//unpkg.com/swagger-ui-dist@3/swagger-ui-standalone-preset.js",
"jquery_js": "//unpkg.com/jquery@2.2.4/dist/jquery.min.js",
"swagger_ui_css": "//unpkg.com/swagger-ui-dist@3/swagger-ui.css",
"uiversion": 3,
"hide_top_bar": True,
"doc_expansion": "none",
"doc_dir": DOC_DIR,
"specs": [
{
"endpoint": "apispec",
"route": "/apispec.json",
"rule_filter": lambda rule: True, # all in
"model_filter": lambda tag: True, # all in
}
],
"specs_route": "/",
}
TEMPLATE = {
"swagger": "2.0",
"info": {
"title": "MPContribs API",
"description": "Operations to contribute, update and retrieve materials data on Materials Project",
"termsOfService": "https://materialsproject.org/terms",
"version": VERSION,
"contact": {
"name": "MPContribs",
"email": "contribs@materialsproject.org",
"url": "https://mpcontribs.org",
},
"license": {
"name": "Creative Commons Attribution 4.0 International License",
"url": "https://creativecommons.org/licenses/by/4.0/",
},
},
"tags": [
{
"name": "projects",
"description": f'contain provenance information about contributed datasets. \
Deleting projects will also delete all contributions including tables, structures, attachments, notebooks \
and cards for the project. Only users who have been added to a project can update its contents. While \
unpublished, only users on the project can retrieve its data or view it on the \
Portal. Making a project public does not automatically publish all \
its contributions, tables, attachments, and structures. These are separately set to public individually or in bulk.'
"",
},
{
"name": "contributions",
"description": f'contain simple hierarchical data which will show up as cards on the MP details \
page for MP material(s). Tables (rows and columns), structures, and attachments can be added to a \
contribution. Each contribution uses `mp-id` or composition as identifier to associate its data with the \
according entries on MP. Only admins or users on the project can create, update or delete contributions, and \
while unpublished, retrieve its data or view it on the Portal. \
Contribution components (tables, structures, and attachments) are deleted along with a contribution.',
},
{
"name": "structures",
"description": 'are \
<a href="https://pymatgen.org/_modules/pymatgen/core/structure.html#Structure">pymatgen structures</a> which \
can be added to a contribution.',
},
{
"name": "tables",
"description": 'are simple spreadsheet-type tables with columns and rows saved as Pandas \
<a href="https://pandas.pydata.org/pandas-docs/stable/getting_started/dsintro.html#dataframe">DataFrames</a> \
which can be added to a contribution.',
},
{
"name": "attachments",
"description": 'are files saved as objects in AWS S3 and not accessible for querying (only retrieval) \
which can be added to a contribution.',
},
{
"name": "notebooks",
"description": f'are Jupyter \
<a href="https://jupyter-notebook.readthedocs.io/en/stable/notebook.html#notebook-documents">notebook</a> \
documents generated and saved when a contribution is saved. They form the basis for Contribution \
Details Pages on the Portal.',
},
],
"securityDefinitions": {
"ApiKeyAuth": {
"description": "MP API key to authorize requests",
"name": "X-API-KEY",
"in": "header",
"type": "apiKey",
}
},
"security": [{"ApiKeyAuth": []}],
}
|
materialsproject/MPContribs
|
mpcontribs-api/mpcontribs/api/config.py
|
Python
|
mit
| 5,326 | 0.003567 |
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
# these are system modules
import math
import numpy
import random
import sys
import urllib
# these are my local modules
import miscIO
import path
import tsvIO
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
def getLineInfo(aLine):
lInfo = {}
aTokens = aLine.split('\t')
if (len(aTokens) < 1 or len(aTokens) > 3):
if (1):
print aLine
print aLine.strip()
print aTokens
print len(aTokens)
sys.exit(-1)
return (lInfo)
lInfo['TARG'] = {}
lInfo['FEAT'] = {}
bTarg = aTokens[0].split(',')
bFeat = aTokens[1].split(',')
if (len(aTokens) == 3):
bFeat += aTokens[2].split(',')
if (0):
print bTarg
print bFeat
sys.exit(-1)
for ii in range(len(bTarg)):
cTmp = bTarg[ii].split('=')
try:
zVal = float(cTmp[1].strip())
lInfo['TARG'][cTmp[0].strip()] = zVal
except:
try:
lInfo['TARG'][cTmp[0].strip()] = cTmp[1].strip().upper()
except:
return ({})
for ii in range(len(bFeat)):
cTmp = bFeat[ii].split('=')
try:
zVal = float(cTmp[1].strip())
lInfo['FEAT'][cTmp[0].strip()] = zVal
except:
try:
lInfo['FEAT'][cTmp[0].strip()] = cTmp[1].strip().upper()
except:
return ({})
if (0):
print lInfo
sys.exit(-1)
return (lInfo)
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
def getCoordinates(aName):
tokenList = aName.split(':')
chrName = tokenList[3]
startPos = -1
endPos = -1
try:
startPos = int(tokenList[4])
endPos = int(tokenList[5])
except:
doNothing = 1
return (chrName, startPos, endPos)
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
def filterPWPV(pwpvOutFilename):
print " "
print " reading PWPV outputs from <%s> " % pwpvOutFilename
fh = file(pwpvOutFilename, 'r')
out0 = pwpvOutFilename + ".unmapd"
out1 = pwpvOutFilename + ".mapped"
fh0 = file(out0, 'w')
fh1 = file(out1, 'w')
n0 = 0
n1 = 0
# why didn't I put GNAB in this list ???
# --> adding it to the list on 06sep12
typeList = ["CNVR", "GEXP", "GNAB", "METH", "MIRN", "RPPA"]
# --> taking it back out on 20sep12 ;-)
typeList = ["CNVR", "GEXP", "METH", "MIRN", "RPPA"]
typeCounts = {}
for aLine in fh:
# by default, we assume we will keep this line from the file
keepLine = 1
aLine = aLine.strip()
tokenList = aLine.split('\t')
# expected list of tokens for a PWPV pair :
## ['C:SAMP:miRNA_k5:::::', 'C:SAMP:miRNA_k7:::::', '0.398', '694', '-300.0', '1.7', '-300.0', '0', '0.0', '0', '0.0\n']
if (len(tokenList) < 3):
continue
aType = tokenList[0][2:6]
bType = tokenList[1][2:6]
if (aType <= bType):
aKey = (aType, bType)
else:
aKey = (bType, aType)
if (aType in typeList):
aTokens = tokenList[0].split(':')
if (aTokens[3] == ""):
keepLine = 0
if (keepLine):
if (bType in typeList):
bTokens = tokenList[1].split(':')
if (bTokens[3] == ""):
keepLine = 0
if (keepLine):
fh1.write("%s\n" % aLine)
n1 += 1
else:
fh0.write("%s\n" % aLine)
n0 += 1
if (aKey not in typeCounts.keys()):
typeCounts[aKey] = 0
typeCounts[aKey] += 1
fh.close()
fh0.close()
fh1.close()
print " "
print " n1 = %9d n0 = %9d " % (n1, n0)
if ( (n1+n0) > 0 ):
f1 = float(n1) / float(n1 + n0)
f0 = float(n0) / float(n1 + n0)
print " f1 = %9.6f f0 = %9.6f " % (f1, f0)
print " "
print typeCounts
print " "
print " "
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
if __name__ == "__main__":
if (len(sys.argv) != 2):
print ' Usage : %s <pwpv results file> ' % sys.argv[0]
print " ERROR -- bad command line arguments "
sys.exit(-1)
pwpvOutFilename = sys.argv[1]
filterPWPV(pwpvOutFilename)
sys.exit(-1)
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
|
cancerregulome/gidget
|
commands/feature_matrix_construction/main/filterPWPV.py
|
Python
|
mit
| 4,776 | 0.002094 |
import os, pygame
#create window of correct size (320x200, with some multiple)
x = 320
y = 200
size_mult = 4
bright_mult = 4
pygame.init()
os.environ['SDL_VIDEO_WINDOW_POS'] = str(0) + "," + str(40) #put window in consistent location
os.environ['SDL_VIDEO_WINDOW_POS'] = str(0) + "," + str(40) #put window in consistent location
screen = pygame.display.set_mode((x*size_mult, y*size_mult))
screen2 = pygame.Surface((x,y))
|
delMar43/wcmodtoolsources
|
WC1_clone/room_engine/win_init.py
|
Python
|
mit
| 424 | 0.023585 |
from builtins import object
from nose.tools import assert_equal, assert_not_equal, raises
from nose.plugins.skip import Skip, SkipTest
from openpathsampling.range_logic import *
class TestRangeLogic(object):
def test_range_and(self):
assert_equal(range_and(1, 3, 2, 4), [(2, 3)])
assert_equal(range_and(2, 4, 1, 3), [(2, 3)])
assert_equal(range_and(1, 2, 3, 4), None)
assert_equal(range_and(3, 4, 1, 2), None)
assert_equal(range_and(1, 4, 2, 3), [(2, 3)])
assert_equal(range_and(2, 3, 1, 4), [(2, 3)])
assert_equal(range_and(1, 2, 1, 2), 1)
def test_range_or(self):
assert_equal(range_or(1, 3, 2, 4), [(1, 4)])
assert_equal(range_or(2, 4, 1, 3), [(1, 4)])
assert_equal(range_or(1, 2, 3, 4), [(1, 2), (3, 4)])
assert_equal(range_or(3, 4, 1, 2), [(3, 4), (1, 2)])
assert_equal(range_or(1, 4, 2, 3), [(1, 4)])
assert_equal(range_or(2, 3, 1, 4), [(1, 4)])
assert_equal(range_or(1, 2, 1, 2), 1)
def test_range_sub(self):
assert_equal(range_sub(1, 3, 2, 4), [(1, 2)])
assert_equal(range_sub(2, 4, 1, 3), [(3, 4)])
assert_equal(range_sub(1, 2, 3, 4), 1)
assert_equal(range_sub(3, 4, 1, 2), 1)
assert_equal(range_sub(1, 4, 2, 3), [(1, 2), (3, 4)])
assert_equal(range_sub(2, 3, 1, 4), None)
assert_equal(range_sub(1, 2, 1, 2), None)
assert_equal(range_sub(0.1, 0.4, 0.1, 0.3), [(0.3, 0.4)])
class TestPeriodicRangeLogic(object):
def test_periodic_order(self):
# orders without wrapping
assert_equal(periodic_ordering(1, 2, 3, 4), [0, 1, 2, 3])
assert_equal(periodic_ordering(1, 3, 2, 4), [0, 2, 1, 3])
assert_equal(periodic_ordering(4, 3, 2, 1), [0, 3, 2, 1])
assert_equal(periodic_ordering(1, 2, 1, 2), [0, 2, 1, 3])
assert_equal(periodic_ordering(2, 4, 1, 3), [1, 3, 0, 2])
assert_equal(periodic_ordering(1, 2, 4, 3), [1, 2, 0, 3])
def test_periodic_and(self):
assert_equal(periodic_range_and(0.1, 0.3, 0.2, 0.4), [(0.2, 0.3)])
assert_equal(periodic_range_and(0.2, 0.4, 0.1, 0.3), [(0.2, 0.3)])
assert_equal(periodic_range_and(1, 2, 3, 4), None)
assert_equal(periodic_range_and(3, 4, 1, 2), None)
assert_equal(periodic_range_and(1, 4, 2, 3), [(2, 3)])
assert_equal(periodic_range_and(2, 3, 1, 4), [(2, 3)])
assert_equal(periodic_range_and(1, 2, 1, 2), 1)
assert_equal(periodic_range_and(1, 2, 2, 1), None)
assert_equal(periodic_range_and(2, 1, 1, 4), [(2, 4)])
assert_equal(periodic_range_and(0.1, 0.4, 0.3, 0.2),
[(0.1, 0.2), (0.3, 0.4)])
def test_periodic_or(self):
assert_equal(periodic_range_or(0.1, 0.3, 0.2, 0.4), [(0.1, 0.4)])
assert_equal(periodic_range_or(0.2, 0.4, 0.1, 0.3), [(0.1, 0.4)])
assert_equal(periodic_range_or(1, 2, 3, 4), [(1, 2), (3, 4)])
assert_equal(periodic_range_or(3, 4, 1, 2), [(3, 4), (1, 2)])
assert_equal(periodic_range_or(1, 4, 2, 3), [(1, 4)])
assert_equal(periodic_range_or(2, 3, 1, 4), [(1, 4)])
assert_equal(periodic_range_or(1, 2, 1, 2), 1)
assert_equal(periodic_range_or(1, 2, 2, 1), -1)
assert_equal(periodic_range_or(0.1, 0.4, 0.3, 0.2), -1)
assert_equal(periodic_range_or(2, 1, 1, 4), -1)
def test_periodic_sub(self):
assert_equal(periodic_range_sub(0.1, 0.3, 0.2, 0.4), [(0.1, 0.2)])
assert_equal(periodic_range_sub(0.2, 0.4, 0.1, 0.3), [(0.3, 0.4)])
assert_equal(periodic_range_sub(1, 2, 3, 4), 1)
assert_equal(periodic_range_sub(3, 4, 1, 2), 1)
assert_equal(periodic_range_sub(1, 4, 2, 3), [(1, 2), (3, 4)])
assert_equal(periodic_range_sub(2, 3, 1, 4), None)
assert_equal(periodic_range_sub(1, 2, 1, 2), None)
assert_equal(periodic_range_sub(1, 2, 2, 1), 1)
assert_equal(periodic_range_sub(2, 1, 1, 4), [(4, 1)])
assert_equal(periodic_range_sub(0.1, 0.4, 0.3, 0.2), [(0.2, 0.3)])
assert_equal(periodic_range_sub(0.1, 0.4, 0.1, 0.3), [(0.3, 0.4)])
|
choderalab/openpathsampling
|
openpathsampling/tests/test_range_logic.py
|
Python
|
lgpl-2.1
| 4,127 | 0.000969 |
# -----------------------------------------------------------------------------
# Copyright (c) 2014, PyInstaller Development Team.
#
# Distributed under the terms of the GNU General Public License with exception
# for distributing bootloader.
#
# The full license is in the file COPYING.txt, distributed with this software.
# -----------------------------------------------------------------------------
"""
Hook for cryptography module from the Python Cryptography Authority.
"""
import os.path
import glob
from PyInstaller.hooks.hookutils import (collect_submodules as cs,
get_module_file_attribute)
from PyInstaller.hooks.hookutils import PY_EXTENSION_SUFFIXES
# add the OpenSSL FFI binding modules as hidden imports
hiddenimports = cs('cryptography.hazmat.bindings.openssl') # pragma: no cover
def hook(mod):
"""
Include the cffi extensions as binaries in a subfolder named like the
package. The cffi verifier expects to find them inside the package
directory for the main module. We cannot use hiddenimports because that
would add the modules outside the package.
"""
crypto_dir = os.path.dirname(get_module_file_attribute('cryptography'))
for ext in PY_EXTENSION_SUFFIXES:
ffimods = glob.glob(os.path.join(crypto_dir,
'*_cffi_*%s*' % ext))
for f in ffimods:
name = os.path.join('cryptography', os.path.basename(f))
# TODO fix this hook to use attribute 'binaries'.
mod.pyinstaller_binaries.append((name, f, 'BINARY'))
return mod
|
timeyyy/PyUpdater
|
pyupdater/hooks/hook-cryptography.py
|
Python
|
bsd-2-clause
| 1,600 | 0 |
# Copyright 2016-2021 IBM Corp. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Unit tests for _activation_profile module.
"""
from __future__ import absolute_import, print_function
import copy
import re
import pytest
from zhmcclient import Client, ActivationProfile
from zhmcclient_mock import FakedSession
from tests.common.utils import assert_resources
class TestActivationProfile(object):
"""
All tests for the ActivationProfile and ActivationProfileManager classes.
"""
def setup_method(self):
"""
Setup that is called by pytest before each test method.
Set up a faked session, and add a faked CPC in classic mode,
and add two faked activation profiles of each type.
"""
# pylint: disable=attribute-defined-outside-init
self.session = FakedSession('fake-host', 'fake-hmc', '2.13.1', '1.8')
self.client = Client(self.session)
self.faked_cpc = self.session.hmc.cpcs.add({
'object-id': 'fake-cpc1-oid',
# object-uri is set up automatically
'parent': None,
'class': 'cpc',
'name': 'fake-cpc1-name',
'description': 'CPC #1 (classic mode)',
'status': 'active',
'dpm-enabled': False,
'is-ensemble-member': False,
'iml-mode': 'lpar',
})
self.cpc = self.client.cpcs.find(name='fake-cpc1-name')
self.faked_reset_ap_1 = self.faked_cpc.reset_activation_profiles.add({
# element-uri is set up automatically
'name': 'rap_1',
'parent': self.faked_cpc.uri,
'class': 'reset-activation-profile',
'description': 'RAP #1',
})
self.faked_reset_ap_2 = self.faked_cpc.reset_activation_profiles.add({
# element-uri is set up automatically
'name': 'rap_2',
'parent': self.faked_cpc.uri,
'class': 'reset-activation-profile',
'description': 'RAP #2',
})
self.faked_image_ap_1 = self.faked_cpc.image_activation_profiles.add({
# element-uri is set up automatically
'name': 'iap_1',
'parent': self.faked_cpc.uri,
'class': 'image-activation-profile',
'description': 'IAP #1',
})
self.faked_image_ap_2 = self.faked_cpc.image_activation_profiles.add({
# element-uri is set up automatically
'name': 'iap_2',
'parent': self.faked_cpc.uri,
'class': 'image-activation-profile',
'description': 'IAP #2',
})
self.faked_load_ap_1 = self.faked_cpc.load_activation_profiles.add({
# element-uri is set up automatically
'name': 'lap_1',
'parent': self.faked_cpc.uri,
'class': 'load-activation-profile',
'description': 'LAP #1',
})
self.faked_load_ap_2 = self.faked_cpc.load_activation_profiles.add({
# element-uri is set up automatically
'name': 'lap_2',
'parent': self.faked_cpc.uri,
'class': 'load-activation-profile',
'description': 'LAP #2',
})
@pytest.mark.parametrize(
"profile_type", ['reset', 'image', 'load']
)
def test_profilemanager_initial_attrs(self, profile_type):
"""Test initial attributes of ActivationProfileManager."""
mgr_attr = profile_type + '_activation_profiles'
profile_mgr = getattr(self.cpc, mgr_attr)
# Verify all public properties of the manager object
assert profile_mgr.resource_class == ActivationProfile
assert profile_mgr.session == self.session
assert profile_mgr.parent == self.cpc
assert profile_mgr.cpc == self.cpc
assert profile_mgr.profile_type == profile_type
# TODO: Test for ActivationProfileManager.__repr__()
@pytest.mark.parametrize(
"profile_type", ['reset', 'image', 'load']
)
@pytest.mark.parametrize(
"full_properties_kwargs, prop_names", [
(dict(),
['name', 'element-uri']),
(dict(full_properties=False),
['name', 'element-uri']),
(dict(full_properties=True),
None),
]
)
def test_profilemanager_list_full_properties(
self, full_properties_kwargs, prop_names, profile_type):
"""Test ActivationProfileManager.list() with full_properties."""
mgr_attr = profile_type + '_activation_profiles'
faked_profile_mgr = getattr(self.faked_cpc, mgr_attr)
exp_faked_profiles = faked_profile_mgr.list()
profile_mgr = getattr(self.cpc, mgr_attr)
# Execute the code to be tested
profiles = profile_mgr.list(**full_properties_kwargs)
assert_resources(profiles, exp_faked_profiles, prop_names)
@pytest.mark.parametrize(
"profile_type, filter_args, exp_names", [
('reset',
{'name': 'rap_2'},
['rap_2']),
('reset',
{'name': '.*rap_1'},
['rap_1']),
('reset',
{'name': 'rap_1.*'},
['rap_1']),
('reset',
{'name': 'rap_.'},
['rap_1', 'rap_2']),
('reset',
{'name': '.ap_1'},
['rap_1']),
('reset',
{'name': '.+'},
['rap_1', 'rap_2']),
('reset',
{'name': 'rap_1.+'},
[]),
('reset',
{'name': '.+rap_1'},
[]),
('image',
{'name': 'iap_1'},
['iap_1']),
('image',
{'name': '.*iap_1'},
['iap_1']),
('image',
{'name': 'iap_1.*'},
['iap_1']),
('image',
{'name': 'iap_.'},
['iap_1', 'iap_2']),
('image',
{'name': '.ap_1'},
['iap_1']),
('image',
{'name': '.+'},
['iap_1', 'iap_2']),
('image',
{'name': 'iap_1.+'},
[]),
('image',
{'name': '.+iap_1'},
[]),
('load',
{'name': 'lap_2'},
['lap_2']),
('load',
{'name': '.*lap_1'},
['lap_1']),
('load',
{'name': 'lap_1.*'},
['lap_1']),
('load',
{'name': 'lap_.'},
['lap_1', 'lap_2']),
('load',
{'name': '.ap_1'},
['lap_1']),
('load',
{'name': '.+'},
['lap_1', 'lap_2']),
('load',
{'name': 'lap_1.+'},
[]),
('load',
{'name': '.+lap_1'},
[]),
('reset',
{'class': 'reset-activation-profile'},
['rap_1', 'rap_2']),
('image',
{'class': 'image-activation-profile'},
['iap_1', 'iap_2']),
('load',
{'class': 'load-activation-profile'},
['lap_1', 'lap_2']),
('reset',
{'class': 'reset-activation-profile',
'description': 'RAP #2'},
['rap_2']),
('image',
{'class': 'image-activation-profile',
'description': 'IAP #1'},
['iap_1']),
('load',
{'class': 'load-activation-profile',
'description': 'LAP #2'},
['lap_2']),
('reset',
{'description': 'RAP #1'},
['rap_1']),
('image',
{'description': 'IAP #2'},
['iap_2']),
('load',
{'description': 'LAP #1'},
['lap_1']),
]
)
def test_profilemanager_list_filter_args(
self, profile_type, filter_args, exp_names):
"""Test ActivationProfileManager.list() with filter_args."""
mgr_attr = profile_type + '_activation_profiles'
profile_mgr = getattr(self.cpc, mgr_attr)
# Execute the code to be tested
profiles = profile_mgr.list(filter_args=filter_args)
assert len(profiles) == len(exp_names)
if exp_names:
names = [ap.properties['name'] for ap in profiles]
assert set(names) == set(exp_names)
# TODO: Test for initial ActivationProfile attributes
def test_profile_repr(self):
"""Test ActivationProfile.__repr__()."""
# We test __repr__() just for reset activation profiles, because the
# ActivationProfile class is the same for all profile types and we know
# that __repr__() does not depend on the profile type.
profile_mgr = self.cpc.reset_activation_profiles
reset_ap = profile_mgr.find(name='rap_1')
# Execute the code to be tested
repr_str = repr(reset_ap)
repr_str = repr_str.replace('\n', '\\n')
# We check just the begin of the string:
assert re.match(r'^{classname}\s+at\s+0x{id:08x}\s+\(\\n.*'.
format(classname=reset_ap.__class__.__name__,
id=id(reset_ap)),
repr_str)
@pytest.mark.parametrize(
"profile_type", ['reset', 'image', 'load']
)
@pytest.mark.parametrize(
"input_props", [
{},
{'description': 'New profile description'},
{'description': ''},
{'ssc-network-info': {
'chpid': '1a',
'port': 0,
'ipaddr-type': 'dhcp',
'vlan-id': None,
'static-ip-info': None,
}},
{'group-profile-uri': None},
{'zaware-gateway-info': None},
]
)
def test_profile_update_properties(self, input_props, profile_type):
"""Test ActivationProfile.update_properties()."""
mgr_attr = profile_type + '_activation_profiles'
profile_mgr = getattr(self.cpc, mgr_attr)
profile = profile_mgr.list()[0]
profile.pull_full_properties()
saved_properties = copy.deepcopy(profile.properties)
# Execute the code to be tested
profile.update_properties(properties=input_props)
# Verify that the resource object already reflects the property
# updates.
for prop_name in saved_properties:
if prop_name in input_props:
exp_prop_value = input_props[prop_name]
else:
exp_prop_value = saved_properties[prop_name]
assert prop_name in profile.properties
prop_value = profile.properties[prop_name]
assert prop_value == exp_prop_value
# Refresh the resource object and verify that the resource object
# still reflects the property updates.
profile.pull_full_properties()
for prop_name in saved_properties:
if prop_name in input_props:
exp_prop_value = input_props[prop_name]
else:
exp_prop_value = saved_properties[prop_name]
assert prop_name in profile.properties
prop_value = profile.properties[prop_name]
assert prop_value == exp_prop_value
|
zhmcclient/python-zhmcclient
|
tests/unit/zhmcclient/test_activation_profile.py
|
Python
|
apache-2.0
| 11,882 | 0 |
# Copyright 2013-2017 The Meson development team
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This file contains the detection logic for miscellaneous external dependencies.
import glob
import os
import stat
import sysconfig
from .. import mlog
from .. import mesonlib
from ..environment import detect_cpu_family
from .base import DependencyException, DependencyMethods
from .base import ExternalDependency, ExtraFrameworkDependency, PkgConfigDependency
class BoostDependency(ExternalDependency):
# Some boost libraries have different names for
# their sources and libraries. This dict maps
# between the two.
name2lib = {'test': 'unit_test_framework'}
def __init__(self, environment, kwargs):
super().__init__('boost', environment, 'cpp', kwargs)
self.libdir = ''
try:
self.boost_root = os.environ['BOOST_ROOT']
if not os.path.isabs(self.boost_root):
raise DependencyException('BOOST_ROOT must be an absolute path.')
except KeyError:
self.boost_root = None
if self.boost_root is None:
if self.want_cross:
if 'BOOST_INCLUDEDIR' in os.environ:
self.incdir = os.environ['BOOST_INCLUDEDIR']
else:
raise DependencyException('BOOST_ROOT or BOOST_INCLUDEDIR is needed while cross-compiling')
if mesonlib.is_windows():
self.boost_root = self.detect_win_root()
self.incdir = self.boost_root
else:
if 'BOOST_INCLUDEDIR' in os.environ:
self.incdir = os.environ['BOOST_INCLUDEDIR']
else:
self.incdir = '/usr/include'
else:
self.incdir = os.path.join(self.boost_root, 'include')
self.boost_inc_subdir = os.path.join(self.incdir, 'boost')
mlog.debug('Boost library root dir is', self.boost_root)
self.src_modules = {}
self.lib_modules = {}
self.lib_modules_mt = {}
self.detect_version()
self.requested_modules = self.get_requested(kwargs)
module_str = ', '.join(self.requested_modules)
if self.is_found:
self.detect_src_modules()
self.detect_lib_modules()
self.validate_requested()
if self.boost_root is not None:
info = self.version + ', ' + self.boost_root
else:
info = self.version
mlog.log('Dependency Boost (%s) found:' % module_str, mlog.green('YES'), info)
else:
mlog.log("Dependency Boost (%s) found:" % module_str, mlog.red('NO'))
def detect_win_root(self):
globtext = 'c:\\local\\boost_*'
files = glob.glob(globtext)
if len(files) > 0:
return files[0]
return 'C:\\'
def get_compile_args(self):
args = []
if self.boost_root is not None:
if mesonlib.is_windows():
include_dir = self.boost_root
else:
include_dir = os.path.join(self.boost_root, 'include')
else:
include_dir = self.incdir
# Use "-isystem" when including boost headers instead of "-I"
# to avoid compiler warnings/failures when "-Werror" is used
# Careful not to use "-isystem" on default include dirs as it
# breaks some of the headers for certain gcc versions
# For example, doing g++ -isystem /usr/include on a simple
# "int main()" source results in the error:
# "/usr/include/c++/6.3.1/cstdlib:75:25: fatal error: stdlib.h: No such file or directory"
# See https://gcc.gnu.org/bugzilla/show_bug.cgi?id=70129
# and http://stackoverflow.com/questions/37218953/isystem-on-a-system-include-directory-causes-errors
# for more details
# TODO: The correct solution would probably be to ask the
# compiler for it's default include paths (ie: "gcc -xc++ -E
# -v -") and avoid including those with -isystem
# For now, use -isystem for all includes except for some
# typical defaults (which don't need to be included at all
# since they are in the default include paths). These typical
# defaults include the usual directories at the root of the
# filesystem, but also any path that ends with those directory
# names in order to handle cases like cross-compiling where we
# might have a different sysroot.
if not include_dir.endswith(('/usr/include', '/usr/local/include')):
args.append("".join(self.compiler.get_include_args(include_dir, True)))
return args
def get_requested(self, kwargs):
candidates = kwargs.get('modules', [])
if not isinstance(candidates, list):
candidates = [candidates]
for c in candidates:
if not isinstance(c, str):
raise DependencyException('Boost module argument is not a string.')
return candidates
def validate_requested(self):
for m in self.requested_modules:
if m not in self.src_modules:
msg = 'Requested Boost module {!r} not found'
raise DependencyException(msg.format(m))
def detect_version(self):
try:
ifile = open(os.path.join(self.boost_inc_subdir, 'version.hpp'))
except FileNotFoundError:
return
with ifile:
for line in ifile:
if line.startswith("#define") and 'BOOST_LIB_VERSION' in line:
ver = line.split()[-1]
ver = ver[1:-1]
self.version = ver.replace('_', '.')
self.is_found = True
return
def detect_src_modules(self):
for entry in os.listdir(self.boost_inc_subdir):
entry = os.path.join(self.boost_inc_subdir, entry)
if stat.S_ISDIR(os.stat(entry).st_mode):
self.src_modules[os.path.split(entry)[-1]] = True
def detect_lib_modules(self):
if mesonlib.is_windows():
return self.detect_lib_modules_win()
return self.detect_lib_modules_nix()
def detect_lib_modules_win(self):
arch = detect_cpu_family(self.env.coredata.compilers)
# Guess the libdir
if arch == 'x86':
gl = 'lib32*'
elif arch == 'x86_64':
gl = 'lib64*'
else:
# Does anyone do Boost cross-compiling to other archs on Windows?
gl = None
# See if the libdir is valid
if gl:
libdir = glob.glob(os.path.join(self.boost_root, gl))
else:
libdir = []
# Can't find libdir, bail
if not libdir:
return
libdir = libdir[0]
self.libdir = libdir
globber = 'libboost_*-gd-*.lib' if self.static else 'boost_*-gd-*.lib' # FIXME
for entry in glob.glob(os.path.join(libdir, globber)):
(_, fname) = os.path.split(entry)
base = fname.split('_', 1)[1]
modname = base.split('-', 1)[0]
self.lib_modules_mt[modname] = fname
def detect_lib_modules_nix(self):
if self.static:
libsuffix = 'a'
elif mesonlib.is_osx() and not self.want_cross:
libsuffix = 'dylib'
else:
libsuffix = 'so'
globber = 'libboost_*.{}'.format(libsuffix)
if 'BOOST_LIBRARYDIR' in os.environ:
libdirs = [os.environ['BOOST_LIBRARYDIR']]
elif self.boost_root is None:
libdirs = mesonlib.get_library_dirs()
else:
libdirs = [os.path.join(self.boost_root, 'lib')]
for libdir in libdirs:
for entry in glob.glob(os.path.join(libdir, globber)):
lib = os.path.basename(entry)
name = lib.split('.')[0].split('_', 1)[-1]
# I'm not 100% sure what to do here. Some distros
# have modules such as thread only as -mt versions.
if entry.endswith('-mt.{}'.format(libsuffix)):
self.lib_modules_mt[name] = True
else:
self.lib_modules[name] = True
def get_win_link_args(self):
args = []
if self.boost_root:
args.append('-L' + self.libdir)
for module in self.requested_modules:
module = BoostDependency.name2lib.get(module, module)
if module in self.lib_modules_mt:
args.append(self.lib_modules_mt[module])
return args
def get_link_args(self):
if mesonlib.is_windows():
return self.get_win_link_args()
args = []
if self.boost_root:
args.append('-L' + os.path.join(self.boost_root, 'lib'))
elif 'BOOST_LIBRARYDIR' in os.environ:
args.append('-L' + os.environ['BOOST_LIBRARYDIR'])
for module in self.requested_modules:
module = BoostDependency.name2lib.get(module, module)
libname = 'boost_' + module
# The compiler's library detector is the most reliable so use that first.
default_detect = self.compiler.find_library(libname, self.env, [])
if default_detect is not None:
if module == 'unit_testing_framework':
emon_args = self.compiler.find_library('boost_test_exec_monitor')
else:
emon_args = None
args += default_detect
if emon_args is not None:
args += emon_args
elif module in self.lib_modules or module in self.lib_modules_mt:
linkcmd = '-l' + libname
args.append(linkcmd)
# FIXME a hack, but Boost's testing framework has a lot of
# different options and it's hard to determine what to do
# without feedback from actual users. Update this
# as we get more bug reports.
if module == 'unit_testing_framework':
args.append('-lboost_test_exec_monitor')
elif module + '-mt' in self.lib_modules_mt:
linkcmd = '-lboost_' + module + '-mt'
args.append(linkcmd)
if module == 'unit_testing_framework':
args.append('-lboost_test_exec_monitor-mt')
return args
def get_sources(self):
return []
def need_threads(self):
return 'thread' in self.requested_modules
class ThreadDependency(ExternalDependency):
def __init__(self, environment, kwargs):
super().__init__('threads', environment, None, {})
self.name = 'threads'
self.is_found = True
mlog.log('Dependency', mlog.bold(self.name), 'found:', mlog.green('YES'))
def need_threads(self):
return True
def get_version(self):
return 'unknown'
class Python3Dependency(ExternalDependency):
def __init__(self, environment, kwargs):
super().__init__('python3', environment, None, kwargs)
self.name = 'python3'
# We can only be sure that it is Python 3 at this point
self.version = '3'
if DependencyMethods.PKGCONFIG in self.methods:
try:
pkgdep = PkgConfigDependency('python3', environment, kwargs)
if pkgdep.found():
self.compile_args = pkgdep.get_compile_args()
self.link_args = pkgdep.get_link_args()
self.version = pkgdep.get_version()
self.is_found = True
return
except Exception:
pass
if not self.is_found:
if mesonlib.is_windows() and DependencyMethods.SYSCONFIG in self.methods:
self._find_libpy3_windows(environment)
elif mesonlib.is_osx() and DependencyMethods.EXTRAFRAMEWORK in self.methods:
# In OSX the Python 3 framework does not have a version
# number in its name.
fw = ExtraFrameworkDependency('python', False, None, self.env,
self.language, kwargs)
if fw.found():
self.compile_args = fw.get_compile_args()
self.link_args = fw.get_link_args()
self.is_found = True
if self.is_found:
mlog.log('Dependency', mlog.bold(self.name), 'found:', mlog.green('YES'))
else:
mlog.log('Dependency', mlog.bold(self.name), 'found:', mlog.red('NO'))
def _find_libpy3_windows(self, env):
'''
Find python3 libraries on Windows and also verify that the arch matches
what we are building for.
'''
pyarch = sysconfig.get_platform()
arch = detect_cpu_family(env.coredata.compilers)
if arch == 'x86':
arch = '32'
elif arch == 'x86_64':
arch = '64'
else:
# We can't cross-compile Python 3 dependencies on Windows yet
mlog.log('Unknown architecture {!r} for'.format(arch),
mlog.bold(self.name))
self.is_found = False
return
# Pyarch ends in '32' or '64'
if arch != pyarch[-2:]:
mlog.log('Need', mlog.bold(self.name),
'for {}-bit, but found {}-bit'.format(arch, pyarch[-2:]))
self.is_found = False
return
inc = sysconfig.get_path('include')
platinc = sysconfig.get_path('platinclude')
self.compile_args = ['-I' + inc]
if inc != platinc:
self.compile_args.append('-I' + platinc)
# Nothing exposes this directly that I coulf find
basedir = sysconfig.get_config_var('base')
vernum = sysconfig.get_config_var('py_version_nodot')
self.link_args = ['-L{}/libs'.format(basedir),
'-lpython{}'.format(vernum)]
self.version = sysconfig.get_config_var('py_version_short')
self.is_found = True
def get_methods(self):
if mesonlib.is_windows():
return [DependencyMethods.PKGCONFIG, DependencyMethods.SYSCONFIG]
elif mesonlib.is_osx():
return [DependencyMethods.PKGCONFIG, DependencyMethods.EXTRAFRAMEWORK]
else:
return [DependencyMethods.PKGCONFIG]
|
wberrier/meson
|
mesonbuild/dependencies/misc.py
|
Python
|
apache-2.0
| 14,951 | 0.001271 |
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'ui4/printdialog_base.ui'
#
# Created: Mon May 4 14:30:35 2009
# by: PyQt4 UI code generator 4.4.4
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
class Ui_Dialog(object):
def setupUi(self, Dialog):
Dialog.setObjectName("Dialog")
Dialog.setWindowModality(QtCore.Qt.ApplicationModal)
Dialog.resize(700, 500)
self.gridlayout = QtGui.QGridLayout(Dialog)
self.gridlayout.setObjectName("gridlayout")
self.StackedWidget = QtGui.QStackedWidget(Dialog)
self.StackedWidget.setObjectName("StackedWidget")
self.page = QtGui.QWidget()
self.page.setObjectName("page")
self.gridlayout1 = QtGui.QGridLayout(self.page)
self.gridlayout1.setObjectName("gridlayout1")
self.label_2 = QtGui.QLabel(self.page)
font = QtGui.QFont()
font.setPointSize(16)
self.label_2.setFont(font)
self.label_2.setObjectName("label_2")
self.gridlayout1.addWidget(self.label_2, 0, 0, 1, 1)
self.line = QtGui.QFrame(self.page)
self.line.setFrameShape(QtGui.QFrame.HLine)
self.line.setFrameShadow(QtGui.QFrame.Sunken)
self.line.setObjectName("line")
self.gridlayout1.addWidget(self.line, 1, 0, 1, 1)
self.Files = FileTable(self.page)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.Files.sizePolicy().hasHeightForWidth())
self.Files.setSizePolicy(sizePolicy)
self.Files.setObjectName("Files")
self.gridlayout1.addWidget(self.Files, 2, 0, 1, 1)
self.StackedWidget.addWidget(self.page)
self.page_2 = QtGui.QWidget()
self.page_2.setObjectName("page_2")
self.gridlayout2 = QtGui.QGridLayout(self.page_2)
self.gridlayout2.setObjectName("gridlayout2")
self.label_3 = QtGui.QLabel(self.page_2)
font = QtGui.QFont()
font.setPointSize(16)
self.label_3.setFont(font)
self.label_3.setObjectName("label_3")
self.gridlayout2.addWidget(self.label_3, 0, 0, 1, 1)
self.line_2 = QtGui.QFrame(self.page_2)
self.line_2.setFrameShape(QtGui.QFrame.HLine)
self.line_2.setFrameShadow(QtGui.QFrame.Sunken)
self.line_2.setObjectName("line_2")
self.gridlayout2.addWidget(self.line_2, 1, 0, 1, 1)
self.PrinterName = PrinterNameComboBox(self.page_2)
self.PrinterName.setObjectName("PrinterName")
self.gridlayout2.addWidget(self.PrinterName, 2, 0, 1, 1)
self.OptionsToolBox = PrintSettingsToolbox(self.page_2)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.OptionsToolBox.sizePolicy().hasHeightForWidth())
self.OptionsToolBox.setSizePolicy(sizePolicy)
self.OptionsToolBox.setObjectName("OptionsToolBox")
self.gridlayout2.addWidget(self.OptionsToolBox, 3, 0, 1, 1)
self.StackedWidget.addWidget(self.page_2)
self.gridlayout.addWidget(self.StackedWidget, 0, 0, 1, 5)
self.line_3 = QtGui.QFrame(Dialog)
self.line_3.setFrameShape(QtGui.QFrame.HLine)
self.line_3.setFrameShadow(QtGui.QFrame.Sunken)
self.line_3.setObjectName("line_3")
self.gridlayout.addWidget(self.line_3, 1, 0, 1, 5)
self.StepText = QtGui.QLabel(Dialog)
self.StepText.setObjectName("StepText")
self.gridlayout.addWidget(self.StepText, 2, 0, 1, 1)
spacerItem = QtGui.QSpacerItem(251, 28, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.gridlayout.addItem(spacerItem, 2, 1, 1, 1)
self.BackButton = QtGui.QPushButton(Dialog)
self.BackButton.setObjectName("BackButton")
self.gridlayout.addWidget(self.BackButton, 2, 2, 1, 1)
self.NextButton = QtGui.QPushButton(Dialog)
self.NextButton.setObjectName("NextButton")
self.gridlayout.addWidget(self.NextButton, 2, 3, 1, 1)
self.CancelButton = QtGui.QPushButton(Dialog)
self.CancelButton.setObjectName("CancelButton")
self.gridlayout.addWidget(self.CancelButton, 2, 4, 1, 1)
self.retranslateUi(Dialog)
self.StackedWidget.setCurrentIndex(1)
self.OptionsToolBox.setCurrentIndex(-1)
QtCore.QMetaObject.connectSlotsByName(Dialog)
def retranslateUi(self, Dialog):
Dialog.setWindowTitle(QtGui.QApplication.translate("Dialog", "HP Device Manager - Print", None, QtGui.QApplication.UnicodeUTF8))
self.label_2.setText(QtGui.QApplication.translate("Dialog", "Select Files to Print", None, QtGui.QApplication.UnicodeUTF8))
self.label_3.setText(QtGui.QApplication.translate("Dialog", "Select Printer and Options", None, QtGui.QApplication.UnicodeUTF8))
self.StepText.setText(QtGui.QApplication.translate("Dialog", "Step %1 of %2", None, QtGui.QApplication.UnicodeUTF8))
self.BackButton.setText(QtGui.QApplication.translate("Dialog", "< Back", None, QtGui.QApplication.UnicodeUTF8))
self.NextButton.setText(QtGui.QApplication.translate("Dialog", "Next >", None, QtGui.QApplication.UnicodeUTF8))
self.CancelButton.setText(QtGui.QApplication.translate("Dialog", "Cancel", None, QtGui.QApplication.UnicodeUTF8))
from .printsettingstoolbox import PrintSettingsToolbox
from .printernamecombobox import PrinterNameComboBox
from .filetable import FileTable
|
matrumz/RPi_Custom_Files
|
Printing/hplip-3.15.2/ui4/printdialog_base.py
|
Python
|
gpl-2.0
| 5,718 | 0.002973 |
def MassFit(particle) :
if raw_input("Do %s mass fit? [y/N] " % (particle)) not in ["y", "Y"]:
return
print "************************************"
print "* Doing mass fit *"
print "************************************"
f = TFile.Open("workspace.root")
w = f.Get("w")
assert(isinstance(w, RooWorkspace))
data = w.data("BsDsPi_data")
if (particle == "Bs"):
varName = "lab0_MM"
meanRange = [5366., 5360., 5372.]
if (particle == "Ds"):
varName = "lab2_MM"
meanRange = [1970., 1965., 1975.]
mass = w.var(varName)
mean = RooRealVar("mean", "mass (MeV)", meanRange[0], meanRange[1], meanRange[2]) ;
width = RooRealVar("width", "width (MeV)", 15., 5., 50.) ;
const = RooRealVar("const", "bg const", -0.005, -0.1, 0.1);
sigModel = RooGaussian( "sigModel", "signal PDF", mass, mean, width) ;
bkgModel = RooExponential("bkgModel", "bkgrnd PDF", mass, const) ;
Nsig = RooRealVar("Nsig", "signal Yield", 10000., 0., 10000000.);
Nbkg = RooRealVar("Nbkg", "bkgrnd Yield", 10000., 0., 10000000.);
model = RooAddPdf("model", "full PDF", RooArgList(sigModel, bkgModel), RooArgList(Nsig, Nbkg));
model.fitTo(data)
cMass = TCanvas("cMass_"+particle, "cMass"+particle)
frame = mass.frame()
frame.SetStats(False)
frame.SetTitle("Fit to the %s mass" % (particle))
data.plotOn(frame, RooFit.DataError(RooAbsData.SumW2))
model.plotOn(frame, RooFit.LineColor(4 ) ) #9
model.plotOn(frame, RooFit.LineColor(8 ), RooFit.LineStyle(2), RooFit.Components("sigModel"), RooFit.Name("sig") )
model.plotOn(frame, RooFit.LineColor(46), RooFit.LineStyle(2), RooFit.Components("bkgModel"), RooFit.Name("bkg") )
frame.Draw()
leg = TLegend(0.64, 0.77, 0.89, 0.89)
leg.AddEntry(frame.findObject("sig"), "Signal ("+particle+")", "l")
leg.AddEntry(frame.findObject("bkg"), "Background", "l")
leg.Draw("same")
cMass.Update()
cMass.SaveAs("plots/MassFit"+particle+".pdf")
print " > Showing mass fit for %s" % (particle)
print " > Signal events: %d +- %d" % (Nsig.getVal(), Nsig.getError())
print " > Background events: %d +- %d" % (Nbkg.getVal(), Nbkg.getError())
raw_input("Press enter to continue.")
f.Close()
|
lbel/Maastricht-Masterclass-2015
|
scripts/MassFit.py
|
Python
|
mit
| 2,260 | 0.031858 |
__author__ = 'bdeutsch'
import re
import numpy as np
import pandas as pd
# List cards drawn by me and played by opponent
def get_cards(filename):
# Open the file
with open(filename) as f:
mycards = []
oppcards = []
for line in f:
# Generate my revealed card list
m = re.search('name=(.+)id.+to FRIENDLY HAND', line)
if m:
mycards.append(m.group(1))
n = re.search('name=(.+)id.+to OPPOSING PLAY(?! \(Hero)', line)
if n:
oppcards.append(n.group(1))
for item in mycards:
print item
print '\n'
for item in oppcards:
print item
# make a list of card IDs and names
def get_ids():
# Create an empty list of IDs
idlist = []
with open('test_game') as f:
# For each line
for line in f:
# Find the entity ids
m = re.search('[\[ ]id=(\d+) ', line)
# if one is found
if m:
# Check that we haven't found it yet, convert to an integer
id = int(m.group(1))
# Add it to the list
if id not in idlist:
idlist.append(id)
# Sort the ids
idlist.sort()
# Convert to dataframe
d = pd.DataFrame(index=idlist)
# Rename the index
d.index.name = "Entity ID"
# Create an empty column for names
d["Name"] = np.nan
#print d
return d
# make a list of card names only if followed by id
def get_names():
with open('test_game') as f:
for line in f:
# Find the entity ids
m = re.search('[\[ ]name=([\w ]+?) id=', line)
if m:
print m.group(1)
def get_ids_names(df):
with open('test_game') as f:
namedict = {}
for line in f:
# Find combinations of entities and names
m = re.search('[\[ ]name=([\w ]+?) id=(\d+)', line)
if m:
ent_id = int(m.group(2))
name = m.group(1)
df.ix[ent_id, 'Name'] = name
#print m.group(2), m.group(1)
return df
idlist = []
with open('test_game') as f:
# For each line
for line in f:
# Find the entity ids
m = re.search('[\[ ]id=(\d+) ', line)
# if one is found
if m:
# Check that we haven't found it yet, convert to an integer
id = int(m.group(1))
# Add it to the list
if id not in idlist:
idlist.append(id)
# Sort the ids
idlist.sort()
# Convert to dataframe
df = pd.DataFrame(index=idlist)
# Rename the index
df.index.name = "Entity ID"
# Create an empty column for names
df["Name"] = np.nan
df["CardId"] = np.nan
df["Player"] = np.nan
with open('test_game') as f:
updates = []
for line in f:
# Find lists of the innermost nested brackets
m = re.findall(r"\[([^\[]+?)]", line)
# If it's not just the command designation bracket ("zone", e.g.)
if len(m)>1:
# for each set of bracket contents
for item in m[1:]:
# add to the list of updates
updates.append(item)
for item in updates:
# find the id
m = re.search("id=(\d+)", item)
if m:
# Assign ID variable
id = int(m.group(1))
# find name and assign
n = re.search("name=(.+?) \w+?=", item)
if n:
name = n.group(1)
df.ix[id, "Name"] = name
# find cardId and assign
n = re.search("cardId=(\w.+?) ", item)
if n:
cardId = n.group(1)
df.ix[id, "CardId"] = cardId
# find player
n = re.search("player=(\d)", item)
if n:
player = n.group(1)
df.ix[id, "Player"] = player
# update the dataframe for each update
# get rid of the "zone" and "power" markers.
# collect the entries into a list
# Put card IDs into a DataFrame
#df = get_ids_names(get_ids())
pd.set_option('display.max_rows', 200)
print df
# get_cards('test_game')
|
aspera1631/hs_logreader
|
logreader.py
|
Python
|
mit
| 4,183 | 0.005738 |
#!/usr/bin/env python
#-----------------------------------------------------------------------------
# Copyright (c) 2013, The BiPy Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
__credits__ = ["Evan Bolyen"]
from pyqi.core.exception import IncompetentDeveloperError
def load_file_lines(option_value):
"""Return a list of strings, one per line in the file.
Each line will have leading and trailing whitespace stripped from it.
"""
if not hasattr(option_value, 'read'):
raise IncompetentDeveloperError("Input type must be a file object.")
return [line.strip() for line in option_value]
def load_file_contents(option_value):
"""Return the contents of a file as a single string."""
if not hasattr(option_value, 'read'):
raise IncompetentDeveloperError("Input type must be a file object.")
return option_value.read()
|
biocore/pyqi
|
pyqi/core/interfaces/html/input_handler.py
|
Python
|
bsd-3-clause
| 1,074 | 0.005587 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Copyright 2020-2022 F4PGA Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# SPDX-License-Identifier: Apache-2.0
""" Tool to cleanup site pins JSON dumps.
This tool has two behaviors. This first is to rename site names from global
coordinates to site local coordinates. The second is remove the tile prefix
from node names.
For example CLBLM_L_X8Y149 contains two sites named SLICE_X10Y149 and
SLICE_X11Y149. SLICE_X10Y149 becomes X0Y0 and SLICE_X11Y149 becomes X1Y0.
"""
from __future__ import print_function
import json
import json5
import re
import sys
import copy
# All site names appear to follow the pattern <type>_X<abs coord>Y<abs coord>.
# Generally speaking, only the tile relatively coordinates are required to
# assemble arch defs, so we re-origin the coordinates to be relative to the tile
# (e.g. start at X0Y0) and discard the prefix from the name.
SITE_COORDINATE_PATTERN = re.compile('^(.+)_X([0-9]+)Y([0-9]+)$')
def find_origin_coordinate(sites):
""" Find the coordinates of each site within the tile, and then subtract the
smallest coordinate to re-origin them all to be relative to the tile.
"""
if len(sites) == 0:
return 0, 0
def inner_():
for site in sites:
coordinate = SITE_COORDINATE_PATTERN.match(site['name'])
assert coordinate is not None, site
x_coord = int(coordinate.group(2))
y_coord = int(coordinate.group(3))
yield x_coord, y_coord
x_coords, y_coords = zip(*inner_())
min_x_coord = min(x_coords)
min_y_coord = min(y_coords)
return min_x_coord, min_y_coord
def create_site_pin_to_wire_maps(tile_name, nodes):
""" Create a map from site_pin names to nodes.
Create a mapping from site pins to tile local wires. For each node that is
attached to a site pin, there should only be 1 tile local wire.
"""
# Remove tile prefix (e.g. CLBLM_L_X8Y149/) from node names.
# Routing resources will not have the prefix.
tile_prefix = tile_name + '/'
site_pin_to_wires = {}
for node in nodes:
if len(node['site_pins']) == 0:
continue
wire_names = [
wire for wire in node['wires'] if wire.startswith(tile_prefix)
]
assert len(wire_names) == 1, (node, tile_prefix)
for site_pin in node["site_pins"]:
assert site_pin not in site_pin_to_wires
site_pin_to_wires[site_pin] = wire_names[0]
return site_pin_to_wires
def main():
site_pins = json5.load(sys.stdin)
output_site_pins = {}
output_site_pins["tile_type"] = site_pins["tile_type"]
output_site_pins["sites"] = copy.deepcopy(site_pins["sites"])
site_pin_to_wires = create_site_pin_to_wire_maps(site_pins['tile_name'],
site_pins['nodes'])
min_x_coord, min_y_coord = find_origin_coordinate(site_pins['sites'])
for site in output_site_pins['sites']:
orig_site_name = site['name']
coordinate = SITE_COORDINATE_PATTERN.match(orig_site_name)
x_coord = int(coordinate.group(2))
y_coord = int(coordinate.group(3))
site['name'] = 'X{}Y{}'.format(x_coord - min_x_coord,
y_coord - min_y_coord)
site['prefix'] = coordinate.group(1)
site['x_coord'] = x_coord - min_x_coord
site['y_coord'] = y_coord - min_y_coord
for site_pin in site['site_pins']:
assert site_pin['name'].startswith(orig_site_name + '/')
if site_pin['name'] in site_pin_to_wires:
site_pin['wire'] = site_pin_to_wires[site_pin['name']]
else:
print(
('***WARNING***: Site pin {} for tile type {} is not connected, '
'make sure all instaces of this tile type has this site_pin '
'disconnected.').format(site_pin['name'],
site_pins['tile_type']),
file=sys.stderr)
site_pin['name'] = site_pin['name'][len(orig_site_name) + 1:]
json.dumps(output_site_pins, indent=2, sort_keys=True)
if __name__ == "__main__":
main()
|
SymbiFlow/prjuray
|
fuzzers/004-tileinfo/cleanup_site_pins.py
|
Python
|
isc
| 4,765 | 0.000839 |
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Generated code. DO NOT EDIT!
#
# Snippet for CreateGlossary
# NOTE: This snippet has been automatically generated for illustrative purposes only.
# It may require modifications to work in your environment.
# To install the latest published package dependency, execute the following:
# python3 -m pip install google-cloud-translate
# [START translate_generated_translate_v3_TranslationService_CreateGlossary_async]
from google.cloud import translate_v3
async def sample_create_glossary():
# Create a client
client = translate_v3.TranslationServiceAsyncClient()
# Initialize request argument(s)
glossary = translate_v3.Glossary()
glossary.name = "name_value"
request = translate_v3.CreateGlossaryRequest(
parent="parent_value",
glossary=glossary,
)
# Make the request
operation = client.create_glossary(request=request)
print("Waiting for operation to complete...")
response = await operation.result()
# Handle the response
print(response)
# [END translate_generated_translate_v3_TranslationService_CreateGlossary_async]
|
googleapis/python-translate
|
samples/generated_samples/translate_generated_translate_v3_translation_service_create_glossary_async.py
|
Python
|
apache-2.0
| 1,703 | 0.001762 |
#!/usr/bin/env python
from fs.errors import ResourceNotFoundError
from fs.opener import opener
from fs.commands.runner import Command
import sys
class FSrm(Command):
usage = """fsrm [OPTION]... [PATH]
Remove a file or directory at PATH"""
def get_optparse(self):
optparse = super(FSrm, self).get_optparse()
optparse.add_option('-f', '--force', dest='force', action='store_true', default=False,
help='ignore non-existent files, never prompt')
optparse.add_option('-i', '--interactive', dest='interactive', action='store_true', default=False,
help='prompt before removing')
optparse.add_option('-r', '--recursive', dest='recursive', action='store_true', default=False,
help='remove directories and their contents recursively')
return optparse
def do_run(self, options, args):
interactive = options.interactive
verbose = options.verbose
for fs, path, is_dir in self.get_resources(args):
if interactive:
if is_dir:
msg = "remove directory '%s'?" % path
else:
msg = "remove file '%s'?" % path
if not self.ask(msg) in 'yY':
continue
try:
if is_dir:
fs.removedir(path, force=options.recursive)
else:
fs.remove(path)
except ResourceNotFoundError:
if not options.force:
raise
else:
if verbose:
self.output("removed '%s'\n" % path)
def run():
return FSrm().run()
if __name__ == "__main__":
sys.exit(run())
|
PyFilesystem/pyfilesystem
|
fs/commands/fsrm.py
|
Python
|
bsd-3-clause
| 1,775 | 0.003944 |
"""
Test cases to cover Accounts-related behaviors of the User API application
"""
import datetime
import hashlib
import json
from copy import deepcopy
from unittest import mock
import ddt
import pytz
from django.conf import settings
from django.test.testcases import TransactionTestCase
from django.test.utils import override_settings
from django.urls import reverse
from edx_name_affirmation.api import create_verified_name
from edx_name_affirmation.statuses import VerifiedNameStatus
from rest_framework import status
from rest_framework.test import APIClient, APITestCase
from common.djangoapps.student.models import PendingEmailChange, UserProfile
from common.djangoapps.student.tests.factories import TEST_PASSWORD, RegistrationFactory, UserFactory
from openedx.core.djangoapps.oauth_dispatch.jwt import create_jwt_for_user
from openedx.core.djangoapps.user_api.accounts import ACCOUNT_VISIBILITY_PREF_KEY
from openedx.core.djangoapps.user_api.models import UserPreference
from openedx.core.djangoapps.user_api.preferences.api import set_user_preference
from openedx.core.djangolib.testing.utils import CacheIsolationTestCase, skip_unless_lms
from .. import ALL_USERS_VISIBILITY, CUSTOM_VISIBILITY, PRIVATE_VISIBILITY
TEST_PROFILE_IMAGE_UPLOADED_AT = datetime.datetime(2002, 1, 9, 15, 43, 1, tzinfo=pytz.UTC)
# this is used in one test to check the behavior of profile image url
# generation with a relative url in the config.
TEST_PROFILE_IMAGE_BACKEND = deepcopy(settings.PROFILE_IMAGE_BACKEND)
TEST_PROFILE_IMAGE_BACKEND['options']['base_url'] = '/profile-images/'
TEST_BIO_VALUE = "Tired mother of twins"
TEST_LANGUAGE_PROFICIENCY_CODE = "hi"
class UserAPITestCase(APITestCase):
"""
The base class for all tests of the User API
"""
VERIFIED_NAME = "Verified User"
def setUp(self):
super().setUp()
self.anonymous_client = APIClient()
self.different_user = UserFactory.create(password=TEST_PASSWORD)
self.different_client = APIClient()
self.staff_user = UserFactory(is_staff=True, password=TEST_PASSWORD)
self.staff_client = APIClient()
self.user = UserFactory.create(password=TEST_PASSWORD) # will be assigned to self.client by default
def login_client(self, api_client, user):
"""Helper method for getting the client and user and logging in. Returns client. """
client = getattr(self, api_client)
user = getattr(self, user)
client.login(username=user.username, password=TEST_PASSWORD)
return client
def send_post(self, client, json_data, content_type='application/json', expected_status=201):
"""
Helper method for sending a post to the server, defaulting to application/json content_type.
Verifies the expected status and returns the response.
"""
# pylint: disable=no-member
response = client.post(self.url, data=json.dumps(json_data), content_type=content_type)
assert expected_status == response.status_code
return response
def send_patch(self, client, json_data, content_type="application/merge-patch+json", expected_status=200):
"""
Helper method for sending a patch to the server, defaulting to application/merge-patch+json content_type.
Verifies the expected status and returns the response.
"""
# pylint: disable=no-member
response = client.patch(self.url, data=json.dumps(json_data), content_type=content_type)
assert expected_status == response.status_code
return response
def post_search_api(self, client, json_data, content_type='application/json', expected_status=200):
"""
Helper method for sending a post to the server, defaulting to application/merge-patch+json content_type.
Verifies the expected status and returns the response.
"""
# pylint: disable=no-member
response = client.post(self.search_api_url, data=json.dumps(json_data), content_type=content_type)
assert expected_status == response.status_code
return response
def send_get(self, client, query_parameters=None, expected_status=200):
"""
Helper method for sending a GET to the server. Verifies the expected status and returns the response.
"""
url = self.url + '?' + query_parameters if query_parameters else self.url # pylint: disable=no-member
response = client.get(url)
assert expected_status == response.status_code
return response
# pylint: disable=no-member
def send_put(self, client, json_data, content_type="application/json", expected_status=204):
"""
Helper method for sending a PUT to the server. Verifies the expected status and returns the response.
"""
response = client.put(self.url, data=json.dumps(json_data), content_type=content_type)
assert expected_status == response.status_code
return response
# pylint: disable=no-member
def send_delete(self, client, expected_status=204):
"""
Helper method for sending a DELETE to the server. Verifies the expected status and returns the response.
"""
response = client.delete(self.url)
assert expected_status == response.status_code
return response
def create_mock_profile(self, user):
"""
Helper method that creates a mock profile for the specified user
:return:
"""
legacy_profile = UserProfile.objects.get(id=user.id)
legacy_profile.country = "US"
legacy_profile.state = "MA"
legacy_profile.level_of_education = "m"
legacy_profile.year_of_birth = 2000
legacy_profile.goals = "world peace"
legacy_profile.mailing_address = "Park Ave"
legacy_profile.gender = "f"
legacy_profile.bio = TEST_BIO_VALUE
legacy_profile.profile_image_uploaded_at = TEST_PROFILE_IMAGE_UPLOADED_AT
legacy_profile.language_proficiencies.create(code=TEST_LANGUAGE_PROFICIENCY_CODE)
legacy_profile.phone_number = "+18005555555"
legacy_profile.save()
def create_mock_verified_name(self, user):
"""
Helper method to create an approved VerifiedName entry in name affirmation.
"""
legacy_profile = UserProfile.objects.get(id=user.id)
create_verified_name(user, self.VERIFIED_NAME, legacy_profile.name, status=VerifiedNameStatus.APPROVED)
def create_user_registration(self, user):
"""
Helper method that creates a registration object for the specified user
"""
RegistrationFactory(user=user)
def _verify_profile_image_data(self, data, has_profile_image):
"""
Verify the profile image data in a GET response for self.user
corresponds to whether the user has or hasn't set a profile
image.
"""
template = '{root}/{filename}_{{size}}.{extension}'
if has_profile_image:
url_root = 'http://example-storage.com/profile-images'
filename = hashlib.md5(('secret' + self.user.username).encode('utf-8')).hexdigest()
file_extension = 'jpg'
template += '?v={}'.format(TEST_PROFILE_IMAGE_UPLOADED_AT.strftime("%s"))
else:
url_root = 'http://testserver/static'
filename = 'default'
file_extension = 'png'
template = template.format(root=url_root, filename=filename, extension=file_extension)
assert data['profile_image'] == {'has_image': has_profile_image,
'image_url_full': template.format(size=50),
'image_url_small': template.format(size=10)}
@ddt.ddt
@skip_unless_lms
class TestOwnUsernameAPI(CacheIsolationTestCase, UserAPITestCase):
"""
Unit tests for the Accounts API.
"""
ENABLED_CACHES = ['default']
def setUp(self):
super().setUp()
self.url = reverse("own_username_api")
def _verify_get_own_username(self, queries, expected_status=200):
"""
Internal helper to perform the actual assertion
"""
with self.assertNumQueries(queries):
response = self.send_get(self.client, expected_status=expected_status)
if expected_status == 200:
data = response.data
assert 1 == len(data)
assert self.user.username == data['username']
def test_get_username(self):
"""
Test that a client (logged in) can get her own username.
"""
self.client.login(username=self.user.username, password=TEST_PASSWORD)
self._verify_get_own_username(17)
def test_get_username_inactive(self):
"""
Test that a logged-in client can get their
username, even if inactive.
"""
self.client.login(username=self.user.username, password=TEST_PASSWORD)
self.user.is_active = False
self.user.save()
self._verify_get_own_username(17)
def test_get_username_not_logged_in(self):
"""
Test that a client (not logged in) gets a 401
when trying to retrieve their username.
"""
# verify that the endpoint is inaccessible when not logged in
self._verify_get_own_username(13, expected_status=401)
@ddt.ddt
@skip_unless_lms
@mock.patch('openedx.core.djangoapps.user_api.accounts.image_helpers._PROFILE_IMAGE_SIZES', [50, 10])
@mock.patch.dict(
'django.conf.settings.PROFILE_IMAGE_SIZES_MAP',
{'full': 50, 'small': 10},
clear=True
)
class TestAccountsAPI(CacheIsolationTestCase, UserAPITestCase):
"""
Unit tests for the Accounts API.
"""
ENABLED_CACHES = ['default']
TOTAL_QUERY_COUNT = 27
FULL_RESPONSE_FIELD_COUNT = 30
def setUp(self):
super().setUp()
self.url = reverse("accounts_api", kwargs={'username': self.user.username})
self.search_api_url = reverse("accounts_search_emails_api")
def _set_user_age_to_10_years(self, user):
"""
Sets the given user's age to 10.
Returns the calculated year of birth.
"""
legacy_profile = UserProfile.objects.get(id=user.id)
current_year = datetime.datetime.now().year
year_of_birth = current_year - 10
legacy_profile.year_of_birth = year_of_birth
legacy_profile.save()
return year_of_birth
def _verify_full_shareable_account_response(self, response, account_privacy=None, badges_enabled=False):
"""
Verify that the shareable fields from the account are returned
"""
data = response.data
assert 12 == len(data)
# public fields (3)
assert account_privacy == data['account_privacy']
self._verify_profile_image_data(data, True)
assert self.user.username == data['username']
# additional shareable fields (8)
assert TEST_BIO_VALUE == data['bio']
assert 'US' == data['country']
assert data['date_joined'] is not None
assert [{'code': TEST_LANGUAGE_PROFICIENCY_CODE}] == data['language_proficiencies']
assert 'm' == data['level_of_education']
assert data['social_links'] is not None
assert data['time_zone'] is None
assert badges_enabled == data['accomplishments_shared']
def _verify_private_account_response(self, response, requires_parental_consent=False):
"""
Verify that only the public fields are returned if a user does not want to share account fields
"""
data = response.data
assert 3 == len(data)
assert PRIVATE_VISIBILITY == data['account_privacy']
self._verify_profile_image_data(data, not requires_parental_consent)
assert self.user.username == data['username']
def _verify_full_account_response(self, response, requires_parental_consent=False, year_of_birth=2000):
"""
Verify that all account fields are returned (even those that are not shareable).
"""
data = response.data
assert self.FULL_RESPONSE_FIELD_COUNT == len(data)
# public fields (3)
expected_account_privacy = (
PRIVATE_VISIBILITY if requires_parental_consent else
UserPreference.get_value(self.user, 'account_privacy')
)
assert expected_account_privacy == data['account_privacy']
self._verify_profile_image_data(data, not requires_parental_consent)
assert self.user.username == data['username']
# additional shareable fields (8)
assert TEST_BIO_VALUE == data['bio']
assert 'US' == data['country']
assert data['date_joined'] is not None
assert data['last_login'] is not None
assert [{'code': TEST_LANGUAGE_PROFICIENCY_CODE}] == data['language_proficiencies']
assert 'm' == data['level_of_education']
assert data['social_links'] is not None
assert UserPreference.get_value(self.user, 'time_zone') == data['time_zone']
assert data['accomplishments_shared'] is not None
assert ((self.user.first_name + ' ') + self.user.last_name) == data['name']
# additional admin fields (13)
assert self.user.email == data['email']
assert self.user.id == data['id']
assert self.VERIFIED_NAME == data['verified_name']
assert data['extended_profile'] is not None
assert 'MA' == data['state']
assert 'f' == data['gender']
assert 'world peace' == data['goals']
assert data['is_active']
assert 'Park Ave' == data['mailing_address']
assert requires_parental_consent == data['requires_parental_consent']
assert data['secondary_email'] is None
assert data['secondary_email_enabled'] is None
assert year_of_birth == data['year_of_birth']
def test_anonymous_access(self):
"""
Test that an anonymous client (not logged in) cannot call GET or PATCH.
"""
self.send_get(self.anonymous_client, expected_status=401)
self.send_patch(self.anonymous_client, {}, expected_status=401)
def test_unsupported_methods(self):
"""
Test that DELETE, POST, and PUT are not supported.
"""
self.client.login(username=self.user.username, password=TEST_PASSWORD)
assert 405 == self.client.put(self.url).status_code
assert 405 == self.client.post(self.url).status_code
assert 405 == self.client.delete(self.url).status_code
@ddt.data(
("client", "user"),
("staff_client", "staff_user"),
)
@ddt.unpack
def test_get_account_unknown_user(self, api_client, user):
"""
Test that requesting a user who does not exist returns a 404.
"""
client = self.login_client(api_client, user)
response = client.get(reverse("accounts_api", kwargs={'username': "does_not_exist"}))
assert 404 == response.status_code
@ddt.data(
("client", "user"),
)
@ddt.unpack
def test_regsitration_activation_key(self, api_client, user):
"""
Test that registration activation key has a value.
UserFactory does not auto-generate registration object for the test users.
It is created only for users that signup via email/API. Therefore, activation key has to be tested manually.
"""
self.create_user_registration(self.user)
client = self.login_client(api_client, user)
response = self.send_get(client)
assert response.data["activation_key"] is not None
def test_successful_get_account_by_email(self):
"""
Test that request using email by a staff user successfully retrieves Account Info.
"""
api_client = "staff_client"
user = "staff_user"
client = self.login_client(api_client, user)
self.create_mock_profile(self.user)
self.create_mock_verified_name(self.user)
set_user_preference(self.user, ACCOUNT_VISIBILITY_PREF_KEY, PRIVATE_VISIBILITY)
response = self.send_get(client, query_parameters=f'email={self.user.email}')
self._verify_full_account_response(response)
def test_unsuccessful_get_account_by_email(self):
"""
Test that request using email by a normal user fails to retrieve Account Info.
"""
api_client = "client"
user = "user"
client = self.login_client(api_client, user)
self.create_mock_profile(self.user)
set_user_preference(self.user, ACCOUNT_VISIBILITY_PREF_KEY, PRIVATE_VISIBILITY)
response = self.send_get(
client, query_parameters=f'email={self.user.email}', expected_status=status.HTTP_403_FORBIDDEN
)
assert response.data.get('detail') == 'You do not have permission to perform this action.'
def test_successful_get_account_by_user_id(self):
"""
Test that request using lms user id by a staff user successfully retrieves Account Info.
"""
api_client = "staff_client"
user = "staff_user"
url = reverse("accounts_detail_api")
client = self.login_client(api_client, user)
self.create_mock_profile(self.user)
self.create_mock_verified_name(self.user)
set_user_preference(self.user, ACCOUNT_VISIBILITY_PREF_KEY, PRIVATE_VISIBILITY)
response = client.get(url + f'?lms_user_id={self.user.id}')
assert response.status_code == status.HTTP_200_OK
response.data = response.data[0]
self._verify_full_account_response(response)
def test_unsuccessful_get_account_by_user_id(self):
"""
Test that requesting using lms user id by a normal user fails to retrieve Account Info.
"""
api_client = "client"
user = "user"
url = reverse("accounts_detail_api")
client = self.login_client(api_client, user)
self.create_mock_profile(self.user)
set_user_preference(self.user, ACCOUNT_VISIBILITY_PREF_KEY, PRIVATE_VISIBILITY)
response = client.get(url + f'?lms_user_id={self.user.id}')
assert response.status_code == status.HTTP_403_FORBIDDEN
assert response.data.get('detail') == 'You do not have permission to perform this action.'
@ddt.data('abc', '2f', '1.0', "2/8")
def test_get_account_by_user_id_non_integer(self, non_integer_id):
"""
Test that request using a non-integer lms user id by a staff user fails to retrieve Account Info.
"""
api_client = "staff_client"
user = "staff_user"
url = reverse("accounts_detail_api")
client = self.login_client(api_client, user)
self.create_mock_profile(self.user)
self.create_mock_verified_name(self.user)
set_user_preference(self.user, ACCOUNT_VISIBILITY_PREF_KEY, PRIVATE_VISIBILITY)
response = client.get(url + f'?lms_user_id={non_integer_id}')
assert response.status_code == status.HTTP_400_BAD_REQUEST
def test_search_emails(self):
client = self.login_client('staff_client', 'staff_user')
json_data = {'emails': [self.user.email]}
response = self.post_search_api(client, json_data=json_data)
assert response.data == [{'email': self.user.email, 'id': self.user.id, 'username': self.user.username}]
def test_search_emails_with_non_staff_user(self):
client = self.login_client('client', 'user')
json_data = {'emails': [self.user.email]}
response = self.post_search_api(client, json_data=json_data, expected_status=404)
assert response.data == {
'developer_message': "not_found",
'user_message': "Not Found"
}
def test_search_emails_with_non_existing_email(self):
client = self.login_client('staff_client', 'staff_user')
json_data = {"emails": ['non_existant_email@example.com']}
response = self.post_search_api(client, json_data=json_data)
assert response.data == []
def test_search_emails_with_invalid_param(self):
client = self.login_client('staff_client', 'staff_user')
json_data = {'invalid_key': [self.user.email]}
response = self.post_search_api(client, json_data=json_data, expected_status=400)
assert response.data == {
'developer_message': "'emails' field is required",
'user_message': "'emails' field is required"
}
# Note: using getattr so that the patching works even if there is no configuration.
# This is needed when testing CMS as the patching is still executed even though the
# suite is skipped.
@mock.patch.dict(getattr(settings, "ACCOUNT_VISIBILITY_CONFIGURATION", {}), {"default_visibility": "all_users"})
def test_get_account_different_user_visible(self):
"""
Test that a client (logged in) can only get the shareable fields for a different user.
This is the case when default_visibility is set to "all_users".
"""
self.different_client.login(username=self.different_user.username, password=TEST_PASSWORD)
self.create_mock_profile(self.user)
with self.assertNumQueries(self.TOTAL_QUERY_COUNT):
response = self.send_get(self.different_client)
self._verify_full_shareable_account_response(response, account_privacy=ALL_USERS_VISIBILITY)
# Note: using getattr so that the patching works even if there is no configuration.
# This is needed when testing CMS as the patching is still executed even though the
# suite is skipped.
@mock.patch.dict(getattr(settings, "ACCOUNT_VISIBILITY_CONFIGURATION", {}), {"default_visibility": "private"})
def test_get_account_different_user_private(self):
"""
Test that a client (logged in) can only get the shareable fields for a different user.
This is the case when default_visibility is set to "private".
"""
self.different_client.login(username=self.different_user.username, password=TEST_PASSWORD)
self.create_mock_profile(self.user)
with self.assertNumQueries(self.TOTAL_QUERY_COUNT):
response = self.send_get(self.different_client)
self._verify_private_account_response(response)
@mock.patch.dict(settings.FEATURES, {'ENABLE_OPENBADGES': True})
@ddt.data(
("client", "user", PRIVATE_VISIBILITY),
("different_client", "different_user", PRIVATE_VISIBILITY),
("staff_client", "staff_user", PRIVATE_VISIBILITY),
("client", "user", ALL_USERS_VISIBILITY),
("different_client", "different_user", ALL_USERS_VISIBILITY),
("staff_client", "staff_user", ALL_USERS_VISIBILITY),
)
@ddt.unpack
def test_get_account_private_visibility(self, api_client, requesting_username, preference_visibility):
"""
Test the return from GET based on user visibility setting.
"""
def verify_fields_visible_to_all_users(response):
"""
Confirms that private fields are private, and public/shareable fields are public/shareable
"""
if preference_visibility == PRIVATE_VISIBILITY:
self._verify_private_account_response(response)
else:
self._verify_full_shareable_account_response(response, ALL_USERS_VISIBILITY, badges_enabled=True)
client = self.login_client(api_client, requesting_username)
# Update user account visibility setting.
set_user_preference(self.user, ACCOUNT_VISIBILITY_PREF_KEY, preference_visibility)
self.create_mock_profile(self.user)
self.create_mock_verified_name(self.user)
response = self.send_get(client)
if requesting_username == "different_user":
verify_fields_visible_to_all_users(response)
else:
self._verify_full_account_response(response)
# Verify how the view parameter changes the fields that are returned.
response = self.send_get(client, query_parameters='view=shared')
verify_fields_visible_to_all_users(response)
response = self.send_get(client, query_parameters=f'view=shared&username={self.user.username}')
verify_fields_visible_to_all_users(response)
@ddt.data(
("client", "user"),
("staff_client", "staff_user"),
("different_client", "different_user"),
)
@ddt.unpack
def test_custom_visibility_over_age(self, api_client, requesting_username):
self.create_mock_profile(self.user)
self.create_mock_verified_name(self.user)
# set user's custom visibility preferences
set_user_preference(self.user, ACCOUNT_VISIBILITY_PREF_KEY, CUSTOM_VISIBILITY)
shared_fields = ("bio", "language_proficiencies", "name")
for field_name in shared_fields:
set_user_preference(self.user, f"visibility.{field_name}", ALL_USERS_VISIBILITY)
# make API request
client = self.login_client(api_client, requesting_username)
response = self.send_get(client)
# verify response
if requesting_username == "different_user":
data = response.data
assert 6 == len(data)
# public fields
assert self.user.username == data['username']
assert UserPreference.get_value(self.user, 'account_privacy') == data['account_privacy']
self._verify_profile_image_data(data, has_profile_image=True)
# custom shared fields
assert TEST_BIO_VALUE == data['bio']
assert [{'code': TEST_LANGUAGE_PROFICIENCY_CODE}] == data['language_proficiencies']
assert ((self.user.first_name + ' ') + self.user.last_name) == data['name']
else:
self._verify_full_account_response(response)
@ddt.data(
("client", "user"),
("staff_client", "staff_user"),
("different_client", "different_user"),
)
@ddt.unpack
def test_custom_visibility_under_age(self, api_client, requesting_username):
self.create_mock_profile(self.user)
self.create_mock_verified_name(self.user)
year_of_birth = self._set_user_age_to_10_years(self.user)
# set user's custom visibility preferences
set_user_preference(self.user, ACCOUNT_VISIBILITY_PREF_KEY, CUSTOM_VISIBILITY)
shared_fields = ("bio", "language_proficiencies")
for field_name in shared_fields:
set_user_preference(self.user, f"visibility.{field_name}", ALL_USERS_VISIBILITY)
# make API request
client = self.login_client(api_client, requesting_username)
response = self.send_get(client)
# verify response
if requesting_username == "different_user":
self._verify_private_account_response(response, requires_parental_consent=True)
else:
self._verify_full_account_response(
response,
requires_parental_consent=True,
year_of_birth=year_of_birth,
)
def test_get_account_default(self):
"""
Test that a client (logged in) can get her own account information (using default legacy profile information,
as created by the test UserFactory).
"""
def verify_get_own_information(queries):
"""
Internal helper to perform the actual assertions
"""
with self.assertNumQueries(queries):
response = self.send_get(self.client)
data = response.data
assert self.FULL_RESPONSE_FIELD_COUNT == len(data)
assert self.user.username == data['username']
assert ((self.user.first_name + ' ') + self.user.last_name) == data['name']
for empty_field in ("year_of_birth", "level_of_education", "mailing_address", "bio"):
assert data[empty_field] is None
assert data['country'] is None
assert data['state'] is None
assert 'm' == data['gender']
assert 'Learn a lot' == data['goals']
assert self.user.email == data['email']
assert self.user.id == data['id']
assert data['date_joined'] is not None
assert data['last_login'] is not None
assert self.user.is_active == data['is_active']
self._verify_profile_image_data(data, False)
assert data['requires_parental_consent']
assert [] == data['language_proficiencies']
assert PRIVATE_VISIBILITY == data['account_privacy']
assert data['time_zone'] is None
# Badges aren't on by default, so should not be present.
assert data['accomplishments_shared'] is False
self.client.login(username=self.user.username, password=TEST_PASSWORD)
verify_get_own_information(25)
# Now make sure that the user can get the same information, even if not active
self.user.is_active = False
self.user.save()
verify_get_own_information(17)
def test_get_account_empty_string(self):
"""
Test the conversion of empty strings to None for certain fields.
"""
legacy_profile = UserProfile.objects.get(id=self.user.id)
legacy_profile.country = ""
legacy_profile.state = ""
legacy_profile.level_of_education = ""
legacy_profile.gender = ""
legacy_profile.bio = ""
legacy_profile.save()
self.client.login(username=self.user.username, password=TEST_PASSWORD)
with self.assertNumQueries(25):
response = self.send_get(self.client)
for empty_field in ("level_of_education", "gender", "country", "state", "bio",):
assert response.data[empty_field] is None
@ddt.data(
("different_client", "different_user"),
("staff_client", "staff_user"),
)
@ddt.unpack
def test_patch_account_disallowed_user(self, api_client, user):
"""
Test that a client cannot call PATCH on a different client's user account (even with
is_staff access).
"""
client = self.login_client(api_client, user)
self.send_patch(client, {}, expected_status=403)
@ddt.data(
("client", "user"),
("staff_client", "staff_user"),
)
@ddt.unpack
def test_patch_account_unknown_user(self, api_client, user):
"""
Test that trying to update a user who does not exist returns a 403.
"""
client = self.login_client(api_client, user)
response = client.patch(
reverse("accounts_api", kwargs={'username': "does_not_exist"}),
data=json.dumps({}), content_type="application/merge-patch+json"
)
assert 403 == response.status_code
@ddt.data(
("gender", "f", "not a gender", '"not a gender" is not a valid choice.'),
("level_of_education", "none", "ȻħȺɍłɇs", '"ȻħȺɍłɇs" is not a valid choice.'),
("country", "GB", "XY", '"XY" is not a valid choice.'),
("state", "MA", "PY", '"PY" is not a valid choice.'),
("year_of_birth", 2009, "not_an_int", "A valid integer is required."),
("name", "bob", "z" * 256, "Ensure this field has no more than 255 characters."),
("name", "ȻħȺɍłɇs", " ", "The name field must be at least 1 character long."),
("goals", "Smell the roses"),
("mailing_address", "Sesame Street"),
# Note that we store the raw data, so it is up to client to escape the HTML.
(
"bio", "<html>Lacrosse-playing superhero 壓是進界推日不復女</html>",
"z" * 301, "The about me field must be at most 300 characters long."
),
("account_privacy", ALL_USERS_VISIBILITY),
("account_privacy", PRIVATE_VISIBILITY),
# Note that email is tested below, as it is not immediately updated.
# Note that language_proficiencies is tested below as there are multiple error and success conditions.
)
@ddt.unpack
def test_patch_account(self, field, value, fails_validation_value=None, developer_validation_message=None):
"""
Test the behavior of patch, when using the correct content_type.
"""
client = self.login_client("client", "user")
if field == 'account_privacy':
# Ensure the user has birth year set, and is over 13, so
# account_privacy behaves normally
legacy_profile = UserProfile.objects.get(id=self.user.id)
legacy_profile.year_of_birth = 2000
legacy_profile.save()
response = self.send_patch(client, {field: value})
assert value == response.data[field]
if fails_validation_value:
error_response = self.send_patch(client, {field: fails_validation_value}, expected_status=400)
expected_user_message = 'This value is invalid.'
if field == 'bio':
expected_user_message = "The about me field must be at most 300 characters long."
assert expected_user_message == error_response.data['field_errors'][field]['user_message']
assert "Value '{value}' is not valid for field '{field}': {messages}".format(
value=fails_validation_value,
field=field,
messages=[developer_validation_message]
) == error_response.data['field_errors'][field]['developer_message']
elif field != "account_privacy":
# If there are no values that would fail validation, then empty string should be supported;
# except for account_privacy, which cannot be an empty string.
response = self.send_patch(client, {field: ""})
assert '' == response.data[field]
def test_patch_inactive_user(self):
""" Verify that a user can patch her own account, even if inactive. """
self.client.login(username=self.user.username, password=TEST_PASSWORD)
self.user.is_active = False
self.user.save()
response = self.send_patch(self.client, {"goals": "to not activate account"})
assert 'to not activate account' == response.data['goals']
@ddt.unpack
def test_patch_account_noneditable(self):
"""
Tests the behavior of patch when a read-only field is attempted to be edited.
"""
client = self.login_client("client", "user")
def verify_error_response(field_name, data):
"""
Internal helper to check the error messages returned
"""
assert 'This field is not editable via this API' == data['field_errors'][field_name]['developer_message']
assert "The '{}' field cannot be edited.".format(
field_name
) == data['field_errors'][field_name]['user_message']
for field_name in ["username", "date_joined", "is_active", "profile_image", "requires_parental_consent"]:
response = self.send_patch(client, {field_name: "will_error", "gender": "o"}, expected_status=400)
verify_error_response(field_name, response.data)
# Make sure that gender did not change.
response = self.send_get(client)
assert 'm' == response.data['gender']
# Test error message with multiple read-only items
response = self.send_patch(client, {"username": "will_error", "date_joined": "xx"}, expected_status=400)
assert 2 == len(response.data['field_errors'])
verify_error_response("username", response.data)
verify_error_response("date_joined", response.data)
def test_patch_bad_content_type(self):
"""
Test the behavior of patch when an incorrect content_type is specified.
"""
self.client.login(username=self.user.username, password=TEST_PASSWORD)
self.send_patch(self.client, {}, content_type="application/json", expected_status=415)
self.send_patch(self.client, {}, content_type="application/xml", expected_status=415)
def test_patch_account_empty_string(self):
"""
Tests the behavior of patch when attempting to set fields with a select list of options to the empty string.
Also verifies the behaviour when setting to None.
"""
self.client.login(username=self.user.username, password=TEST_PASSWORD)
for field_name in ["gender", "level_of_education", "country", "state"]:
response = self.send_patch(self.client, {field_name: ""})
# Although throwing a 400 might be reasonable, the default DRF behavior with ModelSerializer
# is to convert to None, which also seems acceptable (and is difficult to override).
assert response.data[field_name] is None
# Verify that the behavior is the same for sending None.
response = self.send_patch(self.client, {field_name: ""})
assert response.data[field_name] is None
def test_patch_name_metadata(self):
"""
Test the metadata stored when changing the name field.
"""
def get_name_change_info(expected_entries):
"""
Internal method to encapsulate the retrieval of old names used
"""
legacy_profile = UserProfile.objects.get(id=self.user.id)
name_change_info = legacy_profile.get_meta()["old_names"]
assert expected_entries == len(name_change_info)
return name_change_info
def verify_change_info(change_info, old_name, requester, new_name):
"""
Internal method to validate name changes
"""
assert 3 == len(change_info)
assert old_name == change_info[0]
assert f'Name change requested through account API by {requester}' == change_info[1]
assert change_info[2] is not None
# Verify the new name was also stored.
get_response = self.send_get(self.client)
assert new_name == get_response.data['name']
self.client.login(username=self.user.username, password=TEST_PASSWORD)
legacy_profile = UserProfile.objects.get(id=self.user.id)
assert {} == legacy_profile.get_meta()
old_name = legacy_profile.name
# First change the name as the user and verify meta information.
self.send_patch(self.client, {"name": "Mickey Mouse"})
name_change_info = get_name_change_info(1)
verify_change_info(name_change_info[0], old_name, self.user.username, "Mickey Mouse")
# Now change the name again and verify meta information.
self.send_patch(self.client, {"name": "Donald Duck"})
name_change_info = get_name_change_info(2)
verify_change_info(name_change_info[0], old_name, self.user.username, "Donald Duck", )
verify_change_info(name_change_info[1], "Mickey Mouse", self.user.username, "Donald Duck")
@mock.patch.dict(
'django.conf.settings.PROFILE_IMAGE_SIZES_MAP',
{'full': 50, 'medium': 30, 'small': 10},
clear=True
)
def test_patch_email(self):
"""
Test that the user can request an email change through the accounts API.
Full testing of the helper method used (do_email_change_request) exists in the package with the code.
Here just do minimal smoke testing.
"""
client = self.login_client("client", "user")
old_email = self.user.email
new_email = "newemail@example.com"
response = self.send_patch(client, {"email": new_email, "goals": "change my email"})
# Since request is multi-step, the email won't change on GET immediately (though goals will update).
assert old_email == response.data['email']
assert 'change my email' == response.data['goals']
# Now call the method that will be invoked with the user clicks the activation key in the received email.
# First we must get the activation key that was sent.
pending_change = PendingEmailChange.objects.filter(user=self.user)
assert 1 == len(pending_change)
activation_key = pending_change[0].activation_key
confirm_change_url = reverse(
"confirm_email_change", kwargs={'key': activation_key}
)
response = self.client.post(confirm_change_url)
assert 200 == response.status_code
get_response = self.send_get(client)
assert new_email == get_response.data['email']
@ddt.data(
("not_an_email",),
("",),
(None,),
)
@ddt.unpack
def test_patch_invalid_email(self, bad_email):
"""
Test a few error cases for email validation (full test coverage lives with do_email_change_request).
"""
client = self.login_client("client", "user")
# Try changing to an invalid email to make sure error messages are appropriately returned.
error_response = self.send_patch(client, {"email": bad_email}, expected_status=400)
field_errors = error_response.data["field_errors"]
assert "Error thrown from validate_new_email: 'Valid e-mail address required.'" == \
field_errors['email']['developer_message']
assert 'Valid e-mail address required.' == field_errors['email']['user_message']
@mock.patch('common.djangoapps.student.views.management.do_email_change_request')
def test_patch_duplicate_email(self, do_email_change_request):
"""
Test that same success response will be sent to user even if the given email already used.
"""
existing_email = "same@example.com"
UserFactory.create(email=existing_email)
client = self.login_client("client", "user")
# Try changing to an existing email to make sure no error messages returned.
response = self.send_patch(client, {"email": existing_email})
assert 200 == response.status_code
# Verify that no actual request made for email change
assert not do_email_change_request.called
def test_patch_language_proficiencies(self):
"""
Verify that patching the language_proficiencies field of the user
profile completely overwrites the previous value.
"""
client = self.login_client("client", "user")
# Patching language_proficiencies exercises the
# `LanguageProficiencySerializer.get_identity` method, which compares
# identifies language proficiencies based on their language code rather
# than django model id.
for proficiencies in ([{"code": "en"}, {"code": "fr"}, {"code": "es"}], [{"code": "fr"}], [{"code": "aa"}], []):
response = self.send_patch(client, {"language_proficiencies": proficiencies})
self.assertCountEqual(response.data["language_proficiencies"], proficiencies)
@ddt.data(
(
"not_a_list",
{'non_field_errors': ['Expected a list of items but got type "unicode".']}
),
(
["not_a_JSON_object"],
[{'non_field_errors': ['Invalid data. Expected a dictionary, but got unicode.']}]
),
(
[{}],
[{'code': ['This field is required.']}]
),
(
[{"code": "invalid_language_code"}],
[{'code': ['"invalid_language_code" is not a valid choice.']}]
),
(
[{"code": "kw"}, {"code": "el"}, {"code": "kw"}],
['The language_proficiencies field must consist of unique languages.']
),
)
@ddt.unpack
def test_patch_invalid_language_proficiencies(self, patch_value, expected_error_message):
"""
Verify we handle error cases when patching the language_proficiencies
field.
"""
expected_error_message = str(expected_error_message).replace('unicode', 'str')
client = self.login_client("client", "user")
response = self.send_patch(client, {"language_proficiencies": patch_value}, expected_status=400)
assert response.data['field_errors']['language_proficiencies']['developer_message'] == \
f"Value '{patch_value}' is not valid for field 'language_proficiencies': {expected_error_message}"
@mock.patch('openedx.core.djangoapps.user_api.accounts.serializers.AccountUserSerializer.save')
def test_patch_serializer_save_fails(self, serializer_save):
"""
Test that AccountUpdateErrors are passed through to the response.
"""
serializer_save.side_effect = [Exception("bummer"), None]
self.client.login(username=self.user.username, password=TEST_PASSWORD)
error_response = self.send_patch(self.client, {"goals": "save an account field"}, expected_status=400)
assert "Error thrown when saving account updates: 'bummer'" == error_response.data['developer_message']
assert error_response.data['user_message'] is None
@override_settings(PROFILE_IMAGE_BACKEND=TEST_PROFILE_IMAGE_BACKEND)
def test_convert_relative_profile_url(self):
"""
Test that when TEST_PROFILE_IMAGE_BACKEND['base_url'] begins
with a '/', the API generates the full URL to profile images based on
the URL of the request.
"""
self.client.login(username=self.user.username, password=TEST_PASSWORD)
response = self.send_get(self.client)
assert response.data['profile_image'] == \
{'has_image': False,
'image_url_full': 'http://testserver/static/default_50.png',
'image_url_small': 'http://testserver/static/default_10.png'}
@ddt.data(
("client", "user", True),
("different_client", "different_user", False),
("staff_client", "staff_user", True),
)
@ddt.unpack
def test_parental_consent(self, api_client, requesting_username, has_full_access):
"""
Verifies that under thirteens never return a public profile.
"""
client = self.login_client(api_client, requesting_username)
year_of_birth = self._set_user_age_to_10_years(self.user)
set_user_preference(self.user, ACCOUNT_VISIBILITY_PREF_KEY, ALL_USERS_VISIBILITY)
# Verify that the default view is still private (except for clients with full access)
response = self.send_get(client)
if has_full_access:
data = response.data
assert self.FULL_RESPONSE_FIELD_COUNT == len(data)
assert self.user.username == data['username']
assert ((self.user.first_name + ' ') + self.user.last_name) == data['name']
assert self.user.email == data['email']
assert self.user.id == data['id']
assert year_of_birth == data['year_of_birth']
for empty_field in ("country", "level_of_education", "mailing_address", "bio", "state",):
assert data[empty_field] is None
assert 'm' == data['gender']
assert 'Learn a lot' == data['goals']
assert data['is_active']
assert data['date_joined'] is not None
assert data['last_login'] is not None
self._verify_profile_image_data(data, False)
assert data['requires_parental_consent']
assert PRIVATE_VISIBILITY == data['account_privacy']
else:
self._verify_private_account_response(response, requires_parental_consent=True)
# Verify that the shared view is still private
response = self.send_get(client, query_parameters='view=shared')
self._verify_private_account_response(response, requires_parental_consent=True)
@skip_unless_lms
class TestAccountAPITransactions(TransactionTestCase):
"""
Tests the transactional behavior of the account API
"""
def setUp(self):
super().setUp()
self.client = APIClient()
self.user = UserFactory.create(password=TEST_PASSWORD)
self.url = reverse("accounts_api", kwargs={'username': self.user.username})
@mock.patch('common.djangoapps.student.views.do_email_change_request')
def test_update_account_settings_rollback(self, mock_email_change):
"""
Verify that updating account settings is transactional when a failure happens.
"""
# Send a PATCH request with updates to both profile information and email.
# Throw an error from the method that is used to process the email change request
# (this is the last thing done in the api method). Verify that the profile did not change.
mock_email_change.side_effect = [ValueError, "mock value error thrown"]
self.client.login(username=self.user.username, password=TEST_PASSWORD)
old_email = self.user.email
json_data = {"email": "foo@bar.com", "gender": "o"}
response = self.client.patch(self.url, data=json.dumps(json_data), content_type="application/merge-patch+json")
assert 400 == response.status_code
# Verify that GET returns the original preferences
response = self.client.get(self.url)
data = response.data
assert old_email == data['email']
assert 'm' == data['gender']
@ddt.ddt
class NameChangeViewTests(UserAPITestCase):
""" NameChangeView tests """
def setUp(self):
super().setUp()
self.url = reverse('name_change')
def test_request_succeeds(self):
"""
Test that a valid name change request succeeds.
"""
self.client.login(username=self.user.username, password=TEST_PASSWORD)
self.send_post(self.client, {'name': 'New Name'})
def test_unauthenticated(self):
"""
Test that a name change request fails for an unauthenticated user.
"""
self.send_post(self.client, {'name': 'New Name'}, expected_status=401)
def test_empty_request(self):
"""
Test that an empty request fails.
"""
self.client.login(username=self.user.username, password=TEST_PASSWORD)
self.send_post(self.client, {}, expected_status=400)
def test_blank_name(self):
"""
Test that a blank name string fails.
"""
self.client.login(username=self.user.username, password=TEST_PASSWORD)
self.send_post(self.client, {'name': ''}, expected_status=400)
@ddt.data('<html>invalid name</html>', 'https://invalid.com')
def test_fails_validation(self, invalid_name):
"""
Test that an invalid name will return an error.
"""
self.client.login(username=self.user.username, password=TEST_PASSWORD)
self.send_post(
self.client,
{'name': invalid_name},
expected_status=400
)
@ddt.ddt
@mock.patch('django.conf.settings.USERNAME_REPLACEMENT_WORKER', 'test_replace_username_service_worker')
class UsernameReplacementViewTests(APITestCase):
""" Tests UsernameReplacementView """
SERVICE_USERNAME = 'test_replace_username_service_worker'
def setUp(self):
super().setUp()
self.service_user = UserFactory(username=self.SERVICE_USERNAME)
self.url = reverse("username_replacement")
def build_jwt_headers(self, user):
"""
Helper function for creating headers for the JWT authentication.
"""
token = create_jwt_for_user(user)
headers = {'HTTP_AUTHORIZATION': f'JWT {token}'}
return headers
def call_api(self, user, data):
""" Helper function to call API with data """
data = json.dumps(data)
headers = self.build_jwt_headers(user)
return self.client.post(self.url, data, content_type='application/json', **headers)
def test_auth(self):
""" Verify the endpoint only works with the service worker """
data = {
"username_mappings": [
{"test_username_1": "test_new_username_1"},
{"test_username_2": "test_new_username_2"}
]
}
# Test unauthenticated
response = self.client.post(self.url)
assert response.status_code == 401
# Test non-service worker
random_user = UserFactory()
response = self.call_api(random_user, data)
assert response.status_code == 403
# Test service worker
response = self.call_api(self.service_user, data)
assert response.status_code == 200
@ddt.data(
[{}, {}],
{},
[{"test_key": "test_value", "test_key_2": "test_value_2"}]
)
def test_bad_schema(self, mapping_data):
""" Verify the endpoint rejects bad data schema """
data = {
"username_mappings": mapping_data
}
response = self.call_api(self.service_user, data)
assert response.status_code == 400
def test_existing_and_non_existing_users(self):
""" Tests a mix of existing and non existing users """
random_users = [UserFactory() for _ in range(5)]
fake_usernames = ["myname_" + str(x) for x in range(5)]
existing_users = [{user.username: user.username + '_new'} for user in random_users]
non_existing_users = [{username: username + '_new'} for username in fake_usernames]
data = {
"username_mappings": existing_users + non_existing_users
}
expected_response = {
'failed_replacements': [],
'successful_replacements': existing_users + non_existing_users
}
response = self.call_api(self.service_user, data)
assert response.status_code == 200
assert response.data == expected_response
|
edx/edx-platform
|
openedx/core/djangoapps/user_api/accounts/tests/test_views.py
|
Python
|
agpl-3.0
| 53,614 | 0.003117 |
DEBUG = False
BASEDIR = ''
SUBDIR = ''
PREFIX = ''
QUALITY = 85
CONVERT = '/usr/bin/convert'
WVPS = '/usr/bin/wvPS'
PROCESSORS = (
'populous.thumbnail.processors.colorspace',
'populous.thumbnail.processors.autocrop',
'populous.thumbnail.processors.scale_and_crop',
'populous.thumbnail.processors.filters',
)
|
caiges/populous
|
populous/thumbnail/defaults.py
|
Python
|
bsd-3-clause
| 324 | 0 |
'''
Author: Peter Chip (furamail001@gmail.com)
Date: 2015 03 25
Given: Positive integers n≤100 and m≤20.
Return: The total number of pairs of rabbits that will remain after the n-th month if all rabbits live for m months.
Theory: The standard fibonacci series : 1 1 2 3 5 8 13
fn = fn-1 + fn-2
In reality its fn = fn-1 + (fn-2)k where k is the number it gets incremeneted. In the standard fibonacci set k is 1.
The number of pairs in the first six month
m = 3 : 1 1 2 2 3 4
m = 4 : 1 1 2 3 4 6
m = 5 : 1 1 2 3 5 7
After the first death the number of rabbits:
fn = fn-2 + fn-3 ... fn-m
'''
def new_value(i):
value = 0
change = 2
# Repeat it m - 1 times
for y in range(1, m):
# Add the fn-2 + fn-3 .. fn-m
value += L[(i-change)]
change += 1
return value
# number of months
n = 94
# months a rabbit lives
m = 20
L = [1, 1, 2]
i = 3
while i < n:
# If the first death occurs
if i >= m:
L.append(new_value(i))
i += 1
# If before the first death
else:
L.append(L[i-1]+ (L[i-2]))
i += 1
print(L[len(L)-1])
|
amidoimidazol/bio_info
|
Rosalind.info Problems/Mortal Fibonacci Rabbits.py
|
Python
|
mit
| 1,178 | 0.005111 |
import sys
import os
import os.path
import glob
from optparse import OptionParser
#-------------------------------------------------------------------------------
# the main function
# cd bin_VS2010
# ctest -C Release
# cd Testing
# python ../../elastix/Testing/elx_get_checksum_list.py -l elastix_run*
# cd ..
# cmake .
# ctest -C Release -R COMPARE_CHECKSUM
# svn commit -m "ENH: updating baselines after recent change X"
def main():
# usage, parse parameters
usage = "usage: %prog [options] arg"
parser = OptionParser( usage )
# option to debug and verbose
parser.add_option( "-v", "--verbose",
action="store_true", dest="verbose" )
# options to control files
parser.add_option( "-l", "--list", type="string", dest="directoryList", help="list of elastix output directories" )
(options, args) = parser.parse_args()
# Check if option -l is given
if options.directoryList == None :
parser.error( "The option directory list (-l) should be given" )
# Use glob, this works not only on Linux
dirList = glob.glob( options.directoryList );
# Add everything not processed
dirList.extend( args );
print( "directory checksum" )
for directory in dirList:
# Equivalent to: fileName = options.directory + "/" + "elastix.log"
fileName = os.path.join( directory, "elastix.log" );
# Read elastix.log and find last line with checksum
try:
f = open( fileName )
except IOError as e:
print( directory + " No elastix.log found" )
continue
checksumFound = False;
for line in f:
if "Registration result checksum:" in line:
checksumline = line;
checksumFound = True;
# Extract checksum
if checksumFound:
checksum = checksumline.split(': ')[1].rstrip( "\n" );
# Print result
print( directory + " " + checksum );
else:
print( directory + " -" );
f.close();
return 0
#-------------------------------------------------------------------------------
if __name__ == '__main__':
sys.exit(main())
|
SuperElastix/elastix
|
Testing/elx_get_checksum_list.py
|
Python
|
apache-2.0
| 2,114 | 0.028382 |
#
# Copyright (C) 2012, 2014 UNINETT
#
# This file is part of Network Administration Visualized (NAV).
#
# NAV is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License version 2 as published by
# the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details. You should have received a copy of the GNU General Public License
# along with NAV. If not, see <http://www.gnu.org/licenses/>.
#
""""moduleState event plugin"""
import datetime
from nav.eventengine.alerts import AlertGenerator
from nav.eventengine.plugins import delayedstate
from nav.models.manage import Module
class ModuleStateHandler(delayedstate.DelayedStateHandler):
"""Accepts moduleState events"""
HAS_WARNING_ALERT = True
WARNING_WAIT_TIME = 'moduleDown.warning'
ALERT_WAIT_TIME = 'moduleDown.alert'
handled_types = ('moduleState',)
__waiting_for_resolve = {}
_target = None
def get_target(self):
if not self._target:
self._target = Module.objects.get(id=self.event.subid)
assert self._target.netbox_id == self.event.netbox.id
return self._target
def _get_up_alert(self):
alert = self._get_alert()
alert.alert_type = "moduleUp"
return alert
def _set_internal_state_down(self):
module = self.get_target()
module.up = module.UP_DOWN
module.down_since = datetime.datetime.now()
module.save()
def _set_internal_state_up(self):
module = self.get_target()
module.up = module.UP_UP
module.down_since = None
module.save()
def _get_down_alert(self):
alert = self._get_alert()
alert.alert_type = "moduleDown"
return alert
def _get_alert(self):
alert = AlertGenerator(self.event)
target = self.get_target()
if target:
alert['module'] = target
return alert
def _post_down_warning(self):
"""Posts the actual warning alert"""
alert = self._get_alert()
alert.alert_type = "moduleDownWarning"
alert.state = self.event.STATE_STATELESS
self._logger.info("%s: Posting %s alert",
self.get_target(), alert.alert_type)
alert.post()
|
alexanderfefelov/nav
|
python/nav/eventengine/plugins/modulestate.py
|
Python
|
gpl-2.0
| 2,481 | 0 |
"""Conditional Event item definition."""
from gaphor.diagram.presentation import (
Classified,
ElementPresentation,
from_package_str,
)
from gaphor.diagram.shapes import Box, IconBox, Text
from gaphor.diagram.support import represents
from gaphor.diagram.text import FontStyle, FontWeight
from gaphor.RAAML import raaml
from gaphor.RAAML.fta.basicevent import draw_basic_event
from gaphor.UML.modelfactory import stereotypes_str
@represents(raaml.ConditionalEvent)
class ConditionalEventItem(ElementPresentation, Classified):
def __init__(self, diagram, id=None):
super().__init__(diagram, id, width=70, height=35)
self.watch("subject[NamedElement].name").watch(
"subject[NamedElement].namespace.name"
)
def update_shapes(self, event=None):
self.shape = IconBox(
Box(
draw=draw_basic_event,
),
Text(
text=lambda: stereotypes_str(self.subject, ["ConditionalEvent"]),
),
Text(
text=lambda: self.subject.name or "",
width=lambda: self.width - 4,
style={
"font-weight": FontWeight.BOLD,
"font-style": FontStyle.NORMAL,
},
),
Text(
text=lambda: from_package_str(self),
style={"font-size": "x-small"},
),
)
|
amolenaar/gaphor
|
gaphor/RAAML/fta/conditionalevent.py
|
Python
|
lgpl-2.1
| 1,442 | 0.000693 |
# -*- coding: utf-8 -*-
###
# (C) Copyright (2012-2016) Hewlett Packard Enterprise Development LP
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
###
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from __future__ import absolute_import
from future import standard_library
standard_library.install_aliases()
__title__ = 'logical-enclosures'
__version__ = '0.0.1'
__copyright__ = '(C) Copyright (2012-2016) Hewlett Packard Enterprise ' \
' Development LP'
__license__ = 'MIT'
__status__ = 'Development'
from hpOneView.resources.resource import ResourceClient
class LogicalEnclosures(object):
URI = '/rest/logical-enclosures'
def __init__(self, con):
self._connection = con
self._client = ResourceClient(con, self.URI)
def get_all(self, start=0, count=-1, filter='', sort=''):
"""
Returns a list of logical enclosures matching the specified filter. A maximum of 40 logical enclosures are
returned to the caller. Additional calls can be made to retrieve any other logical enclosures matching the
filter. Valid filter parameters include attributes of a Logical Enclosure resource.
Args:
start:
The first item to return, using 0-based indexing.
If not specified, the default is 0 - start with the first available item.
count:
The number of resources to return. A count of -1 requests all the items.
The actual number of items in the response may differ from the requested
count if the sum of start and count exceed the total number of items.
filter:
A general filter/query string to narrow the list of items returned. The
default is no filter - all resources are returned.
sort:
The sort order of the returned data set. By default, the sort order is based
on create time, with the oldest entry first.
Returns:
list: A list of logical enclosures.
"""
return self._client.get_all(start, count, filter=filter, sort=sort)
def get_by(self, field, value):
"""
Get all logical enclosures that match the filter
The search is case insensitive
Args:
field: field name to filter
value: value to filter
Returns:
list: A list of logical enclosures.
"""
return self._client.get_by(field, value)
def get_by_name(self, name):
"""
Retrieve a resource by his name
Args:
name: resource name
Returns: dict
"""
return self._client.get_by_name(name=name)
def get(self, id_or_uri):
"""
Returns the logical enclosure with the specified ID, if it exists.
Args:
id: ID or URI of logical enclosure
Returns: (dict) logical enclosure
"""
return self._client.get(id_or_uri)
def update(self, resource, timeout=-1):
"""
Updates the given logical enclosure that is passed in. The fields that can be updated on the logical enclosure
itself include its name and configuration script. When the script is updated on the logical enclosure, the
configuration script runs on all enclosures in the logical enclosure.
Args:
resource (dict): Object to update
timeout:
Timeout in seconds. Wait task completion by default. The timeout does not abort the operation
in OneView, just stops waiting for its completion.
Returns: (dict) Updated logical enclosure
"""
return self._client.update(resource, timeout=timeout)
def patch(self, id_or_uri, operation, path, value, timeout=-1):
"""
Updates the given logical enclosure's attributes that are passed in. The PATCH operation is a partial update of
the resource. The support operation in this context is the firmware update.
Args:
id_or_uri: Could be either the resource id or the resource uri
operation: Patch operation
path: Path
value: Value
timeout: Timeout in seconds. Wait task completion by default. The timeout does not abort the operation
in OneView, just stops waiting for its completion.
Returns: (dict) Updated logical enclosure
"""
return self._client.patch(id_or_uri, operation, path, value, timeout=timeout)
def update_configuration(self, id_or_uri, timeout=-1):
"""
Reapplies the appliance's configuration on enclosures for the logical enclosure by ID or uri. This includes
running the same configure steps that were performed as part of the enclosure add. A task is returned to the
caller which can be used to track the progress of the operation.
Args:
id_or_uri: Could be either the resource id or the resource uri
timeout: Timeout in seconds. Wait task completion by default. The timeout does not abort the operation
in OneView, just stops waiting for its completion.
Returns: (dict) logical enclosure
"""
uri = self._client.build_uri(id_or_uri) + "/configuration"
return self._client.update_with_zero_body(uri, timeout=timeout)
def get_script(self, id_or_uri):
"""
Gets the configuration script of the logical enclosure by id or uri
Args:
id_or_uri: Could be either the resource id or the resource uri
Return: configuration script
"""
uri = self._client.build_uri(id_or_uri) + "/script"
return self._client.get(uri)
def update_script(self, id_or_uri, information, timeout=-1):
"""
Updates the configuration script of the logical enclosure and on all enclosures in the logical enclosure with
the specified ID.
Args:
id_or_uri: Could be either the resource id or the resource uri
information: updated script
timeout: Timeout in seconds. Wait task completion by default. The timeout does not abort the operation
in OneView, just stops waiting for its completion.
Return: configuration script
"""
uri = self._client.build_uri(id_or_uri) + "/script"
return self._client.update(information, uri=uri, timeout=timeout)
def generate_support_dump(self, information, id_or_uri, timeout=-1):
"""
Generates a support dump for the logical enclosure with the specified ID. A logical enclosure support dump
includes content for logical interconnects associated with that logical enclosure. By default, it also contains
appliance support dump content.
Args:
id_or_uri: Could be either the resource id or the resource uri
information (dict): information to generate support dump
timeout: Timeout in seconds. Wait task completion by default. The timeout does not abort the operation
in OneView, just stops waiting for its completion.
Returns: (dict) support dump
"""
uri = self._client.build_uri(id_or_uri) + "/support-dumps"
return self._client.create(information, uri=uri, timeout=timeout)
def update_from_group(self, id_or_uri, timeout=-1):
"""
Use this action to make a logical enclosure consistent with the enclosure group when the logical enclosure is
in the Inconsistent state.
Args:
id_or_uri: Could be either the resource id or the resource uri
timeout: Timeout in seconds. Wait task completion by default. The timeout does not abort the operation
in OneView, just stops waiting for its completion.
Returns: (dict) logical enclosure
"""
uri = self._client.build_uri(id_or_uri) + "/updateFromGroup"
return self._client.update_with_zero_body(uri=uri, timeout=timeout)
|
andreadean5/python-hpOneView
|
hpOneView/resources/servers/logical_enclosures.py
|
Python
|
mit
| 9,117 | 0.002962 |
class EventSearchPageLocators(object):
NAME_FIELD = ".form-control[name='name']"
START_DATE_FIELD = ".form-control[name='start_date']"
END_DATE_FIELD = ".form-control[name='end_date']"
CITY_FIELD = ".form-control[name='city']"
STATE_FIELD = ".form-control[name='state']"
COUNTRY_FIELD = ".form-control[name='country']"
JOB_FIELD = ".form-control[name='job']"
RESULT_BODY = '//table//tbody'
HELP_BLOCK = 'help-block'
SUBMIT_PATH = 'submit'
|
systers/vms
|
vms/pom/locators/eventSearchPageLocators.py
|
Python
|
gpl-2.0
| 481 | 0.002079 |
"""
Utilities module whose functions are designed to do the basic processing of
the data using obspy modules (which also rely on scipy and numpy).
:copyright:
EQcorrscan developers.
:license:
GNU Lesser General Public License, Version 3
(https://www.gnu.org/copyleft/lesser.html)
"""
import numpy as np
import logging
import datetime as dt
from collections import Counter
from multiprocessing import Pool, cpu_count
from obspy import Stream, Trace, UTCDateTime
from obspy.core.trace import Stats
from obspy.signal.filter import bandpass, lowpass, highpass
Logger = logging.getLogger(__name__)
def _check_daylong(tr):
"""
Check the data quality of the daylong file.
Check to see that the day isn't just zeros, with large steps, if it is
then the resampling will hate it.
:type tr: obspy.core.trace.Trace
:param tr: Trace to check if the data are daylong.
:return quality (simply good or bad)
:rtype: bool
.. rubric:: Example
>>> from obspy import read
>>> from eqcorrscan.utils.pre_processing import _check_daylong
>>> # Get the path to the test data
>>> import eqcorrscan
>>> import os
>>> TEST_PATH = os.path.dirname(eqcorrscan.__file__) + '/tests/test_data'
>>> st = read(TEST_PATH + '/WAV/TEST_/' +
... '2013-09-01-0410-35.DFDPC_024_00')
>>> _check_daylong(st[0])
True
"""
if len(np.nonzero(tr.data)[0]) < 0.5 * len(tr.data):
qual = False
else:
qual = True
return qual
def shortproc(st, lowcut, highcut, filt_order, samp_rate, parallel=False,
num_cores=False, starttime=None, endtime=None,
seisan_chan_names=False, fill_gaps=True, ignore_length=False,
ignore_bad_data=False, fft_threads=1):
"""
Basic function to bandpass and downsample.
Works in place on data. This is employed to ensure all parts of the
data are processed in the same way.
:type st: obspy.core.stream.Stream
:param st: Stream to process
:type lowcut: float
:param lowcut: Low cut for bandpass in Hz
:type highcut: float
:param highcut: High cut for bandpass in Hz
:type filt_order: int
:param filt_order: Number of corners for bandpass filter
:type samp_rate: float
:param samp_rate: Sampling rate desired in Hz
:type parallel: bool
:param parallel:
Set to True to process traces in parallel, for small numbers of traces
this is often slower than serial processing, defaults to False
:type num_cores: int
:param num_cores:
Control the number of cores for parallel processing, if set to False
then this will use all the cores available.
:type starttime: obspy.core.utcdatetime.UTCDateTime
:param starttime:
Desired data start time, will trim to this before processing
:type endtime: obspy.core.utcdatetime.UTCDateTime
:param endtime:
Desired data end time, will trim to this before processing
:type seisan_chan_names: bool
:param seisan_chan_names:
Whether channels are named like seisan channels (which are two letters
rather than SEED convention of three) - defaults to True.
:type fill_gaps: bool
:param fill_gaps: Whether to pad any gaps found with zeros or not.
:type ignore_length: bool
:param ignore_length:
Whether to allow data that are less than 80% of the requested length.
Defaults to False which will error if short data are found.
:type ignore_bad_data: bool
:param ignore_bad_data:
If False (default), errors will be raised if data are excessively
gappy or are mostly zeros. If True then no error will be raised, but
an empty trace will be returned.
:type fft_threads: int
:param fft_threads:
Number of threads to use for pyFFTW FFT in resampling. Note that it
is not recommended to use fft_threads > 1 and num_cores > 1.
:return: Processed stream
:rtype: :class:`obspy.core.stream.Stream`
.. note::
If your data contain gaps you should *NOT* fill those gaps before
using the pre-process functions. The pre-process functions will fill
the gaps internally prior to processing, process the data, then re-fill
the gaps with zeros to ensure correlations are not incorrectly
calculated within gaps. If your data have gaps you should pass a merged
stream without the `fill_value` argument (e.g.: `st = st.merge()`).
.. warning::
If you intend to use this for processing templates you should consider
how resampling will impact your cross-correlations. Minor differences
in resampling between day-long files (which you are likely to use for
continuous detection) and shorter files will reduce your
cross-correlations!
.. rubric:: Example, bandpass
>>> from obspy import read
>>> from eqcorrscan.utils.pre_processing import shortproc
>>> # Get the path to the test data
>>> import eqcorrscan
>>> import os
>>> TEST_PATH = os.path.dirname(eqcorrscan.__file__) + '/tests/test_data'
>>> st = read(TEST_PATH + '/WAV/TEST_/2013-09-01-0410-35.DFDPC_024_00')
>>> st = shortproc(st=st, lowcut=2, highcut=9, filt_order=3, samp_rate=20,
... parallel=True, num_cores=2)
>>> print(st[0])
AF.LABE..SHZ | 2013-09-01T04:10:35.700000Z - 2013-09-01T04:12:05.650000Z \
| 20.0 Hz, 1800 samples
.. rubric:: Example, low-pass
>>> from obspy import read
>>> from eqcorrscan.utils.pre_processing import shortproc
>>> # Get the path to the test data
>>> import eqcorrscan
>>> import os
>>> TEST_PATH = os.path.dirname(eqcorrscan.__file__) + '/tests/test_data'
>>> st = read(TEST_PATH + '/WAV/TEST_/2013-09-01-0410-35.DFDPC_024_00')
>>> st = shortproc(st=st, lowcut=None, highcut=9, filt_order=3,
... samp_rate=20)
>>> print(st[0])
AF.LABE..SHZ | 2013-09-01T04:10:35.700000Z - 2013-09-01T04:12:05.650000Z \
| 20.0 Hz, 1800 samples
.. rubric:: Example, high-pass
>>> from obspy import read
>>> from eqcorrscan.utils.pre_processing import shortproc
>>> # Get the path to the test data
>>> import eqcorrscan
>>> import os
>>> TEST_PATH = os.path.dirname(eqcorrscan.__file__) + '/tests/test_data'
>>> st = read(TEST_PATH + '/WAV/TEST_/2013-09-01-0410-35.DFDPC_024_00')
>>> st = shortproc(st=st, lowcut=2, highcut=None, filt_order=3,
... samp_rate=20)
>>> print(st[0])
AF.LABE..SHZ | 2013-09-01T04:10:35.700000Z - 2013-09-01T04:12:05.650000Z \
| 20.0 Hz, 1800 samples
"""
if isinstance(st, Trace):
tracein = True
st = Stream(st)
else:
tracein = False
# Add sanity check for filter
if highcut and highcut >= 0.5 * samp_rate:
raise IOError('Highcut must be lower than the nyquist')
length = None
clip = False
if starttime is not None and endtime is not None:
for tr in st:
tr.trim(starttime, endtime)
if len(tr.data) == ((endtime - starttime) *
tr.stats.sampling_rate) + 1:
tr.data = tr.data[1:len(tr.data)]
length = endtime - starttime
clip = True
elif starttime:
for tr in st:
tr.trim(starttime=starttime)
elif endtime:
for tr in st:
tr.trim(endtime=endtime)
for tr in st:
if len(tr.data) == 0:
st.remove(tr)
Logger.warning('No data for {0} after trim'.format(tr.id))
if parallel:
if not num_cores:
num_cores = cpu_count()
if num_cores > len(st):
num_cores = len(st)
pool = Pool(processes=num_cores)
results = [pool.apply_async(process, (tr,), {
'lowcut': lowcut, 'highcut': highcut, 'filt_order': filt_order,
'samp_rate': samp_rate, 'starttime': starttime,
'clip': clip, 'seisan_chan_names': seisan_chan_names,
'fill_gaps': fill_gaps, 'length': length,
'ignore_length': ignore_length, 'fft_threads': fft_threads,
'ignore_bad_data': ignore_bad_data})
for tr in st]
pool.close()
try:
stream_list = [p.get() for p in results]
except KeyboardInterrupt as e: # pragma: no cover
pool.terminate()
raise e
pool.join()
st = Stream(stream_list)
else:
for i, tr in enumerate(st):
st[i] = process(
tr=tr, lowcut=lowcut, highcut=highcut, filt_order=filt_order,
samp_rate=samp_rate, starttime=starttime,
clip=clip, seisan_chan_names=seisan_chan_names,
fill_gaps=fill_gaps, length=length,
ignore_length=ignore_length, ignore_bad_data=ignore_bad_data,
fft_threads=fft_threads)
if tracein:
st.merge()
return st[0]
return st
def dayproc(st, lowcut, highcut, filt_order, samp_rate, starttime,
parallel=True, num_cores=False, ignore_length=False,
seisan_chan_names=False, fill_gaps=True, ignore_bad_data=False,
fft_threads=1):
"""
Wrapper for dayproc to parallel multiple traces in a stream.
Works in place on data. This is employed to ensure all parts of the data \
are processed in the same way.
:type st: obspy.core.stream.Stream
:param st: Stream to process (can be trace).
:type lowcut: float
:param lowcut: Low cut in Hz for bandpass.
:type highcut: float
:param highcut: High cut in Hz for bandpass.
:type filt_order: int
:param filt_order: Corners for bandpass.
:type samp_rate: float
:param samp_rate: Desired sampling rate in Hz.
:type starttime: obspy.core.utcdatetime.UTCDateTime
:param starttime: Desired start-date of trace.
:type parallel: bool
:param parallel:
Set to True to process traces in parallel, this is often faster than
serial processing of traces: defaults to True.
:type num_cores: int
:param num_cores:
Control the number of cores for parallel processing, if set to False
then this will use all the cores.
:type ignore_length: bool
:param ignore_length: See warning below.
:type seisan_chan_names: bool
:param seisan_chan_names:
Whether channels are named like seisan channels (which are two letters
rather than SEED convention of three) - defaults to True.
:type fill_gaps: bool
:param fill_gaps: Whether to pad any gaps found with zeros or not.
:type ignore_bad_data: bool
:param ignore_bad_data:
If False (default), errors will be raised if data are excessively
gappy or are mostly zeros. If True then no error will be raised, but
an empty trace will be returned.
:type fft_threads: int
:param fft_threads:
Number of threads to use for pyFFTW FFT in resampling. Note that it
is not recommended to use fft_threads > 1 and num_cores > 1.
:return: Processed stream.
:rtype: :class:`obspy.core.stream.Stream`
.. note::
If your data contain gaps you should *NOT* fill those gaps before
using the pre-process functions. The pre-process functions will fill
the gaps internally prior to processing, process the data, then re-fill
the gaps with zeros to ensure correlations are not incorrectly
calculated within gaps. If your data have gaps you should pass a merged
stream without the `fill_value` argument (e.g.: `st = st.merge()`).
.. warning::
Will fail if data are less than 19.2 hours long - this number is
arbitrary and is chosen to alert the user to the dangers of padding
to day-long, if you don't care you can ignore this error by setting
`ignore_length=True`. Use this option at your own risk! It will also
warn any-time it has to pad data - if you see strange artifacts in your
detections, check whether the data have gaps.
.. rubric:: Example
>>> import obspy
>>> if int(obspy.__version__.split('.')[0]) >= 1:
... from obspy.clients.fdsn import Client
... else:
... from obspy.fdsn import Client
>>> from obspy import UTCDateTime
>>> from eqcorrscan.utils.pre_processing import dayproc
>>> client = Client('NCEDC')
>>> t1 = UTCDateTime(2012, 3, 26)
>>> t2 = t1 + 86400
>>> bulk_info = [('BP', 'JCNB', '40', 'SP1', t1, t2)]
>>> st = client.get_waveforms_bulk(bulk_info)
>>> st_keep = st.copy() # Copy the stream for later examples
>>> # Example of bandpass filtering
>>> st = dayproc(st=st, lowcut=2, highcut=9, filt_order=3, samp_rate=20,
... starttime=t1, parallel=True, num_cores=2)
>>> print(st[0])
BP.JCNB.40.SP1 | 2012-03-26T00:00:00.000000Z - 2012-03-26T23:59:59.\
950000Z | 20.0 Hz, 1728000 samples
>>> # Example of lowpass filtering
>>> st = dayproc(st=st, lowcut=None, highcut=9, filt_order=3, samp_rate=20,
... starttime=t1, parallel=True, num_cores=2)
>>> print(st[0])
BP.JCNB.40.SP1 | 2012-03-26T00:00:00.000000Z - 2012-03-26T23:59:59.\
950000Z | 20.0 Hz, 1728000 samples
>>> # Example of highpass filtering
>>> st = dayproc(st=st, lowcut=2, highcut=None, filt_order=3, samp_rate=20,
... starttime=t1, parallel=True, num_cores=2)
>>> print(st[0])
BP.JCNB.40.SP1 | 2012-03-26T00:00:00.000000Z - 2012-03-26T23:59:59.\
950000Z | 20.0 Hz, 1728000 samples
"""
# Add sanity check for filter
if isinstance(st, Trace):
st = Stream(st)
tracein = True
else:
tracein = False
if highcut and highcut >= 0.5 * samp_rate:
raise IOError('Highcut must be lower than the nyquist')
# Set the start-time to a day start - cope with
if starttime is None:
startdates = []
for tr in st:
if abs(tr.stats.starttime - (UTCDateTime(
tr.stats.starttime.date) + 86400)) < tr.stats.delta:
# If the trace starts within 1 sample of the next day, use the
# next day as the startdate
startdates.append((tr.stats.starttime + 86400).date)
Logger.warning(
'{0} starts within 1 sample of the next day, using this '
'time {1}'.format(
tr.id, (tr.stats.starttime + 86400).date))
else:
startdates.append(tr.stats.starttime.date)
# Check that all traces start on the same date...
if not len(set(startdates)) == 1:
raise NotImplementedError('Traces start on different days')
starttime = UTCDateTime(startdates[0])
if parallel:
if not num_cores:
num_cores = cpu_count()
if num_cores > len(st):
num_cores = len(st)
pool = Pool(processes=num_cores)
results = [pool.apply_async(process, (tr,), {
'lowcut': lowcut, 'highcut': highcut, 'filt_order': filt_order,
'samp_rate': samp_rate, 'starttime': starttime, 'clip': True,
'ignore_length': ignore_length, 'length': 86400,
'seisan_chan_names': seisan_chan_names, 'fill_gaps': fill_gaps,
'ignore_bad_data': ignore_bad_data, 'fft_threads': fft_threads})
for tr in st]
pool.close()
try:
stream_list = [p.get() for p in results]
except KeyboardInterrupt as e: # pragma: no cover
pool.terminate()
raise e
pool.join()
st = Stream(stream_list)
else:
for i, tr in enumerate(st):
st[i] = process(
tr=tr, lowcut=lowcut, highcut=highcut, filt_order=filt_order,
samp_rate=samp_rate, starttime=starttime, clip=True,
length=86400, ignore_length=ignore_length,
seisan_chan_names=seisan_chan_names, fill_gaps=fill_gaps,
ignore_bad_data=ignore_bad_data, fft_threads=fft_threads)
for tr in st:
if len(tr.data) == 0:
st.remove(tr)
if tracein:
st.merge()
return st[0]
return st
def process(tr, lowcut, highcut, filt_order, samp_rate,
starttime=False, clip=False, length=86400,
seisan_chan_names=False, ignore_length=False, fill_gaps=True,
ignore_bad_data=False, fft_threads=1):
"""
Basic function to process data, usually called by dayproc or shortproc.
Functionally, this will bandpass, downsample and check headers and length
of trace to ensure files start when they should and are the correct length.
This is a simple wrapper on obspy functions, we include it here to provide
a system to ensure all parts of the dataset are processed in the same way.
.. note:: Usually this function is called via dayproc or shortproc.
:type tr: obspy.core.trace.Trace
:param tr: Trace to process
:type lowcut: float
:param lowcut:
Low cut in Hz, if set to None and highcut is set, will use
a lowpass filter.
:type highcut: float
:param highcut:
High cut in Hz, if set to None and lowcut is set, will use
a highpass filter.
:type filt_order: int
:param filt_order: Number of corners for filter.
:type samp_rate: float
:param samp_rate: Desired sampling rate in Hz.
:type starttime: obspy.core.utcdatetime.UTCDateTime
:param starttime: Desired start of trace
:type clip: bool
:param clip: Whether to expect, and enforce a set length of data or not.
:type length: float
:param length: Use to set a fixed length for data from the given starttime.
:type seisan_chan_names: bool
:param seisan_chan_names:
Whether channels are named like seisan channels (which are two letters
rather than SEED convention of three) - defaults to True.
:type ignore_length: bool
:param ignore_length: See warning in dayproc.
:type fill_gaps: bool
:param fill_gaps: Whether to pad any gaps found with zeros or not.
:type ignore_bad_data: bool
:param ignore_bad_data:
If False (default), errors will be raised if data are excessively
gappy or are mostly zeros. If True then no error will be raised, but
an empty trace will be returned.
:type fft_threads: int
:param fft_threads: Number of threads to use for pyFFTW FFT in resampling
:return: Processed trace.
:type: :class:`obspy.core.stream.Trace`
.. note::
If your data contain gaps you should *NOT* fill those gaps before
using the pre-process functions. The pre-process functions will fill
the gaps internally prior to processing, process the data, then re-fill
the gaps with zeros to ensure correlations are not incorrectly
calculated within gaps. If your data have gaps you should pass a merged
stream without the `fill_value` argument (e.g.: `tr = tr.merge()`).
"""
# Add sanity check
if highcut and highcut >= 0.5 * samp_rate:
raise IOError('Highcut must be lower than the nyquist')
# Define the start-time
if starttime:
# Be nice and allow a datetime object.
if isinstance(starttime, dt.date) or isinstance(starttime,
dt.datetime):
starttime = UTCDateTime(starttime)
Logger.debug('Working on: {0}'.format(tr.id))
# Check if the trace is gappy and pad if it is.
gappy = False
if isinstance(tr.data, np.ma.MaskedArray):
gappy = True
gaps, tr = _fill_gaps(tr)
# Do a brute force quality check
qual = _check_daylong(tr)
if not qual:
msg = ("Data have more zeros than actual data, please check the raw",
" data set-up and manually sort it: " + tr.stats.station + "." +
tr.stats.channel)
if not ignore_bad_data:
raise ValueError(msg)
else:
Logger.warning(msg)
return Trace(data=np.array([]), header={
"station": tr.stats.station, "channel": tr.stats.channel,
"network": tr.stats.network, "location": tr.stats.location,
"starttime": tr.stats.starttime,
"sampling_rate": tr.stats.sampling_rate})
tr = tr.detrend('simple')
# Detrend data before filtering
Logger.debug('I have {0} data points for {1} before processing'.format(
tr.stats.npts, tr.id))
# Sanity check to ensure files are daylong
padded = False
if clip:
tr = tr.trim(starttime, starttime + length, nearest_sample=True)
if float(tr.stats.npts / tr.stats.sampling_rate) != length and clip:
Logger.info(
'Data for {0} are not long-enough, will zero pad'.format(
tr.id))
if tr.stats.endtime - tr.stats.starttime < 0.8 * length\
and not ignore_length:
msg = (
"Data for {0}.{1} is {2:.2f} seconds long, which is less than "
"80 percent of the desired length ({3} seconds), will not "
"pad".format(
tr.stats.station, tr.stats.channel,
tr.stats.endtime - tr.stats.starttime, length))
if not ignore_bad_data:
raise NotImplementedError(msg)
else:
Logger.warning(msg)
return Trace(data=np.array([]), header={
"station": tr.stats.station, "channel": tr.stats.channel,
"network": tr.stats.network, "location": tr.stats.location,
"starttime": tr.stats.starttime,
"sampling_rate": tr.stats.sampling_rate})
# trim, then calculate length of any pads required
pre_pad_secs = tr.stats.starttime - starttime
post_pad_secs = (starttime + length) - tr.stats.endtime
if pre_pad_secs > 0 or post_pad_secs > 0:
padded = True
pre_pad = np.zeros(int(pre_pad_secs * tr.stats.sampling_rate))
post_pad = np.zeros(int(post_pad_secs * tr.stats.sampling_rate))
Logger.debug(str(tr))
Logger.info("Padding to length with {0} s before and {1} s "
"at end".format(pre_pad_secs, post_pad_secs))
tr.data = np.concatenate([pre_pad, tr.data, post_pad])
# Use this rather than the expected pad because of rounding samples
tr.stats.starttime -= len(pre_pad) * tr.stats.delta
Logger.debug(str(tr))
# If there is one sample too many after this remove the first one
# by convention
if tr.stats.npts == (length * tr.stats.sampling_rate) + 1:
tr.data = tr.data[1:len(tr.data)]
# Cope with time precision.
if abs((tr.stats.sampling_rate * length) -
tr.stats.npts) > tr.stats.delta:
msg = ("Data sampling-rate * length ({0} * {1} = {2}) does not "
"match number of samples ({3}) for {4}".format(
tr.stats.sampling_rate, length,
tr.stats.sampling_rate * length, tr.stats.npts, tr.id))
if not ignore_bad_data:
raise ValueError(msg)
else:
Logger.warning(msg)
return Trace(data=np.array([]), header={
"station": tr.stats.station, "channel": tr.stats.channel,
"network": tr.stats.network, "location": tr.stats.location,
"starttime": tr.stats.starttime,
"sampling_rate": tr.stats.sampling_rate})
Logger.debug(
'I now have {0} data points after enforcing length'.format(
tr.stats.npts))
# Check sampling rate and resample
if tr.stats.sampling_rate != samp_rate:
Logger.debug('Resampling')
tr = _resample(tr, samp_rate, threads=fft_threads)
# Filtering section
tr = tr.detrend('simple') # Detrend data again before filtering
if highcut and lowcut:
Logger.debug('Bandpassing')
tr.data = bandpass(tr.data, lowcut, highcut,
tr.stats.sampling_rate, filt_order, True)
elif highcut:
Logger.debug('Lowpassing')
tr.data = lowpass(tr.data, highcut, tr.stats.sampling_rate,
filt_order, True)
elif lowcut:
Logger.debug('Highpassing')
tr.data = highpass(tr.data, lowcut, tr.stats.sampling_rate,
filt_order, True)
else:
Logger.warning('No filters applied')
# Account for two letter channel names in s-files and therefore templates
if seisan_chan_names:
tr.stats.channel = tr.stats.channel[0] + tr.stats.channel[-1]
if padded:
Logger.debug("Reapplying zero pads post processing")
Logger.debug(str(tr))
pre_pad = np.zeros(int(pre_pad_secs * tr.stats.sampling_rate))
post_pad = np.zeros(int(post_pad_secs * tr.stats.sampling_rate))
pre_pad_len = len(pre_pad)
post_pad_len = len(post_pad)
Logger.debug(
"Taking only valid data between {0} and {1} samples".format(
pre_pad_len, tr.stats.npts - post_pad_len))
# Re-apply the pads, taking only the data section that was valid
tr.data = np.concatenate(
[pre_pad, tr.data[pre_pad_len: len(tr.data) - post_pad_len],
post_pad])
Logger.debug(str(tr))
# Sanity check to ensure files are correct length
if float(tr.stats.npts * tr.stats.delta) != length and clip:
Logger.info(
'Data for {0} are not of required length, will zero pad'.format(
tr.id))
# Use obspy's trim function with zero padding
tr = tr.trim(starttime, starttime + length, pad=True, fill_value=0,
nearest_sample=True)
# If there is one sample too many after this remove the last one
# by convention
if len(tr.data) == (length * tr.stats.sampling_rate) + 1:
tr.data = tr.data[1:len(tr.data)]
if abs((tr.stats.sampling_rate * length) -
tr.stats.npts) > tr.stats.delta:
raise ValueError('Data are not required length for ' +
tr.stats.station + '.' + tr.stats.channel)
# Replace the gaps with zeros
if gappy:
tr = _zero_pad_gaps(tr, gaps, fill_gaps=fill_gaps)
return tr
def _resample(tr, sampling_rate, threads=1):
"""
Provide a pyfftw version of obspy's trace resampling. This code is
modified from obspy's Trace.resample method.
"""
from future.utils import native_str
from scipy.signal import get_window
from pyfftw.interfaces.scipy_fftpack import rfft, irfft
factor = tr.stats.sampling_rate / float(sampling_rate)
# resample in the frequency domain. Make sure the byteorder is native.
x = rfft(tr.data.newbyteorder("="), threads=threads)
# Cast the value to be inserted to the same dtype as the array to avoid
# issues with numpy rule 'safe'.
x = np.insert(x, 1, x.dtype.type(0))
if tr.stats.npts % 2 == 0:
x = np.append(x, [0])
x_r = x[::2]
x_i = x[1::2]
large_w = np.fft.ifftshift(
get_window(native_str("hanning"), tr.stats.npts))
x_r *= large_w[:tr.stats.npts // 2 + 1]
x_i *= large_w[:tr.stats.npts // 2 + 1]
# interpolate
num = int(tr.stats.npts / factor)
df = 1.0 / (tr.stats.npts * tr.stats.delta)
d_large_f = 1.0 / num * sampling_rate
f = df * np.arange(0, tr.stats.npts // 2 + 1, dtype=np.int32)
n_large_f = num // 2 + 1
large_f = d_large_f * np.arange(0, n_large_f, dtype=np.int32)
large_y = np.zeros((2 * n_large_f))
large_y[::2] = np.interp(large_f, f, x_r)
large_y[1::2] = np.interp(large_f, f, x_i)
large_y = np.delete(large_y, 1)
if num % 2 == 0:
large_y = np.delete(large_y, -1)
tr.data = irfft(large_y, threads=threads) * (
float(num) / float(tr.stats.npts))
tr.stats.sampling_rate = sampling_rate
return tr
def _zero_pad_gaps(tr, gaps, fill_gaps=True):
"""
Replace padded parts of trace with zeros.
Will cut around gaps, detrend, then pad the gaps with zeros.
:type tr: :class:`osbpy.core.stream.Trace`
:param tr: A trace that has had the gaps padded
:param gaps: List of dict of start-time and end-time as UTCDateTime objects
:type gaps: list
:return: :class:`obspy.core.stream.Trace`
"""
start_in, end_in = (tr.stats.starttime, tr.stats.endtime)
for gap in gaps:
stream = Stream()
if gap['starttime'] > tr.stats.starttime:
stream += tr.slice(tr.stats.starttime, gap['starttime']).copy()
if gap['endtime'] < tr.stats.endtime:
# Note this can happen when gaps are calculated for a trace that
# is longer than `length`, e.g. gaps are calculated pre-trim.
stream += tr.slice(gap['endtime'], tr.stats.endtime).copy()
tr = stream.merge()[0]
if fill_gaps:
tr = tr.split()
tr = tr.detrend()
tr = tr.merge(fill_value=0)[0]
# Need to check length - if a gap happened overlapping the end or start
# of the trace this will be lost.
if tr.stats.starttime != start_in:
# pad with zeros
tr.data = np.concatenate(
[np.zeros(int(tr.stats.starttime - start_in)), tr.data])
tr.stats.starttime = start_in
if tr.stats.endtime != end_in:
tr.data = np.concatenate(
[tr.data, np.zeros(int(end_in - tr.stats.endtime))])
return tr
def _fill_gaps(tr):
"""
Interpolate through gaps and work-out where gaps are.
:param tr: Gappy trace (e.g. tr.data is np.ma.MaskedArray)
:type tr: `obspy.core.stream.Trace`
:return: gaps, trace, where gaps is a list of dict
"""
tr = tr.split()
gaps = tr.get_gaps()
tr = tr.detrend().merge(fill_value=0)[0]
gaps = [{'starttime': gap[4], 'endtime': gap[5]} for gap in gaps]
return gaps, tr
def _prep_data_for_correlation(stream, templates, template_names=None,
force_stream_epoch=True):
"""
Check that all channels are the same length and that all channels have data
for both template and stream.
Works in place on data - will cut to shortest length
:param stream: Stream to compare data to
:param templates:
List of streams that will be forced to have the same channels as stream
:param template_names:
List of strings same length as templates
:type force_stream_epoch: bool
:param force_stream_epoch:
Whether to force all channels in stream to cover the same time period
:return: stream, templates, template_names (if template_names given)
"""
n_templates = len(templates)
template_samp_rates = {
tr.stats.sampling_rate for template in templates for tr in template}
stream_samp_rates = {tr.stats.sampling_rate for tr in stream}
samp_rates = template_samp_rates.union(stream_samp_rates)
assert len(samp_rates) == 1, "Sampling rates differ"
samp_rate = samp_rates.pop()
out_stream = Stream()
named = True
if template_names is None:
named = False
template_names = range(n_templates)
# Work out shapes.
stream_start = min([tr.stats.starttime for tr in stream])
stream_end = max([tr.stats.endtime for tr in stream])
if force_stream_epoch:
stream_length = int(samp_rate * (stream_end - stream_start)) + 1
else:
stream_length = max([tr.stats.npts for tr in stream])
template_length = {
tr.stats.npts for template in templates for tr in template}
assert len(template_length) == 1, "Template traces not all the same length"
template_length = template_length.pop()
stream_ids = {tr.id for tr in stream}
# Need to ensure that a channel can be in the template multiple times.
all_template_ids = [
Counter([tr.id for tr in template]) for template in templates]
template_ids = {
stream_id: max(tid.get(stream_id, 0) for tid in all_template_ids)
for stream_id in stream_ids}
template_ids = {_id: value for _id, value in template_ids.items() if value}
seed_ids = sorted(
[key.split('.') + [i] for key, value in template_ids.items()
for i in range(value)])
seed_ids = [('.'.join(seed_id[0:-1]), seed_id[-1]) for seed_id in seed_ids]
for channel_number, seed_id in enumerate(template_ids.keys()):
stream_data = np.zeros(stream_length, dtype=np.float32)
stream_channel = stream.select(id=seed_id)
if len(stream_channel) > 1:
raise NotImplementedError(
"Multiple channels in continuous data for {0}".format(seed_id))
stream_channel = stream_channel[0]
if stream_channel.stats.npts == stream_length:
stream_data = stream_channel.data
else:
Logger.info('Data for {0} is not as long as needed, '
'padding'.format(stream_channel.id))
if force_stream_epoch:
start_pad = int(samp_rate * (
stream_channel.stats.starttime - stream_start))
end_pad = stream_length - (
start_pad + stream_channel.stats.npts)
# In some cases there will be one sample missing when sampling
# time-stamps are not set consistently between channels, this
# results in start_pad and end_pad being len==0
if start_pad == 0 and end_pad == 0:
Logger.debug("Start and end pad are both zero, padding "
"at one end")
if (stream_channel.stats.starttime - stream_start) > (
stream_end - stream_channel.stats.endtime):
start_pad = int(
stream_length - stream_channel.stats.npts)
else:
end_pad = int(
stream_length - stream_channel.stats.npts)
stream_channel.stats.starttime -= (start_pad / samp_rate)
else:
start_pad = 0
end_pad = stream_length - stream_channel.stats.npts
if end_pad == 0:
stream_data[start_pad:] = stream_channel.data
else:
stream_data[start_pad:-end_pad] = stream_channel.data
header = stream_channel.stats.copy()
header.npts = stream_length
out_stream += Trace(data=stream_data, header=header)
# Initialize nan template for speed.
nan_channel = np.full(template_length, np.nan, dtype=np.float32)
nan_template = Stream()
for _seed_id in seed_ids:
net, sta, loc, chan = _seed_id[0].split('.')
nan_template += Trace(header=Stats({
'network': net, 'station': sta, 'location': loc,
'channel': chan, 'starttime': UTCDateTime(),
'npts': template_length, 'sampling_rate': samp_rate}))
# Remove templates with no matching channels
filt = np.ones(len(template_names)).astype(bool)
for i, template in enumerate(templates):
trace_ids = {tr.id for tr in template}
if len(trace_ids.intersection(stream_ids)) == 0:
filt[i] = 0
_out = dict(zip(
[_tn for _tn, _filt in zip(template_names, filt) if _filt],
[_t for _t, _filt in zip(templates, filt) if _filt]))
flt_templates = list(_out.values())
if len(_out) != len(templates):
Logger.debug("Some templates not used due to no matching channels")
# Ensure that the templates' earliest traces are kept, even if there is no
# continuous data for them. If this happens, we need to add a NaN-stream to
# the continuous data to avoid inconsistent detection times.
n_template_traces = np.array([len(temp) for temp in flt_templates])
n_stream_traces = sum([n+1 for s, n in seed_ids])
# These checks are not necessary if all templates will get NaN-traces,
# because the NaN-traces will save the right starttime for the template.
nan_stream_ids = list()
if any(n_template_traces > n_stream_traces):
earliest_templ_trace_ids = set(
[template.sort(['starttime'])[0].id for template in flt_templates])
for earliest_templ_trace_id in earliest_templ_trace_ids:
if earliest_templ_trace_id not in template_ids:
nan_stream_ids.append(earliest_templ_trace_id)
net, sta, loc, chan = earliest_templ_trace_id.split('.')
nan_template += Trace(header=Stats({
'network': net, 'station': sta, 'location': loc,
'channel': chan, 'starttime': UTCDateTime(),
'npts': template_length, 'sampling_rate': samp_rate}))
stream_nan_data = np.full(
stream_length, np.nan, dtype=np.float32)
out_stream += Trace(
data=np.ma.masked_array(stream_nan_data, stream_nan_data),
header=Stats({
'network': net, 'station': sta, 'location': loc,
'channel': chan, 'starttime': stream_start,
'npts': stream_length, 'sampling_rate': samp_rate}))
seed_ids.append((earliest_templ_trace_id, 0))
incomplete_templates = {
template_name for template_name, template in _out.items() if
sorted([tr.id for tr in template]) != [tr.id for tr in nan_template]}
# Fill out the templates with nan channels
for template_name in incomplete_templates:
template = _out[template_name]
template_starttime = min(tr.stats.starttime for tr in template)
out_template = nan_template.copy()
for channel_number, _seed_id in enumerate(seed_ids):
seed_id, channel_index = _seed_id
template_channel = template.select(id=seed_id)
if len(template_channel) <= channel_index:
out_template[channel_number].data = nan_channel
out_template[channel_number].stats.starttime = \
template_starttime
else:
out_template[channel_number] = template_channel[channel_index]
# If a template-trace matches a NaN-trace in the stream , then set
# template-trace to NaN so that this trace does not appear in channel-
# list of detections.
if len(nan_stream_ids) > 0:
for tr in out_template:
if tr.id in nan_stream_ids:
tr.data = nan_channel
_out.update({template_name: out_template})
out_templates = list(_out.values())
out_template_names = list(_out.keys())
if named:
return out_stream, out_templates, out_template_names
return out_stream, out_templates
if __name__ == "__main__":
import doctest
doctest.testmod()
|
calum-chamberlain/EQcorrscan
|
eqcorrscan/utils/pre_processing.py
|
Python
|
gpl-3.0
| 39,205 | 0 |
# Licensed under a 3-clause BSD style license - see LICENSE.rst
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import pytest
import numpy as np
from ..convolve import convolve, convolve_fft
from ..kernels import Gaussian2DKernel
from ...nddata import NDData
def test_basic_nddata():
arr = np.zeros((11, 11))
arr[5, 5] = 1
ndd = NDData(arr)
test_kernel = Gaussian2DKernel(1)
result = convolve(ndd, test_kernel)
x, y = np.mgrid[:11, :11]
expected = result[5, 5] * np.exp(-0.5 * ((x - 5)**2 + (y - 5)**2))
np.testing.assert_allclose(result, expected, atol=1e-6)
resultf = convolve_fft(ndd, test_kernel)
np.testing.assert_allclose(resultf, expected, atol=1e-6)
@pytest.mark.parametrize('convfunc',
[lambda *args: convolve(*args, nan_treatment='interpolate', normalize_kernel=True),
lambda *args: convolve_fft(*args, nan_treatment='interpolate', normalize_kernel=True)])
def test_masked_nddata(convfunc):
arr = np.zeros((11, 11))
arr[4, 5] = arr[6, 5] = arr[5, 4] = arr[5, 6] = 0.2
arr[5, 5] = 1.5
ndd_base = NDData(arr)
mask = arr < 0 # this is all False
mask[5, 5] = True
ndd_mask = NDData(arr, mask=mask)
arrnan = arr.copy()
arrnan[5, 5] = np.nan
ndd_nan = NDData(arrnan)
test_kernel = Gaussian2DKernel(1)
result_base = convfunc(ndd_base, test_kernel)
result_nan = convfunc(ndd_nan, test_kernel)
result_mask = convfunc(ndd_mask, test_kernel)
assert np.allclose(result_nan, result_mask)
assert not np.allclose(result_base, result_mask)
assert not np.allclose(result_base, result_nan)
# check to make sure the mask run doesn't talk back to the initial array
assert np.sum(np.isnan(ndd_base.data)) != np.sum(np.isnan(ndd_nan.data))
|
kelle/astropy
|
astropy/convolution/tests/test_convolve_nddata.py
|
Python
|
bsd-3-clause
| 1,827 | 0.002737 |
""" Testing DICOM wrappers
"""
from os.path import join as pjoin, dirname
import gzip
import numpy as np
try:
import dicom
except ImportError:
have_dicom = False
else:
have_dicom = True
dicom_test = np.testing.dec.skipif(not have_dicom,
'could not import pydicom')
from .. import dicomwrappers as didw
from .. import dicomreaders as didr
from nose.tools import assert_true, assert_false, \
assert_equal, assert_raises
from numpy.testing import assert_array_equal, assert_array_almost_equal
IO_DATA_PATH = pjoin(dirname(__file__), 'data')
DATA_FILE = pjoin(IO_DATA_PATH, 'siemens_dwi_1000.dcm.gz')
if have_dicom:
DATA = dicom.read_file(gzip.open(DATA_FILE))
else:
DATA = None
DATA_FILE_B0 = pjoin(IO_DATA_PATH, 'siemens_dwi_0.dcm.gz')
DATA_FILE_SLC_NORM = pjoin(IO_DATA_PATH, 'csa_slice_norm.dcm')
# This affine from our converted image was shown to match our image
# spatially with an image from SPM DICOM conversion. We checked the
# matching with SPM check reg. We have flipped the first and second
# rows to allow for rows, cols tranpose in current return compared to
# original case.
EXPECTED_AFFINE = np.array(
[[ -1.796875, 0, 0, 115],
[0, -1.79684984, -0.01570896, 135.028779],
[0, -0.00940843750, 2.99995887, -78.710481],
[0, 0, 0, 1]])[:,[1,0,2,3]]
# from Guys and Matthew's SPM code, undoing SPM's Y flip, and swapping
# first two values in vector, to account for data rows, cols difference.
EXPECTED_PARAMS = [992.05050247, (0.00507649,
0.99997450,
-0.005023611)]
@dicom_test
def test_wrappers():
# test direct wrapper calls
# first with empty data
for maker, kwargs in ((didw.Wrapper,{}),
(didw.SiemensWrapper, {}),
(didw.MosaicWrapper, {'n_mosaic':10})):
dw = maker(**kwargs)
assert_equal(dw.get('InstanceNumber'), None)
assert_equal(dw.get('AcquisitionNumber'), None)
assert_raises(KeyError, dw.__getitem__, 'not an item')
assert_raises(didw.WrapperError, dw.get_data)
assert_raises(didw.WrapperError, dw.get_affine)
for klass in (didw.Wrapper, didw.SiemensWrapper):
dw = klass()
assert_false(dw.is_mosaic)
for maker in (didw.wrapper_from_data,
didw.Wrapper,
didw.SiemensWrapper,
didw.MosaicWrapper
):
dw = maker(DATA)
assert_equal(dw.get('InstanceNumber'), 2)
assert_equal(dw.get('AcquisitionNumber'), 2)
assert_raises(KeyError, dw.__getitem__, 'not an item')
for maker in (didw.MosaicWrapper, didw.wrapper_from_data):
assert_true(dw.is_mosaic)
@dicom_test
def test_wrapper_from_data():
# test wrapper from data, wrapper from file
for dw in (didw.wrapper_from_data(DATA),
didw.wrapper_from_file(DATA_FILE)):
assert_equal(dw.get('InstanceNumber'), 2)
assert_equal(dw.get('AcquisitionNumber'), 2)
assert_raises(KeyError, dw.__getitem__, 'not an item')
assert_true(dw.is_mosaic)
assert_array_almost_equal(
np.dot(didr.DPCS_TO_TAL, dw.get_affine()),
EXPECTED_AFFINE)
@dicom_test
def test_dwi_params():
dw = didw.wrapper_from_data(DATA)
b_matrix = dw.b_matrix
assert_equal(b_matrix.shape, (3,3))
q = dw.q_vector
b = np.sqrt(np.sum(q * q)) # vector norm
g = q / b
assert_array_almost_equal(b, EXPECTED_PARAMS[0])
assert_array_almost_equal(g, EXPECTED_PARAMS[1])
@dicom_test
def test_vol_matching():
# make the Siemens wrapper, check it compares True against itself
dw_siemens = didw.wrapper_from_data(DATA)
assert_true(dw_siemens.is_mosaic)
assert_true(dw_siemens.is_csa)
assert_true(dw_siemens.is_same_series(dw_siemens))
# make plain wrapper, compare against itself
dw_plain = didw.Wrapper(DATA)
assert_false(dw_plain.is_mosaic)
assert_false(dw_plain.is_csa)
assert_true(dw_plain.is_same_series(dw_plain))
# specific vs plain wrapper compares False, because the Siemens
# wrapper has more non-empty information
assert_false(dw_plain.is_same_series(dw_siemens))
# and this should be symmetric
assert_false(dw_siemens.is_same_series(dw_plain))
# we can even make an empty wrapper. This compares True against
# itself but False against the others
dw_empty = didw.Wrapper()
assert_true(dw_empty.is_same_series(dw_empty))
assert_false(dw_empty.is_same_series(dw_plain))
assert_false(dw_plain.is_same_series(dw_empty))
# Just to check the interface, make a pretend signature-providing
# object.
class C(object):
series_signature = {}
assert_true(dw_empty.is_same_series(C()))
@dicom_test
def test_slice_indicator():
dw_0 = didw.wrapper_from_file(DATA_FILE_B0)
dw_1000 = didw.wrapper_from_data(DATA)
z = dw_0.slice_indicator
assert_false(z is None)
assert_equal(z, dw_1000.slice_indicator)
dw_empty = didw.Wrapper()
assert_true(dw_empty.slice_indicator is None)
@dicom_test
def test_orthogonal():
#Test that the slice normal is sufficiently orthogonal
dw = didw.wrapper_from_file(DATA_FILE_SLC_NORM)
R = dw.rotation_matrix
assert np.allclose(np.eye(3),
np.dot(R, R.T),
atol=1e-6)
@dicom_test
def test_use_csa_sign():
#Test that we get the same slice normal, even after swapping the iop
#directions
dw = didw.wrapper_from_file(DATA_FILE_SLC_NORM)
iop = dw.image_orient_patient
dw.image_orient_patient = np.c_[iop[:,1], iop[:,0]]
dw2 = didw.wrapper_from_file(DATA_FILE_SLC_NORM)
assert np.allclose(dw.slice_normal, dw2.slice_normal)
@dicom_test
def test_assert_parallel():
#Test that we get an AssertionError if the cross product and the CSA
#slice normal are not parallel
dw = didw.wrapper_from_file(DATA_FILE_SLC_NORM)
dw.image_orient_patient = np.c_[[1., 0., 0.], [0., 1., 0.]]
assert_raises(AssertionError, dw.__getattribute__, 'slice_normal')
|
ME-ICA/me-ica
|
meica.libs/nibabel/nicom/tests/test_dicomwrappers.py
|
Python
|
lgpl-2.1
| 6,163 | 0.004543 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""!가입; 봇 게임센터에 가입합니다.\n!내정보; 내 등록된 정보를 봅니다."""
import re
import json
from botlib import BotLib
from rpg import RPG
from util.util import enum
CmdType = enum(
Register = 1,
MyInfo = 2,
WeaponInfo = 3,
AddWeapon = 4,
UpgradeWeapon = 5,
)
# 입력으로부터 명령어 타입을 얻어내는 함수
def input_to_CmdType(text):
if u"!가입" == text: return CmdType.Register
if u"!내정보" == text: return CmdType.MyInfo
if re.findall(u"^!무기정보 ", text): return CmdType.WeaponInfo
if re.findall(u"^!무기추가 ", text): return CmdType.AddWeapon
if re.findall(u"^!무기강화 ", text): return CmdType.UpgradeWeapon
return None
# 입력으로부터 인자를 가져오는 함수
# 일단은 모든 인자가 오직 한개뿐이라 가정하고 만들었다....
# 인자가 없을 때 예외처리는 on_message()에서 try-except에서 처리
def get_argument(cmdType, text):
match = None
if cmdType == CmdType.WeaponInfo :
match = re.findall(ur"^!무기정보 (.*)", text)
if not match: raise ValueError
if cmdType == CmdType.AddWeapon :
match = re.findall(ur"^!무기추가 (.*)", text)
if not match: raise ValueError
if cmdType == CmdType.UpgradeWeapon :
match = re.findall(ur"^!무기강화 (.*)", text)
if not match: raise ValueError
return match[0]
# 메인 명령을 실행하는 함수
def run_command(cmdType, text, msgobj, serverobj):
userId = msgobj['user']
userobj = BotLib.get_user_json_obj(userId, serverobj)
channel = msgobj['channel']
Rpg = RPG(BotLib)
result = ''
if cmdType == CmdType.Register :
result = Rpg.add_user(userobj)
if cmdType == CmdType.MyInfo :
result = Rpg.get_user_info(userobj)
if cmdType == CmdType.WeaponInfo :
weaponname = get_argument(cmdType, text)
result = Rpg.get_weapon_info(userobj, weaponname)
if cmdType == CmdType.AddWeapon :
weaponname = get_argument(cmdType, text)
result = Rpg.add_weapon(userobj, weaponname)
if cmdType == CmdType.UpgradeWeapon :
weaponname = get_argument(cmdType, text)
result = Rpg.upgrade_weapon(userobj, weaponname)
BotLib.say(channel, result)
################################################################################
# slask 껍데기 함수
# 문자열을 return하면 봇이 그 말을 한다
def on_message(msg, server):
text = msg.get("text", "")
cmdType = input_to_CmdType(text)
if not cmdType:
return
try:
run_command(cmdType, text, msg, server['client'].server)
except ValueError:
if cmdType == CmdType.WeaponInfo :
return u"사용방법: !무기정보 <무기명> \n안내: 무기명에는 찾을 무기를 입력해주십시요."
if cmdType == CmdType.AddWeapon :
return u"사용방법: !무기추가 <무기명> \n안내: 무기명에는 추가할 무기를 입력해주십시요."
if cmdType == CmdType.UpgradeWeapon :
return u"사용방법: !무기강화 <무기명> \n안내: 무기명에는 강화할 무기를 입력해주십시요."
|
storyhe/playWithBot
|
plugins/rpgbot.py
|
Python
|
mit
| 3,282 | 0.014009 |
#
# Copyright (c) 2015 nexB Inc. and others. All rights reserved.
# http://nexb.com and https://github.com/nexB/scancode-toolkit/
# The ScanCode software is licensed under the Apache License version 2.0.
# Data generated with ScanCode require an acknowledgment.
# ScanCode is a trademark of nexB Inc.
#
# You may not use this software except in compliance with the License.
# You may obtain a copy of the License at: http://apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software distributed
# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
#
# When you publish or redistribute any data created with ScanCode or any ScanCode
# derivative work, you must accompany this data with the following acknowledgment:
#
# Generated with ScanCode and provided on an "AS IS" BASIS, WITHOUT WARRANTIES
# OR CONDITIONS OF ANY KIND, either express or implied. No content created from
# ScanCode should be considered or used as legal advice. Consult an Attorney
# for any legal advice.
# ScanCode is a free software code scanning tool from nexB Inc. and others.
# Visit https://github.com/nexB/scancode-toolkit/ for support and download.
from __future__ import absolute_import, print_function
import functools
from itertools import izip
from types import ListType, TupleType, GeneratorType
def flatten(seq):
"""
Flatten recursively a sequence and all its sub-sequences that can be tuples,
lists or generators (generators will be consumed): all are converted to a
flat list of elements.
For example::
>>> flatten([7, (6, [5, [4, ['a'], 3]], 3), 2, 1])
[7, 6, 5, 4, 'a', 3, 3, 2, 1]
>>> def gen():
... for i in range(2):
... yield range(5)
...
>>> flatten(gen())
[0, 1, 2, 3, 4, 0, 1, 2, 3, 4]
Originally derived from http://www.andreasen.org/misc/util.py
2002-2005 by Erwin S. Andreasen -- http://www.andreasen.org/misc.shtml
This file is in the Public Domain
Version: Id: util.py,v 1.22 2005/12/16 00:08:21 erwin Exp erwin
"""
r = []
for x in seq:
if isinstance(x, (ListType, TupleType)):
r.extend(flatten(x))
elif isinstance(x, GeneratorType):
r.extend(flatten(list(x)))
else:
r.append(x)
return r
def pair_chunks(iterable):
"""
Return an iterable of chunks of elements pairs from iterable. The iterable
must contain an even number of elements or it will truncated.
For example::
>>> list(pair_chunks([1, 2, 3, 4, 5, 6]))
[(1, 2), (3, 4), (5, 6)]
>>> list(pair_chunks([1, 2, 3, 4, 5, 6, 7]))
[(1, 2), (3, 4), (5, 6)]
"""
return izip(*[iter(iterable)] * 2)
def memoize(fun):
"""
Decorate fun function and cache return values. Arguments must be
hashable. kwargs are not handled. Used to speed up some often executed
functions.
Usage example::
>>> @memoize
... def expensive(*args, **kwargs):
... print('Calling expensive with', args, kwargs)
... return 'value expensive to compute' + repr(args)
>>> expensive(1, 2)
Calling expensive with (1, 2) {}
'value expensive to compute(1, 2)'
>>> expensive(1, 2)
'value expensive to compute(1, 2)'
>>> expensive(1, 2, a=0)
Calling expensive with (1, 2) {'a': 0}
'value expensive to compute(1, 2)'
>>> expensive(1, 2, a=0)
Calling expensive with (1, 2) {'a': 0}
'value expensive to compute(1, 2)'
>>> expensive(1, 2)
'value expensive to compute(1, 2)'
>>> expensive(1, 2, 5)
Calling expensive with (1, 2, 5) {}
'value expensive to compute(1, 2, 5)'
The expensive function returned value will be cached based for each args
values and computed only once in its life. Call with kwargs are not cached
"""
memos = {}
@functools.wraps(fun)
def memoized(*args, **kwargs):
# calls with kwargs are not handled and not cached
if kwargs:
return fun(*args, **kwargs)
# convert any list arg to a tuple
args = tuple(tuple(arg) if isinstance(arg, ListType) else arg
for arg in args)
try:
return memos[args]
except KeyError:
memos[args] = fun(*args)
return memos[args]
return functools.update_wrapper(memoized, fun)
def memoize_to_attribute(attr_name, _test=False):
"""
Decorate a method and cache return values in attr_name of the parent object.
Used to speed up some often called methods that cache their values in
instance variables.
Usage example::
>>> class Obj(object):
... def __init__(self):
... self._expensive = None
... @property
... @memoize_to_attribute('_expensive')
... def expensive(self):
... print('Calling expensive')
... return 'value expensive to compute'
>>> o=Obj()
>>> o.expensive
Calling expensive
'value expensive to compute'
>>> o.expensive
'value expensive to compute'
>>> o.expensive
'value expensive to compute'
The Obj().expensive property value will be cached to attr_name
self._expensive and computed only once in the life of the Obj instance.
"""
def memoized_to_attr(meth):
@functools.wraps(meth)
def wrapper(self, *args, **kwargs):
if getattr(self, attr_name) is None:
res = meth(self, *args, **kwargs)
setattr(self, attr_name, res)
else:
res = getattr(self, attr_name)
return res
return wrapper
return memoized_to_attr
|
lach76/scancode-toolkit
|
src/commoncode/functional.py
|
Python
|
apache-2.0
| 5,818 | 0.001203 |
import unittest
import imp
import os
import errno
import sys
import glob
import re
from distutils.errors import *
def unlink(path):
try:
os.unlink(path)
except OSError, exc:
if exc.errno != errno.ENOENT:
raise
class BrokenTest(unittest.TestCase.failureException):
def __repr__(self):
name, reason = self.args
return '%s: %s: %s works now' % (
(self.__class__.__name__, name, reason))
def broken(reason, *exceptions):
'''Indicates a failing (or erroneous) test case fails that should succeed.
If the test fails with an exception, list the exception type in args'''
def wrapper(test_method):
def replacement(*args, **kwargs):
try:
test_method(*args, **kwargs)
except exceptions or unittest.TestCase.failureException:
pass
else:
raise BrokenTest(test_method.__name__, reason)
replacement.__doc__ = test_method.__doc__
replacement.__name__ = 'XXX_' + test_method.__name__
replacement.todo = reason
return replacement
return wrapper
dependencyCache = {}
compileErrorCache = {}
# setup java CLASSPATH
if 'CLASSPATH' not in os.environ:
cp = []
baseDir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..', '..'))
libDir = os.path.join(baseDir, 'lib')
jar = os.path.join(libDir, 'stringtemplate-3.0.jar')
if not os.path.isfile(jar):
raise DistutilsFileError(
"Missing file '%s'. Grap it from a distribution package."
% jar,
)
cp.append(jar)
jar = os.path.join(libDir, 'antlr-2.7.7.jar')
if not os.path.isfile(jar):
raise DistutilsFileError(
"Missing file '%s'. Grap it from a distribution package."
% jar,
)
cp.append(jar)
jar = os.path.join(libDir, 'junit-4.2.jar')
if not os.path.isfile(jar):
raise DistutilsFileError(
"Missing file '%s'. Grap it from a distribution package."
% jar,
)
cp.append(jar)
cp.append(os.path.join(baseDir, 'runtime', 'Python', 'build'))
classpath = '-cp "' + ':'.join([os.path.abspath(p) for p in cp]) + '"'
else:
classpath = ''
class ANTLRTest(unittest.TestCase):
def __init__(self, *args, **kwargs):
unittest.TestCase.__init__(self, *args, **kwargs)
self.baseName = os.path.splitext(os.path.basename(sys.modules[self.__module__].__file__))[0]
self.lexerModule = None
self.parserModule = None
def _invokeantlr(self, dir, file, options):
fp = os.popen('cd %s; java %s org.antlr.Tool %s %s 2>&1'
% (dir, classpath, options, file)
)
output = ''
failed = False
for line in fp:
output += line
if line.startswith('error('):
failed = True
rc = fp.close()
if rc is not None:
failed = True
if failed:
raise RuntimeError(
"Failed to compile grammar '%s':\n\n" % file
+ output
)
def compileGrammar(self, grammarName=None, options=''):
if grammarName is None:
grammarName = self.baseName + '.g'
# don't try to rebuild grammar, if it already failed
if grammarName in compileErrorCache:
return
try:
testDir = os.path.dirname(os.path.abspath(__file__))
# get dependencies from antlr
if grammarName in dependencyCache:
dependencies = dependencyCache[grammarName]
else:
dependencies = []
cmd = ('cd %s; java %s org.antlr.Tool -depend %s 2>&1'
% (testDir, classpath, grammarName)
)
output = ""
failed = False
fp = os.popen(cmd)
for line in fp:
output += line
if line.startswith('error('):
failed = True
elif ':' in line:
a, b = line.strip().split(':', 1)
dependencies.append(
(os.path.join(testDir, a.strip()),
[os.path.join(testDir, b.strip())])
)
rc = fp.close()
if rc is not None:
failed = True
if failed:
raise RuntimeError(
"antlr -depend failed with code %s on grammar '%s':\n\n"
% (rc, grammarName)
+ cmd
+ "\n"
+ output
)
# add dependencies to my .stg files
templateDir = os.path.abspath(os.path.join(testDir, '..', '..', '..', 'src', 'org', 'antlr', 'codegen', 'templates', 'Python'))
templates = glob.glob(os.path.join(templateDir, '*.stg'))
for dst, src in dependencies:
src.extend(templates)
dependencyCache[grammarName] = dependencies
rebuild = False
for dest, sources in dependencies:
if not os.path.isfile(dest):
rebuild = True
break
for source in sources:
if os.path.getmtime(source) > os.path.getmtime(dest):
rebuild = True
break
if rebuild:
self._invokeantlr(testDir, grammarName, options)
except:
# mark grammar as broken
compileErrorCache[grammarName] = True
raise
def lexerClass(self, base):
"""Optionally build a subclass of generated lexer class"""
return base
def parserClass(self, base):
"""Optionally build a subclass of generated parser class"""
return base
def walkerClass(self, base):
"""Optionally build a subclass of generated walker class"""
return base
def __load_module(self, name):
modFile, modPathname, modDescription \
= imp.find_module(name, [os.path.dirname(__file__)])
return imp.load_module(
name, modFile, modPathname, modDescription
)
def getLexer(self, *args, **kwargs):
"""Build lexer instance. Arguments are passed to lexer.__init__()."""
self.lexerModule = self.__load_module(self.baseName + 'Lexer')
cls = getattr(self.lexerModule, self.baseName + 'Lexer')
cls = self.lexerClass(cls)
lexer = cls(*args, **kwargs)
return lexer
def getParser(self, *args, **kwargs):
"""Build parser instance. Arguments are passed to parser.__init__()."""
self.parserModule = self.__load_module(self.baseName + 'Parser')
cls = getattr(self.parserModule, self.baseName + 'Parser')
cls = self.parserClass(cls)
parser = cls(*args, **kwargs)
return parser
def getWalker(self, *args, **kwargs):
"""Build walker instance. Arguments are passed to walker.__init__()."""
self.walkerModule = self.__load_module(self.baseName + 'Walker')
cls = getattr(self.walkerModule, self.baseName + 'Walker')
cls = self.walkerClass(cls)
walker = cls(*args, **kwargs)
return walker
def compileInlineGrammar(self, grammar, options=''):
testDir = os.path.dirname(os.path.abspath(__file__))
# get type and name from first grammar line
m = re.match(r'\s*((lexer|parser|tree)\s+|)grammar\s+(\S+);', grammar)
assert m is not None
grammarType = m.group(2)
if grammarType is None:
grammarType = 'combined'
grammarName = m.group(3)
assert grammarType in ('lexer', 'parser', 'tree', 'combined'), grammarType
# dump temp grammar file
fp = open(os.path.join(testDir, grammarName + '.g'), 'w')
fp.write(grammar)
fp.close()
# compile it
self._invokeantlr(testDir, grammarName + '.g', options)
if grammarType == 'combined':
lexerMod = self.__load_module(grammarName + 'Lexer')
lexerCls = getattr(lexerMod, grammarName + 'Lexer')
lexerCls = self.lexerClass(lexerCls)
parserMod = self.__load_module(grammarName + 'Parser')
parserCls = getattr(parserMod, grammarName + 'Parser')
parserCls = self.parserClass(parserCls)
return lexerCls, parserCls
if grammarType == 'lexer':
lexerMod = self.__load_module(grammarName + 'Lexer')
lexerCls = getattr(lexerMod, grammarName + 'Lexer')
lexerCls = self.lexerClass(lexerCls)
return lexerCls
if grammarType == 'parser':
parserMod = self.__load_module(grammarName + 'Parser')
parserCls = getattr(parserMod, grammarName + 'Parser')
parserCls = self.parserClass(parserCls)
return parserCls
if grammarType == 'tree':
walkerMod = self.__load_module(grammarName)
walkerCls = getattr(walkerMod, grammarName)
walkerCls = self.walkerClass(walkerCls)
return walkerCls
|
sshrdp/mclab
|
lib/antlr-3.0.1/runtime/Python/tests/testbase.py
|
Python
|
apache-2.0
| 9,623 | 0.004053 |
'''
.. module:: skrf.network
========================================
network (:mod:`skrf.network`)
========================================
Provides a n-port network class and associated functions.
Most of the functionality in this module is provided as methods and
properties of the :class:`Network` Class.
Network Class
===============
.. autosummary::
:toctree: generated/
Network
Network Representations
============================
.. autosummary::
:toctree: generated/
Network.s
Network.z
Network.y
Network.a
Network.t
Connecting Networks
===============================
.. autosummary::
:toctree: generated/
connect
innerconnect
cascade
cascade_list
de_embed
flip
Interpolation and Concatenation Along Frequency Axis
=====================================================
.. autosummary::
:toctree: generated/
stitch
overlap
Network.resample
Network.interpolate
Network.interpolate_self
Network.interpolate_from_f
Combining Networks
===================================
.. autosummary::
:toctree: generated/
n_oneports_2_nport
four_oneports_2_twoport
three_twoports_2_threeport
n_twoports_2_nport
IO
====
.. autosummary::
skrf.io.general.read
skrf.io.general.write
skrf.io.general.ntwk_2_spreadsheet
Network.write
Network.write_touchstone
Network.read
Network.write_spreadsheet
Noise
============
.. autosummary::
:toctree: generated/
Network.add_noise_polar
Network.add_noise_polar_flatband
Network.multiply_noise
Supporting Functions
======================
.. autosummary::
:toctree: generated/
inv
connect_s
innerconnect_s
s2z
s2y
s2t
s2a
z2s
z2y
z2t
z2a
y2s
y2z
y2t
t2s
t2z
t2y
fix_z0_shape
renormalize_s
passivity
reciprocity
Misc Functions
=====================
.. autosummary::
:toctree: generated/
average
two_port_reflect
chopinhalf
Network.nudge
Network.renormalize
'''
from six.moves import xrange
import os
import warnings
try:
import cPickle as pickle
from cPickle import UnpicklingError
except ImportError:
import pickle as pickle
from pickle import UnpicklingError
from copy import deepcopy as copy
import re
from numbers import Number
from itertools import product
import numpy as npy
from numpy.linalg import inv as npy_inv
import pylab as plb
from scipy import stats,signal # for Network.add_noise_*, and Network.windowed
from scipy.interpolate import interp1d # for Network.interpolate()
from numpy import fft
import unittest # fotr unitest.skip
from . import mathFunctions as mf
from . frequency import Frequency
from . plotting import *#smith, plot_rectangular, plot_smith, plot_complex_polar
from . tlineFunctions import zl_2_Gamma0
from . util import get_fid, get_extn, find_nearest_index,slice_domain
## later imports. delayed to solve circular dependencies
#from io.general import read, write
#from io import touchstone
#from io.general import network_2_spreadsheet
from .constants import ZERO
class Network(object):
'''
A n-port electrical network [#]_.
For instructions on how to create Network see :func:`__init__`.
A n-port network may be defined by three quantities,
* network parameter matrix (s, z, or y-matrix)
* port characteristic impedance matrix
* frequency information
The :class:`Network` class stores these data structures internally
in the form of complex :class:`numpy.ndarray`'s. These arrays are not
interfaced directly but instead through the use of the properties:
===================== =============================================
Property Meaning
===================== =============================================
:attr:`s` scattering parameter matrix
:attr:`z0` characteristic impedance matrix
:attr:`f` frequency vector
===================== =============================================
Although these docs focus on s-parameters, other equivalent network
representations such as :attr:`z` and :attr:`y` are
available. Scalar projections of the complex network parameters
are accessible through properties as well. These also return
:class:`numpy.ndarray`'s.
===================== =============================================
Property Meaning
===================== =============================================
:attr:`s_re` real part of the s-matrix
:attr:`s_im` imaginary part of the s-matrix
:attr:`s_mag` magnitude of the s-matrix
:attr:`s_db` magnitude in log scale of the s-matrix
:attr:`s_deg` phase of the s-matrix in degrees
:attr:`s_gd` group delay derived from the s-matrix
===================== =============================================
The following operations act on the networks s-matrix.
===================== =============================================
Operator Function
===================== =============================================
\+ element-wise addition of the s-matrix
\- element-wise difference of the s-matrix
\* element-wise multiplication of the s-matrix
\/ element-wise division of the s-matrix
\*\* cascading (only for 2-ports)
\// de-embedding (for 2-ports, see :attr:`inv`)
===================== =============================================
Different components of the :class:`Network` can be visualized
through various plotting methods. These methods can be used to plot
individual elements of the s-matrix or all at once. For more info
about plotting see the :doc:`../../tutorials/plotting` tutorial.
========================= =============================================
Method Meaning
========================= =============================================
:func:`plot_s_smith` plot complex s-parameters on smith chart
:func:`plot_s_re` plot real part of s-parameters vs frequency
:func:`plot_s_im` plot imaginary part of s-parameters vs frequency
:func:`plot_s_mag` plot magnitude of s-parameters vs frequency
:func:`plot_s_db` plot magnitude (in dB) of s-parameters vs frequency
:func:`plot_s_deg` plot phase of s-parameters (in degrees) vs frequency
:func:`plot_s_deg_unwrap` plot phase of s-parameters (in unwrapped degrees) vs frequency
:func:`plot_s_gd` plot group delay of s-parameters (in s) vs frequency
========================= =============================================
:class:`Network` objects can be created from a touchstone or pickle
file (see :func:`__init__`), by a
:class:`~skrf.media.media.Media` object, or manually by assigning the
network properties directly. :class:`Network` objects
can be saved to disk in the form of touchstone files with the
:func:`write_touchstone` method.
An exhaustive list of :class:`Network` Methods and Properties
(Attributes) are given below
References
------------
.. [#] http://en.wikipedia.org/wiki/Two-port_network
'''
global PRIMARY_PROPERTIES
PRIMARY_PROPERTIES = [ 's','z','y','a']
global COMPONENT_FUNC_DICT
COMPONENT_FUNC_DICT = {
're' : npy.real,
'im' : npy.imag,
'mag' : npy.abs,
'db' : mf.complex_2_db,
'db10' : mf.complex_2_db10,
'rad' : npy.angle,
'deg' : lambda x: npy.angle(x, deg=True),
'arcl' : lambda x: npy.angle(x) * npy.abs(x),
'rad_unwrap' : lambda x: mf.unwrap_rad(npy.angle(x)),
'deg_unwrap' : lambda x: mf.radian_2_degree(mf.unwrap_rad(\
npy.angle(x))),
'arcl_unwrap' : lambda x: mf.unwrap_rad(npy.angle(x)) *\
npy.abs(x),
'gd' : lambda x: -1 * npy.gradient(mf.unwrap_rad(npy.angle(x)))[0],
'vswr' : lambda x: (1+abs(x))/(1-abs(x)),
'time' : lambda x: fft.ifftshift(fft.ifft(x, axis=0), axes=0),
'time_db' : lambda x: mf.complex_2_db(fft.ifftshift(fft.ifft(x, axis=0),axes=0)),
'time_mag' : lambda x: mf.complex_2_magnitude(fft.ifftshift(fft.ifft(x, axis=0),axes=0)),
}
# provides y-axis labels to the plotting functions
global Y_LABEL_DICT
Y_LABEL_DICT = {
're' : 'Real Part',
'im' : 'Imag Part',
'mag' : 'Magnitude',
'abs' : 'Magnitude',
'db' : 'Magnitude (dB)',
'db10' : 'Magnitude (dB)',
'deg' : 'Phase (deg)',
'deg_unwrap' : 'Phase (deg)',
'rad' : 'Phase (rad)',
'rad_unwrap' : 'Phase (rad)',
'arcl' : 'Arc Length',
'arcl_unwrap' : 'Arc Length',
'gd' : 'Group Delay (s)',
'vswr' : 'VSWR',
'passivity' : 'Passivity',
'reciprocity' : 'Reciprocity',
'time': 'Time (real)',
'time_db': 'Magnitude (dB)',
'time_mag': 'Magnitude',
}
## CONSTRUCTOR
def __init__(self, file = None, name = None , comments = None, f_unit=None, **kwargs):
'''
Network constructor.
Creates an n-port microwave network from a `file` or directly
from data. If no file or data is given, then an empty Network
is created.
Parameters
------------
file : str or file-object
file to load information from. supported formats are:
* touchstone file (.s?p)
* pickled Network (.ntwk, .p) see :func:`write`
name : str
Name of this Network. if None will try to use file, if
its a str
comments : str
Comments associated with the Network
\*\*kwargs :
key word arguments can be used to assign properties of the
Network, such as `s`, `f` and `z0`.
Examples
------------
From a touchstone
>>> n = rf.Network('ntwk1.s2p')
From a pickle file
>>> n = rf.Network('ntwk1.ntwk')
Create a blank network, then fill in values
>>> n = rf.Network()
>>> freq = rf.Frequency(1,3,3,'ghz')
>>> n.frequency, n.s, n.z0 = freq,[1,2,3], [1,2,3]
Directly from values
>>> n = rf.Network(f=[1,2,3],s=[1,2,3],z0=[1,2,3])
See Also
-----------
read : read a network from a file
write : write a network to a file, using pickle
write_touchstone : write a network to a touchstone file
'''
# allow for old kwarg for backward compatability
if 'touchstone_filename' in kwargs:
file = kwargs['touchstone_filename']
self.name = name
self.comments = comments
if file is not None:
# allows user to pass filename or file obj
# open file in 'binary' mode because we are going to try and
# unpickle it first
fid = get_fid(file,'rb')
try:
self.read(fid)
except(UnpicklingError):
# if unpickling doesn't work then, close fid, reopen in
# non-binary mode and try to read it as touchstone
fid.close()
fid = get_fid(file)
self.read_touchstone(fid)
if name is None and isinstance(file,str):
name = os.path.splitext(os.path.basename(file))[0]
if self.frequency is not None and f_unit is not None:
self.frequency.unit = f_unit
# allow properties to be set through the constructor
for attr in PRIMARY_PROPERTIES + ['frequency','z0','f']:
if attr in kwargs:
self.__setattr__(attr,kwargs[attr])
#self.nports = self.number_of_ports
##TODO: remove this as it takes up ~70% cpu time of this init
self.__generate_plot_functions()
## OPERATORS
def __pow__(self,other):
'''
cascade this network with another network
port 1 of this network is connected to port 0 or the other
network
'''
# if they pass a number then use power operator
if isinstance(other, Number):
out = self.copy()
out.s = out.s**other
return out
# else connect the two
return connect(self,1,other,0)
def __floordiv__(self,other):
'''
de-embeding another network[s], from this network
See Also
----------
inv : inverse s-parameters
'''
try:
# if they passed 1 ntwks and a tuple of ntwks,
# then deEmbed like A.inv*C*B.inv
b = other[0]
c = other[1]
result = copy (self)
result.s = (b.inv**self**c.inv).s
#flip(de_embed( flip(de_embed(c.s,self.s)),b.s))
return result
except TypeError:
pass
if other.number_of_ports == 2:
result = self.copy()
result.s = (other.inv**self).s
#de_embed(self.s,other.s)
return result
else:
raise IndexError('Incorrect number of ports.')
def __mul__(self,other):
'''
Element-wise complex multiplication of s-matrix
'''
result = self.copy()
if isinstance(other, Network):
self.__compatable_for_scalar_operation_test(other)
result.s = self.s * other.s
else:
# other may be an array or a number
result.s = self.s * npy.array(other).reshape(-1,self.nports,self.nports)
return result
def __rmul__(self,other):
'''
Element-wise complex multiplication of s-matrix
'''
result = self.copy()
if isinstance(other, Network):
self.__compatable_for_scalar_operation_test(other)
result.s = self.s * other.s
else:
# other may be an array or a number
result.s = self.s * npy.array(other).reshape(-1,self.nports,self.nports)
return result
def __add__(self,other):
'''
Element-wise complex addition of s-matrix
'''
result = self.copy()
if isinstance(other, Network):
self.__compatable_for_scalar_operation_test(other)
result.s = self.s + other.s
else:
# other may be an array or a number
result.s = self.s + npy.array(other).reshape(-1,self.nports,self.nports)
return result
def __radd__(self,other):
'''
Element-wise complex addition of s-matrix
'''
result = self.copy()
if isinstance(other, Network):
self.__compatable_for_scalar_operation_test(other)
result.s = self.s + other.s
else:
# other may be an array or a number
result.s = self.s + npy.array(other).reshape(-1,self.nports,self.nports)
return result
def __sub__(self,other):
'''
Element-wise complex subtraction of s-matrix
'''
result = self.copy()
if isinstance(other, Network):
self.__compatable_for_scalar_operation_test(other)
result.s = self.s - other.s
else:
# other may be an array or a number
result.s = self.s - npy.array(other).reshape(-1,self.nports,self.nports)
return result
def __rsub__(self,other):
'''
Element-wise complex subtraction of s-matrix
'''
result = self.copy()
if isinstance(other, Network):
self.__compatable_for_scalar_operation_test(other)
result.s = other.s - self.s
else:
# other may be an array or a number
result.s = npy.array(other).reshape(-1,self.nports,self.nports) - self.s
return result
def __truediv__(self,other):
return self.__div__(other)
def __div__(self,other):
'''
Element-wise complex multiplication of s-matrix
'''
result = self.copy()
if isinstance(other, Network):
self.__compatable_for_scalar_operation_test(other)
result.s = self.s / other.s
else:
# other may be an array or a number
result.s = self.s / npy.array(other).reshape(-1,self.nports,self.nports)
return result
def __eq__(self,other):
if other is None:
return False
if npy.all(npy.abs(self.s - other.s) < ZERO):
return True
else:
return False
def __ne__(self,other):
return (not self.__eq__(other))
def __getitem__(self,key):
'''
Slices a Network object based on an index, or human readable string
Parameters
-----------
key : str, or slice
if slice; like [2-10] then it is interpreted as the index of
the frequency.
if str, then should be like '50.1-75.5ghz', or just '50'.
If the frequency unit is omited then self.frequency.unit is
used.
Examples
-----------
>>> from skrf.data import ring_slot
>>> a = ring_slot['80-90ghz']
>>> a.plot_s_db()
'''
a = self.z0# HACK: to force getter for z0 to re-shape it
sliced_frequency = self.frequency[key]
return self.interpolate(sliced_frequency)
def __str__(self):
'''
'''
f = self.frequency
if self.name is None:
name = ''
else:
name = self.name
if len(npy.shape(self.z0)) == 0:
z0 = str(self.z0)
else:
z0 = str(self.z0[0,:])
output = '%i-Port Network: \'%s\', %s, z0=%s' % (self.number_of_ports, name, str(f), z0)
return output
def __repr__(self):
return self.__str__()
def __len__(self):
'''
length of frequency axis
'''
return len(self.s)
## INTERNAL CODE GENERATION METHODS
def __compatable_for_scalar_operation_test(self, other):
'''
tests to make sure other network's s-matrix is of same shape
'''
if other.frequency != self.frequency:
raise IndexError('Networks must have same frequency. See `Network.interpolate`')
if other.s.shape != self.s.shape:
raise IndexError('Networks must have same number of ports.')
def __generate_secondary_properties(self):
'''
creates numerous `secondary properties` which are various
different scalar projects of the primary properties. the primary
properties are s,z, and y.
'''
for prop_name in PRIMARY_PROPERTIES:
for func_name in COMPONENT_FUNC_DICT:
func = COMPONENT_FUNC_DICT[func_name]
if 'gd' in func_name: # scaling of gradient by frequency
def fget(self, f=func, p = prop_name):
return f(getattr(self,p)) / (2 * npy.pi * self.frequency.step)
else:
def fget(self, f=func, p = prop_name):
return f(getattr(self,p))
doc = '''
The %s component of the %s-matrix
See Also
----------
%s
'''%(func_name, prop_name, prop_name)
setattr(self.__class__,'%s_%s'%(prop_name, func_name),\
property(fget, doc = doc))
def __generate_plot_functions(self):
'''
'''
for prop_name in PRIMARY_PROPERTIES:
def plot_prop_polar(self,
m=None, n=None, ax=None,
show_legend=True ,prop_name=prop_name,*args, **kwargs):
# create index lists, if not provided by user
if m is None:
M = range(self.number_of_ports)
else:
M = [m]
if n is None:
N = range(self.number_of_ports)
else:
N = [n]
if 'label' not in kwargs.keys():
gen_label = True
else:
gen_label = False
was_interactive = plb.isinteractive
if was_interactive:
plb.interactive(False)
for m in M:
for n in N:
# set the legend label for this trace to the networks
# name if it exists, and they didnt pass a name key in
# the kwargs
if gen_label:
if self.name is None:
if plb.rcParams['text.usetex']:
label_string = '$%s_{%i%i}$'%\
(prop_name[0].upper(),m+1,n+1)
else:
label_string = '%s%i%i'%\
(prop_name[0].upper(),m+1,n+1)
else:
if plb.rcParams['text.usetex']:
label_string = self.name+', $%s_{%i%i}$'%\
(prop_name[0].upper(),m+1,n+1)
else:
label_string = self.name+', %s%i%i'%\
(prop_name[0].upper(),m+1,n+1)
kwargs['label'] = label_string
# plot the desired attribute vs frequency
plot_complex_polar(
z = getattr(self,prop_name)[:,m,n],
show_legend = show_legend, ax = ax,
*args, **kwargs)
if was_interactive:
plb.interactive(True)
plb.draw()
plb.show()
plot_prop_polar.__doc__ = '''
plot the Network attribute :attr:`%s` vs frequency.
Parameters
-----------
m : int, optional
first index of s-parameter matrix, if None will use all
n : int, optional
secon index of the s-parameter matrix, if None will use all
ax : :class:`matplotlib.Axes` object, optional
An existing Axes object to plot on
show_legend : Boolean
draw legend or not
attribute : string
Network attribute to plot
y_label : string, optional
the y-axis label
\*args,\\**kwargs : arguments, keyword arguments
passed to :func:`matplotlib.plot`
Notes
-------
This function is dynamically generated upon Network
initialization. This is accomplished by calling
:func:`plot_vs_frequency_generic`
Examples
------------
>>> myntwk.plot_%s(m=1,n=0,color='r')
'''%(prop_name,prop_name)
setattr(self.__class__,'plot_%s_polar'%(prop_name), \
plot_prop_polar)
def plot_prop_rect(self,
m=None, n=None, ax=None,
show_legend=True,prop_name=prop_name,*args, **kwargs):
# create index lists, if not provided by user
if m is None:
M = range(self.number_of_ports)
else:
M = [m]
if n is None:
N = range(self.number_of_ports)
else:
N = [n]
if 'label' not in kwargs.keys():
gen_label = True
else:
gen_label = False
#was_interactive = plb.isinteractive
#if was_interactive:
# plb.interactive(False)
for m in M:
for n in N:
# set the legend label for this trace to the networks
# name if it exists, and they didnt pass a name key in
# the kwargs
if gen_label:
if self.name is None:
if plb.rcParams['text.usetex']:
label_string = '$%s_{%i%i}$'%\
(prop_name[0].upper(),m+1,n+1)
else:
label_string = '%s%i%i'%\
(prop_name[0].upper(),m+1,n+1)
else:
if plb.rcParams['text.usetex']:
label_string = self.name+', $%s_{%i%i}$'%\
(prop_name[0].upper(),m+1,n+1)
else:
label_string = self.name+', %s%i%i'%\
(prop_name[0].upper(),m+1,n+1)
kwargs['label'] = label_string
# plot the desired attribute vs frequency
plot_complex_rectangular(
z = getattr(self,prop_name)[:,m,n],
show_legend = show_legend, ax = ax,
*args, **kwargs)
#if was_interactive:
# plb.interactive(True)
# plb.draw()
# plb.show()
plot_prop_rect.__doc__ = '''
plot the Network attribute :attr:`%s` vs frequency.
Parameters
-----------
m : int, optional
first index of s-parameter matrix, if None will use all
n : int, optional
secon index of the s-parameter matrix, if None will use all
ax : :class:`matplotlib.Axes` object, optional
An existing Axes object to plot on
show_legend : Boolean
draw legend or not
attribute : string
Network attribute to plot
y_label : string, optional
the y-axis label
\*args,\\**kwargs : arguments, keyword arguments
passed to :func:`matplotlib.plot`
Notes
-------
This function is dynamically generated upon Network
initialization. This is accomplished by calling
:func:`plot_vs_frequency_generic`
Examples
------------
>>> myntwk.plot_%s(m=1,n=0,color='r')
'''%(prop_name,prop_name)
setattr(self.__class__,'plot_%s_complex'%(prop_name), \
plot_prop_rect)
for func_name in COMPONENT_FUNC_DICT:
attribute = '%s_%s'%(prop_name, func_name)
y_label = Y_LABEL_DICT[func_name]
def plot_func(self, m=None, n=None, ax=None,
show_legend=True,attribute=attribute,
y_label=y_label,*args, **kwargs):
# create index lists, if not provided by user
if m is None:
M = range(self.number_of_ports)
else:
M = [m]
if n is None:
N = range(self.number_of_ports)
else:
N = [n]
if 'label' not in kwargs.keys():
gen_label = True
else:
gen_label = False
#TODO: turn off interactive plotting for performance
# this didnt work because it required a show()
# to be called, which in turn, disrupted testCases
#
#was_interactive = plb.isinteractive
#if was_interactive:
# plb.interactive(False)
for m in M:
for n in N:
# set the legend label for this trace to the networks
# name if it exists, and they didnt pass a name key in
# the kwargs
if gen_label:
if self.name is None:
if plb.rcParams['text.usetex']:
label_string = '$%s_{%i%i}$'%\
(attribute[0].upper(),m+1,n+1)
else:
label_string = '%s%i%i'%\
(attribute[0].upper(),m+1,n+1)
else:
if plb.rcParams['text.usetex']:
label_string = self.name+', $%s_{%i%i}$'%\
(attribute[0].upper(),m+1,n+1)
else:
label_string = self.name+', %s%i%i'%\
(attribute[0].upper(),m+1,n+1)
kwargs['label'] = label_string
# plot the desired attribute vs frequency
if 'time' in attribute:
xlabel = 'Time (ns)'
x = self.frequency.t_ns
else:
xlabel = 'Frequency (%s)'%self.frequency.unit
x = self.frequency.f_scaled
plot_rectangular(
x = x,
y = getattr(self,attribute)[:,m,n],
x_label = xlabel,
y_label = y_label,
show_legend = show_legend, ax = ax,
*args, **kwargs)
#if was_interactive:
# plb.interactive(True)
# plb.draw()
# #plb.show()
plot_func.__doc__ = '''
plot the Network attribute :attr:`%s` vs frequency.
Parameters
-----------
m : int, optional
first index of s-parameter matrix, if None will use all
n : int, optional
secon index of the s-parameter matrix, if None will use all
ax : :class:`matplotlib.Axes` object, optional
An existing Axes object to plot on
show_legend : Boolean
draw legend or not
attribute : string
Network attribute to plot
y_label : string, optional
the y-axis label
\*args,\\**kwargs : arguments, keyword arguments
passed to :func:`matplotlib.plot`
Notes
-------
This function is dynamically generated upon Network
initialization. This is accomplished by calling
:func:`plot_vs_frequency_generic`
Examples
------------
>>> myntwk.plot_%s(m=1,n=0,color='r')
'''%(attribute,attribute)
setattr(self.__class__,'plot_%s'%(attribute), \
plot_func)
def __generate_subnetworks(self):
'''
generates all one-port sub-networks
'''
for m in range(self.number_of_ports):
for n in range(self.number_of_ports):
def fget(self,m=m,n=n):
ntwk = self.copy()
ntwk.s = self.s[:,m,n]
ntwk.z0 = self.z0[:,m]
return ntwk
doc = '''
one-port sub-network.
'''
setattr(self.__class__,'s%i%i'%(m+1,n+1),\
property(fget,doc=doc))
def plot_s_db_time(self,*args,**kwargs):
return self.windowed().plot_s_time_db(*args,**kwargs)
## PRIMARY PROPERTIES
@property
def s(self):
'''
Scattering parameter matrix.
The s-matrix[#]_ is a 3-dimensional :class:`numpy.ndarray` which has shape
`fxnxn`, where `f` is frequency axis and `n` is number of ports.
Note that indexing starts at 0, so s11 can be accessed by
taking the slice s[:,0,0].
Returns
---------
s : complex :class:`numpy.ndarray` of shape `fxnxn`
the scattering parameter matrix.
See Also
------------
s
y
z
t
a
References
------------
.. [#] http://en.wikipedia.org/wiki/Scattering_parameters
'''
return self._s
@s.setter
def s(self, s):
'''
the input s-matrix should be of shape fxnxn,
where f is frequency axis and n is number of ports
'''
s_shape= npy.shape(s)
if len(s_shape) <3:
if len(s_shape) == 2:
# reshape to kx1x1, this simplifies indexing in function
s = npy.reshape(s,(-1,s_shape[0],s_shape[0]))
else:
s = npy.reshape(s,(-1,1,1))
self._s = npy.array(s,dtype=complex)
self.__generate_secondary_properties()
self.__generate_subnetworks()
@property
def y(self):
'''
Admittance parameter matrix.
The y-matrix [#]_ is a 3-dimensional :class:`numpy.ndarray` which has shape
`fxnxn`, where `f` is frequency axis and `n` is number of ports.
Note that indexing starts at 0, so y11 can be accessed by
taking the slice `y[:,0,0]`.
Returns
---------
y : complex :class:`numpy.ndarray` of shape `fxnxn`
the admittance parameter matrix.
See Also
------------
s
y
z
t
a
References
------------
.. [#] http://en.wikipedia.org/wiki/Admittance_parameters
'''
return s2y(self._s, self.z0)
@y.setter
def y(self, value):
self._s = y2s(value, self.z0)
@property
def z(self):
'''
Impedance parameter matrix.
The z-matrix [#]_ is a 3-dimensional :class:`numpy.ndarray` which has shape
`fxnxn`, where `f` is frequency axis and `n` is number of ports.
Note that indexing starts at 0, so z11 can be accessed by
taking the slice `z[:,0,0]`.
Returns
---------
z : complex :class:`numpy.ndarray` of shape `fxnxn`
the Impedance parameter matrix.
See Also
------------
s
y
z
t
a
References
------------
.. [#] http://en.wikipedia.org/wiki/impedance_parameters
'''
return s2z(self._s, self.z0)
@z.setter
def z(self, value):
self._s = z2s(value, self.z0)
@property
def t(self):
'''
Scattering transfer parameters
The t-matrix [#]_ is a 3-dimensional :class:`numpy.ndarray`
which has shape `fx2x2`, where `f` is frequency axis.
Note that indexing starts at 0, so t11 can be accessed by
taking the slice `t[:,0,0]`.
The t-matrix, also known as the wave cascading matrix, is
only defined for a 2-port Network.
Returns
--------
t : complex numpy.ndarry of shape `fx2x2`
t-parameters, aka scattering transfer parameters
See Also
------------
s
y
z
t
a
References
-----------
.. [#] http://en.wikipedia.org/wiki/Scattering_parameters#Scattering_transfer_parameters
'''
return s2t(self.s)
@property
def sa(self):
'''
Active scattering parameter matrix.
Active scattering parameters are simply inverted s-parameters,
defined as a = 1/s. Useful in analysis of active networks.
The a-matrix is a 3-dimensional :class:`numpy.ndarray` which has shape
`fxnxn`, where `f` is frequency axis and `n` is number of ports.
Note that indexing starts at 0, so a11 can be accessed by
taking the slice a[:,0,0].
Returns
---------
a : complex :class:`numpy.ndarray` of shape `fxnxn`
the active scattering parameter matrix.
See Also
------------
s
y
z
t
a
'''
return 1/self.s
@sa.setter
def sa(self, value):
raise (NotImplementedError)
@property
def a(self):
'''
abcd parameter matrix. Used to cascade two-ports
The abcd-matrix [#]_ is a 3-dimensional :class:`numpy.ndarray` which has shape
`fxnxn`, where `f` is frequency axis and `n` is number of ports.
Note that indexing starts at 0, so abcd11 can be accessed by
taking the slice `abcd[:,0,0]`.
Returns
---------
abcd : complex :class:`numpy.ndarray` of shape `fxnxn`
the Impedance parameter matrix.
See Also
------------
s
y
z
t
a
abcd
References
------------
.. [#] http://en.wikipedia.org/wiki/impedance_parameters
'''
return s2a(self.s, self.z0)
@a.setter
def a(self, value):
raise (NotImplementedError)
@property
def z0(self):
'''
Characteristic impedance[s] of the network ports.
This property stores the characteristic impedance of each port
of the network. Because it is possible that each port has
a different characteristic impedance each varying with
frequency, `z0` is stored internally as a `fxn` array.
However because `z0` is frequently simple (like 50ohm), it can
be set with just number as well.
Returns
--------
z0 : :class:`numpy.ndarray` of shape fxn
characteristic impedance for network
'''
# i hate this function
# it was written this way because id like to allow the user to
# set the z0 before the s-parameters are set. However, in this
# case we dont know how to re-shape the z0 to fxn. to solve this
# i attempt to do the re-shaping when z0 is accessed, not when
# it is set. this is what makes this function confusing.
try:
if len(npy.shape(self._z0)) ==0:
try:
#try and re-shape z0 to match s
self._z0=self._z0*npy.ones(self.s.shape[:-1])
except(AttributeError):
print ('Warning: Network has improper \'z0\' shape.')
#they have yet to set s .
elif len(npy.shape(self._z0)) ==1:
try:
if len(self._z0) == self.frequency.npoints:
# this z0 is frequency dependent but not port dependent
self._z0 = \
npy.repeat(npy.reshape(self._z0,(-1,1)),self.number_of_ports,1)
elif len(self._z0) == self.number_of_ports:
# this z0 is port dependent but not frequency dependent
self._z0 = self._z0*npy.ones(\
(self.frequency.npoints,self.number_of_ports))
else:
raise(IndexError('z0 has bad shape'))
except(AttributeError):
# there is no self.frequency, or self.number_of_ports
raise(AttributeError('Error: I cant reshape z0 through inspection. you must provide correctly shaped z0, or s-matrix first.'))
return self._z0
except(AttributeError):
#print('Warning: z0 is undefined. Defaulting to 50.')
self.z0=50
return self.z0 #this is not an error, its a recursive call
@z0.setter
def z0(self, z0):
'''z0=npy.array(z0)
if len(z0.shape) < 2:
try:
#try and re-shape z0 to match s
z0=z0*npy.ones(self.s.shape[:-1])
except(AttributeError):
print ('Warning: you should store a Network\'s \'s\' matrix before its \'z0\'')
#they have yet to set s .
pass
'''
self._z0 = npy.array(z0,dtype=complex)
@property
def frequency(self):
'''
frequency information for the network.
This property is a :class:`~skrf.frequency.Frequency` object.
It holds the frequency vector, as well frequency unit, and
provides other properties related to frequency information, such
as start, stop, etc.
Returns
--------
frequency : :class:`~skrf.frequency.Frequency` object
frequency information for the network.
See Also
---------
f : property holding frequency vector in Hz
change_frequency : updates frequency property, and
interpolates s-parameters if needed
interpolate : interpolate function based on new frequency
info
'''
try:
return self._frequency
except (AttributeError):
self._frequency = Frequency(0,0,0)
return self._frequency
@frequency.setter
def frequency(self, new_frequency):
'''
takes a Frequency object, see frequency.py
'''
if isinstance(new_frequency, Frequency):
self._frequency = new_frequency.copy()
else:
try:
self._frequency = Frequency.from_f(new_frequency)
except (TypeError):
raise TypeError('Could not convert argument to a frequency vector')
@property
def inv(self):
'''
a :class:`Network` object with 'inverse' s-parameters.
This is used for de-embedding. It is defined so that the inverse
of a Network cascaded with itself is unity.
Returns
---------
inv : a :class:`Network` object
a :class:`Network` object with 'inverse' s-parameters.
See Also
----------
inv : function which implements the inverse s-matrix
'''
if self.number_of_ports <2:
raise(TypeError('One-Port Networks dont have inverses'))
out = self.copy()
out.s = inv(self.s)
return out
@property
def f(self):
'''
the frequency vector for the network, in Hz.
Returns
--------
f : :class:`numpy.ndarray`
frequency vector in Hz
See Also
---------
frequency : frequency property that holds all frequency
information
'''
return self.frequency.f
@f.setter
def f(self,f):
tmpUnit= self.frequency.unit
self.frequency = Frequency.from_f(f, unit=tmpUnit)
## SECONDARY PROPERTIES
@property
def number_of_ports(self):
'''
the number of ports the network has.
Returns
--------
number_of_ports : number
the number of ports the network has.
'''
try:
return self.s.shape[1]
except (AttributeError):
return 0
@property
def nports(self):
'''
the number of ports the network has.
Returns
--------
number_of_ports : number
the number of ports the network has.
'''
return self.number_of_ports
@property
def port_tuples(self):
'''
Returns a list of tuples, for each port index pair
A convenience function for the common task fo iterating over
all s-parameters index pairs
This just calls:
`[(y,x) for x in range(self.nports) for y in range(self.nports)]`
'''
return [(y,x) for x in range(self.nports) for y in range(self.nports)]
@property
def passivity(self):
'''
passivity metric for a multi-port network.
This returns a matrix who's diagonals are equal to the total
power received at all ports, normalized to the power at a single
excitement port.
mathematically, this is a test for unitary-ness of the
s-parameter matrix [#]_.
for two port this is
.. math::
( |S_{11}|^2 + |S_{21}|^2 \, , \, |S_{22}|^2+|S_{12}|^2)
in general it is
.. math::
S^H \\cdot S
where :math:`H` is conjugate transpose of S, and :math:`\\cdot`
is dot product.
Returns
---------
passivity : :class:`numpy.ndarray` of shape fxnxn
References
------------
.. [#] http://en.wikipedia.org/wiki/Scattering_parameters#Lossless_networks
'''
return passivity(self.s)
@property
def reciprocity(self):
'''
reciprocity metric for a multi-port network.
This returns the difference between the s-parameter matrix
and its transpose.
for two port this is
.. math::
S - S^T
where :math:`T` is transpose of S
Returns
---------
reciprocity : :class:`numpy.ndarray` of shape fxnxn
'''
return reciprocity(self.s)
@property
def reciprocity2(self):
'''
Reciprocity metric #2
.. math::
abs(1 - S/S^T )
for the two port case, this evaluates to the distance of the
determinant of the wave-cascading matrix from unity.
'''
return abs(1-self.s/self.s.swapaxes(1,2))
## NETWORK CLASIFIERs
def is_reciprocal(self):
'''
test for reciprocity
'''
raise(NotImplementedError)
def is_symmetric(self):
'''
test for symmetry
'''
raise(NotImplementedError)
def is_passive(self):
'''
test for passivity
'''
raise(NotImplementedError)
def is_lossless(self):
'''
test for losslessness
'''
raise(NotImplementedError)
## specific ploting functions
def plot_passivity(self, port = None,label_prefix=None, *args, **kwargs):
'''
Plot dB(diag(passivity metric)) vs frequency
Notes
-------
This plot does not completely capture the passivity metric, which
is a test for `unitary-ness` of the s-matrix. However, it may
be used to display a measure of power disapated in a network.
See Also
-----------
passivity
'''
name = '' if self.name is None else self.name
if port is None:
ports = range(self.nports)
else:
ports = [port]
for k in ports:
if label_prefix==None:
label = name +', port %i'%(k+1)
else:
label = label_prefix+', port %i'%(k+1)
self.frequency.plot(mf.complex_2_db(self.passivity[:,k,k]),
label=label,
*args, **kwargs)
plb.legend()
plb.draw()
def plot_reciprocity(self, db= False, *args, **kwargs):
'''
Plot reciprocity metric
See Also
-----------
reciprocity
'''
for m in range(self.nports):
for n in range(self.nports):
if m>n:
if 'label' not in kwargs.keys():
kwargs['label'] = 'ports %i%i'%(m,n)
y = self.reciprocity[:,m,n].flatten()
if db:
y = mf.complex_2_db(y)
self.frequency.plot(y,*args, **kwargs)
plb.legend()
plb.draw()
def plot_reciprocity2(self, db= False, *args, **kwargs):
'''
Plot reciprocity metric #2
this is distance of the determinant of the wave-cascading matrix
from unity.
.. math::
abs(1 - S/S^T )
See Also
-----------
reciprocity
'''
for m in range(self.nports):
for n in range(self.nports):
if m>n:
if 'label' not in kwargs.keys():
kwargs['label'] = 'ports %i%i'%(m,n)
y = self.reciprocity2[:,m,n].flatten()
if db:
y = mf.complex_2_db(y)
self.frequency.plot(y,*args, **kwargs)
plb.legend()
plb.draw()
## CLASS METHODS
def copy(self):
'''
Returns a copy of this Network
Needed to allow pass-by-value for a Network instead of
pass-by-reference
'''
ntwk = Network(s = self.s,
frequency = self.frequency.copy(),
z0 = self.z0,
)
ntwk.name = self.name
return ntwk
def copy_from(self,other):
'''
Copies the contents of another Network into self
Uses copy, so that the data is passed-by-value, not reference
Parameters
-----------
other : Network
the network to copy the contents of
Examples
-----------
>>> a = rf.N()
>>> b = rf.N('my_file.s2p')
>>> a.copy_from (b)
'''
for attr in ['_s','frequency','_z0','name' ]:
self.__setattr__(attr,copy(other.__getattribute__(attr)))
# touchstone file IO
def read_touchstone(self, filename):
'''
loads values from a touchstone file.
The work of this function is done through the
:class:`~skrf.io.touchstone` class.
Parameters
----------
filename : str or file-object
touchstone file name.
Notes
------
only the scattering parameters format is supported at the
moment
'''
from .io import touchstone
touchstoneFile = touchstone.Touchstone(filename)
if touchstoneFile.get_format().split()[1] != 's':
raise NotImplementedError('only s-parameters supported for now.')
self.comments = touchstoneFile.get_comments()
# set z0 before s so that y and z can be computed
self.z0 = complex(touchstoneFile.resistance)
f, self.s = touchstoneFile.get_sparameter_arrays() # note: freq in Hz
self.frequency = Frequency.from_f(f, unit='hz')
self.frequency.unit = touchstoneFile.frequency_unit
if self.name is None:
try:
self.name = os.path.basename( os.path.splitext(filename)[0])
# this may not work if filename is a file object
except(AttributeError):
# in case they pass a file-object instead of file name,
# get the name from the touchstone file
try:
self.name = os.path.basename( os.path.splitext(touchstoneFile.filename)[0])
except():
print('warning: couldnt inspect network name')
self.name=''
pass
#TODO: add Network property `comments` which is read from
# touchstone file.
def write_touchstone(self, filename=None, dir = None,
write_z0=False,skrf_comment=True,
form='ri'):
'''
Write a contents of the :class:`Network` to a touchstone file.
Parameters
----------
filename : a string, optional
touchstone filename, without extension. if 'None', then
will use the network's :attr:`name`.
dir : string, optional
the directory to save the file in.
write_z0 : boolean
write impedance information into touchstone as comments,
like Ansoft HFSS does
skrf_comment : bool, optional
write `created by skrf` comment
form : 'db','ma','ri'
format to write data,
* db = db, deg
* ma = mag, deg
* ri = real, imag
Notes
-------
format supported at the moment are,
[Hz/kHz/MHz/GHz] S [DB/MA/RI]
Frequency unit can be changed by setting Network.frequency.unit property
The functionality of this function should take place in the
:class:`~skrf.touchstone.touchstone` class.
'''
# according to Touchstone 2.0 spec
# [no tab, max. 4 coeffs per line, etc.]
if filename is None:
if self.name is not None:
filename= self.name
else:
raise ValueError('No filename given. Network must have a name, or you must provide a filename')
if get_extn(filename) is None:
filename = filename +'.s%ip'%self.number_of_ports
if dir is not None:
filename = os.path.join(dir, filename)
# set internal varialbes according to form
form = form.upper()
if form == "RI":
formatDic = {"labelA":"Re", "labelB":"Im"}
funcA = npy.real
funcB = npy.imag
elif form == "DB":
formatDic = {"labelA":"dB", "labelB":"ang"}
funcA = mf.complex_2_db
funcB = mf.complex_2_degree
elif form == "MA":
formatDic = {"labelA":"mag", "labelB":"ang"}
funcA = mf.complex_2_magnitude
funcB = mf.complex_2_degree
else:
raise ValueError('`form` must be either `db`,`ma`,`ri`')
with open(filename,"w") as outputFile:
# Add '!' Touchstone comment delimiters to the start of every line
# in self.comments
commented_header = ''
if self.comments:
for comment_line in self.comments.split('\n'):
commented_header += '!{}\n'.format(comment_line)
if skrf_comment:
commented_header +='!Created with skrf (http://scikit-rf.org).\n'
outputFile.write(commented_header)
# write header file.
# the '#' line is NOT a comment it is essential and it must be
# exactly this format, to work
# [HZ/KHZ/MHZ/GHZ] [S/Y/Z/G/H] [MA/DB/RI] [R n]
outputFile.write('# {} S {} R {} \n'.format(self.frequency.unit, form, str(abs(self.z0[0,0]))))
if self.number_of_ports == 1 :
# write comment line for users (optional)
outputFile.write('!freq {labelA}S11 {labelB}S11\n'.format(**formatDic))
# write out data
for f in range(len(self.f)):
outputFile.write(str(self.frequency.f_scaled[f])+' '\
+ str(funcA(self.s[f,0,0])) + ' '\
+ str(funcB(self.s[f,0,0])) +'\n')
# write out the z0 following hfss's convention if desired
if write_z0:
outputFile.write('! Port Impedance ' )
for n in range(self.number_of_ports):
outputFile.write('%.14f %.14f '%(self.z0[f,n].real, self.z0[f,n].imag))
outputFile.write('\n')
elif self.number_of_ports == 2 :
# 2-port is a special case with
# - single line, and
# - S21,S12 in reverse order: legacy ?
# write comment line for users (optional)
outputFile.write('!freq {labelA}S11 {labelB}S11 {labelA}S21 {labelB}S21 {labelA}S12 {labelB}S12 {labelA}S22 {labelB}S22\n'.format(**formatDic))
# write out data
for f in range(len(self.f)):
outputFile.write(str(self.frequency.f_scaled[f])+' '\
+ str(funcA(self.s[f,0,0])) + ' '\
+ str(funcB(self.s[f,0,0])) + ' '\
+ str(funcA(self.s[f,1,0])) + ' '\
+ str(funcB(self.s[f,1,0])) + ' '\
+ str(funcA(self.s[f,0,1])) + ' '\
+ str(funcB(self.s[f,0,1])) + ' '\
+ str(funcA(self.s[f,1,1])) + ' '\
+ str(funcB(self.s[f,1,1])) +'\n')
# write out the z0 following hfss's convention if desired
if write_z0:
outputFile.write('! Port Impedance' )
for n in range(2):
outputFile.write(' %.14f %.14f'%(self.z0[f,n].real, self.z0[f,n].imag))
outputFile.write('\n')
elif self.number_of_ports == 3 :
# 3-port is written over 3 lines / matrix order
# write comment line for users (optional)
outputFile.write ('!freq')
for m in range(1,4):
for n in range(1,4):
outputFile.write(" {labelA}S{m}{n} {labelB}S{m}{n}".format(m=m, n=n, **formatDic))
outputFile.write('\n!')
outputFile.write('\n')
# write out data
for f in range(len(self.f)):
outputFile.write(str(self.frequency.f_scaled[f]))
for m in range(3):
for n in range(3):
outputFile.write( ' ' + str(funcA(self.s[f,m,n])) + ' '\
+ str(funcB(self.s[f,m,n])))
outputFile.write('\n')
# write out the z0 following hfss's convention if desired
if write_z0:
outputFile.write('! Port Impedance' )
for n in range(3):
outputFile.write(' %.14f %.14f'%(self.z0[f,n].real, self.z0[f,n].imag))
outputFile.write('\n')
elif self.number_of_ports >= 4 :
# general n-port
# - matrix is written line by line
# - 4 complex numbers / 8 real numbers max. for a single line
# - continuation lines (anything except first) go with indent
# this is not part of the spec, but many tools handle it this way
# -> allows to parse without knowledge of number of ports
# write comment line for users (optional)
outputFile.write ('!freq')
for m in range(1,1+self.number_of_ports):
for n in range(1,1+self.number_of_ports):
if (n > 0 and (n%4) == 0 ) :
outputFile.write('\n!')
outputFile.write(" {labelA}S{m}{n} {labelB}S{m}{n}".format(m=m, n=n, **formatDic))
outputFile.write('\n!')
outputFile.write('\n')
# write out data
for f in range(len(self.f)):
outputFile.write(str(self.frequency.f_scaled[f]))
for m in range(self.number_of_ports):
for n in range(self.number_of_ports):
if (n > 0 and (n%4) == 0 ) :
outputFile.write('\n')
outputFile.write( ' ' + str(funcA(self.s[f,m,n])) + ' '\
+ str(funcB(self.s[f,m,n])))
outputFile.write('\n')
# write out the z0 following hfss's convention if desired
if write_z0:
outputFile.write('! Port Impedance' )
for n in range(self.number_of_ports):
outputFile.write(' %.14f %.14f'%(self.z0[f,n].real, self.z0[f,n].imag))
outputFile.write('\n')
def write(self, file=None, *args, **kwargs):
'''
Write the Network to disk using the :mod:`pickle` module.
The resultant file can be read either by using the Networks
constructor, :func:`__init__` , the read method :func:`read`, or
the general read function :func:`skrf.io.general.read`
Parameters
-----------
file : str or file-object
filename or a file-object. If left as None then the
filename will be set to Network.name, if its not None.
If both are None, ValueError is raised.
\*args, \*\*kwargs :
passed through to :func:`~skrf.io.general.write`
Notes
------
If the self.name is not None and file is can left as None
and the resultant file will have the `.ntwk` extension appended
to the filename.
Examples
---------
>>> n = rf.N(f=[1,2,3],s=[1,1,1],z0=50, name = 'open')
>>> n.write()
>>> n2 = rf.read('open.ntwk')
See Also
---------
skrf.io.general.write : write any skrf object
skrf.io.general.read : read any skrf object
'''
# this import is delayed until here because of a circular depency
from io.general import write
if file is None:
if self.name is None:
raise (ValueError('No filename given. You must provide a filename, or set the name attribute'))
file = self.name
write(file,self,*args, **kwargs)
def read(self, *args, **kwargs):
'''
Read a Network from a 'ntwk' file
A ntwk file is written with :func:`write`. It is just a pickled
file.
Parameters
-------------
\*args, \*\*kwargs : args and kwargs
passed to :func:`skrf.io.general.write`
Notes
------
This function calls :func:`skrf.io.general.read`.
Examples
-----------
>>> rf.read('myfile.ntwk')
>>> rf.read('myfile.p')
See Also
----------
write
skrf.io.general.write
skrf.io.general.read
'''
from .io.general import read
self.copy_from(read(*args, **kwargs))
def write_spreadsheet(self, *args, **kwargs):
'''
Write contents of network to a spreadsheet, for your boss to use.
See Also
---------
skrf.io.general.network_2_spreadsheet
'''
from .io.general import network_2_spreadsheet
network_2_spreadsheet(self, *args, **kwargs)
def to_dataframe(self, *args, **kwargs):
'''
Convert attributes of a Network to a pandas DataFrame
See Also
---------
skrf.io.general.network_2_dataframe
'''
from .io.general import network_2_dataframe
return network_2_dataframe(self, *args, **kwargs)
# interpolation
def interpolate(self, new_frequency,**kwargs):
'''
Return an interpolated network, from a new :class:'~skrf.frequency.Frequency'.
Interpolate the networks s-parameters linearly in real and
imaginary components. Other interpolation types can be used
by passing appropriate `\*\*kwargs`. This function `returns` an
interpolated Network. Alternatively :func:`~Network.interpolate_self`
will interpolate self.
Parameters
-----------
new_frequency : :class:`~skrf.frequency.Frequency`
frequency information to interpolate
**kwargs : keyword arguments
passed to :func:`scipy.interpolate.interp1d` initializer.
Returns
----------
result : :class:`Network`
an interpolated Network
Notes
--------
See :func:`scipy.interpolate.interpolate.interp1d` for useful
kwargs. For example
**kind** : str or int
Specifies the kind of interpolation as a string ('linear',
'nearest', 'zero', 'slinear', 'quadratic, 'cubic') or
as an integer specifying the order of the spline
interpolator to use.
See Also
----------
resample
interpolate_self
interpolate_from_f
Examples
-----------
.. ipython::
@suppress
In [21]: import skrf as rf
In [21]: n = rf.data.ring_slot
In [21]: n
In [21]: new_freq = rf.Frequency(75,110,501,'ghz')
In [21]: n.interpolate(new_freq, kind = 'cubic')
'''
# create interpolation objects
interpolation_s_re = \
interp1d(self.frequency.f,self.s_re,axis=0,**kwargs)
interpolation_s_im = \
interp1d(self.frequency.f,self.s_im,axis=0,**kwargs)
interpolation_z0_re = \
interp1d(self.frequency.f,self.z0.real,axis=0,**kwargs)
interpolation_z0_im = \
interp1d(self.frequency.f,self.z0.imag,axis=0,**kwargs)
# make new network and fill with interpolated s, and z0
result = self.copy()
result.frequency = new_frequency
result.s = interpolation_s_re(new_frequency.f) +\
1j*interpolation_s_im(new_frequency.f)
result.z0 = interpolation_z0_re(new_frequency.f) +\
1j*interpolation_z0_im(new_frequency.f)
return result
def interpolate_self_npoints(self, npoints, **kwargs):
'''
Interpolate network based on a new number of frequency points
Parameters
-----------
npoints : int
number of frequency points
**kwargs : keyword arguments
passed to :func:`scipy.interpolate.interp1d` initializer.
See Also
---------
interpolate_self : same functionality but takes a Frequency
object
interpolate : same functionality but takes a Frequency
object and returns a new Network, instead of updating
itself.
Notes
-------
The function :func:`~Network.resample` is an alias for
:func:`~Network.interpolate_self_npoints`.
Examples
-----------
.. ipython::
@suppress
In [21]: import skrf as rf
In [21]: n = rf.data.ring_slot
In [21]: n
In [21]: n.resample(501) # resample is an alias
In [21]: n
'''
new_frequency = self.frequency.copy()
new_frequency.npoints = npoints
self.interpolate_self(new_frequency, **kwargs)
##convenience
resample = interpolate_self_npoints
def interpolate_self(self, new_frequency, **kwargs):
'''
Interpolates s-parameters given a new
:class:'~skrf.frequency.Frequency' object.
See :func:`~Network.interpolate` for more information.
Parameters
-----------
new_frequency : :class:`~skrf.frequency.Frequency`
frequency information to interpolate at
**kwargs : keyword arguments
passed to :func:`scipy.interpolate.interp1d` initializer.
See Also
----------
resample
interpolate
interpolate_from_f
'''
ntwk = self.interpolate(new_frequency, **kwargs)
self.frequency, self.s,self.z0 = ntwk.frequency, ntwk.s,ntwk.z0
def interpolate_from_f(self, f, interp_kwargs={}, **kwargs):
'''
Interpolates s-parameters from a frequency vector.
Given a frequency vector, and optionally a `unit` (see \*\*kwargs)
, interpolate the networks s-parameters linearly in real and
imaginary components.
See :func:`~Network.interpolate` for more information.
Parameters
-----------
new_frequency : :class:`~skrf.frequency.Frequency`
frequency information to interpolate at
interp_kwargs :
dictionary of kwargs to be passed through to
:func:`scipy.interpolate.interpolate.interp1d`
\*\*kwargs :
passed to :func:`scipy.interpolate.interp1d` initializer.
Notes
---------
This creates a new :class:`~skrf.frequency.Frequency`, object
using the method :func:`~skrf.frequency.Frequency.from_f`, and then calls
:func:`~Network.interpolate_self`.
See Also
----------
resample
interpolate
interpolate_self
'''
freq = Frequency.from_f(f,**kwargs)
self.interpolate_self(freq, **interp_kwargs)
def crop(self, f_start, f_stop):
'''
Crop Network based on start and stop frequencies.
No interpolation is done.
Parameters
-----------
f_start : number
start frequency of crop range, in units of self.frequency.unit
f_stop : number
stop frequency of crop range, in units of self.frequency.unit
'''
if f_start < self.frequency.f_scaled.min():
raise ValueError('`f_start` is out of range.')
elif f_stop > self.frequency.f_scaled.max():
raise ValueError('`f_stop` is out of range.')
start_idx = find_nearest_index(self.frequency.f_scaled,f_start)
stop_idx = find_nearest_index(self.frequency.f_scaled,f_stop)
ntwk = self[start_idx:stop_idx+1]
self.frequency, self.s,self.z0 = ntwk.frequency, ntwk.s,ntwk.z0
def cropped(self, f_start, f_stop):
'''
returns a cropped network, leaves self alone.
See Also
---------
crop
'''
out = self.copy()
out.crop(f_start = f_start, f_stop = f_stop)
return out
def flip(self):
'''
swaps the ports of a two port Network
'''
if self.number_of_ports == 2:
self.renumber( [0,1], [1,0] )
else:
raise ValueError('you can only flip two-port Networks')
def flipped(self):
'''
returns a flipped network, leaves self alone.
See Also
---------
flip
'''
out = self.copy()
out.flip()
return out
def renormalize(self, z_new, powerwave=False):
'''
Renormalize s-parameter matrix given a new port impedances
Parameters
---------------
z_new : complex array of shape FxN, F, N or a scalar
new port impedances
powerwave : bool
if true this calls :func:`renormalize_s_pw`, which assumes
a powerwave formulation. Otherwise it calls
:func:`renormalize_s` which implements the default psuedowave
formuation. If z_new or self.z0 is complex, then these
produce different results.
See Also
----------
renormalize_s
renormalize_s_pw
fix_z0_shape
'''
if powerwave:
self.s = renormalize_s_pw(self.s, self.z0, z_new)
else:
self.s = renormalize_s(self.s, self.z0, z_new)
self.z0 = fix_z0_shape(z_new,self.frequency.npoints, self.nports)
def renumber(self, from_ports, to_ports):
'''
renumbers ports of a Network
Parameters
-----------
from_ports : list-like
to_ports: list-like
Examples
---------
To flip the ports of a 2-port network 'foo':
>>> foo.renumber( [0,1], [1,0] )
To rotate the ports of a 3-port network 'bar' so that port 0 becomes port 1:
>>> bar.renumber( [0,1,2], [1,2,0] )
To swap the first and last ports of a network 'duck':
>>> duck.renumber( [0,-1], [-1,0] )
'''
from_ports = npy.array(from_ports)
to_ports = npy.array(to_ports)
if len(npy.unique(from_ports)) != len(from_ports):
raise ValueError('an index can appear at most once in from_ports or to_ports')
if any(npy.unique(from_ports) != npy.unique(to_ports)):
raise ValueError('from_ports and to_ports must have the same set of indices')
self.s[:,to_ports,:] = self.s[:,from_ports,:] # renumber rows
self.s[:,:,to_ports] = self.s[:,:,from_ports] # renumber columns
self.z0[:,to_ports] = self.z0[:,from_ports]
def windowed(self, window=('kaiser',6), normalize = True):
'''
Return a windowed version of s-matrix. Used in time-domain analysis.
When using time domain through :attr:`s_time_db`,
or similar properies, the spectrum is usually windowed,
before the IFFT is taken. This is done to
compensate for the band-pass nature of a spectrum [1]_ .
This function calls :func:`scipy.signal.get_window` which gives
more details about the windowing.
Parameters
-----------
window : string, float, or tuple
The type of window to create. See :func:`scipy.signal.get_window`
for details.
normalize : bool
Normalize the window to preserve power. ie
sum(ntwk.s,axis=0) == sum(ntwk.windowed().s,axis=0)
Examples
-----------
>>> ntwk = rf.Network('myfile.s2p')
>>> ntwk_w = ntwk.windowed()
>>> ntwk_w.plot_s_time_db()
References
-------------
.. [1] Agilent Time Domain Analysis Using a Network Analyzer Application Note 1287-12
'''
window = signal.get_window(window, len(self))
window =window.reshape(-1,1,1) * npy.ones((len(self),
self.nports,
self.nports))
windowed = self * window
if normalize:
# normalize the s-parameters to account for power lost in windowing
windowed.s = windowed.s * npy.sum(self.s_mag,axis=0)/\
npy.sum(windowed.s_mag,axis=0)
return windowed
def time_gate(self, t_start, t_stop=None, window = ('kaiser',6)):
'''
Time-gate s-parameters
The gate can be defined with start/stop times, or by the gate
width. If `t_stop` is None, the it will default to -`t_start`.
In this case `t_start`== gate width/2
See Warning!
Parameters
------------
t_start : number
start of time gate, (s). Or, if t_stop==None, then it is
1/2*gate width.
t_stop : number
stop of time gate (s), if None will be -t_start.
Returns
--------
ntwk : Network
copy of self with time-gated s-parameters
.. warning::
This is not fully tested, and doesnt appear to be preserve power
correctly
'''
if t_stop is None:
t_stop = -1*t_start
if t_start >t_stop:
t_start *=-1
t_stop *=-1
# find start/stop gate indecies
t = self.frequency.t
t_start_idx = find_nearest_index(t,t_start)
t_stop_idx = find_nearest_index(t,t_stop)
# create window
window_width = abs(t_stop_idx-t_start_idx)
window = signal.get_window(window, window_width)
# create the gate by padding the window with zeros
padded_window = npy.r_[npy.zeros(t_start_idx),
window,
npy.zeros(len(t)-t_stop_idx)]
# reshape the gate array so it operates on all s-parameters
padded_window = padded_window.reshape(-1,1,1) *\
npy.ones((len(self), self.nports, self.nports))
s_time = fft.ifftshift(fft.ifft(self.s, axis=0), axes=0)
s_time_windowed = self.s_time*padded_window
s_freq = fft.fft(fft.fftshift(s_time_windowed, axes=0), axis=0)
gated = self.copy()
gated.s = s_freq
return gated
# plotting
def plot_s_smith(self,m=None, n=None,r=1,ax = None, show_legend=True,\
chart_type='z', draw_labels=False, label_axes=False, *args,**kwargs):
'''
plots the scattering parameter on a smith chart
plots indices `m`, `n`, where `m` and `n` can be integers or
lists of integers.
Parameters
-----------
m : int, optional
first index
n : int, optional
second index
ax : matplotlib.Axes object, optional
axes to plot on. in case you want to update an existing
plot.
show_legend : boolean, optional
to turn legend show legend of not, optional
chart_type : ['z','y']
draw impedance or addmitance contours
draw_labels : Boolean
annotate chart with impedance values
label_axes : Boolean
Label axis with titles `Real` and `Imaginary`
border : Boolean
draw rectangular border around image with ticks
\*args : arguments, optional
passed to the matplotlib.plot command
\*\*kwargs : keyword arguments, optional
passed to the matplotlib.plot command
See Also
--------
plot_vs_frequency_generic - generic plotting function
smith - draws a smith chart
Examples
---------
>>> myntwk.plot_s_smith()
>>> myntwk.plot_s_smith(m=0,n=1,color='b', marker='x')
'''
# TODO: prevent this from re-drawing smith chart if one alread
# exists on current set of axes
# get current axis if user doesnt supply and axis
if ax is None:
ax = plb.gca()
if m is None:
M = range(self.number_of_ports)
else:
M = [m]
if n is None:
N = range(self.number_of_ports)
else:
N = [n]
if 'label' not in kwargs.keys():
generate_label=True
else:
generate_label=False
for m in M:
for n in N:
# set the legend label for this trace to the networks name if it
# exists, and they didnt pass a name key in the kwargs
if generate_label:
if self.name is None:
if plb.rcParams['text.usetex']:
label_string = '$S_{'+repr(m+1) + repr(n+1)+'}$'
else:
label_string = 'S'+repr(m+1) + repr(n+1)
else:
if plb.rcParams['text.usetex']:
label_string = self.name+', $S_{'+repr(m+1) + \
repr(n+1)+'}$'
else:
label_string = self.name+', S'+repr(m+1) + repr(n+1)
kwargs['label'] = label_string
# plot the desired attribute vs frequency
if len (ax.patches) == 0:
smith(ax=ax, smithR = r, chart_type=chart_type, draw_labels=draw_labels)
ax.plot(self.s[:,m,n].real, self.s[:,m,n].imag, *args,**kwargs)
#draw legend
if show_legend:
ax.legend()
ax.axis(npy.array([-1.1,1.1,-1.1,1.1])*r)
if label_axes:
ax.set_xlabel('Real')
ax.set_ylabel('Imaginary')
def plot_it_all(self,*args, **kwargs):
'''
Plots dB, deg, smith, and complex in subplots
Plots the magnitude in dB in subplot 1, the phase in degrees in
subplot 2, a smith chart in subplot 3, and a complex plot in
subplot 4.
Parameters
-----------
\*args : arguments, optional
passed to the matplotlib.plot command
\*\*kwargs : keyword arguments, optional
passed to the matplotlib.plot command
See Also
--------
plot_s_db - plot magnitude (in dB) of s-parameters vs frequency
plot_s_deg - plot phase of s-parameters (in degrees) vs frequency
plot_s_smith - plot complex s-parameters on smith chart
plot_s_complex - plot complex s-parameters in the complex plane
Examples
---------
>>> from skrf.data import ring_slot
>>> ring_slot.plot_it_all()
'''
plb.subplot(221)
getattr(self,'plot_s_db')(*args, **kwargs)
plb.subplot(222)
getattr(self,'plot_s_deg')(*args, **kwargs)
plb.subplot(223)
getattr(self,'plot_s_smith')(*args, **kwargs)
plb.subplot(224)
getattr(self,'plot_s_complex')(*args, **kwargs)
# noise
def add_noise_polar(self,mag_dev, phase_dev,**kwargs):
'''
adds a complex zero-mean gaussian white-noise.
adds a complex zero-mean gaussian white-noise of a given
standard deviation for magnitude and phase
Parameters
------------
mag_dev : number
standard deviation of magnitude
phase_dev : number
standard deviation of phase [in degrees]
'''
phase_rv= stats.norm(loc=0, scale=phase_dev).rvs(size = self.s.shape)
mag_rv = stats.norm(loc=0, scale=mag_dev).rvs(size = self.s.shape)
phase = (self.s_deg+phase_rv)
mag = self.s_mag + mag_rv
self.s = mag* npy.exp(1j*npy.pi/180.*phase)
def add_noise_polar_flatband(self,mag_dev, phase_dev,**kwargs):
'''
adds a flatband complex zero-mean gaussian white-noise signal of
given standard deviations for magnitude and phase
Parameters
------------
mag_dev : number
standard deviation of magnitude
phase_dev : number
standard deviation of phase [in degrees]
'''
phase_rv= stats.norm(loc=0, scale=phase_dev).rvs(size = self.s[0].shape)
mag_rv = stats.norm(loc=0, scale=mag_dev).rvs(size = self.s[0].shape)
phase = (self.s_deg+phase_rv)
mag = self.s_mag + mag_rv
self.s = mag* npy.exp(1j*npy.pi/180.*phase)
def multiply_noise(self,mag_dev, phase_dev, **kwargs):
'''
multiplys a complex bivariate gaussian white-noise signal
of given standard deviations for magnitude and phase.
magnitude mean is 1, phase mean is 0
takes:
mag_dev: standard deviation of magnitude
phase_dev: standard deviation of phase [in degrees]
n_ports: number of ports. defualt to 1
returns:
nothing
'''
phase_rv = stats.norm(loc=0, scale=phase_dev).rvs(\
size = self.s.shape)
mag_rv = stats.norm(loc=1, scale=mag_dev).rvs(\
size = self.s.shape)
self.s = mag_rv*npy.exp(1j*npy.pi/180.*phase_rv)*self.s
def nudge(self, amount=1e-12):
'''
Perturb s-parameters by small amount.
This is useful to work-around numerical bugs.
Notes
-----------
This function is
self.s = self.s + 1e-12
Parameters
------------
amount : number,
amount to add to s parameters
'''
self.s = self.s + amount
# other
def func_on_parameter(self, func, attr= 's',*args, **kwargs):
'''
Applies a function parameter matrix, one frequency slice at a time
This is useful for functions that can only operate on 2d arrays,
like numpy.linalg.inv. This loops over f and calls
`func(ntwkA.s[f,:,:], *args, **kwargs)`
Parameters
------------
func : func
function to apply to s-parameters, on a single-freqency slice.
(ie func(ntwkA.s[0,:,:], *args, **kwargs)
\*args, \*\*kwargs :
passed to the func
Examples
-----------
>>> from numpy.linalg import inv
>>> ntwk.func_on_parameter(inv)
'''
ntwkB= self.copy()
p = self.__getattribute__(attr)
ntwkB.s = npy.r_[[func(p[k,:,:],*args,**kwargs) \
for k in range(len(p))]]
return ntwkB
def nonreciprocity(self,m,n, normalize = False):
'''
Normalized non-reciprocity metric.
This is a port-by-port measure of how non-reciprocal a n-port
network is. It is defined by,
.. math::
(S_{mn} - S_{nm}) / \\sqrt ( S_{mn} S_{nm} )
'''
forward = self.__getattribute__('s%i%i'%(m,n))
reverse = self.__getattribute__('s%i%i'%(n,m))
if normalize:
denom = forward*reverse
denom.s = npy.sqrt(denom.s)
return (forward-reverse)/denom
else:
return (forward-reverse)
# generalized mixed mode transformations
# XXX: experimental implementation of gmm s parameters
# TODO: automated test cases
def se2gmm(self, p, z0_mm=None):
'''
Transform network from single ended parameters to generalized mixed mode parameters [1]
[1] Ferrero and Pirola; Generalized Mixed-Mode S-Parameters; IEEE Transactions on
Microwave Theory and Techniques; Vol. 54; No. 1; Jan 2006
Parameters
------------
p : int, number of differential ports
z0_mm: f x n x n matrix of mixed mode impedances, optional
if input is None, 100 Ohms differentail and 25 Ohms common mode reference impedance
.. warning::
This is not fully tested, and should be considered as experimental
'''
#XXX: assumes 'proper' order (first differential ports, then single ended ports)
if z0_mm is None:
z0_mm = self.z0.copy()
z0_mm[:,0:p] = 100 # differential mode impedance
z0_mm[:,p:2*p] = 25 # common mode impedance
Xi_tilde_11, Xi_tilde_12, Xi_tilde_21, Xi_tilde_22 = self._Xi_tilde(p, self.z0, z0_mm)
A = Xi_tilde_21 + npy.einsum('...ij,...jk->...ik', Xi_tilde_22, self.s)
B = Xi_tilde_11 + npy.einsum('...ij,...jk->...ik', Xi_tilde_12, self.s)
self.s = npy.transpose(npy.linalg.solve(npy.transpose(B, (0,2,1)).conj(), npy.transpose(A, (0,2,1)).conj()), (0,2,1)).conj() # (34)
self.z0 = z0_mm
def gmm2se(self, p, z0_se=None):
'''
Transform network from generalized mixed mode parameters [1] to single ended parameters
[1] Ferrero and Pirola; Generalized Mixed-Mode S-Parameters; IEEE Transactions on
Microwave Theory and Techniques; Vol. 54; No. 1; Jan 2006
Parameters
------------
p : int, number of differential ports
z0_mm: f x n x n matrix of single ended impedances, optional
if input is None, assumes 50 Ohm reference impedance
.. warning::
This is not fully tested, and should be considered as experimental
'''
# TODO: testing of reverse transformation
# XXX: assumes 'proper' order (differential ports, single ended ports)
if z0_se is None:
z0_se = self.z0.copy()
z0_se = 50
Xi_tilde_11, Xi_tilde_12, Xi_tilde_21, Xi_tilde_22 = self._Xi_tilde(p, z0_se, self.z0)
A = Xi_tilde_22 - npy.einsum('...ij,...jk->...ik', self.s, Xi_tilde_12)
B = Xi_tilde_21 - npy.einsum('...ij,...jk->...ik', self.s, Xi_tilde_11)
self.s = npy.linalg.solve(A, B) # (35)
self.z0 = z0_se
# generalized mixed mode supplement functions
_T = npy.array([[1, 0 , -1, 0], [0, 0.5, 0, -0.5], [0.5, 0, 0.5, 0], [0, 1, 0, 1]]) # (5)
def _m(self, z0):
scaling = npy.sqrt(z0.real) / (2 * npy.abs(z0))
Z = npy.ones((z0.shape[0], 2, 2), dtype=npy.complex128)
Z[:,0,1] = z0
Z[:,1,1] = -z0
return scaling[:,npy.newaxis,npy.newaxis] * Z
def _M(self, j, k, z0_se): # (14)
M = npy.zeros((self.f.shape[0],4,4), dtype=npy.complex128)
M[:,:2,:2] = self._m(z0_se[:,j])
M[:,2:,2:] = self._m(z0_se[:,k])
return M
def _M_circle(self, l, p, z0_mm): # (12)
M = npy.zeros((self.f.shape[0],4,4), dtype=npy.complex128)
M[:,:2,:2] = self._m(z0_mm[:,l]) # differential mode impedance of port pair
M[:,2:,2:] = self._m(z0_mm[:,p+l]) # common mode impedance of port pair
return M
def _X(self, j, k, l, p, z0_se, z0_mm): # (15)
return npy.einsum('...ij,...jk->...ik', self._M_circle(l, p, z0_mm).dot(self._T), npy.linalg.inv(self._M(j,k, z0_se))) # matrix multiplication elementwise for each frequency
def _P(self, p): # (27) (28)
n = self.nports
Pda = npy.zeros((p,2*n), dtype=npy.bool)
Pdb = npy.zeros((p,2*n), dtype=npy.bool)
Pca = npy.zeros((p,2*n), dtype=npy.bool)
Pcb = npy.zeros((p,2*n), dtype=npy.bool)
Pa = npy.zeros((n-2*p,2*n), dtype=npy.bool)
Pb = npy.zeros((n-2*p,2*n), dtype=npy.bool)
for l in npy.arange(p):
Pda[l,4*(l+1)-3-1] = True
Pca[l,4*(l+1)-1-1] = True
Pdb[l,4*(l+1)-2-1] = True
Pcb[l,4*(l+1)-1] = True
if Pa.shape[0] is not 0:
Pa[l,4*p+2*(l+1)-1-1] = True
Pb[l,4*p+2*(l+1)-1] = True
return npy.concatenate((Pda, Pca, Pa, Pdb, Pcb, Pb))
def _Q(self): # (29) error corrected
n = self.nports
Qa = npy.zeros((n,2*n), dtype=npy.bool)
Qb = npy.zeros((n,2*n), dtype=npy.bool)
for l in npy.arange(n):
Qa[l,2*(l+1)-1-1] = True
Qb[l,2*(l+1)-1] = True
return npy.concatenate((Qa, Qb))
def _Xi(self, p, z0_se, z0_mm): # (24)
n = self.nports
Xi = npy.ones(self.f.shape[0])[:,npy.newaxis,npy.newaxis] * npy.eye(2*n, dtype=npy.complex128)
for l in npy.arange(p):
Xi[:,4*l:4*l+4,4*l:4*l+4] = self._X(l*2, l*2+1, l, p, z0_se, z0_mm)
return Xi
def _Xi_tilde(self, p, z0_se, z0_mm): # (31)
n = self.nports
P = npy.ones(self.f.shape[0])[:,npy.newaxis,npy.newaxis] * self._P(p)
QT = npy.ones(self.f.shape[0])[:,npy.newaxis,npy.newaxis] * self._Q().T
Xi = self._Xi(p, z0_se, z0_mm)
Xi_tilde = npy.einsum('...ij,...jk->...ik', npy.einsum('...ij,...jk->...ik', P, Xi), QT)
return Xi_tilde[:,:n,:n], Xi_tilde[:,:n,n:], Xi_tilde[:,n:,:n], Xi_tilde[:,n:,n:]
## Functions operating on Network[s]
def connect(ntwkA, k, ntwkB, l, num=1):
'''
connect two n-port networks together.
specifically, connect ports `k` thru `k+num-1` on `ntwkA` to ports
`l` thru `l+num-1` on `ntwkB`. The resultant network has
(ntwkA.nports+ntwkB.nports-2*num) ports. The port indices ('k','l')
start from 0. Port impedances **are** taken into account.
Parameters
-----------
ntwkA : :class:`Network`
network 'A'
k : int
starting port index on `ntwkA` ( port indices start from 0 )
ntwkB : :class:`Network`
network 'B'
l : int
starting port index on `ntwkB`
num : int
number of consecutive ports to connect (default 1)
Returns
---------
ntwkC : :class:`Network`
new network of rank (ntwkA.nports + ntwkB.nports - 2*num)
See Also
-----------
connect_s : actual S-parameter connection algorithm.
innerconnect_s : actual S-parameter connection algorithm.
Notes
-------
the effect of mis-matched port impedances is handled by inserting
a 2-port 'mismatch' network between the two connected ports.
This mismatch Network is calculated with the
:func:`impedance_mismatch` function.
Examples
---------
To implement a *cascade* of two networks
>>> ntwkA = rf.Network('ntwkA.s2p')
>>> ntwkB = rf.Network('ntwkB.s2p')
>>> ntwkC = rf.connect(ntwkA, 1, ntwkB,0)
'''
# some checking
check_frequency_equal(ntwkA,ntwkB)
if (k+num-1> ntwkA.nports-1):
raise IndexError('Port `k` out of range')
if (l+num-1> ntwkB.nports-1):
raise IndexError('Port `l` out of range')
# create output Network, from copy of input
ntwkC = ntwkA.copy()
# if networks' z0's are not identical, then connect a impedance
# mismatch, which takes into account the effect of differing port
# impedances.
#import pdb;pdb.set_trace()
if assert_z0_at_ports_equal(ntwkA,k,ntwkB,l) == False:
ntwkC.s = connect_s(
ntwkA.s, k,
impedance_mismatch(ntwkA.z0[:,k], ntwkB.z0[:,l]), 0)
# the connect_s() put the mismatch's output port at the end of
# ntwkC's ports. Fix the new port's impedance, then insert it
# at position k where it belongs.
ntwkC.z0[:,k:] = npy.hstack((ntwkC.z0[:,k+1:], ntwkB.z0[:,[l]]))
ntwkC.renumber(from_ports= [ntwkC.nports-1] + list(range(k, ntwkC.nports-1)),
to_ports=list(range(k, ntwkC.nports)))
# call s-matrix connection function
ntwkC.s = connect_s(ntwkC.s,k,ntwkB.s,l)
# combine z0 arrays and remove ports which were `connected`
ntwkC.z0 = npy.hstack(
(npy.delete(ntwkA.z0, range(k,k+1), 1), npy.delete(ntwkB.z0, range(l,l+1), 1)))
# if we're connecting more than one port, call innerconnect recursively
# untill all connections are made to finish the job
if num>1:
ntwkC = innerconnect(ntwkC, k, ntwkA.nports-1+l, num-1)
# if ntwkB is a 2port, then keep port indices where you expect.
if ntwkB.nports == 2 and ntwkA.nports>2:
from_ports = list(range(ntwkC.nports))
to_ports = list(range(ntwkC.nports))
to_ports.pop(k);
to_ports.append(k)
ntwkC.renumber(from_ports=from_ports,
to_ports=to_ports)
return ntwkC
def connect_fast(ntwkA, k, ntwkB, l):
'''
Connect two n-port networks together (using C-implementation)
Specifically, connect ports `k` on `ntwkA` to ports
`l` thru on `ntwkB`. The resultant network has
(ntwkA.nports+ntwkB.nports-2) ports. The port indices ('k','l')
start from 0. Port impedances **are** taken into account.
Parameters
-----------
ntwkA : :class:`Network`
network 'A'
k : int
starting port index on `ntwkA` ( port indices start from 0 )
ntwkB : :class:`Network`
network 'B'
l : int
starting port index on `ntwkB`
Returns
---------
ntwkC : :class:`Network`
new network of rank (ntwkA.nports + ntwkB.nports - 2)
See Also
-----------
:mod:`skrf.src`
Notes
-------
the effect of mis-matched port impedances is handled by inserting
a 2-port 'mismatch' network between the two connected ports.
This mismatch Network is calculated with the
:func:`impedance_mismatch` function.
Examples
---------
To implement a *cascade* of two networks
>>> ntwkA = rf.Network('ntwkA.s2p')
>>> ntwkB = rf.Network('ntwkB.s2p')
>>> ntwkC = rf.connect(ntwkA, 1, ntwkB,0)
'''
num = 1
from src import connect_s_fast
# some checking
check_frequency_equal(ntwkA,ntwkB)
# create output Network, from copy of input
ntwkC = ntwkA.copy()
# if networks' z0's are not identical, then connect a impedance
# mismatch, which takes into account the effect of differing port
# impedances.
if assert_z0_at_ports_equal(ntwkA,k,ntwkB,l) == False:
ntwkC.s = connect_s(
ntwkA.s, k,
impedance_mismatch(ntwkA.z0[:,k], ntwkB.z0[:,l]), 0)
# the connect_s() put the mismatch's output port at the end of
# ntwkC's ports. Fix the new port's impedance, then insert it
# at position k where it belongs.
ntwkC.z0[:,k:] = npy.hstack((ntwkC.z0[:,k+1:], ntwkB.z0[:,[l]]))
ntwkC.renumber(from_ports= [ntwkC.nports-1] + range(k, ntwkC.nports-1),
to_ports=range(k, ntwkC.nports))
# call s-matrix connection function
ntwkC.s = connect_s_fast(ntwkC.s,k,ntwkB.s,l)
# combine z0 arrays and remove ports which were `connected`
ntwkC.z0 = npy.hstack(
(npy.delete(ntwkA.z0, range(k,k+num), 1), npy.delete(ntwkB.z0, range(l,l+num), 1)))
return ntwkC
def innerconnect(ntwkA, k, l, num=1):
'''
connect ports of a single n-port network.
this results in a (n-2)-port network. remember port indices start
from 0.
Parameters
-----------
ntwkA : :class:`Network`
network 'A'
k,l : int
starting port indices on ntwkA ( port indices start from 0 )
num : int
number of consecutive ports to connect
Returns
---------
ntwkC : :class:`Network`
new network of rank (ntwkA.nports - 2*num)
See Also
-----------
connect_s : actual S-parameter connection algorithm.
innerconnect_s : actual S-parameter connection algorithm.
Notes
-------
a 2-port 'mismatch' network is inserted between the connected ports
if their impedances are not equal.
Examples
---------
To connect ports '0' and port '1' on ntwkA
>>> ntwkA = rf.Network('ntwkA.s3p')
>>> ntwkC = rf.innerconnect(ntwkA, 0,1)
'''
if (k+num-1> ntwkA.nports-1):
raise IndexError('Port `k` out of range')
if (l+num-1> ntwkA.nports-1):
raise IndexError('Port `l` out of range')
# create output Network, from copy of input
ntwkC = ntwkA.copy()
if not (ntwkA.z0[:,k] == ntwkA.z0[:,l]).all():
# connect a impedance mismatch, which will takes into account the
# effect of differing port impedances
mismatch = impedance_mismatch(ntwkA.z0[:,k], ntwkA.z0[:,l])
ntwkC.s = connect_s( ntwkA.s,k, mismatch, 0)
#print 'mismatch %i-%i'%(k,l)
# the connect_s() put the mismatch's output port at the end of
# ntwkC's ports. Fix the new port's impedance, then insert it
# at position k where it belongs.
ntwkC.z0[:,k:] = npy.hstack((ntwkC.z0[:,k+1:], ntwkC.z0[:,[l]]))
ntwkC.renumber(from_ports= [ntwkC.nports-1] + list(range(k, ntwkC.nports-1)),
to_ports=list(range(k, ntwkC.nports)))
# call s-matrix connection function
ntwkC.s = innerconnect_s(ntwkC.s,k,l)
# update the characteristic impedance matrix
ntwkC.z0 = npy.delete(ntwkC.z0, list(range(k,k+1)) + list(range(l,l+1)),1)
# recur if we're connecting more than one port
if num>1:
ntwkC = innerconnect(ntwkC, k, l-1, num-1)
return ntwkC
def cascade(ntwkA,ntwkB):
'''
Cascade two 2-port Networks together
Connects port 1 of `ntwkA` to port 0 of `ntwkB`. This calls
`connect(ntwkA,1, ntwkB,0)`, which is a more general function.
Parameters
-----------
ntwkA : :class:`Network`
network `ntwkA`
ntwkB : Network
network `ntwkB`
Returns
--------
C : Network
the resultant network of ntwkA cascaded with ntwkB
See Also
---------
connect : connects two Networks together at arbitrary ports.
'''
return connect(ntwkA,1, ntwkB,0)
def cascade_list(l):
'''
cascade a list of 2-port networks
all networks must have same frequency
Parameters
--------------
l : list-like
(ordered) list of networks
Returns
----------
out : 2-port Network
the results of casacading all networks in the list `l`
'''
return reduce(cascade, l)
def de_embed(ntwkA,ntwkB):
'''
De-embed `ntwkA` from `ntwkB`.
This calls `ntwkA.inv ** ntwkB`. The syntax of cascading an inverse
is more explicit, it is recomended that it be used instead of this
function.
Parameters
-----------
ntwkA : :class:`Network`
network `ntwkA`
ntwkB : :class:`Network`
network `ntwkB`
Returns
--------
C : Network
the resultant network of ntwkB de-embeded from ntwkA
See Also
---------
connect : connects two Networks together at arbitrary ports.
'''
return ntwkA.inv ** ntwkB
def stitch(ntwkA, ntwkB, **kwargs):
'''
Stitches ntwkA and ntwkB together.
Concatenates two networks' data. Given two networks that cover
different frequency bands this can be used to combine their data
into a single network.
Parameters
------------
ntwkA, ntwkB : :class:`Network` objects
Networks to stitch together
\*\*kwargs : keyword args
passed to :class:`Network` constructor, for output network
Returns
---------
ntwkC : :class:`Network`
result of stitching the networks `ntwkA` and `ntwkB` together
Examples
----------
>>> from skrf.data import wr2p2_line, wr1p5_line
>>> rf.stitch(wr2p2_line, wr1p5_line)
2-Port Network: 'wr2p2,line', 330-750 GHz, 402 pts, z0=[ 50.+0.j 50.+0.j]
'''
A,B = ntwkA, ntwkB
C = Network(
frequency = Frequency.from_f(npy.r_[A.f[:],B.f[:]], unit='hz'),
s = npy.r_[A.s,B.s],
z0 = npy.r_[A.z0, B.z0],
name = A.name,
**kwargs
)
C.frequency.unit = A.frequency.unit
return C
def overlap(ntwkA, ntwkB):
'''
Returns the overlapping parts of two Networks, interpolating if needed.
If frequency vectors for each ntwk dont perfectly overlap, then
ntwkB is interpolated so that the resultant networks have identical
frequencies.
Parameters
------------
ntwkA : :class:`Network`
a ntwk which overlaps `ntwkB`. (the `dominant` network)
ntwkB : :class:`Network`
a ntwk which overlaps `ntwkA`.
Returns
-----------
ntwkA_new : :class:`Network`
part of `ntwkA` that overlapped `ntwkB`
ntwkB_new : :class:`Network`
part of `ntwkB` that overlapped `ntwkA`, possibly interpolated
See Also
------------
:func:`skrf.frequency.overlap_freq`
'''
new_freq = ntwkA.frequency.overlap(ntwkB.frequency)
return ntwkA.interpolate(new_freq),ntwkB.interpolate(new_freq)
def average(list_of_networks, polar = False):
'''
Calculates the average network from a list of Networks.
This is complex average of the s-parameters for a list of Networks.
Parameters
-----------
list_of_networks : list of :class:`Network` objects
the list of networks to average
Returns
---------
ntwk : :class:`Network`
the resultant averaged Network
Notes
------
This same function can be accomplished with properties of a
:class:`~skrf.networkset.NetworkSet` class.
Examples
---------
>>> ntwk_list = [rf.Network('myntwk.s1p'), rf.Network('myntwk2.s1p')]
>>> mean_ntwk = rf.average(ntwk_list)
'''
out_ntwk = list_of_networks[0].copy()
if polar:
# average the mag/phase components individually
raise NotImplementedError
else:
# average the re/im components individually
for a_ntwk in list_of_networks[1:]:
out_ntwk += a_ntwk
out_ntwk.s = out_ntwk.s/(len(list_of_networks))
return out_ntwk
def one_port_2_two_port(ntwk):
'''
calculates the two-port network given a symmetric, reciprocal and
lossless one-port network.
takes:
ntwk: a symmetric, reciprocal and lossless one-port network.
returns:
ntwk: the resultant two-port Network
'''
result = ntwk.copy()
result.s = npy.zeros((result.frequency.npoints,2,2), dtype=complex)
s11 = ntwk.s[:,0,0]
result.s[:,0,0] = s11
result.s[:,1,1] = s11
## HACK: TODO: verify this mathematically
result.s[:,0,1] = npy.sqrt(1- npy.abs(s11)**2)*\
npy.exp(1j*(npy.angle(s11)+npy.pi/2.*(npy.angle(s11)<0) -npy.pi/2*(npy.angle(s11)>0)))
result.s[:,1,0] = result.s[:,0,1]
return result
def chopinhalf(ntwk, *args, **kwargs):
'''
Chops a sandwich of identical,recicprocal 2-ports in half.
Given two identical, reciprocal 2-ports measured in series,
this returns one.
Notes
--------
In other words, given
.. math::
B = A\\cdot\\cdotA
Return A, where A port2 is connected to A port1. The result may
be found through signal flow graph analysis and is,
.. math::
a_{11} = \frac{b_{11}}{1+b_{12}}
a_{22} = \frac{b_{22}}{1+b_{12}}
a_{12}^2 = b_{21}(1-\frac{b_{11}b_{22}}{(1+b_{12})^2}
Parameters
------------
ntwk : :class:`Network`
a 2-port that is equal to two identical two-ports in cascade
'''
if ntwk.nports != 2:
raise ValueError('Only valid on 2ports')
b11,b22,b12 = ntwk.s11,ntwk.s22,ntwk.s12
kwargs['name'] = kwargs.get('name', ntwk.name)
a11 = b11/(1+b12)
a22 = b22/(1+b12)
a21 = b12*(1-b11*b22/(1+b12)**2) # this is a21^2 here
a21.s = mf.sqrt_phase_unwrap(a21.s)
A = n_oneports_2_nport([a11,a21,a21,a22], *args, **kwargs)
return A
## Building composit networks from sub-networks
def n_oneports_2_nport(ntwk_list, *args, **kwargs):
'''
Builds a N-port Network from list of N one-ports
Parameters
-----------
ntwk_list : list of :class:`Network` objects
must follow left-right, top-bottom order, ie, s11,s12,s21,s22
\*args, \*\*kwargs :
passed to :func:`Network.__init__` for the N-port
Returns
----------
nport : n-port :class:`Network`
result
'''
nports = int(npy.sqrt(len(ntwk_list)))
s_out = npy.concatenate(
[npy.concatenate(
[ntwk_list[(k+(l*nports))].s for k in range(nports)],2)\
for l in range(nports)],1)
z0 = npy.concatenate(
[ntwk_list[k].z0 for k in range(0,nports**2,nports+1)],1)
frequency = ntwk_list[0].frequency
return Network(s=s_out, z0=z0, frequency=frequency, *args, **kwargs)
def n_twoports_2_nport(ntwk_list,nports, offby=1, **kwargs):
'''
Builds a N-port Network from list of two-ports
By default all entries of result.s are filled with 0's, in case you
dont fully specify the entire s-matrix of the resultant ntwk.
Parameters
-----------
ntwk_list : list of :class:`Network` objects
the names must contain the port index, ie 'p12' or 'p43'
offby : int
starting value for s-parameters idecies. ie a value of `1`,
assumes that a s21 = ntwk.s[:,1,0]
\*args, \*\*kwargs :
passed to :func:`Network.__init__` for the N-port
Returns
----------
nport : n-port :class:`Network`
result
'''
frequency = ntwk_list[0].frequency
nport = Network(frequency = frequency ,
s=npy.zeros(shape=(frequency.npoints,nports,nports)),
**kwargs)
for subntwk in ntwk_list:
for m,n in nport.port_tuples:
if m!=n and m>n:
if '%i%i'%(m+offby,n+offby) in subntwk.name:
pass
elif '%i%i'%(n+offby,m+offby) in subntwk.name:
subntwk = subntwk.flipped()
else:
continue
for mn,jk in zip(product((m,n), repeat=2),product((0,1), repeat=2)):
m,n,j,k = mn[0],mn[1],jk[0],jk[1]
nport.s[:,m,n] = subntwk.s[:,j,k]
nport.z0[:,m] = subntwk.z0[:,j]
return nport
def four_oneports_2_twoport(s11,s12,s21,s22, *args, **kwargs):
'''
Builds a 2-port Network from list of four 1-ports
Parameters
-----------
s11 : one-port :class:`Network`
s11
s12 : one-port :class:`Network`
s12
s21 : one-port :class:`Network`
s21
s22 : one-port :class:`Network`
s22
\*args, \*\*kwargs :
passed to :func:`Network.__init__` for the twoport
Returns
----------
twoport : two-port :class:`Network`
result
See Also
-----------
n_oneports_2_nport
three_twoports_2_threeport
'''
return n_oneports_2_nport([s11,s12,s21,s22], *args, **kwargs)
def three_twoports_2_threeport(ntwk_triplet, auto_order = True, *args,
**kwargs):
'''
Creates 3-port from three 2-port Networks
This function provides a convenient way to build a 3-port Network
from a set of 2-port measurements. Which may occur when measuring
a three port device on a 2-port VNA.
Notes
---------
if `auto_order` is False, ntwk_triplet must be of port orderings:
[p12, p13, p23]
else if `auto_order`is True, then the 3 Networks in ntwk_triplet must
contain port identification in their names.
For example, their names may be like `me12`, `me13`, `me23`
Parameters
--------------
ntwk_triplet : list of 2-port Network objects
list of three 2-ports. see notes about order.
auto_order : bool
if True attempt to inspect port orderings from Network names.
Names must be like 'p12', 'p23', etc
contains : str
only files containing this string will be loaded.
\*args,\*\*kwargs :
passed to :func:`Network.__init__` for resultant network
Returns
------------
threeport : 3-port Network
See Also
-----------
n_oneports_2_nport
Examples
-----------
>>> rf.three_twoports_2_threeport(rf.read_all('.').values())
'''
raise DeprecationWarning('Use n_twoports_2_nport instead')
if auto_order:
p12,p13,p23 = None,None,None
s11,s12,s13,s21,s22,s23,s31,s32,s33 = None,None,None,None,None,None,None,None,None
for k in ntwk_triplet:
if '12' in k.name:
p12 = k
elif '13' in k.name:
p13 = k
elif '23' in k.name:
p23 = k
elif '21' in k.name:
p12 = k.flipped()
elif '31' in k.name:
p31 = k.flipped()
elif '32' in k.name:
p23 = k.flipped()
else:
p12,p13,p23 = ntwk_triplet
p21= p12.flipped()
p31= p13.flipped()
p32= p23.flipped()
if p12 != None:
s11 = p12.s11
s12 = p12.s12
s21 = p12.s21
s22 = p12.s22
if p13 != None:
s11 = p13.s11
s13 = p13.s12
s31 = p13.s21
s33 = p13.s22
if p23 != None:
s22 = p23.s11
s23 = p23.s12
s32 = p23.s21
s33 = p23.s22
ntwk_list = [s11,s12,s13,s21,s22,s23,s31,s32,s33]
for k in range(len(ntwk_list)):
if ntwk_list[k] == None:
frequency = ntwk_triplet[0].frequency
s = npy.zeros((len(ntwk_triplet[0]),1,1))
ntwk_list[k] = Network(s=s, frequency=frequency)
threeport = n_oneports_2_nport( ntwk_list, *args, **kwargs)
return threeport
## Functions operating on s-parameter matrices
def connect_s(A,k,B,l):
'''
connect two n-port networks' s-matricies together.
specifically, connect port `k` on network `A` to port `l` on network
`B`. The resultant network has nports = (A.rank + B.rank-2). This
function operates on, and returns s-matricies. The function
:func:`connect` operates on :class:`Network` types.
Parameters
-----------
A : :class:`numpy.ndarray`
S-parameter matrix of `A`, shape is fxnxn
k : int
port index on `A` (port indices start from 0)
B : :class:`numpy.ndarray`
S-parameter matrix of `B`, shape is fxnxn
l : int
port index on `B`
Returns
-------
C : :class:`numpy.ndarray`
new S-parameter matrix
Notes
-------
internally, this function creates a larger composite network
and calls the :func:`innerconnect_s` function. see that function for more
details about the implementation
See Also
--------
connect : operates on :class:`Network` types
innerconnect_s : function which implements the connection
connection algorithm
'''
if k > A.shape[-1]-1 or l > B.shape[-1] - 1:
raise(ValueError('port indices are out of range'))
nf = A.shape[0] # num frequency points
nA = A.shape[1] # num ports on A
nB = B.shape[1] # num ports on B
nC = nA + nB # num ports on C
#create composite matrix, appending each sub-matrix diagonally
C = npy.zeros((nf, nC, nC), dtype='complex')
C[:, :nA, :nA] = A.copy()
C[:, nA:, nA:] = B.copy()
# call innerconnect_s() on composit matrix C
return innerconnect_s(C, k, nA + l)
def innerconnect_s(A, k, l):
'''
connect two ports of a single n-port network's s-matrix.
Specifically, connect port `k` to port `l` on `A`. This results in
a (n-2)-port network. This function operates on, and returns
s-matrices. The function :func:`innerconnect` operates on
:class:`Network` types.
Parameters
-----------
A : :class:`numpy.ndarray`
S-parameter matrix of `A`, shape is fxnxn
k : int
port index on `A` (port indices start from 0)
l : int
port index on `A`
Returns
-------
C : :class:`numpy.ndarray`
new S-parameter matrix
Notes
-----
The algorithm used to calculate the resultant network is called a
'sub-network growth', can be found in [#]_. The original paper
describing the algorithm is given in [#]_.
References
----------
.. [#] Compton, R.C.; , "Perspectives in microwave circuit analysis," Circuits and Systems, 1989., Proceedings of the 32nd Midwest Symposium on , vol., no., pp.716-718 vol.2, 14-16 Aug 1989. URL: http://ieeexplore.ieee.org/stamp/stamp.jsp?tp=&arnumber=101955&isnumber=3167
.. [#] Filipsson, Gunnar; , "A New General Computer Algorithm for S-Matrix Calculation of Interconnected Multiports," Microwave Conference, 1981. 11th European , vol., no., pp.700-704, 7-11 Sept. 1981. URL: http://ieeexplore.ieee.org/stamp/stamp.jsp?tp=&arnumber=4131699&isnumber=4131585
'''
if k > A.shape[-1] - 1 or l > A.shape[-1] - 1:
raise(ValueError('port indices are out of range'))
nA = A.shape[1] # num of ports on input s-matrix
# create an empty s-matrix, to store the result
C = npy.zeros(shape=A.shape, dtype='complex')
# loop through ports and calulates resultant s-parameters
for i in range(nA):
for j in range(nA):
C[:,i,j] = \
A[:,i,j] + \
( A[:,k,j] * A[:,i,l] * (1 - A[:,l,k]) + \
A[:,l,j] * A[:,i,k] * (1 - A[:,k,l]) +\
A[:,k,j] * A[:,l,l] * A[:,i,k] + \
A[:,l,j] * A[:,k,k] * A[:,i,l])/\
((1 - A[:,k,l]) * (1 - A[:,l,k]) - A[:,k,k] * A[:,l,l])
# remove ports that were `connected`
C = npy.delete(C, (k,l), 1)
C = npy.delete(C, (k,l), 2)
return C
## network parameter conversion
def s2z(s,z0=50):
'''
Convert scattering parameters [1]_ to impedance parameters [2]_
.. math::
z = \\sqrt {z_0} \\cdot (I + s) (I - s)^{-1} \\cdot \\sqrt{z_0}
Parameters
------------
s : complex array-like
scattering parameters
z0 : complex array-like or number
port impedances.
Returns
---------
z : complex array-like
impedance parameters
References
----------
.. [1] http://en.wikipedia.org/wiki/S-parameters
.. [2] http://en.wikipedia.org/wiki/impedance_parameters
'''
nfreqs, nports, nports = s.shape
z0 = fix_z0_shape(z0, nfreqs, nports)
z = npy.zeros(s.shape, dtype='complex')
I = npy.mat(npy.identity(s.shape[1]))
s = s.copy() # to prevent the original array from being altered
s[s==1.] = 1. + 1e-12 # solve numerical singularity
s[s==-1.] = -1. + 1e-12 # solve numerical singularity
for fidx in xrange(s.shape[0]):
sqrtz0 = npy.mat(npy.sqrt(npy.diagflat(z0[fidx])))
z[fidx] = sqrtz0 * (I-s[fidx])**-1 * (I+s[fidx]) * sqrtz0
return z
def s2y(s,z0=50):
'''
convert scattering parameters [#]_ to admittance parameters [#]_
.. math::
y = \\sqrt {y_0} \\cdot (I - s)(I + s)^{-1} \\cdot \\sqrt{y_0}
Parameters
------------
s : complex array-like
scattering parameters
z0 : complex array-like or number
port impedances
Returns
---------
y : complex array-like
admittance parameters
See Also
----------
s2z
s2y
s2t
z2s
z2y
z2t
y2s
y2z
y2z
t2s
t2z
t2y
Network.s
Network.y
Network.z
Network.t
References
----------
.. [#] http://en.wikipedia.org/wiki/S-parameters
.. [#] http://en.wikipedia.org/wiki/Admittance_parameters
'''
nfreqs, nports, nports = s.shape
z0 = fix_z0_shape(z0, nfreqs, nports)
y = npy.zeros(s.shape, dtype='complex')
I = npy.mat(npy.identity(s.shape[1]))
s = s.copy() # to prevent the original array from being altered
s[s==-1.] = -1. + 1e-12 # solve numerical singularity
s[s==1.] = 1. + 1e-12 # solve numerical singularity
for fidx in xrange(s.shape[0]):
sqrty0 = npy.mat(npy.sqrt(npy.diagflat(1.0/z0[fidx])))
y[fidx] = sqrty0*(I-s[fidx])*(I+s[fidx])**-1*sqrty0
return y
def s2t(s):
'''
Converts scattering parameters [#]_ to scattering transfer parameters [#]_ .
transfer parameters are also refered to as
'wave cascading matrix', this function only operates on 2-port
networks.
Parameters
-----------
s : :class:`numpy.ndarray` (shape fx2x2)
scattering parameter matrix
Returns
-------
t : numpy.ndarray
scattering transfer parameters (aka wave cascading matrix)
See Also
---------
inv : calculates inverse s-parameters
s2z
s2y
s2t
z2s
z2y
z2t
y2s
y2z
y2z
t2s
t2z
t2y
Network.s
Network.y
Network.z
Network.t
References
-----------
.. [#] http://en.wikipedia.org/wiki/S-parameters
.. [#] http://en.wikipedia.org/wiki/Scattering_transfer_parameters#Scattering_transfer_parameters
'''
#TODO: check rank(s) ==2
t = npy.array([
[-1*(s[:,0,0]*s[:,1,1]- s[:,1,0]*s[:,0,1])/s[:,1,0],
-s[:,1,1]/s[:,1,0]],
[s[:,0,0]/s[:,1,0],
1./s[:,1,0] ]
]).transpose()
return t
def z2s(z, z0=50):
'''
convert impedance parameters [1]_ to scattering parameters [2]_
.. math::
s = (\\sqrt{y_0} \\cdot z \\cdot \\sqrt{y_0} - I)(\\sqrt{y_0} \\cdot z \\cdot\\sqrt{y_0} + I)^{-1}
Parameters
------------
z : complex array-like
impedance parameters
z0 : complex array-like or number
port impedances
Returns
---------
s : complex array-like
scattering parameters
References
----------
.. [1] http://en.wikipedia.org/wiki/impedance_parameters
.. [2] http://en.wikipedia.org/wiki/S-parameters
'''
nfreqs, nports, nports = z.shape
z0 = fix_z0_shape(z0, nfreqs, nports)
s = npy.zeros(z.shape, dtype='complex')
I = npy.mat(npy.identity(z.shape[1]))
for fidx in xrange(z.shape[0]):
sqrty0 = npy.mat(npy.sqrt(npy.diagflat(1.0/z0[fidx])))
s[fidx] = (sqrty0*z[fidx]*sqrty0 - I) * (sqrty0*z[fidx]*sqrty0 + I)**-1
return s
def z2y(z):
'''
convert impedance parameters [#]_ to admittance parameters [#]_
.. math::
y = z^{-1}
Parameters
------------
z : complex array-like
impedance parameters
Returns
---------
y : complex array-like
admittance parameters
See Also
----------
s2z
s2y
s2t
z2s
z2y
z2t
y2s
y2z
y2z
t2s
t2z
t2y
Network.s
Network.y
Network.z
Network.t
References
----------
.. [#] http://en.wikipedia.org/wiki/impedance_parameters
.. [#] http://en.wikipedia.org/wiki/Admittance_parameters
'''
return npy.array([npy.mat(z[f,:,:])**-1 for f in xrange(z.shape[0])])
def z2t(z):
'''
Not Implemented yet
convert impedance parameters [#]_ to scattering transfer parameters [#]_
Parameters
------------
z : complex array-like or number
impedance parameters
Returns
---------
s : complex array-like or number
scattering parameters
See Also
----------
s2z
s2y
s2t
z2s
z2y
z2t
y2s
y2z
y2z
t2s
t2z
t2y
Network.s
Network.y
Network.z
Network.t
References
----------
.. [#] http://en.wikipedia.org/wiki/impedance_parameters
.. [#] http://en.wikipedia.org/wiki/Scattering_transfer_parameters#Scattering_transfer_parameters
'''
raise (NotImplementedError)
def z2a(z):
'''
Converts impedance parameters to abcd parameters [#]_ .
Parameters
-----------
z : :class:`numpy.ndarray` (shape fx2x2)
impedance parameter matrix
Returns
-------
abcd : numpy.ndarray
scattering transfer parameters (aka wave cascading matrix)
See Also
---------
inv : calculates inverse s-parameters
s2z
s2y
s2t
z2s
z2y
z2t
y2s
y2z
y2z
t2s
t2z
t2y
Network.s
Network.y
Network.z
Network.t
References
-----------
.. [#] https://en.wikipedia.org/wiki/Two-port_network
'''
abcd = npy.array([
[z[:,0,0]/z[:,1,0],
1./z[:,1,0]],
[(z[:,0,0]*z[:,1,1]- z[:,1,0]*z[:,0,1])/z[:,1,0],
z[:,1,1]/z[:,1,0]],
]).transpose()
return abcd
def s2a(s,z0):
'''
Converts scattering parameters to abcd parameters [#]_ .
Parameters
-----------
s : :class:`numpy.ndarray` (shape fx2x2)
impedance parameter matrix
z0: number or, :class:`numpy.ndarray` (shape fx2)
port impedance
Returns
-------
abcd : numpy.ndarray
scattering transfer parameters (aka wave cascading matrix)
'''
return z2a(s2z(s,z0))
def y2s(y, z0=50):
'''
convert admittance parameters [#]_ to scattering parameters [#]_
.. math::
s = (I - \\sqrt{z_0} \\cdot y \\cdot \\sqrt{z_0})(I + \\sqrt{z_0} \\cdot y \\cdot \\sqrt{z_0})^{-1}
Parameters
------------
y : complex array-like
admittance parameters
z0 : complex array-like or number
port impedances
Returns
---------
s : complex array-like or number
scattering parameters
See Also
----------
s2z
s2y
s2t
z2s
z2y
z2t
y2s
y2z
y2z
t2s
t2z
t2y
Network.s
Network.y
Network.z
Network.t
References
----------
.. [#] http://en.wikipedia.org/wiki/Admittance_parameters
.. [#] http://en.wikipedia.org/wiki/S-parameters
'''
nfreqs, nports, nports = y.shape
z0 = fix_z0_shape(z0, nfreqs, nports)
s = npy.zeros(y.shape, dtype='complex')
I = npy.mat(npy.identity(s.shape[1]))
for fidx in xrange(s.shape[0]):
sqrtz0 = npy.mat(npy.sqrt(npy.diagflat(z0[fidx])))
s[fidx] = (I - sqrtz0*y[fidx]*sqrtz0) * (I + sqrtz0*y[fidx]*sqrtz0)**-1
return s
def y2z(y):
'''
convert admittance parameters [#]_ to impedance parameters [#]_
.. math::
z = y^{-1}
Parameters
------------
y : complex array-like
admittance parameters
Returns
---------
z : complex array-like
impedance parameters
See Also
----------
s2z
s2y
s2t
z2s
z2y
z2t
y2s
y2z
y2z
t2s
t2z
t2y
Network.s
Network.y
Network.z
Network.t
References
----------
.. [#] http://en.wikipedia.org/wiki/Admittance_parameters
.. [#] http://en.wikipedia.org/wiki/impedance_parameters
'''
return npy.array([npy.mat(y[f,:,:])**-1 for f in xrange(y.shape[0])])
def y2t(y):
'''
Not Implemented Yet
convert admittance parameters [#]_ to scattering-transfer parameters [#]_
Parameters
------------
y : complex array-like or number
impedance parameters
Returns
---------
t : complex array-like or number
scattering parameters
See Also
----------
s2z
s2y
s2t
z2s
z2y
z2t
y2s
y2z
y2z
t2s
t2z
t2y
Network.s
Network.y
Network.z
Network.t
References
----------
.. [#] http://en.wikipedia.org/wiki/Admittance_parameters
.. [#] http://en.wikipedia.org/wiki/Scattering_transfer_parameters#Scattering_transfer_parameters
'''
raise (NotImplementedError)
def t2s(t):
'''
converts scattering transfer parameters [#]_ to scattering parameters [#]_
transfer parameters are also referred to as
'wave cascading matrix', this function only operates on 2-port
networks. this function only operates on 2-port scattering
parameters.
Parameters
-----------
t : :class:`numpy.ndarray` (shape fx2x2)
scattering transfer parameters
Returns
-------
s : :class:`numpy.ndarray`
scattering parameter matrix.
See Also
---------
inv : calculates inverse s-parameters
s2z
s2y
s2t
z2s
z2y
z2t
y2s
y2z
y2z
t2s
t2z
t2y
Network.s
Network.y
Network.z
Network.t
References
-----------
.. [#] http://en.wikipedia.org/wiki/Scattering_transfer_parameters#Scattering_transfer_parameters
.. [#] http://en.wikipedia.org/wiki/S-parameters
'''
#TODO: check rank(s) ==2
s = npy.array([
[t[:,0,1]/t[:,1,1],
1/t[:,1,1]],
[(t[:,0,0]*t[:,1,1]- t[:,1,0]*t[:,0,1])/t[:,1,1],
-1*t[:,1,0]/t[:,1,1] ]
]).transpose()
return s
def t2z(t):
'''
Not Implemented Yet
Convert scattering transfer parameters [#]_ to impedance parameters [#]_
Parameters
------------
t : complex array-like or number
impedance parameters
Returns
---------
z : complex array-like or number
scattering parameters
See Also
----------
s2z
s2y
s2t
z2s
z2y
z2t
y2s
y2z
y2z
t2s
t2z
t2y
Network.s
Network.y
Network.z
Network.t
References
----------
.. [#] http://en.wikipedia.org/wiki/Scattering_transfer_parameters#Scattering_transfer_parameters
.. [#] http://en.wikipedia.org/wiki/impedance_parameters
'''
raise (NotImplementedError)
def t2y(t):
'''
Not Implemented Yet
Convert scattering transfer parameters to admittance parameters [#]_
Parameters
------------
t : complex array-like or number
t-parameters
Returns
---------
y : complex array-like or number
admittance parameters
See Also
----------
s2z
s2y
s2t
z2s
z2y
z2t
y2s
y2z
y2z
t2s
t2z
t2y
Network.s
Network.y
Network.z
Network.t
References
----------
.. [#] http://en.wikipedia.org/wiki/Scattering_transfer_parameters#Scattering_transfer_parameters
'''
raise (NotImplementedError)
## these methods are used in the secondary properties
def passivity(s):
'''
Passivity metric for a multi-port network.
A metric which is proportional to the amount of power lost in a
multiport network, depending on the excitation port. Specifically,
this returns a matrix who's diagonals are equal to the total
power received at all ports, normalized to the power at a single
excitement port.
mathmatically, this is a test for unitary-ness of the
s-parameter matrix [#]_.
for two port this is
.. math::
\sqrt( |S_{11}|^2 + |S_{21}|^2 \, , \, |S_{22}|^2+|S_{12}|^2)
in general it is
.. math::
\\sqrt( S^H \\cdot S)
where :math:`H` is conjugate transpose of S, and :math:`\\cdot`
is dot product.
Notes
---------
The total amount of power disipated in a network depends on the
port matches. For example, given a matched attenuator, this metric
will yield the attenuation value. However, if the attenuator is
cascaded with a mismatch, the power disipated will not be equivalent
to the attenuator value, nor equal for each excitation port.
Returns
---------
passivity : :class:`numpy.ndarray` of shape fxnxn
References
------------
.. [#] http://en.wikipedia.org/wiki/Scattering_parameters#Lossless_networks
'''
if s.shape[-1] == 1:
raise (ValueError('Doesn\'t exist for one ports'))
pas_mat = s.copy()
for f in range(len(s)):
pas_mat[f,:,:] = npy.sqrt(npy.dot(s[f,:,:].conj().T, s[f,:,:]))
return pas_mat
def reciprocity(s):
'''
Reciprocity metric for a multi-port network.
This returns the magnitude of the difference between the
s-parameter matrix and its transpose.
for two port this is
.. math::
| S - S^T |
where :math:`T` is transpose of S
Returns
---------
reciprocity : :class:`numpy.ndarray` of shape fxnxn
'''
if s.shape[-1] == 1:
raise (ValueError('Doesn\'t exist for one ports'))
rec_mat = s.copy()
for f in range(len(s)):
rec_mat[f,:,:] = abs(s[f,:,:]- s[f,:,:].T)
return rec_mat
## renormalize
def renormalize_s(s, z_old, z_new):
'''
Renormalize a s-parameter matrix given old and new port impedances
In the Parameters descriptions, F,N,N = shape(s).
Notes
------
This re-normalization assumes psuedo-wave formulation. The
function :func:`renormalize_s_pw` implementes the power-wave
formulation. However, the two implementation are only different
for complex characteristic impedances.
See the [1]_ and [2]_ for more details.
Parameters
---------------
s : complex array of shape FxNxN
s-parameter matrix
z_old : complex array of shape FxN, F, N or a scalar
old (original) port impedances
z_new : complex array of shape FxN, F, N or a scalar
new port impedances
Notes
------
The impedance renormalization. This just calls ::
z2s(s2z(s,z0 = z_old), z0 = z_new)
However, you can see ref [1]_ or [2]_ for some theoretical background.
See Also
--------
renormalize_s_pw : renormalize using power wave formulation
Network.renormalize : method of Network to renormalize s
fix_z0_shape
ssz
z2s
References
-------------
.. [1] R. B. Marks and D. F. Williams, "A general waveguide circuit theory," Journal of Research of the National Institute of Standards and Technology, vol. 97, no. 5, pp. 533-561, 1992.
.. [2] http://www.anritsu.com/en-gb/downloads/application-notes/application-note/dwl1334.aspx
Examples
------------
>>> s = zeros(shape=(101,2,2))
>>> renormalize_s(s, 50,25)
'''
# thats a heck of a one-liner!
return z2s(s2z(s, z0=z_old), z0=z_new)
def renormalize_s_pw(s, z_old, z_new):
'''
Renormalize a s-parameter matrix given old and new port impedances
by the power wave renormalization
In the Parameters descriptions, F,N,N = shape(s).
Parameters
---------------
s : complex array of shape FxNxN
s-parameter matrix
z_old : complex array of shape FxN, F, N or a scalar
old (original) port impedances
z_new : complex array of shape FxN, F, N or a scalar
new port impedances
Notes
------
This re-normalization assumes psuedo-wave formulation. The
function :func:`renormalize_s_pw` implementes the power-wave
formulation. However, the two implementation are only different
for complex characteristic impedances.
See the [1]_ and [2]_ for more details.
References
-------------
.. [1] http://www.anritsu.com/en-gb/downloads/application-notes/application-note/dwl1334.aspx
power-wave Eq 10,11,12 in page 10
See Also
----------
renormalize_s : renormalize using psuedo wave formulation
Network.renormalize : method of Network to renormalize s
fix_z0_shape
fix_z0_shape
ssz
z2s
Examples
------------
>>> z_old = 50.+0.j # original reference impedance
>>> z_new = 50.+50.j # new reference impedance to change to
>>> load = rf.wr10.load(0.+0.j, nports=1, z0=z_old)
>>> s = load.s
>>> renormalize_s_powerwave(s, z_old, z_new)
'''
nfreqs, nports, nports = s.shape
A = fix_z0_shape(z_old, nfreqs, nports)
B = fix_z0_shape(z_new, nfreqs, nports)
S_pw = npy.zeros(s.shape, dtype='complex')
I = npy.mat(npy.identity(s.shape[1]))
s = s.copy() # to prevent the original array from being altered
s[s==1.] = 1. + 1e-12 # solve numerical singularity
s[s==-1.] = -1. + 1e-12 # solve numerical singularity
# make sure real part of impedance is not zero
A[A.real==0] = 1e-12 + 1.j*A.imag[A.real<=0]
B[B.real==0] = 1e-12 + 1.j*B.imag[B.real<=0]
for fidx in xrange(s.shape[0]):
A_ii = A[fidx]
B_ii = B[fidx]
# Eq. 11, Eq. 12
Q_ii = npy.sqrt(npy.absolute(B_ii.real/A_ii.real)) * (A_ii + A_ii.conj()) / (B_ii.conj() + A_ii) # Eq(11)
G_ii = (B_ii - A_ii) / (B_ii + A_ii.conj()) # Eq(12)
Q = npy.mat(npy.diagflat(Q_ii))
G = npy.mat(npy.diagflat(G_ii))
S = s[fidx]
# Eq. 10
S_pw[fidx] = Q**-1 * (S - G.conj().T) * (I - G*S)**-1 * Q.conj().T
return S_pw
def fix_z0_shape( z0, nfreqs, nports):
'''
Make a port impedance of correct shape for a given network's matrix
This attempts to broadcast z0 to satisy
npy.shape(z0) == (nfreqs,nports)
Parameters
--------------
z0 : number, array-like
z0 can be:
* a number (same at all ports and frequencies)
* an array-like of length == number ports.
* an array-like of length == number frequency points.
* the correct shape ==(nfreqs,nports)
nfreqs : int
number of frequency points
nportrs : int
number of ports
Returns
----------
z0 : array of shape ==(nfreqs,nports)
z0 with the right shape for a nport Network
Examples
----------
For a two-port network with 201 frequency points, possible uses may
be
>>> z0 = rf.fix_z0_shape(50 , 201,2)
>>> z0 = rf.fix_z0_shape([50,25] , 201,2)
>>> z0 = rf.fix_z0_shape(range(201) , 201,2)
'''
if npy.shape(z0) == (nfreqs, nports):
# z0 is of correct shape. super duper.return it quick.
return z0.copy()
elif npy.isscalar(z0):
# z0 is a single number
return npy.array(nfreqs*[nports * [z0]])
elif len(z0) == nports:
# assume z0 is a list of impedances for each port,
# but constant with frequency
return npy.array(nfreqs*[z0])
elif len(z0) == nfreqs:
# assume z0 is a list of impedances for each frequency,
# but constant with respect to ports
return npy.array(nports * [z0]).T
else:
raise IndexError('z0 is not an acceptable shape')
## cascading assistance functions
def inv(s):
'''
Calculates 'inverse' s-parameter matrix, used for de-embedding
This is not literally the inverse of the s-parameter matrix. Instead, it
is defined such that the inverse of the s-matrix cascaded
with itself is unity.
.. math::
inv(s) = t2s({s2t(s)}^{-1})
where :math:`x^{-1}` is the matrix inverse. In words, this
is the inverse of the scattering transfer parameters matrix
transformed into a scattering parameters matrix.
Parameters
-----------
s : :class:`numpy.ndarray` (shape fx2x2)
scattering parameter matrix.
Returns
-------
s' : :class:`numpy.ndarray`
inverse scattering parameter matrix.
See Also
---------
t2s : converts scattering transfer parameters to scattering parameters
s2t : converts scattering parameters to scattering transfer parameters
'''
# this idea is from lihan
i = s2t(s)
for f in range(len(i)):
i[f,:,:] = npy.linalg.inv(i[f,:,:]) # could also be written as
# npy.mat(i[f,:,:])**-1 -- Trey
i = t2s(i)
return i
def flip(a):
'''
invert the ports of a networks s-matrix, 'flipping' it over
Parameters
-----------
a : :class:`numpy.ndarray`
scattering parameter matrix. shape should be should be 2x2, or
fx2x2
Returns
-------
a' : :class:`numpy.ndarray`
flipped scattering parameter matrix, ie interchange of port 0
and port 1
Note
-----
only works for 2-ports at the moment
'''
c = a.copy()
if len (a.shape) > 2 :
for f in range(a.shape[0]):
c[f,:,:] = flip(a[f,:,:])
elif a.shape == (2,2):
c[0,0] = a[1,1]
c[1,1] = a[0,0]
c[0,1] = a[1,0]
c[1,0] = a[0,1]
else:
raise IndexError('matrices should be 2x2, or kx2x2')
return c
## COMMON CHECKS (raise exceptions)
def check_frequency_equal(ntwkA, ntwkB):
'''
checks if two Networks have same frequency
'''
if assert_frequency_equal(ntwkA,ntwkB) == False:
raise IndexError('Networks dont have matching frequency. See `Network.interpolate`')
def check_z0_equal(ntwkA,ntwkB):
'''
checks if two Networks have same port impedances
'''
#note you should check frequency equal before you call this
if assert_z0_equal(ntwkA,ntwkB) == False:
raise ValueError('Networks dont have matching z0.')
def check_nports_equal(ntwkA,ntwkB):
'''
checks if two Networks have same number of ports
'''
if assert_nports_equal(ntwkA,ntwkB) == False:
raise ValueError('Networks dont have matching number of ports.')
## TESTs (return [usually boolean] values)
def assert_frequency_equal(ntwkA, ntwkB):
'''
'''
return (ntwkA.frequency == ntwkB.frequency)
def assert_z0_equal(ntwkA,ntwkB):
'''
'''
return (ntwkA.z0 == ntwkB.z0).all()
def assert_z0_at_ports_equal(ntwkA,k,ntwkB,l):
'''
'''
return (ntwkA.z0[:,k] == ntwkB.z0[:,l]).all()
def assert_nports_equal(ntwkA,ntwkB):
'''
'''
return (ntwkA.number_of_ports == ntwkB.number_of_ports)
## Other
# dont belong here, but i needed them quickly
# this is needed for port impedance mismatches
def impedance_mismatch(z1, z2):
'''
creates a two-port s-matrix for a impedance mis-match
Parameters
-----------
z1 : number or array-like
complex impedance of port 1
z2 : number or array-like
complex impedance of port 2
Returns
---------
s' : 2-port s-matrix for the impedance mis-match
'''
gamma = zl_2_Gamma0(z1,z2)
result = npy.zeros(shape=(len(gamma),2,2), dtype='complex')
result[:,0,0] = gamma
result[:,1,1] = -gamma
result[:,1,0] = (1+gamma)*npy.sqrt(1.0*z1/z2)
result[:,0,1] = (1-gamma)*npy.sqrt(1.0*z2/z1)
return result
def two_port_reflect(ntwk1, ntwk2=None):
'''
Generates a two-port reflective two-port, from two one-ports.
Parameters
----------
ntwk1 : one-port Network object
network seen from port 1
ntwk2 : one-port Network object, or None
network seen from port 2. if None then will use ntwk1.
Returns
-------
result : Network object
two-port reflective network
Notes
-------
The resultant Network is copied from `ntwk1`, so its various
properties(name, frequency, etc) are inherited from that Network.
Examples
---------
>>> short,open = rf.Network('short.s1p', rf.Network('open.s1p')
>>> rf.two_port_reflect(short,open)
'''
result = ntwk1.copy()
if ntwk2 is None:
ntwk2 =ntwk1
s11 = ntwk1.s[:,0,0]
s22 = ntwk2.s[:,0,0]
s21 = npy.zeros(ntwk1.frequency.npoints, dtype=complex)
result.s = npy.array(\
[[s11, s21],\
[ s21, s22]]).\
transpose().reshape(-1,2,2)
result.z0 = npy.hstack([ntwk1.z0, ntwk2.z0])
try:
result.name = ntwk1.name+'-'+ntwk2.name
except(TypeError):
pass
return result
|
hohe/scikit-rf
|
skrf/network.py
|
Python
|
bsd-3-clause
| 141,218 | 0.00973 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class ProviderOperationsOperations(object):
"""ProviderOperationsOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.devtestlabs.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list(
self,
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.ProviderOperationResult"]
"""Result of the request to list REST API operations.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ProviderOperationResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.devtestlabs.models.ProviderOperationResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ProviderOperationResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-09-15"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('ProviderOperationResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/providers/Microsoft.DevTestLab/operations'} # type: ignore
|
Azure/azure-sdk-for-python
|
sdk/devtestlabs/azure-mgmt-devtestlabs/azure/mgmt/devtestlabs/operations/_provider_operations_operations.py
|
Python
|
mit
| 4,782 | 0.004391 |
#!/usr/bin/env python
# Copyright (C) 2015 Swift Navigation Inc.
# Contact: Ian Horn <ian@swiftnav.com>
#
# This source is subject to the license found in the file 'LICENSE' which must
# be be distributed together with this source. All other rights reserved.
#
# THIS CODE AND INFORMATION IS PROVIDED "AS IS" WITHOUT WARRANTY OF ANY KIND,
# EITHER EXPRESSED OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND/OR FITNESS FOR A PARTICULAR PURPOSE.
from gnss_analysis.runner import run as single_run
import pandas as pd
import numpy as np
def main():
import argparse
parser = argparse.ArgumentParser(description='RTK Filter SITL tests.')
parser.add_argument('infile', help='Specify the HDF5 file to use for input.')
parser.add_argument('outfile', help='Specify the HDF5 file to output into.')
parser.add_argument('baselineX', help='The baseline north component.')
parser.add_argument('baselineY', help='The baseline east component.')
parser.add_argument('baselineZ', help='The baseline down component.')
parser.add_argument('--NED', action='store_true')
parser.add_argument('-k', '--key',
default='table', nargs=1,
help='The key for the output table to insert into.')
parser.add_argument('-r', '--row',
default=None, nargs=1,
help='The key for the output table to insert into.')
args = parser.parse_args()
hdf5_filename_in = args.infile
hdf5_filename_out = args.outfile
baselineX = args.baselineX
baselineY = args.baselineY
baselineZ = args.baselineZ
baseline = np.array(map(float, [baselineX, baselineY, baselineZ]))
out_key = args.key
row = args.row
if row is None:
row = hdf5_filename_in
reports = single_run(hdf5_filename_in, baseline, baseline_is_NED=args.NED)
out_store = pd.HDFStore(hdf5_filename_out)
if ('/' + out_key) in out_store.keys():
out_df = out_store[out_key]
else:
out_df = pd.DataFrame()
new_cols = [col for col in reports.keys() if col not in out_df.columns]
for new_col in new_cols:
out_df[new_col] = pd.Series(np.nan * np.empty_like(out_df.index),
index=out_df.index)
out_df.loc[row] = pd.Series(reports)
out_store[out_key] = out_df
out_store.close()
if __name__ == "__main__":
main()
|
imh/gnss-analysis
|
gnss_analysis/agg_run.py
|
Python
|
lgpl-3.0
| 2,348 | 0.013203 |
# -*- coding: utf-8 -*-
# This file is part of Shuup.
#
# Copyright (c) 2012-2016, Shoop Ltd. All rights reserved.
#
# This source code is licensed under the AGPLv3 license found in the
# LICENSE file in the root directory of this source tree.
from __future__ import absolute_import
import hashlib
import six
from django.core.files.base import ContentFile
from django.forms.models import modelform_factory
from filer.models import File, Folder, Image
def filer_folder_from_path(path):
"""
Split `path` by slashes and create a hierarchy of Filer Folder objects accordingly.
Blank path components are ignored, so "/////foo//////bar///" is the same as "foo/bar".
The empty string (and `None`) are handled as "no folder", i.e. root folder.
:param path: Pathname or None
:type path: str|None
:return: Folder
:rtype: filer.models.Folder
"""
if path is None:
return None
folder = None
for component in six.text_type(path).split("/"):
if component:
folder = Folder.objects.get_or_create(name=component, parent=folder)[0]
return folder
def _filer_file_from_upload(model, request, path, upload_data, sha1=None):
"""
Create some sort of Filer file (either File or Image, really) from the given upload data (ContentFile or UploadFile)
:param model: Model class
:param request: Request, to figure out the owner for this file
:type request: django.http.request.HttpRequest|None
:param path: Pathname string (see `filer_folder_from_path`) or a Filer Folder.
:type path: basestring|filer.models.Folder
:param upload_data: Upload data
:type upload_data: django.core.files.base.File
:param sha1: SHA1 checksum. If given and a matching `model` with the SHA1 is found, it is returned instead.
:type sha1: basestring
:return: Filer file
"""
if sha1:
upload = model.objects.filter(sha1=sha1).first()
if upload:
return upload
file_form_cls = modelform_factory(
model=model, fields=('original_filename', 'owner', 'file'))
upload_form = file_form_cls(
data={
'original_filename': upload_data.name,
'owner': (request.user.pk if (request and not request.user.is_anonymous()) else None)
},
files={
'file': upload_data
}
)
upload = upload_form.save(commit=False)
upload.is_public = True
if isinstance(path, Folder):
upload.folder = path
else:
upload.folder = filer_folder_from_path(path)
upload.save()
return upload
def filer_file_from_upload(request, path, upload_data, sha1=None):
"""
Create a filer.models.filemodels.File from an upload (UploadedFile or such).
If the `sha1` parameter is passed and a file with said SHA1 is found, it will be returned instead.
:param request: Request, to figure out the owner for this file
:type request: django.http.request.HttpRequest|None
:param path: Pathname string (see `filer_folder_from_path`) or a Filer Folder.
:type path: basestring|filer.models.Folder
:param upload_data: Upload data
:type upload_data: django.core.files.base.File
:param sha1: SHA1 checksum. If given and a matching `model` with the SHA1 is found, it is returned instead.
:type sha1: basestring
:rtype: filer.models.filemodels.File
"""
return _filer_file_from_upload(model=File, request=request, path=path, upload_data=upload_data, sha1=sha1)
def filer_image_from_upload(request, path, upload_data, sha1=None):
"""
Create a Filer Image from an upload (UploadedFile or such).
If the `sha1` parameter is passed and an Image with said SHA1 is found, it will be returned instead.
:param request: Request, to figure out the owner for this file
:type request: django.http.request.HttpRequest|None
:param path: Pathname string (see `filer_folder_from_path`) or a Filer Folder.
:type path: basestring|filer.models.Folder
:param upload_data: Upload data
:type upload_data: django.core.files.base.File
:param sha1: SHA-1 checksum of the data, if available, to do deduplication
:type sha1: basestring
:rtype: filer.models.imagemodels.Image
"""
return _filer_file_from_upload(model=Image, request=request, path=path, upload_data=upload_data, sha1=sha1)
def filer_image_from_data(request, path, file_name, file_data, sha1=None):
"""
Create a Filer Image from the given data string.
If the `sha1` parameter is passed and True (the value True, not a truey value), the SHA-1 of the data string
is calculated and passed to the underlying creation function.
If the `sha1` parameter is truthy (generally the SHA-1 hex string), it's passed directly to the creation function.
:param request: Request, to figure out the owner for this file
:type request: django.http.request.HttpRequest|None
:param path: Pathname string (see `filer_folder_from_path`) or a Filer Folder.
:type path: basestring|filer.models.Folder
:param file_name: File name
:type file_data: basestring
:param file_data: Upload data
:type file_data: bytes
:param sha1: SHA-1 checksum of the data, if available, to do deduplication.
May also be `True` to calculate the SHA-1 first.
:type sha1: basestring|bool
:rtype: filer.models.imagemodels.Image
"""
if sha1 is True:
sha1 = hashlib.sha1(file_data).hexdigest()
upload_data = ContentFile(file_data, file_name)
return _filer_file_from_upload(model=Image, request=request, path=path, upload_data=upload_data, sha1=sha1)
|
hrayr-artunyan/shuup
|
shuup/utils/filer.py
|
Python
|
agpl-3.0
| 5,631 | 0.003374 |
# -*-Python-*-
################################################################################
#
# File: frontend.py
# RCS: $Header: $
# Description: frontend:
# responsibility:
# init backend
# init processors
# handle two query types:
# 1) metadata
# response: metadata from backend and processors
# 2) informational
# response: proccess(proc(query))(backend(info(query)))
# Author: Staal Vinterbo
# Created: Wed May 8 16:28:56 2013
# Modified: Sun Jun 23 14:31:31 2013 (Staal Vinterbo) staal@mats
# Language: Python
# Package: N/A
# Status: Experimental
#
# (c) Copyright 2013, Staal Vinterbo, all rights reserved.
#
# frontend.py is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# frontend.py is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with frontend.py; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
################################################################################
from backend import init_backend, query_backend
def init_frontend(database, processors, reinit=False):
if len(processors) == 0:
raise Exception('Failed to initialize frontend: no processors given.')
try:
if reinit:
backend = reinit_backend(backend)
else:
backend = init_backend(database)
except Exception as e:
raise Exception('Could not initialize backend: ' + str(e))
pdict = {}
for (k,v) in processors.items():
pdict[k] = v['meta']
meta = dict(backend['meta'])
meta['processors'] = pdict
return {'backend' : backend, 'processors' : processors, 'meta' : meta}
def handle_query(frontend, eps, query):
if eps <= 0:
raise Exception('Privacy risk must be positive.')
try:
(ddesc, proc) = query
(pname, parms) = proc
(dname, sel, pro) = ddesc
except Exception as e:
raise Exception('Malformed data query.')
# check if data set exists and if processor is allowed
if dname not in frontend['backend']['meta']['datasets'].keys():
raise Exception('Requested data set not available.')
if pname not in frontend['backend']['meta']['datasets'][dname]['processors']:
raise Exception('Requested information not appropriate for data set.')
try:
proc = frontend['processors'][pname]
except Exception as e:
raise Exception('Could not find query type: ' + str(e))
try:
if proc.has_key('query_edit'):
parms += [('orig_query', {'predicate' :sel, 'attributes' : pro})]
(sel, pro) = proc['query_edit'](sel, pro)
ddesc = (dname, sel, pro)
except Exception as e:
raise Exception('Query edit failed: ' + str(e))
try:
res = query_backend(frontend['backend'], ddesc)
except Exception as e:
raise Exception('Data query failed: ' + str(e))
try:
pres = proc['f'](eps, parms, res)
except Exception as e:
raise Exception('Information processing failed: ' + str(e))
return pres
|
laats/dpdq
|
src/qp/frontend.py
|
Python
|
gpl-3.0
| 3,696 | 0.005952 |
"""
Copyright (c) 2012, CCL Forensics
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of the CCL Forensics nor the
names of its contributors may be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL CCL FORENSICS BE LIABLE FOR ANY
DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
import sys
import os
import struct
import datetime
__version__ = "0.11"
__description__ = "Converts Apple binary PList files into a native Python data structure"
__contact__ = "Alex Caithness"
class BplistError(Exception):
pass
class BplistUID:
def __init__(self, value):
self.value = value
def __repr__(self):
return "UID: {0}".format(self.value)
def __str__(self):
return self.__repr__()
def __decode_multibyte_int(b, signed=True):
if len(b) == 1:
fmt = ">B" # Always unsigned?
elif len(b) == 2:
fmt = ">h"
elif len(b) == 3:
if signed:
return ((b[0] << 16) | struct.unpack(">H", b[1:])[0]) - ((b[0] >> 7) * 2 * 0x800000)
else:
return (b[0] << 16) | struct.unpack(">H", b[1:])[0]
elif len(b) == 4:
fmt = ">i"
elif len(b) == 8:
fmt = ">q"
else:
raise BplistError("Cannot decode multibyte int of length {0}".format(len(b)))
if signed and len(b) > 1:
return struct.unpack(fmt.lower(), b)[0]
else:
return struct.unpack(fmt.upper(), b)[0]
def __decode_float(b, signed=True):
if len(b) == 4:
fmt = ">f"
elif len(b) == 8:
fmt = ">d"
else:
raise BplistError("Cannot decode float of length {0}".format(len(b)))
if signed:
return struct.unpack(fmt.lower(), b)[0]
else:
return struct.unpack(fmt.upper(), b)[0]
def __decode_object(f, offset, collection_offset_size, offset_table):
# Move to offset and read type
#print("Decoding object at offset {0}".format(offset))
f.seek(offset)
# A little hack to keep the script portable between py2.x and py3k
if sys.version_info[0] < 3:
type_byte = ord(f.read(1)[0])
else:
type_byte = f.read(1)[0]
#print("Type byte: {0}".format(hex(type_byte)))
if type_byte == 0x00: # Null 0000 0000
return None
elif type_byte == 0x08: # False 0000 1000
return False
elif type_byte == 0x09: # True 0000 1001
return True
elif type_byte == 0x0F: # Fill 0000 1111
raise BplistError("Fill type not currently supported at offset {0}".format(f.tell())) # Not sure what to return really...
elif type_byte & 0xF0 == 0x10: # Int 0001 xxxx
int_length = 2 ** (type_byte & 0x0F)
int_bytes = f.read(int_length)
return __decode_multibyte_int(int_bytes)
elif type_byte & 0xF0 == 0x20: # Float 0010 nnnn
float_length = 2 ** (type_byte & 0x0F)
float_bytes = f.read(float_length)
return __decode_float(float_bytes)
elif type_byte & 0xFF == 0x33: # Date 0011 0011
date_bytes = f.read(8)
date_value = __decode_float(date_bytes)
return datetime.datetime(2001,1,1) + datetime.timedelta(seconds = date_value)
elif type_byte & 0xF0 == 0x40: # Data 0100 nnnn
if type_byte & 0x0F != 0x0F:
# length in 4 lsb
data_length = type_byte & 0x0F
else:
# A little hack to keep the script portable between py2.x and py3k
if sys.version_info[0] < 3:
int_type_byte = ord(f.read(1)[0])
else:
int_type_byte = f.read(1)[0]
if int_type_byte & 0xF0 != 0x10:
raise BplistError("Long Data field definition not followed by int type at offset {0}".format(f.tell()))
int_length = 2 ** (int_type_byte & 0x0F)
int_bytes = f.read(int_length)
data_length = __decode_multibyte_int(int_bytes, False)
return f.read(data_length)
elif type_byte & 0xF0 == 0x50: # ASCII 0101 nnnn
if type_byte & 0x0F != 0x0F:
# length in 4 lsb
ascii_length = type_byte & 0x0F
else:
# A little hack to keep the script portable between py2.x and py3k
if sys.version_info[0] < 3:
int_type_byte = ord(f.read(1)[0])
else:
int_type_byte = f.read(1)[0]
if int_type_byte & 0xF0 != 0x10:
raise BplistError("Long ASCII field definition not followed by int type at offset {0}".format(f.tell()))
int_length = 2 ** (int_type_byte & 0x0F)
int_bytes = f.read(int_length)
ascii_length = __decode_multibyte_int(int_bytes, False)
return f.read(ascii_length).decode("ascii")
elif type_byte & 0xF0 == 0x60: # UTF-16 0110 nnnn
if type_byte & 0x0F != 0x0F:
# length in 4 lsb
utf16_length = (type_byte & 0x0F) * 2 # Length is characters - 16bit width
else:
# A little hack to keep the script portable between py2.x and py3k
if sys.version_info[0] < 3:
int_type_byte = ord(f.read(1)[0])
else:
int_type_byte = f.read(1)[0]
if int_type_byte & 0xF0 != 0x10:
raise BplistError("Long UTF-16 field definition not followed by int type at offset {0}".format(f.tell()))
int_length = 2 ** (int_type_byte & 0x0F)
int_bytes = f.read(int_length)
utf16_length = __decode_multibyte_int(int_bytes, False) * 2
return f.read(utf16_length).decode("utf_16_be")
elif type_byte & 0xF0 == 0x80: # UID 1000 nnnn
uid_length = (type_byte & 0x0F) + 1
uid_bytes = f.read(uid_length)
return BplistUID(__decode_multibyte_int(uid_bytes, signed=False))
elif type_byte & 0xF0 == 0xA0: # Array 1010 nnnn
if type_byte & 0x0F != 0x0F:
# length in 4 lsb
array_count = type_byte & 0x0F
else:
# A little hack to keep the script portable between py2.x and py3k
if sys.version_info[0] < 3:
int_type_byte = ord(f.read(1)[0])
else:
int_type_byte = f.read(1)[0]
if int_type_byte & 0xF0 != 0x10:
raise BplistError("Long Array field definition not followed by int type at offset {0}".format(f.tell()))
int_length = 2 ** (int_type_byte & 0x0F)
int_bytes = f.read(int_length)
array_count = __decode_multibyte_int(int_bytes, signed=False)
array_refs = []
for i in range(array_count):
array_refs.append(__decode_multibyte_int(f.read(collection_offset_size), False))
return [__decode_object(f, offset_table[obj_ref], collection_offset_size, offset_table) for obj_ref in array_refs]
elif type_byte & 0xF0 == 0xC0: # Set 1010 nnnn
if type_byte & 0x0F != 0x0F:
# length in 4 lsb
set_count = type_byte & 0x0F
else:
# A little hack to keep the script portable between py2.x and py3k
if sys.version_info[0] < 3:
int_type_byte = ord(f.read(1)[0])
else:
int_type_byte = f.read(1)[0]
if int_type_byte & 0xF0 != 0x10:
raise BplistError("Long Set field definition not followed by int type at offset {0}".format(f.tell()))
int_length = 2 ** (int_type_byte & 0x0F)
int_bytes = f.read(int_length)
set_count = __decode_multibyte_int(int_bytes, signed=False)
set_refs = []
for i in range(set_count):
set_refs.append(__decode_multibyte_int(f.read(collection_offset_size), False))
return [__decode_object(f, offset_table[obj_ref], collection_offset_size, offset_table) for obj_ref in set_refs]
elif type_byte & 0xF0 == 0xD0: # Dict 1011 nnnn
if type_byte & 0x0F != 0x0F:
# length in 4 lsb
dict_count = type_byte & 0x0F
else:
# A little hack to keep the script portable between py2.x and py3k
if sys.version_info[0] < 3:
int_type_byte = ord(f.read(1)[0])
else:
int_type_byte = f.read(1)[0]
#print("Dictionary length int byte: {0}".format(hex(int_type_byte)))
if int_type_byte & 0xF0 != 0x10:
raise BplistError("Long Dict field definition not followed by int type at offset {0}".format(f.tell()))
int_length = 2 ** (int_type_byte & 0x0F)
int_bytes = f.read(int_length)
dict_count = __decode_multibyte_int(int_bytes, signed=False)
key_refs = []
#print("Dictionary count: {0}".format(dict_count))
for i in range(dict_count):
key_refs.append(__decode_multibyte_int(f.read(collection_offset_size), False))
value_refs = []
for i in range(dict_count):
value_refs.append(__decode_multibyte_int(f.read(collection_offset_size), False))
dict_result = {}
for i in range(dict_count):
#print("Key ref: {0}\tVal ref: {1}".format(key_refs[i], value_refs[i]))
key = __decode_object(f, offset_table[key_refs[i]], collection_offset_size, offset_table)
val = __decode_object(f, offset_table[value_refs[i]], collection_offset_size, offset_table)
dict_result[key] = val
return dict_result
def load(f):
"""
Reads and converts a file-like object containing a binary property list.
Takes a file-like object (must support reading and seeking) as an argument
Returns a data structure representing the data in the property list
"""
# Check magic number
if f.read(8) != b"bplist00":
raise BplistError("Bad file header")
# Read trailer
f.seek(-32, os.SEEK_END)
trailer = f.read(32)
offset_int_size, collection_offset_size, object_count, top_level_object_index, offest_table_offset = struct.unpack(">6xbbQQQ", trailer)
# Read offset table
f.seek(offest_table_offset)
offset_table = []
for i in range(object_count):
offset_table.append(__decode_multibyte_int(f.read(offset_int_size), False))
return __decode_object(f, offset_table[top_level_object_index], collection_offset_size, offset_table)
def NSKeyedArchiver_convert(o, object_table):
if isinstance(o, list):
return NsKeyedArchiverList(o, object_table)
elif isinstance(o, dict):
return NsKeyedArchiverDictionary(o, object_table)
elif isinstance(o, BplistUID):
return NSKeyedArchiver_convert(object_table[o.value], object_table)
else:
return o
class NsKeyedArchiverDictionary(dict):
def __init__(self, original_dict, object_table):
super(NsKeyedArchiverDictionary, self).__init__(original_dict)
self.object_table = object_table
def __getitem__(self, index):
o = super(NsKeyedArchiverDictionary, self).__getitem__(index)
return NSKeyedArchiver_convert(o, self.object_table)
class NsKeyedArchiverList(list):
def __init__(self, original_iterable, object_table):
super(NsKeyedArchiverList, self).__init__(original_iterable)
self.object_table = object_table
def __getitem__(self, index):
o = super(NsKeyedArchiverList, self).__getitem__(index)
return NSKeyedArchiver_convert(o, self.object_table)
def __iter__(self):
for o in super(NsKeyedArchiverList, self).__iter__():
yield NSKeyedArchiver_convert(o, self.object_table)
def deserialise_NsKeyedArchiver(obj):
"""Deserialises an NSKeyedArchiver bplist rebuilding the structure.
obj should usually be the top-level object returned by the load()
function."""
# Check that this is an archiver and version we understand
if not isinstance(obj, dict):
raise TypeError("obj must be a dict")
if "$archiver" not in obj or obj["$archiver"] != "NSKeyedArchiver":
raise ValueError("obj does not contain an '$archiver' key or the '$archiver' is unrecognised")
if "$version" not in obj or obj["$version"] != 100000:
raise ValueError("obj does not contain a '$version' key or the '$version' is unrecognised")
object_table = obj["$objects"]
if "root" in obj["$top"]:
return NSKeyedArchiver_convert(obj["$top"]["root"], object_table)
else:
return NSKeyedArchiver_convert(obj["$top"], object_table)
# NSMutableDictionary convenience functions
def is_nsmutabledictionary(obj):
if not isinstance(obj, dict):
#print("not dict")
return False
if "$class" not in obj.keys():
#print("no class")
return False
if obj["$class"].get("$classname") != "NSMutableDictionary":
#print("wrong class")
return False
if "NS.keys" not in obj.keys():
#print("no keys")
return False
if "NS.objects" not in obj.keys():
#print("no objects")
return False
return True
def convert_NSMutableDictionary(obj):
"""Converts a NSKeyedArchiver serialised NSMutableDictionary into
a straight dictionary (rather than two lists as it is serialised
as)"""
# The dictionary is serialised as two lists (one for keys and one
# for values) which obviously removes all convenience afforded by
# dictionaries. This function converts this structure to an
# actual dictionary so that values can be accessed by key.
if not is_nsmutabledictionary(obj):
raise ValueError("obj does not have the correct structure for a NSMutableDictionary serialised to a NSKeyedArchiver")
keys = obj["NS.keys"]
vals = obj["NS.objects"]
# sense check the keys and values:
if not isinstance(keys, list):
raise TypeError("The 'NS.keys' value is an unexpected type (expected list; actual: {0}".format(type(keys)))
if not isinstance(vals, list):
raise TypeError("The 'NS.objects' value is an unexpected type (expected list; actual: {0}".format(type(vals)))
if len(keys) != len(vals):
raise ValueError("The length of the 'NS.keys' list ({0}) is not equal to that of the 'NS.objects ({1})".format(len(keys), len(vals)))
result = {}
for i,k in enumerate(keys):
if "k" in result:
raise ValueError("The 'NS.keys' list contains duplicate entries")
result[k] = vals[i]
return result
|
Wonfee/pymobiledevice
|
util/ccl_bplist.py
|
Python
|
gpl-3.0
| 15,606 | 0.005318 |
import _plotly_utils.basevalidators
class MaxpointsValidator(_plotly_utils.basevalidators.NumberValidator):
def __init__(
self, plotly_name="maxpoints", parent_name="histogram.stream", **kwargs
):
super(MaxpointsValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "calc"),
max=kwargs.pop("max", 10000),
min=kwargs.pop("min", 0),
role=kwargs.pop("role", "info"),
**kwargs
)
|
plotly/python-api
|
packages/python/plotly/plotly/validators/histogram/stream/_maxpoints.py
|
Python
|
mit
| 550 | 0 |
import os
import shutil
import jinja2
from saliere.core import UsageError
class Templatizer:
"""Template manager.
Handles all the template related operations.
"""
def __init__(self, template_path_list=None, template_type=None):
"""Initializer.
:param template_path_list: the list of paths where the templates are possibly located
"""
# Use default template paths if none were specified.
self.template_path_list = template_path_list if template_path_list else ['data/templates',
'../data/templates',
'/usr/local/share/saliere/templates']
# Set the type if specified.
self.template_type = template_type
@staticmethod
def create_folder(folder, on_failure=None):
"""Creates a folder and the parent directories if needed.
:param folder: name/path of the folder to create
:param on_failure: function to execute in case of failure
"""
try:
os.makedirs(folder)
except OSError:
if on_failure:
on_failure()
def copy(self, project_name, output_dir, template_vars=None):
"""Creates the skeleton based on the chosen template.
:param template_path: the path of the template to use
:param project_name: the name of the project
:param output_dir: the path of the output directory
"""
# Locate the template path.
template_path = self.locate_template()
if not template_path:
raise UsageError("A project type is required.")
# Ensure the template path ends with a "/".
template_folder_parent = os.path.abspath(template_path) + "/"
# Prepare the output directory.
output_folder_root = os.path.abspath(output_dir)
# List of the files in the template folder.
for root, subfolders, files in os.walk(template_path):
# Prepare the jinja environment.
template_loader = jinja2.FileSystemLoader(root)
jinja_env = jinja2.Environment(loader=template_loader)
# Recreate the folders with the formula name
template_folder_base = root.replace(template_folder_parent, "")
formula_folder_name = template_folder_base.replace("template", project_name)
formula_folder_path = os.path.join(output_folder_root, formula_folder_name)
Templatizer.create_folder(formula_folder_path)
# List the files.
for file in files:
dst = os.path.join(formula_folder_path, file)
# If there is no variables to replace, simply copy the file.
if not template_vars:
src = os.path.join(root, file)
shutil.copyfile(src, dst)
continue
# Otherwise jinjanize it.
jinjanized_content = Jinjanizer.jinjanize(jinja_env, file, template_vars)
# Create the file with the rendered content.
with open(dst, mode='w', encoding='utf-8') as jinjanized_file:
jinjanized_file.write(jinjanized_content)
def list_templates(self):
"""Returns a list of available templates ordered alphabetically.
:return: a list of available templates ordered alphabetically
"""
# Ensure we have a list of paths.
if not self.template_path_list:
return None
# Initialize an empty set of available templates.
available_templates = set()
# Go through the list of valid paths.
for path in self.template_path_list:
base_path = os.path.abspath(path)
try:
subdirs = os.listdir(base_path)
available_templates.update(subdirs)
except FileNotFoundError:
pass
# Return a list of available templates ordered alphabetically.
return sorted(available_templates)
def locate_template(self, template_type=None):
"""Returns the path of a template.
Given a template type the function will attempt to retrieve its full path. If instead of a template type, a
full path is given, the function will validate the full path, If the full path cannot be determined, the
function returns None.
:param template_type: the type of the template or its full path
:return: the path of the template or None if it does not exist
"""
# Ensure we have a template type.
if not template_type:
template_type = self.template_type
if not template_type:
return None
# If the template type is a valid custom path, return it.
if os.path.exists(template_type):
return template_type
# Ensure we have a list of paths.
if not self.template_path_list:
return None
# Go through the list of valid paths.
for path in self.template_path_list:
base_path = os.path.abspath(path)
template_path = os.path.join(base_path, template_type)
is_valid = os.path.exists(template_path)
if is_valid:
break
# Return the full path of the given template or None if it cannot be found.
return os.path.abspath(template_path) if is_valid else None
class Jinjanizer:
"""Handle the jinjanization of the templates.
"""
@staticmethod
def jinjanize(jinja_env, template_file, template_vars=None):
"""Renders a Jinja2 template.
:param jinja_env: the jinja environment
:param template_file: the full path of the template file to render
:param formula_name: the name of the formula
:return: a string representing the rendered template
"""
if not template_vars:
template_vars = {}
# Load the template
template = jinja_env.get_template(template_file)
# Render the template and return the result
return template.render(template_vars)
|
TeamLovely/Saliere
|
saliere/templatizer.py
|
Python
|
mit
| 6,214 | 0.001609 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from io import BytesIO
from django.core.files.storage import Storage
class TestStorage(Storage):
def __init__(self, *args, **kwargs):
self.reset()
def _open(self, name, mode='rb'):
if not self.exists(name):
if 'w' in mode:
self.save(name, '')
return self._file_system[name]
else:
raise IOError("[Errno 2] No such file or directory: '{}'".format(name))
return self._file_system[name]
def _save(self, name, content):
f = BytesIO()
file_content = content.read()
if isinstance(file_content, bytes):
f.write(file_content)
else:
f.write(file_content.encode('utf8'))
f.seek(0)
if self.exists(name):
name = self.get_available_name(name)
self._file_system[name] = f
return name
def delete(self, name):
"""
Deletes the specified file from the storage system.
"""
if self.exists(name):
del self._file_system['name']
else:
raise OSError("[Errno 2] No such file or directory: '{}'".format(name))
def exists(self, name):
return name in self._file_system
def reset(self):
self._file_system = {}
|
jsatt/django-db-email-backend
|
test_app/storage.py
|
Python
|
mit
| 1,353 | 0.001478 |
import urllib.request
pagina = urllib.request.urlopen(
'http://beans.itcarlow.ie/prices-loyalty.html')
texto = pagina.read().decode('utf8')
onde = texto.find('>$')
início = onde + 2
fim = início + 4
preço = texto[início:fim]
if preço < 4.74:
print ('Comprar pois está barato:', preço)
else:
print ('Esperar')
|
wsricardo/mcestudos
|
treinamento-webScraping/Abraji/p08.py
|
Python
|
gpl-3.0
| 342 | 0.01194 |
from django.db import models
from _datetime import date
class Restaurant(models.Model):
name = models.CharField(max_length=200)
transportation = models.BooleanField(default=False)
weatherSensetion = models.BooleanField(default=False)
status = models.BooleanField(default=True)
totalDay = models.IntegerField(default=0)
counter = models.IntegerField(default=0)
def __str__(self):
return self.name
def deleteRest(self, deleteId):
self.objects.filter(id=deleteId).delete()
def updateStatus(self, newStatus, updateId):
self.objects.get(id=updateId).update(status = newStatus)
|
itucsProject2/Proje1
|
restaurant/models.py
|
Python
|
unlicense
| 639 | 0.00626 |
#!/usr/bin/python
"""
::
This experiment is used to study Half wave rectifiers
"""
from __future__ import print_function
from PSL_Apps.utilitiesClass import utilitiesClass
from PSL_Apps.templates import ui_template_graph_nofft as template_graph_nofft
from PyQt4 import QtGui,QtCore
import sys,time
params = {
'image' : 'clipping.png',
'name':"Summing Junction",
'hint':'''
Study summing junctions using op-amps
'''
}
class AppWindow(QtGui.QMainWindow, template_graph_nofft.Ui_MainWindow,utilitiesClass):
def __init__(self, parent=None,**kwargs):
super(AppWindow, self).__init__(parent)
self.setupUi(self)
self.I=kwargs.get('I',None)
self.setWindowTitle(self.I.H.version_string+' : '+params.get('name','').replace('\n',' ') )
from PSL.analyticsClass import analyticsClass
self.math = analyticsClass()
self.prescalerValue=0
self.plot=self.add2DPlot(self.plot_area,enableMenu=False)
self.enableCrossHairs(self.plot,[])
labelStyle = {'color': 'rgb(255,255,255)', 'font-size': '11pt'}
self.plot.setLabel('left','Voltage -->', units='V',**labelStyle)
self.plot.setLabel('bottom','Time -->', units='S',**labelStyle)
self.plot.setYRange(-8.5,8.5)
self.I.set_gain('CH1',1)
self.I.set_gain('CH2',1)
self.I.set_pv2(0);self.I.set_pv3(0)
self.plot.setLimits(yMax=8,yMin=-8,xMin=0,xMax=4e-3)
self.I.configure_trigger(0,'CH1',0,prescaler = self.prescalerValue)
self.tg=2
self.max_samples=2000
self.samples = self.max_samples
self.timer = QtCore.QTimer()
self.legend = self.plot.addLegend(offset=(-10,30))
self.curve1 = self.addCurve(self.plot,'INPUT 1(CH2)')
self.curve2 = self.addCurve(self.plot,'INPUT 2(CH3)')
self.curve3 = self.addCurve(self.plot,'OUTPUT (CH1)')
self.WidgetLayout.setAlignment(QtCore.Qt.AlignLeft)
#Utility widgets
#Widgets related to power supplies PV1,PVS2,PV3,PCS
self.supplySection = self.supplyWidget(self.I); self.WidgetLayout.addWidget(self.supplySection)
#Widgets related to Analog Waveform generators
self.sineSection = self.sineWidget(self.I); self.WidgetLayout.addWidget(self.sineSection)
#Control widgets
a1={'TITLE':'TIMEBASE','MIN':0,'MAX':9,'FUNC':self.set_timebase,'UNITS':'S','TOOLTIP':'Set Timebase of the oscilloscope'}
self.ControlsLayout.addWidget(self.dialIcon(**a1))
self.ControlsLayout.addWidget(self.gainIconCombined(FUNC=self.I.set_gain,LINK=self.gainChanged))
self.running=True
self.timer.singleShot(100,self.run)
def gainChanged(self,g):
self.autoRange()
def set_timebase(self,g):
timebases = [1.5,2,4,8,16,32,128,256,512,1024]
self.prescalerValue=[0,0,0,0,1,1,2,2,3,3,3][g]
samplescaling=[1,1,1,1,1,0.5,0.4,0.3,0.2,0.2,0.1]
self.tg=timebases[g]
self.samples = int(self.max_samples*samplescaling[g])
return self.autoRange()
def autoRange(self):
xlen = self.tg*self.samples*1e-6
self.plot.autoRange();
chan = self.I.analogInputSources['CH1']
R = [chan.calPoly10(0),chan.calPoly10(1023)]
R[0]=R[0]*.9;R[1]=R[1]*.9
self.plot.setLimits(yMax=max(R),yMin=min(R),xMin=0,xMax=xlen)
self.plot.setYRange(min(R),max(R))
self.plot.setXRange(0,xlen)
return self.samples*self.tg*1e-6
def run(self):
if not self.running: return
try:
self.I.configure_trigger(0,'CH1',0,prescaler = self.prescalerValue)
self.I.capture_traces(3,self.samples,self.tg)
if self.running:self.timer.singleShot(self.samples*self.I.timebase*1e-3+10,self.plotData)
except Exception as e:
print (e)
def plotData(self):
if not self.running: return
try:
n=0
while(not self.I.oscilloscope_progress()[0]):
time.sleep(0.1)
n+=1
if n>10:
self.timer.singleShot(100,self.run)
return
self.I.__fetch_channel__(1)
self.I.__fetch_channel__(2)
self.I.__fetch_channel__(3)
self.curve1.setData(self.I.achans[1].get_xaxis()*1e-6,self.I.achans[1].get_yaxis(),connect='finite')
self.curve2.setData(self.I.achans[2].get_xaxis()*1e-6,self.I.achans[2].get_yaxis(),connect='finite')
self.curve3.setData(self.I.achans[0].get_xaxis()*1e-6,self.I.achans[0].get_yaxis(),connect='finite')
self.displayCrossHairData(self.plot,False,self.samples,self.I.timebase,[self.I.achans[1].get_yaxis(),self.I.achans[2].get_yaxis(),self.I.achans[0].get_yaxis()],[(0,255,0),(255,0,0),(255,255,0)])
if self.running:self.timer.singleShot(100,self.run)
except Exception as e:
print (e)
def saveData(self):
self.saveDataWindow([self.curve1,self.curve2,self.curve3],self.plot)
def closeEvent(self, event):
self.running=False
self.timer.stop()
self.finished=True
def __del__(self):
self.timer.stop()
print('bye')
if __name__ == "__main__":
from PSL import sciencelab
app = QtGui.QApplication(sys.argv)
myapp = AppWindow(I=sciencelab.connect())
myapp.show()
sys.exit(app.exec_())
|
jithinbp/pslab-desktop-apps
|
psl_res/GUI/B_ELECTRONICS/B_Opamps/L_Summing.py
|
Python
|
gpl-3.0
| 4,802 | 0.05935 |
"""
Bob is a honest user. Bob creates transactions and smart contracts, like Alice.
Thread for sync must be started separately, wallet must be already created.
"""
from hodl import block
import logging as log
def main(wallet, keys=None):
log.info("Bob's main started")
log.debug("Bob's money: " + str(wallet.bch.money(keys['Bob'][1])))
# start blockchain checking thread
# create transaction
# create smart contract
# messages to smart contract
# decentralized internet request
pass # todo
|
leofnch/kc
|
tests/testnet/roles/Bob.py
|
Python
|
gpl-3.0
| 526 | 0 |
#!/usr/bin/python
# Copyright 2014 Google Inc. All Rights Reserved.
"""Package application for the given platform and build configs.
Depending on platform, this will create a package suitable for distribution.
If the buildbot is running the script, it will be uploaded to the
buildbot staging area.
Usage varies depending on the platform and the user.
If the buildbot is running the script, no parameters should be required
other than the platform and the path to the staging area.
However, users who want to build their own packages can specify options on the
command line.
"""
import argparse
import importlib
import logging
import os
import platform
import sys
import textwrap
import package_utils
def _ParseCommandLine(args):
"""Parse command line and return options."""
parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
description=textwrap.dedent(__doc__))
if platform.system() == 'Linux':
valid_platforms = ('Android', 'Linux')
else:
valid_platforms = ('PS3', 'PS4', 'WiiU', 'XB1')
packagers = {}
# Import each packager module for the given platform, and
# store the platform-specific Packager class in a dict.
for plat in valid_platforms:
packagers[plat] = importlib.import_module(
'%s.packager' % plat.lower()).Packager
valid_configs = (
'Debug',
'Devel',
'QA',
'Gold',
)
subparsers = parser.add_subparsers(dest='platform', help='Platform name')
# We allow each platform to add its own command line arguments,
# as well as the common ones. Add the common args to each sub-parser
# to avoid confusing ordering requirements.
# So the user invokes this like $ package_application.py PLATFORM <args>
for plat, packager in packagers.iteritems():
sub_parser = subparsers.add_parser(plat)
packager.AddCommandLineArguments(sub_parser)
sub_parser.add_argument(
'-c', '--config',
dest='config_list',
required=not package_utils.IsBuildbot(),
choices=valid_configs, action='append',
help='Build config. May be specified multiple times.'
'For automated builds, the set of configs will be specified in'
'the packager script.')
# The buildbot tells us the path to the staging directory, since it's
# based on the branch, time of day and buildnumber.
sub_parser.add_argument('-s', '--staging',
required=package_utils.IsBuildbot(),
help='Path to staging area on buildmaster. '
'(For use by buildbot.)')
sub_parser.add_argument('-u', '--user',
help='Override user for testing staging.')
sub_parser.add_argument('-v', '--verbose',
required=False, action='store_true')
sub_parser.set_defaults(packager=packager)
return parser.parse_args(args)
def main(args):
options = _ParseCommandLine(args)
if options.verbose:
logging_level = logging.DEBUG
else:
logging_level = logging.INFO
logging_format = '%(asctime)s %(levelname)-8s %(message)s'
logging.basicConfig(level=logging_level,
format=logging_format,
datefmt='%m-%d %H:%M')
packager = options.packager(options)
try:
deployment_files = packager.PackageApplication()
except RuntimeError as e:
logging.error(e)
return 1
logging.debug('Paths for deployment: %s', deployment_files)
rc = 0
if package_utils.IsBuildbot() or options.user:
build_info = os.path.join(packager.GetOutDir(), 'build_info.txt')
if os.path.exists(build_info):
deployment_files.append(build_info)
else:
logging.error('%s not found.', build_info)
rc |= package_utils.DeployToStaging(
deployment_files, options.staging, options.user)
return rc
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
|
snibug/gyp_example
|
build/package_application.py
|
Python
|
apache-2.0
| 3,909 | 0.008186 |
# -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2012 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
import warnings
from invenio.dbquery import run_sql
from invenio.textutils import wait_for_user
depends_on = ['invenio_release_1_1_0']
def info():
return "WebLinkback and BibCirculation updates"
def do_upgrade():
## Since Invenio Upgrader was committed to maint-1.1 and merged to
## master in 8d7ed84, some of the tables that were different in
## maint-1.1 and master at the time needed upgrade recipe. This
## commit fixes the situation in gentle manner (by checking column
## existence etc), since some sites may have upgraded DB schema in
## various times.
## Firstly, BibCirculation tables:
# crcBORROWER
create_statement = run_sql('SHOW CREATE TABLE crcBORROWER')[0][1]
if '`ccid` int(15)' not in create_statement:
run_sql("ALTER TABLE crcBORROWER ADD COLUMN ccid int(15) " \
"unsigned NULL default NULL AFTER id")
if 'KEY `ccid`' not in create_statement:
run_sql("ALTER TABLE crcBORROWER ADD UNIQUE KEY ccid (ccid)")
if 'KEY `name`' not in create_statement:
run_sql("ALTER TABLE crcBORROWER ADD KEY name (name)")
if 'KEY `email`' not in create_statement:
run_sql("ALTER TABLE crcBORROWER ADD KEY email (email)")
# crcILLREQUEST
create_statement = run_sql('SHOW CREATE TABLE crcILLREQUEST')[0][1]
if '`budget_code` varchar(60)' not in create_statement:
run_sql("ALTER TABLE crcILLREQUEST ADD COLUMN budget_code varchar(60) " \
"NOT NULL default '' AFTER cost")
# crcITEM.expected_arrival_date
create_statement = run_sql('SHOW CREATE TABLE crcITEM')[0][1]
if '`expected_arrival_date` varchar(60)' not in create_statement:
run_sql("ALTER TABLE crcITEM ADD COLUMN expected_arrival_date varchar(60) " \
"NOT NULL default '' AFTER status")
## Secondly, WebLinkback tables:
run_sql("""
CREATE TABLE IF NOT EXISTS lnkENTRY (
id int(15) NOT NULL auto_increment,
origin_url varchar(100) NOT NULL, -- url of the originating resource
id_bibrec mediumint(8) unsigned NOT NULL, -- bibrecord
additional_properties longblob,
type varchar(30) NOT NULL,
status varchar(30) NOT NULL default 'PENDING',
insert_time datetime default '0000-00-00 00:00:00',
PRIMARY KEY (id),
INDEX (id_bibrec),
INDEX (type),
INDEX (status),
INDEX (insert_time)
) ENGINE=MyISAM;
""")
run_sql("""
CREATE TABLE IF NOT EXISTS lnkENTRYURLTITLE (
id int(15) unsigned NOT NULL auto_increment,
url varchar(100) NOT NULL,
title varchar(100) NOT NULL,
manual_set boolean NOT NULL default 0,
broken_count int(5) default 0,
broken boolean NOT NULL default 0,
PRIMARY KEY (id),
UNIQUE (url),
INDEX (title)
) ENGINE=MyISAM;
""")
run_sql("""
CREATE TABLE IF NOT EXISTS lnkENTRYLOG (
id_lnkENTRY int(15) unsigned NOT NULL,
id_lnkLOG int(15) unsigned NOT NULL,
FOREIGN KEY (id_lnkENTRY) REFERENCES lnkENTRY(id),
FOREIGN KEY (id_lnkLOG) REFERENCES lnkLOG(id)
) ENGINE=MyISAM;
""")
run_sql("""
CREATE TABLE IF NOT EXISTS lnkLOG (
id int(15) unsigned NOT NULL auto_increment,
id_user int(15) unsigned,
action varchar(30) NOT NULL,
log_time datetime default '0000-00-00 00:00:00',
PRIMARY KEY (id),
INDEX (id_user),
INDEX (action),
INDEX (log_time)
) ENGINE=MyISAM;
""")
run_sql("""
CREATE TABLE IF NOT EXISTS lnkADMINURL (
id int(15) unsigned NOT NULL auto_increment,
url varchar(100) NOT NULL,
list varchar(30) NOT NULL,
PRIMARY KEY (id),
UNIQUE (url),
INDEX (list)
) ENGINE=MyISAM;
""")
run_sql("""
CREATE TABLE IF NOT EXISTS lnkADMINURLLOG (
id_lnkADMINURL int(15) unsigned NOT NULL,
id_lnkLOG int(15) unsigned NOT NULL,
FOREIGN KEY (id_lnkADMINURL) REFERENCES lnkADMINURL(id),
FOREIGN KEY (id_lnkLOG) REFERENCES lnkLOG(id)
) ENGINE=MyISAM;
""")
def estimate():
return 10
def pre_upgrade():
pass
def post_upgrade():
pass
|
Panos512/invenio
|
modules/miscutil/lib/upgrades/invenio_2012_11_04_circulation_and_linkback_updates.py
|
Python
|
gpl-2.0
| 4,682 | 0.006621 |
#!/usr/bin/env python3
import argparse
from pathlib import Path
from PIL import Image
parser = argparse.ArgumentParser(
prog='emoji-extractor',
description="""Resize extracted emojis to 128x128.""")
parser.add_argument(
'-e', '--emojis',
help='folder where emojis are stored',
default='output/',
required=False)
args = parser.parse_args()
path = Path(args.emojis)
for image_path in path.iterdir():
try:
print('Cropping {}...'.format(image_path.name))
image = Image.open(image_path)
width, height = image.size
box = (4, 0, width - 4, height)
crop = image.crop(box)
crop.save(image_path)
except:
print('Cannot crop {}...'.format(image_path.name))
|
SMSSecure/SMSSecure
|
scripts/emoji-extractor/remove-emoji-margins.py
|
Python
|
gpl-3.0
| 738 | 0.001355 |
"""Start/stop/manage workers."""
from __future__ import absolute_import, unicode_literals
import errno
import os
import shlex
import signal
import sys
from collections import OrderedDict, defaultdict
from functools import partial
from subprocess import Popen
from time import sleep
from kombu.utils.encoding import from_utf8
from kombu.utils.objects import cached_property
from celery.five import UserList, items
from celery.platforms import IS_WINDOWS, Pidfile, signal_name
from celery.utils.nodenames import (gethostname, host_format, node_format,
nodesplit)
from celery.utils.saferepr import saferepr
__all__ = ('Cluster', 'Node')
CELERY_EXE = 'celery'
def celery_exe(*args):
return ' '.join((CELERY_EXE,) + args)
def build_nodename(name, prefix, suffix):
hostname = suffix
if '@' in name:
nodename = host_format(name)
shortname, hostname = nodesplit(nodename)
name = shortname
else:
shortname = '%s%s' % (prefix, name)
nodename = host_format(
'{0}@{1}'.format(shortname, hostname),
)
return name, nodename, hostname
def build_expander(nodename, shortname, hostname):
return partial(
node_format,
name=nodename,
N=shortname,
d=hostname,
h=nodename,
i='%i',
I='%I',
)
def format_opt(opt, value):
if not value:
return opt
if opt.startswith('--'):
return '{0}={1}'.format(opt, value)
return '{0} {1}'.format(opt, value)
def _kwargs_to_command_line(kwargs):
return {
('--{0}'.format(k.replace('_', '-'))
if len(k) > 1 else '-{0}'.format(k)): '{0}'.format(v)
for k, v in items(kwargs)
}
class NamespacedOptionParser(object):
def __init__(self, args):
self.args = args
self.options = OrderedDict()
self.values = []
self.passthrough = ''
self.namespaces = defaultdict(lambda: OrderedDict())
def parse(self):
rargs = list(self.args)
pos = 0
while pos < len(rargs):
arg = rargs[pos]
if arg == '--':
self.passthrough = ' '.join(rargs[pos:])
break
elif arg[0] == '-':
if arg[1] == '-':
self.process_long_opt(arg[2:])
else:
value = None
if len(rargs) > pos + 1 and rargs[pos + 1][0] != '-':
value = rargs[pos + 1]
pos += 1
self.process_short_opt(arg[1:], value)
else:
self.values.append(arg)
pos += 1
def process_long_opt(self, arg, value=None):
if '=' in arg:
arg, value = arg.split('=', 1)
self.add_option(arg, value, short=False)
def process_short_opt(self, arg, value=None):
self.add_option(arg, value, short=True)
def optmerge(self, ns, defaults=None):
if defaults is None:
defaults = self.options
return OrderedDict(defaults, **self.namespaces[ns])
def add_option(self, name, value, short=False, ns=None):
prefix = short and '-' or '--'
dest = self.options
if ':' in name:
name, ns = name.split(':')
dest = self.namespaces[ns]
dest[prefix + name] = value
class Node(object):
"""Represents a node in a cluster."""
def __init__(self, name,
cmd=None, append=None, options=None, extra_args=None):
self.name = name
self.cmd = cmd or '-m {0}'.format(celery_exe('worker', '--detach'))
self.append = append
self.extra_args = extra_args or ''
self.options = self._annotate_with_default_opts(
options or OrderedDict())
self.expander = self._prepare_expander()
self.argv = self._prepare_argv()
self._pid = None
def _annotate_with_default_opts(self, options):
options['-n'] = self.name
self._setdefaultopt(options, ['--pidfile', '-p'], '%n.pid')
self._setdefaultopt(options, ['--logfile', '-f'], '%n%I.log')
self._setdefaultopt(options, ['--executable'], sys.executable)
return options
def _setdefaultopt(self, d, alt, value):
for opt in alt[1:]:
try:
return d[opt]
except KeyError:
pass
return d.setdefault(alt[0], value)
def _prepare_expander(self):
shortname, hostname = self.name.split('@', 1)
return build_expander(
self.name, shortname, hostname)
def _prepare_argv(self):
argv = tuple(
[self.expander(self.cmd)] +
[format_opt(opt, self.expander(value))
for opt, value in items(self.options)] +
[self.extra_args]
)
if self.append:
argv += (self.expander(self.append),)
return argv
def alive(self):
return self.send(0)
def send(self, sig, on_error=None):
pid = self.pid
if pid:
try:
os.kill(pid, sig)
except OSError as exc:
if exc.errno != errno.ESRCH:
raise
maybe_call(on_error, self)
return False
return True
maybe_call(on_error, self)
def start(self, env=None, **kwargs):
return self._waitexec(
self.argv, path=self.executable, env=env, **kwargs)
def _waitexec(self, argv, path=sys.executable, env=None,
on_spawn=None, on_signalled=None, on_failure=None):
argstr = self.prepare_argv(argv, path)
maybe_call(on_spawn, self, argstr=' '.join(argstr), env=env)
pipe = Popen(argstr, env=env)
return self.handle_process_exit(
pipe.wait(),
on_signalled=on_signalled,
on_failure=on_failure,
)
def handle_process_exit(self, retcode, on_signalled=None, on_failure=None):
if retcode < 0:
maybe_call(on_signalled, self, -retcode)
return -retcode
elif retcode > 0:
maybe_call(on_failure, self, retcode)
return retcode
def prepare_argv(self, argv, path):
args = ' '.join([path] + list(argv))
return shlex.split(from_utf8(args), posix=not IS_WINDOWS)
def getopt(self, *alt):
for opt in alt:
try:
return self.options[opt]
except KeyError:
pass
raise KeyError(alt[0])
def __repr__(self):
return '<{name}: {0.name}>'.format(self, name=type(self).__name__)
@cached_property
def pidfile(self):
return self.expander(self.getopt('--pidfile', '-p'))
@cached_property
def logfile(self):
return self.expander(self.getopt('--logfile', '-f'))
@property
def pid(self):
if self._pid is not None:
return self._pid
try:
return Pidfile(self.pidfile).read_pid()
except ValueError:
pass
@pid.setter
def pid(self, value):
self._pid = value
@cached_property
def executable(self):
return self.options['--executable']
@cached_property
def argv_with_executable(self):
return (self.executable,) + self.argv
@classmethod
def from_kwargs(cls, name, **kwargs):
return cls(name, options=_kwargs_to_command_line(kwargs))
def maybe_call(fun, *args, **kwargs):
if fun is not None:
fun(*args, **kwargs)
class MultiParser(object):
Node = Node
def __init__(self, cmd='celery worker',
append='', prefix='', suffix='',
range_prefix='celery'):
self.cmd = cmd
self.append = append
self.prefix = prefix
self.suffix = suffix
self.range_prefix = range_prefix
def parse(self, p):
names = p.values
options = dict(p.options)
ranges = len(names) == 1
prefix = self.prefix
cmd = options.pop('--cmd', self.cmd)
append = options.pop('--append', self.append)
hostname = options.pop('--hostname', options.pop('-n', gethostname()))
prefix = options.pop('--prefix', prefix) or ''
suffix = options.pop('--suffix', self.suffix) or hostname
suffix = '' if suffix in ('""', "''") else suffix
if ranges:
try:
names, prefix = self._get_ranges(names), self.range_prefix
except ValueError:
pass
self._update_ns_opts(p, names)
self._update_ns_ranges(p, ranges)
return (
self._node_from_options(
p, name, prefix, suffix, cmd, append, options)
for name in names
)
def _node_from_options(self, p, name, prefix,
suffix, cmd, append, options):
namespace, nodename, _ = build_nodename(name, prefix, suffix)
namespace = nodename if nodename in p.namespaces else namespace
return Node(nodename, cmd, append,
p.optmerge(namespace, options), p.passthrough)
def _get_ranges(self, names):
noderange = int(names[0])
return [str(n) for n in range(1, noderange + 1)]
def _update_ns_opts(self, p, names):
# Numbers in args always refers to the index in the list of names.
# (e.g., `start foo bar baz -c:1` where 1 is foo, 2 is bar, and so on).
for ns_name, ns_opts in list(items(p.namespaces)):
if ns_name.isdigit():
ns_index = int(ns_name) - 1
if ns_index < 0:
raise KeyError('Indexes start at 1 got: %r' % (ns_name,))
try:
p.namespaces[names[ns_index]].update(ns_opts)
except IndexError:
raise KeyError('No node at index %r' % (ns_name,))
def _update_ns_ranges(self, p, ranges):
for ns_name, ns_opts in list(items(p.namespaces)):
if ',' in ns_name or (ranges and '-' in ns_name):
for subns in self._parse_ns_range(ns_name, ranges):
p.namespaces[subns].update(ns_opts)
p.namespaces.pop(ns_name)
def _parse_ns_range(self, ns, ranges=False):
ret = []
for space in ',' in ns and ns.split(',') or [ns]:
if ranges and '-' in space:
start, stop = space.split('-')
ret.extend(
str(n) for n in range(int(start), int(stop) + 1)
)
else:
ret.append(space)
return ret
class Cluster(UserList):
"""Represent a cluster of workers."""
def __init__(self, nodes, cmd=None, env=None,
on_stopping_preamble=None,
on_send_signal=None,
on_still_waiting_for=None,
on_still_waiting_progress=None,
on_still_waiting_end=None,
on_node_start=None,
on_node_restart=None,
on_node_shutdown_ok=None,
on_node_status=None,
on_node_signal=None,
on_node_signal_dead=None,
on_node_down=None,
on_child_spawn=None,
on_child_signalled=None,
on_child_failure=None):
self.nodes = nodes
self.cmd = cmd or celery_exe('worker')
self.env = env
self.on_stopping_preamble = on_stopping_preamble
self.on_send_signal = on_send_signal
self.on_still_waiting_for = on_still_waiting_for
self.on_still_waiting_progress = on_still_waiting_progress
self.on_still_waiting_end = on_still_waiting_end
self.on_node_start = on_node_start
self.on_node_restart = on_node_restart
self.on_node_shutdown_ok = on_node_shutdown_ok
self.on_node_status = on_node_status
self.on_node_signal = on_node_signal
self.on_node_signal_dead = on_node_signal_dead
self.on_node_down = on_node_down
self.on_child_spawn = on_child_spawn
self.on_child_signalled = on_child_signalled
self.on_child_failure = on_child_failure
def start(self):
return [self.start_node(node) for node in self]
def start_node(self, node):
maybe_call(self.on_node_start, node)
retcode = self._start_node(node)
maybe_call(self.on_node_status, node, retcode)
return retcode
def _start_node(self, node):
return node.start(
self.env,
on_spawn=self.on_child_spawn,
on_signalled=self.on_child_signalled,
on_failure=self.on_child_failure,
)
def send_all(self, sig):
for node in self.getpids(on_down=self.on_node_down):
maybe_call(self.on_node_signal, node, signal_name(sig))
node.send(sig, self.on_node_signal_dead)
def kill(self):
return self.send_all(signal.SIGKILL)
def restart(self, sig=signal.SIGTERM):
retvals = []
def restart_on_down(node):
maybe_call(self.on_node_restart, node)
retval = self._start_node(node)
maybe_call(self.on_node_status, node, retval)
retvals.append(retval)
self._stop_nodes(retry=2, on_down=restart_on_down, sig=sig)
return retvals
def stop(self, retry=None, callback=None, sig=signal.SIGTERM):
return self._stop_nodes(retry=retry, on_down=callback, sig=sig)
def stopwait(self, retry=2, callback=None, sig=signal.SIGTERM):
return self._stop_nodes(retry=retry, on_down=callback, sig=sig)
def _stop_nodes(self, retry=None, on_down=None, sig=signal.SIGTERM):
on_down = on_down if on_down is not None else self.on_node_down
nodes = list(self.getpids(on_down=on_down))
if nodes:
for node in self.shutdown_nodes(nodes, sig=sig, retry=retry):
maybe_call(on_down, node)
def shutdown_nodes(self, nodes, sig=signal.SIGTERM, retry=None):
P = set(nodes)
maybe_call(self.on_stopping_preamble, nodes)
to_remove = set()
for node in P:
maybe_call(self.on_send_signal, node, signal_name(sig))
if not node.send(sig, self.on_node_signal_dead):
to_remove.add(node)
yield node
P -= to_remove
if retry:
maybe_call(self.on_still_waiting_for, P)
its = 0
while P:
to_remove = set()
for node in P:
its += 1
maybe_call(self.on_still_waiting_progress, P)
if not node.alive():
maybe_call(self.on_node_shutdown_ok, node)
to_remove.add(node)
yield node
maybe_call(self.on_still_waiting_for, P)
break
P -= to_remove
if P and not its % len(P):
sleep(float(retry))
maybe_call(self.on_still_waiting_end)
def find(self, name):
for node in self:
if node.name == name:
return node
raise KeyError(name)
def getpids(self, on_down=None):
for node in self:
if node.pid:
yield node
else:
maybe_call(on_down, node)
def __repr__(self):
return '<{name}({0}): {1}>'.format(
len(self), saferepr([n.name for n in self]),
name=type(self).__name__,
)
@property
def data(self):
return self.nodes
|
kawamon/hue
|
desktop/core/ext-py/celery-4.2.1/celery/apps/multi.py
|
Python
|
apache-2.0
| 15,740 | 0 |
"""The Tile component."""
import asyncio
from datetime import timedelta
from pytile import async_login
from pytile.errors import SessionExpiredError, TileError
from homeassistant.const import ATTR_ATTRIBUTION, CONF_PASSWORD, CONF_USERNAME
from homeassistant.core import callback
from homeassistant.helpers import aiohttp_client
from homeassistant.helpers.update_coordinator import (
CoordinatorEntity,
DataUpdateCoordinator,
UpdateFailed,
)
from .const import DATA_COORDINATOR, DOMAIN, LOGGER
PLATFORMS = ["device_tracker"]
DEVICE_TYPES = ["PHONE", "TILE"]
DEFAULT_ATTRIBUTION = "Data provided by Tile"
DEFAULT_ICON = "mdi:view-grid"
DEFAULT_UPDATE_INTERVAL = timedelta(minutes=2)
CONF_SHOW_INACTIVE = "show_inactive"
async def async_setup(hass, config):
"""Set up the Tile component."""
hass.data[DOMAIN] = {DATA_COORDINATOR: {}}
return True
async def async_setup_entry(hass, config_entry):
"""Set up Tile as config entry."""
websession = aiohttp_client.async_get_clientsession(hass)
client = await async_login(
config_entry.data[CONF_USERNAME],
config_entry.data[CONF_PASSWORD],
session=websession,
)
async def async_update_data():
"""Get new data from the API."""
try:
return await client.tiles.all()
except SessionExpiredError:
LOGGER.info("Tile session expired; creating a new one")
await client.async_init()
except TileError as err:
raise UpdateFailed(f"Error while retrieving data: {err}") from err
coordinator = DataUpdateCoordinator(
hass,
LOGGER,
name=config_entry.title,
update_interval=DEFAULT_UPDATE_INTERVAL,
update_method=async_update_data,
)
await coordinator.async_refresh()
hass.data[DOMAIN][DATA_COORDINATOR][config_entry.entry_id] = coordinator
for component in PLATFORMS:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(config_entry, component)
)
return True
async def async_unload_entry(hass, config_entry):
"""Unload a Tile config entry."""
unload_ok = all(
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_unload(config_entry, component)
for component in PLATFORMS
]
)
)
if unload_ok:
hass.data[DOMAIN][DATA_COORDINATOR].pop(config_entry.entry_id)
return unload_ok
class TileEntity(CoordinatorEntity):
"""Define a generic Tile entity."""
def __init__(self, coordinator):
"""Initialize."""
super().__init__(coordinator)
self._attrs = {ATTR_ATTRIBUTION: DEFAULT_ATTRIBUTION}
self._name = None
self._unique_id = None
@property
def device_state_attributes(self):
"""Return the device state attributes."""
return self._attrs
@property
def icon(self):
"""Return the icon."""
return DEFAULT_ICON
@property
def name(self):
"""Return the name."""
return self._name
@property
def unique_id(self):
"""Return the unique ID of the entity."""
return self._unique_id
@callback
def _handle_coordinator_update(self):
"""Respond to a DataUpdateCoordinator update."""
self._update_from_latest_data()
self.async_write_ha_state()
@callback
def _update_from_latest_data(self):
"""Update the entity from the latest data."""
raise NotImplementedError
async def async_added_to_hass(self):
"""Handle entity which will be added."""
await super().async_added_to_hass()
self._update_from_latest_data()
|
tboyce1/home-assistant
|
homeassistant/components/tile/__init__.py
|
Python
|
apache-2.0
| 3,733 | 0.000536 |
from __future__ import print_function
import xml.dom.minidom
import DWML
import datetime
import pyxb.binding.datatypes as xsd
import urllib2
import time
import collections
import sys
# Get the next seven days forecast for two locations
zip = [ 85711, 55108 ]
if 1 < len(sys.argv):
zip = sys.argv[1:]
begin = xsd.dateTime.today()
end = xsd.dateTime(begin + datetime.timedelta(7))
# Create the REST URI for this query
uri = 'http://www.weather.gov/forecasts/xml/sample_products/browser_interface/ndfdXMLclient.php?zipCodeList=%s&product=time-series&begin=%s&end=%s&maxt=maxt&mint=mint' % ("+".join([ str(_zc) for _zc in zip ]), begin.xsdLiteral(), end.xsdLiteral())
print(uri)
# Retrieve the data
xmld = urllib2.urlopen(uri).read()
open('forecast.xml', 'wb').write(xmld)
#print xmld
# Convert it to DWML object
r = DWML.CreateFromDocument(xmld)
product = r.head.product
print('%s %s' % (product.title, product.category))
source = r.head.source
print(", ".join(source.production_center.content()))
data = r.data
if isinstance(data, collections.MutableSequence):
data = data.pop(0)
print(data)
for i in range(len(data.location)):
loc = data.location[i]
print('%s [%s %s]' % (loc.location_key, loc.point.latitude, loc.point.longitude))
for p in data.parameters:
if p.applicable_location != loc.location_key:
continue
mint = maxt = None
for t in p.temperature:
if 'maximum' == t.type:
maxt = t
elif 'minimum' == t.type:
mint = t
print('%s (%s): %s' % (t.name[0], t.units, " ".join([ str(_v) for _v in t.content() ])))
# Sometimes the service doesn't provide the same number of
# data points for min and max
mint_time_layout = maxt_time_layout = None
for tl in data.time_layout:
if tl.layout_key == mint.time_layout:
mint_time_layout = tl
if tl.layout_key == maxt.time_layout:
maxt_time_layout = tl
for ti in range(min(len(mint_time_layout.start_valid_time), len(maxt_time_layout.start_valid_time))):
start = mint_time_layout.start_valid_time[ti].value()
end = mint_time_layout.end_valid_time[ti]
print('%s: min %s, max %s' % (time.strftime('%A, %B %d %Y', start.timetuple()),
mint.value_[ti].value(), maxt.value_[ti].value()))
|
jonfoster/pyxb-upstream-mirror
|
examples/ndfd/forecast.py
|
Python
|
apache-2.0
| 2,426 | 0.005359 |
# Copyright 2004-2017 Tom Rothamel <pytom@bishoujo.us>
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
import os
import renpy
import traceback
import subprocess
class Editor(object):
"""
This class is intended to be subclassed by editor subclasses. It provides a
number of editor related operations, which are called by Ren'Py (including
the Ren'Py Launcher).
Editor operations are grouped into transactions. An editor transaction
starts with a call to the begin() method. Ren'Py will then call some number
of command methods, each causing an operation to occur in the editor. Ren'Py
will call end() at the end of the transaction.
Although not required, it's reasonable than an implementation of this class
will batch the files together and send them to the editor at once. It's also
reasonable that an implementation will send the operations one at a time (and
do little-to-nothing in begin() and end().
Each operation takes a path to operate on. If the editor has a buffer
corresponding to that path, that buffer is used. Otherwise, the editor
is implicitly opened.
We reserve the right to add new keyword arguments to methods of this class,
so please ensure that subclasses accept and ignore unknown keyword
arguments.
"""
def begin(self, new_window=False, **kwargs):
"""
Begins an editor transaction.
`new_window`
If True, a new editor window will be created and presented to the
user. Otherwise, and existing editor window will be used.
"""
def end(self, **kwargs):
"""
Ends an editor transaction.
"""
def open(self, filename, line=None, **kwargs): # @ReservedAssignment
"""
Ensures `path` is open in the editor. This may be called multiple
times per transaction.
`line`
If not None, this should be a line number to open in the
editor.
The first open call in a transaction is somewhat special - that file
should be given focus in a tabbed editor environment.
"""
class SystemEditor(Editor):
def open(self, filename, line=None, **kwargs): # @ReservedAssignment
filename = renpy.exports.fsencode(filename)
try:
if renpy.windows:
os.startfile(filename) # @UndefinedVariable
elif renpy.macintosh:
subprocess.call([ "open", filename ]) # @UndefinedVariable
elif renpy.linux:
subprocess.call([ "xdg-open", filename ]) # @UndefinedVariable
except:
traceback.print_exc()
# The editor that Ren'Py is using. It should be a subclass of the Editor
# class.
editor = None
def init():
"""
Creates the editor object, based on the contents of the RENPY_EDIT_PY
file.
"""
global editor
editor = SystemEditor()
path = os.environ.get("RENPY_EDIT_PY", None)
if path is None:
return
with open(path, "r") as f:
source = f.read()
code = compile(source, path, "exec")
scope = { "__file__" : path }
exec code in scope, scope
if "Editor" in scope:
editor = scope["Editor"]()
return
raise Exception("{0} did not define an Editor class.".format(path))
def launch_editor(filenames, line=1, transient=False):
"""
Causes the editor to be launched.
"""
# On mobile devices, we will never be able to launch the editor.
if renpy.mobile:
return True
if editor is None:
init()
if editor is None:
return False
filenames = [ renpy.parser.unelide_filename(i) for i in filenames ]
try:
editor.begin(new_window=transient)
for i in filenames:
editor.open(i, line)
line = None # The line number only applies to the first filename.
editor.end()
return True
except:
traceback.print_exc()
return False
|
kfcpaladin/sze-the-game
|
renpy/editor.py
|
Python
|
mit
| 5,015 | 0.002792 |
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: dummy.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='dummy.proto',
package='dummy',
syntax='proto3',
serialized_options=None,
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n\x0b\x64ummy.proto\x12\x05\x64ummy\"\x1d\n\x0c\x44ummyRequest\x12\r\n\x05value\x18\x01 \x01(\t\"\x1b\n\nDummyReply\x12\r\n\x05value\x18\x01 \x01(\t2\xfa\x01\n\x0c\x44ummyService\x12\x36\n\nUnaryUnary\x12\x13.dummy.DummyRequest\x1a\x11.dummy.DummyReply\"\x00\x12\x39\n\x0bUnaryStream\x12\x13.dummy.DummyRequest\x1a\x11.dummy.DummyReply\"\x00\x30\x01\x12\x39\n\x0bStreamUnary\x12\x13.dummy.DummyRequest\x1a\x11.dummy.DummyReply\"\x00(\x01\x12<\n\x0cStreamStream\x12\x13.dummy.DummyRequest\x1a\x11.dummy.DummyReply\"\x00(\x01\x30\x01\x62\x06proto3'
)
_DUMMYREQUEST = _descriptor.Descriptor(
name='DummyRequest',
full_name='dummy.DummyRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='value', full_name='dummy.DummyRequest.value', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=22,
serialized_end=51,
)
_DUMMYREPLY = _descriptor.Descriptor(
name='DummyReply',
full_name='dummy.DummyReply',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='value', full_name='dummy.DummyReply.value', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=53,
serialized_end=80,
)
DESCRIPTOR.message_types_by_name['DummyRequest'] = _DUMMYREQUEST
DESCRIPTOR.message_types_by_name['DummyReply'] = _DUMMYREPLY
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
DummyRequest = _reflection.GeneratedProtocolMessageType('DummyRequest', (_message.Message,), {
'DESCRIPTOR' : _DUMMYREQUEST,
'__module__' : 'dummy_pb2'
# @@protoc_insertion_point(class_scope:dummy.DummyRequest)
})
_sym_db.RegisterMessage(DummyRequest)
DummyReply = _reflection.GeneratedProtocolMessageType('DummyReply', (_message.Message,), {
'DESCRIPTOR' : _DUMMYREPLY,
'__module__' : 'dummy_pb2'
# @@protoc_insertion_point(class_scope:dummy.DummyReply)
})
_sym_db.RegisterMessage(DummyReply)
_DUMMYSERVICE = _descriptor.ServiceDescriptor(
name='DummyService',
full_name='dummy.DummyService',
file=DESCRIPTOR,
index=0,
serialized_options=None,
create_key=_descriptor._internal_create_key,
serialized_start=83,
serialized_end=333,
methods=[
_descriptor.MethodDescriptor(
name='UnaryUnary',
full_name='dummy.DummyService.UnaryUnary',
index=0,
containing_service=None,
input_type=_DUMMYREQUEST,
output_type=_DUMMYREPLY,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='UnaryStream',
full_name='dummy.DummyService.UnaryStream',
index=1,
containing_service=None,
input_type=_DUMMYREQUEST,
output_type=_DUMMYREPLY,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='StreamUnary',
full_name='dummy.DummyService.StreamUnary',
index=2,
containing_service=None,
input_type=_DUMMYREQUEST,
output_type=_DUMMYREPLY,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='StreamStream',
full_name='dummy.DummyService.StreamStream',
index=3,
containing_service=None,
input_type=_DUMMYREQUEST,
output_type=_DUMMYREPLY,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
])
_sym_db.RegisterServiceDescriptor(_DUMMYSERVICE)
DESCRIPTOR.services_by_name['DummyService'] = _DUMMYSERVICE
# @@protoc_insertion_point(module_scope)
|
vmagamedov/grpclib
|
tests/dummy_pb2.py
|
Python
|
bsd-3-clause
| 5,139 | 0.003308 |
__all__ = ["user_controller", "plant_controller"]
|
CHrycyna/LandscapeTracker
|
app/controllers/__init__.py
|
Python
|
mit
| 49 | 0.020408 |
# Copyright 2016 Mycroft AI, Inc.
#
# This file is part of Mycroft Core.
#
# Mycroft Core is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Mycroft Core is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Mycroft Core. If not, see <http://www.gnu.org/licenses/>.
import argparse
import sys
from os.path import dirname, exists, isdir
from mycroft.configuration import ConfigurationManager
from mycroft.messagebus.client.ws import WebsocketClient
from mycroft.skills.core import create_skill_descriptor, load_skill
from mycroft.skills.intent import Intent
from mycroft.util.log import getLogger
__author__ = 'seanfitz'
LOG = getLogger("SkillContainer")
class SkillContainer(object):
def __init__(self, args):
params = self.__build_params(args)
if params.config:
ConfigurationManager.load_local([params.config])
if exists(params.lib) and isdir(params.lib):
sys.path.append(params.lib)
sys.path.append(params.dir)
self.dir = params.dir
self.enable_intent = params.enable_intent
self.__init_client(params)
@staticmethod
def __build_params(args):
parser = argparse.ArgumentParser()
parser.add_argument("--config", default="./mycroft.conf")
parser.add_argument("dir", nargs='?', default=dirname(__file__))
parser.add_argument("--lib", default="./lib")
parser.add_argument("--host", default=None)
parser.add_argument("--port", default=None)
parser.add_argument("--use-ssl", action='store_true', default=False)
parser.add_argument("--enable-intent", action='store_true',
default=False)
return parser.parse_args(args)
def __init_client(self, params):
config = ConfigurationManager.get().get("websocket")
if not params.host:
params.host = config.get('host')
if not params.port:
params.port = config.get('port')
self.ws = WebsocketClient(host=params.host,
port=params.port,
ssl=params.use_ssl)
def load_skill(self):
if self.enable_intent:
Intent(self.ws)
skill_descriptor = create_skill_descriptor(self.dir)
self.skill = load_skill(skill_descriptor, self.ws)
def run(self):
try:
self.ws.on('message', LOG.debug)
self.ws.on('open', self.load_skill)
self.ws.on('error', LOG.error)
self.ws.run_forever()
except Exception as e:
LOG.error("Error: {0}".format(e))
self.stop()
def stop(self):
if self.skill:
self.skill.shutdown()
def main():
container = SkillContainer(sys.argv[1:])
try:
container.run()
except KeyboardInterrupt:
container.stop()
finally:
sys.exit()
if __name__ == "__main__":
main()
|
jasonehines/mycroft-core
|
mycroft/skills/container.py
|
Python
|
gpl-3.0
| 3,372 | 0 |
#!/usr/bin/env python
# coding: utf-8
import datetime
import subprocess
import logging
import json
import os
import sys
from io import BytesIO
import requests
from bottle import route, run, request
from bottle import jinja2_view as view, jinja2_template as template
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
EXIFTOOL_PATH = 'exiftool/exiftool'
@route('/favicon.ico')
def get_favicon():
return ''
@route('/ads.txt')
def get_ads():
return 'google.com, pub-0745898310693904, DIRECT, f08c47fec0942fa0'
@route('/')
@view('index')
def fetch_data():
image_location = request.GET.get('img')
template_data = {
'state': 0,
'image_location': image_location,
'metadata': {}
}
# If no image location was specified, just return the initial page with no data
if not image_location:
logging.info("No image location specified")
return template_data
template_data['state'] = 1
logging.info("Fetching image at {}...".format(image_location))
response = requests.get(image_location)
if response.status_code != 200:
logging.error("Problem fetching image :(")
template_data['invalid_image'] = "Invalid image"
return template_data
logging.info("Image fetched properly")
f = BytesIO(response.content)
logging.info("Running exiftool process...")
process = subprocess.Popen([EXIFTOOL_PATH, '-g0', '-j', '-c', '%+.6f', '-'],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE)
output, output_err = process.communicate(f.read())
# TODO: check for errors running process
logging.info("Decoding JSON from output...")
metadata = json.loads(output)[0]
# Filter metadata components that are not dictionaries
metadata = {k: v for k, v in metadata.items() if isinstance(v, dict)}
if 'ExifTool' in metadata:
del metadata['ExifTool']
# Try to build a summary of information
basic_info = {}
try:
basic_info['Dimensions'] = u"{} × {} {}".format(
metadata['File']['ImageWidth'],
metadata['File']['ImageHeight'],
metadata['File']['FileType']
)
except:
pass
if 'EXIF' in metadata:
if 'Artist' in metadata['EXIF']:
basic_info['Artist'] = metadata['EXIF']['Artist']
if 'Copyright' in metadata['EXIF']:
basic_info['Copyright'] = metadata['EXIF']['Copyright']
if 'Model' in metadata['EXIF']:
basic_info['Camera'] = metadata['EXIF']['Model']
if 'LensModel' in metadata['EXIF']:
basic_info['LensModel'] = metadata['EXIF']['LensModel']
if {'ExposureMode', 'ExposureTime', 'FNumber', 'ISO'} <= set(metadata['EXIF'].keys()):
m = metadata['EXIF']
basic_info['Exposure'] = '{}, {}, {}, ISO {}'.format(
m['ExposureMode'], m['ExposureTime'], m['FNumber'], m['ISO']
)
if 'Composite' in metadata:
if 'GPSLongitude' in metadata['Composite'] and 'GPSLatitude' in metadata['Composite']:
template_data['has_location'] = True
if 'LensID' in metadata['Composite']:
basic_info['Lens'] = metadata['Composite']['LensID']
metadata['Basic'] = basic_info
template_data['metadata'] = metadata
# Get a sorted list of metadata keys
template_data['metadata_sorted_keys'] = sorted(metadata.keys())
# Try to get the referer
referer = request.GET.get('page', request.headers.get('Referer', '/'))
return template_data
run(host='0.0.0.0', port=os.environ.get('PORT', 5000))
|
JoseTomasTocino/image-metadata-viewer
|
main.py
|
Python
|
lgpl-3.0
| 3,693 | 0.002709 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.